@loaders.gl/parquet 4.0.0-alpha.5 → 4.0.0-alpha.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (596) hide show
  1. package/dist/bundle.js +2 -2
  2. package/dist/constants.js +18 -6
  3. package/dist/dist.min.js +27 -25
  4. package/dist/dist.min.js.map +3 -3
  5. package/dist/es5/bundle.js +6 -0
  6. package/dist/es5/bundle.js.map +1 -0
  7. package/dist/es5/constants.js +17 -0
  8. package/dist/es5/constants.js.map +1 -0
  9. package/dist/es5/index.js +128 -0
  10. package/dist/es5/index.js.map +1 -0
  11. package/dist/es5/lib/arrow/convert-columns-to-row-group.js +2 -0
  12. package/dist/es5/lib/arrow/convert-columns-to-row-group.js.map +1 -0
  13. package/dist/es5/lib/arrow/convert-row-group-to-columns.js +19 -0
  14. package/dist/es5/lib/arrow/convert-row-group-to-columns.js.map +1 -0
  15. package/dist/es5/lib/arrow/convert-schema-from-parquet.js +114 -0
  16. package/dist/es5/lib/arrow/convert-schema-from-parquet.js.map +1 -0
  17. package/dist/es5/lib/arrow/convert-schema-to-parquet.js +47 -0
  18. package/dist/es5/lib/arrow/convert-schema-to-parquet.js.map +1 -0
  19. package/dist/es5/lib/geo/decode-geo-metadata.js +81 -0
  20. package/dist/es5/lib/geo/decode-geo-metadata.js.map +1 -0
  21. package/dist/es5/lib/geo/geoparquet-schema.js +83 -0
  22. package/dist/es5/lib/geo/geoparquet-schema.js.map +1 -0
  23. package/dist/es5/lib/parsers/parse-parquet-to-columns.js +177 -0
  24. package/dist/es5/lib/parsers/parse-parquet-to-columns.js.map +1 -0
  25. package/dist/es5/lib/parsers/parse-parquet-to-rows.js +172 -0
  26. package/dist/es5/lib/parsers/parse-parquet-to-rows.js.map +1 -0
  27. package/dist/es5/lib/wasm/encode-parquet-wasm.js +43 -0
  28. package/dist/es5/lib/wasm/encode-parquet-wasm.js.map +1 -0
  29. package/dist/es5/lib/wasm/load-wasm/index.js +13 -0
  30. package/dist/es5/lib/wasm/load-wasm/index.js.map +1 -0
  31. package/dist/es5/lib/wasm/load-wasm/load-wasm-browser.js +42 -0
  32. package/dist/es5/lib/wasm/load-wasm/load-wasm-browser.js.map +1 -0
  33. package/dist/es5/lib/wasm/load-wasm/load-wasm-node.js +31 -0
  34. package/dist/es5/lib/wasm/load-wasm/load-wasm-node.js.map +1 -0
  35. package/dist/es5/lib/wasm/parse-parquet-wasm.js +60 -0
  36. package/dist/es5/lib/wasm/parse-parquet-wasm.js.map +1 -0
  37. package/dist/es5/lib/wip/convert-schema-deep.java.disabled +910 -0
  38. package/dist/es5/lib/wip/convert-schema-deep.rs.disabled +976 -0
  39. package/dist/es5/parquet-loader.js +44 -0
  40. package/dist/es5/parquet-loader.js.map +1 -0
  41. package/dist/es5/parquet-wasm-loader.js +30 -0
  42. package/dist/es5/parquet-wasm-loader.js.map +1 -0
  43. package/dist/es5/parquet-wasm-writer.js +26 -0
  44. package/dist/es5/parquet-wasm-writer.js.map +1 -0
  45. package/dist/es5/parquet-writer.js +24 -0
  46. package/dist/es5/parquet-writer.js.map +1 -0
  47. package/dist/es5/parquetjs/codecs/declare.js +2 -0
  48. package/dist/es5/parquetjs/codecs/declare.js.map +1 -0
  49. package/dist/es5/parquetjs/codecs/dictionary.js +23 -0
  50. package/dist/es5/parquetjs/codecs/dictionary.js.map +1 -0
  51. package/dist/es5/parquetjs/codecs/index.js +47 -0
  52. package/dist/es5/parquetjs/codecs/index.js.map +1 -0
  53. package/dist/es5/parquetjs/codecs/plain.js +208 -0
  54. package/dist/es5/parquetjs/codecs/plain.js.map +1 -0
  55. package/dist/es5/parquetjs/codecs/rle.js +132 -0
  56. package/dist/es5/parquetjs/codecs/rle.js.map +1 -0
  57. package/dist/es5/parquetjs/compression.js +137 -0
  58. package/dist/es5/parquetjs/compression.js.map +1 -0
  59. package/dist/es5/parquetjs/encoder/parquet-encoder.js +625 -0
  60. package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -0
  61. package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js +14 -0
  62. package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js.map +1 -0
  63. package/dist/es5/parquetjs/parquet-thrift/BsonType.js +52 -0
  64. package/dist/es5/parquetjs/parquet-thrift/BsonType.js.map +1 -0
  65. package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js +193 -0
  66. package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -0
  67. package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js +198 -0
  68. package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -0
  69. package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js +367 -0
  70. package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -0
  71. package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js +99 -0
  72. package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -0
  73. package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js +19 -0
  74. package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js.map +1 -0
  75. package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js +33 -0
  76. package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js.map +1 -0
  77. package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js +152 -0
  78. package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -0
  79. package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js +207 -0
  80. package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -0
  81. package/dist/es5/parquetjs/parquet-thrift/DateType.js +52 -0
  82. package/dist/es5/parquetjs/parquet-thrift/DateType.js.map +1 -0
  83. package/dist/es5/parquetjs/parquet-thrift/DecimalType.js +96 -0
  84. package/dist/es5/parquetjs/parquet-thrift/DecimalType.js.map +1 -0
  85. package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js +113 -0
  86. package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -0
  87. package/dist/es5/parquetjs/parquet-thrift/Encoding.js +19 -0
  88. package/dist/es5/parquetjs/parquet-thrift/Encoding.js.map +1 -0
  89. package/dist/es5/parquetjs/parquet-thrift/EnumType.js +52 -0
  90. package/dist/es5/parquetjs/parquet-thrift/EnumType.js.map +1 -0
  91. package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js +14 -0
  92. package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js.map +1 -0
  93. package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js +239 -0
  94. package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js.map +1 -0
  95. package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js +52 -0
  96. package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -0
  97. package/dist/es5/parquetjs/parquet-thrift/IntType.js +96 -0
  98. package/dist/es5/parquetjs/parquet-thrift/IntType.js.map +1 -0
  99. package/dist/es5/parquetjs/parquet-thrift/JsonType.js +52 -0
  100. package/dist/es5/parquetjs/parquet-thrift/JsonType.js.map +1 -0
  101. package/dist/es5/parquetjs/parquet-thrift/KeyValue.js +94 -0
  102. package/dist/es5/parquetjs/parquet-thrift/KeyValue.js.map +1 -0
  103. package/dist/es5/parquetjs/parquet-thrift/ListType.js +52 -0
  104. package/dist/es5/parquetjs/parquet-thrift/ListType.js.map +1 -0
  105. package/dist/es5/parquetjs/parquet-thrift/LogicalType.js +423 -0
  106. package/dist/es5/parquetjs/parquet-thrift/LogicalType.js.map +1 -0
  107. package/dist/es5/parquetjs/parquet-thrift/MapType.js +52 -0
  108. package/dist/es5/parquetjs/parquet-thrift/MapType.js.map +1 -0
  109. package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js +52 -0
  110. package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -0
  111. package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js +52 -0
  112. package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -0
  113. package/dist/es5/parquetjs/parquet-thrift/NullType.js +52 -0
  114. package/dist/es5/parquetjs/parquet-thrift/NullType.js.map +1 -0
  115. package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js +89 -0
  116. package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -0
  117. package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js +115 -0
  118. package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -0
  119. package/dist/es5/parquetjs/parquet-thrift/PageHeader.js +204 -0
  120. package/dist/es5/parquetjs/parquet-thrift/PageHeader.js.map +1 -0
  121. package/dist/es5/parquetjs/parquet-thrift/PageLocation.js +124 -0
  122. package/dist/es5/parquetjs/parquet-thrift/PageLocation.js.map +1 -0
  123. package/dist/es5/parquetjs/parquet-thrift/PageType.js +15 -0
  124. package/dist/es5/parquetjs/parquet-thrift/PageType.js.map +1 -0
  125. package/dist/es5/parquetjs/parquet-thrift/RowGroup.js +165 -0
  126. package/dist/es5/parquetjs/parquet-thrift/RowGroup.js.map +1 -0
  127. package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js +231 -0
  128. package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js.map +1 -0
  129. package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js +115 -0
  130. package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js.map +1 -0
  131. package/dist/es5/parquetjs/parquet-thrift/Statistics.js +165 -0
  132. package/dist/es5/parquetjs/parquet-thrift/Statistics.js.map +1 -0
  133. package/dist/es5/parquetjs/parquet-thrift/StringType.js +52 -0
  134. package/dist/es5/parquetjs/parquet-thrift/StringType.js.map +1 -0
  135. package/dist/es5/parquetjs/parquet-thrift/TimeType.js +97 -0
  136. package/dist/es5/parquetjs/parquet-thrift/TimeType.js.map +1 -0
  137. package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js +126 -0
  138. package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js.map +1 -0
  139. package/dist/es5/parquetjs/parquet-thrift/TimestampType.js +97 -0
  140. package/dist/es5/parquetjs/parquet-thrift/TimestampType.js.map +1 -0
  141. package/dist/es5/parquetjs/parquet-thrift/Type.js +19 -0
  142. package/dist/es5/parquetjs/parquet-thrift/Type.js.map +1 -0
  143. package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js +52 -0
  144. package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -0
  145. package/dist/es5/parquetjs/parquet-thrift/UUIDType.js +52 -0
  146. package/dist/es5/parquetjs/parquet-thrift/UUIDType.js.map +1 -0
  147. package/dist/es5/parquetjs/parquet-thrift/index.js +479 -0
  148. package/dist/es5/parquetjs/parquet-thrift/index.js.map +1 -0
  149. package/dist/es5/parquetjs/parser/decoders.js +393 -0
  150. package/dist/es5/parquetjs/parser/decoders.js.map +1 -0
  151. package/dist/es5/parquetjs/parser/parquet-reader.js +610 -0
  152. package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -0
  153. package/dist/es5/parquetjs/schema/declare.js +21 -0
  154. package/dist/es5/parquetjs/schema/declare.js.map +1 -0
  155. package/dist/es5/parquetjs/schema/schema.js +165 -0
  156. package/dist/es5/parquetjs/schema/schema.js.map +1 -0
  157. package/dist/es5/parquetjs/schema/shred.js +282 -0
  158. package/dist/es5/parquetjs/schema/shred.js.map +1 -0
  159. package/dist/es5/parquetjs/schema/types.js +406 -0
  160. package/dist/es5/parquetjs/schema/types.js.map +1 -0
  161. package/dist/es5/parquetjs/utils/file-utils.js +47 -0
  162. package/dist/es5/parquetjs/utils/file-utils.js.map +1 -0
  163. package/dist/es5/parquetjs/utils/read-utils.js +120 -0
  164. package/dist/es5/parquetjs/utils/read-utils.js.map +1 -0
  165. package/dist/es5/workers/parquet-worker.js +6 -0
  166. package/dist/es5/workers/parquet-worker.js.map +1 -0
  167. package/dist/esm/bundle.js +4 -0
  168. package/dist/esm/bundle.js.map +1 -0
  169. package/dist/esm/constants.js +6 -0
  170. package/dist/esm/constants.js.map +1 -0
  171. package/dist/esm/index.js +31 -0
  172. package/dist/esm/index.js.map +1 -0
  173. package/dist/esm/lib/arrow/convert-columns-to-row-group.js +2 -0
  174. package/dist/esm/lib/arrow/convert-columns-to-row-group.js.map +1 -0
  175. package/dist/esm/lib/arrow/convert-row-group-to-columns.js +8 -0
  176. package/dist/esm/lib/arrow/convert-row-group-to-columns.js.map +1 -0
  177. package/dist/esm/lib/arrow/convert-schema-from-parquet.js +95 -0
  178. package/dist/esm/lib/arrow/convert-schema-from-parquet.js.map +1 -0
  179. package/dist/esm/lib/arrow/convert-schema-to-parquet.js +39 -0
  180. package/dist/esm/lib/arrow/convert-schema-to-parquet.js.map +1 -0
  181. package/dist/esm/lib/geo/decode-geo-metadata.js +62 -0
  182. package/dist/esm/lib/geo/decode-geo-metadata.js.map +1 -0
  183. package/dist/esm/lib/geo/geoparquet-schema.js +76 -0
  184. package/dist/esm/lib/geo/geoparquet-schema.js.map +1 -0
  185. package/dist/esm/lib/parsers/parse-parquet-to-columns.js +39 -0
  186. package/dist/esm/lib/parsers/parse-parquet-to-columns.js.map +1 -0
  187. package/dist/esm/lib/parsers/parse-parquet-to-rows.js +29 -0
  188. package/dist/esm/lib/parsers/parse-parquet-to-rows.js.map +1 -0
  189. package/dist/esm/lib/wasm/encode-parquet-wasm.js +15 -0
  190. package/dist/esm/lib/wasm/encode-parquet-wasm.js.map +1 -0
  191. package/dist/esm/lib/wasm/load-wasm/index.js +2 -0
  192. package/dist/esm/lib/wasm/load-wasm/index.js.map +1 -0
  193. package/dist/esm/lib/wasm/load-wasm/load-wasm-browser.js +11 -0
  194. package/dist/esm/lib/wasm/load-wasm/load-wasm-browser.js.map +1 -0
  195. package/dist/esm/lib/wasm/load-wasm/load-wasm-node.js +5 -0
  196. package/dist/esm/lib/wasm/load-wasm/load-wasm-node.js.map +1 -0
  197. package/dist/esm/lib/wasm/parse-parquet-wasm.js +21 -0
  198. package/dist/esm/lib/wasm/parse-parquet-wasm.js.map +1 -0
  199. package/dist/esm/lib/wip/convert-schema-deep.java.disabled +910 -0
  200. package/dist/esm/lib/wip/convert-schema-deep.rs.disabled +976 -0
  201. package/dist/esm/parquet-loader.js +36 -0
  202. package/dist/esm/parquet-loader.js.map +1 -0
  203. package/dist/esm/parquet-wasm-loader.js +22 -0
  204. package/dist/esm/parquet-wasm-loader.js.map +1 -0
  205. package/dist/esm/parquet-wasm-writer.js +19 -0
  206. package/dist/esm/parquet-wasm-writer.js.map +1 -0
  207. package/dist/esm/parquet-writer.js +17 -0
  208. package/dist/esm/parquet-writer.js.map +1 -0
  209. package/dist/esm/parquetjs/LICENSE +20 -0
  210. package/dist/esm/parquetjs/codecs/declare.js +2 -0
  211. package/dist/esm/parquetjs/codecs/declare.js.map +1 -0
  212. package/dist/esm/parquetjs/codecs/dictionary.js +13 -0
  213. package/dist/esm/parquetjs/codecs/dictionary.js.map +1 -0
  214. package/dist/esm/parquetjs/codecs/index.js +23 -0
  215. package/dist/esm/parquetjs/codecs/index.js.map +1 -0
  216. package/dist/esm/parquetjs/codecs/plain.js +200 -0
  217. package/dist/esm/parquetjs/codecs/plain.js.map +1 -0
  218. package/dist/esm/parquetjs/codecs/rle.js +119 -0
  219. package/dist/esm/parquetjs/codecs/rle.js.map +1 -0
  220. package/dist/esm/parquetjs/compression.js +61 -0
  221. package/dist/esm/parquetjs/compression.js.map +1 -0
  222. package/dist/{parquetjs/encoder/writer.js → esm/parquetjs/encoder/parquet-encoder.js} +8 -106
  223. package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +1 -0
  224. package/dist/esm/parquetjs/modules.d.ts +21 -0
  225. package/dist/esm/parquetjs/parquet-thrift/BoundaryOrder.js +7 -0
  226. package/dist/esm/parquetjs/parquet-thrift/BoundaryOrder.js.map +1 -0
  227. package/dist/esm/parquetjs/parquet-thrift/BsonType.js +31 -0
  228. package/dist/esm/parquetjs/parquet-thrift/BsonType.js.map +1 -0
  229. package/dist/esm/parquetjs/parquet-thrift/ColumnChunk.js +173 -0
  230. package/dist/esm/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -0
  231. package/dist/esm/parquetjs/parquet-thrift/ColumnIndex.js +176 -0
  232. package/dist/esm/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -0
  233. package/dist/esm/parquetjs/parquet-thrift/ColumnMetaData.js +347 -0
  234. package/dist/esm/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -0
  235. package/dist/esm/parquetjs/parquet-thrift/ColumnOrder.js +77 -0
  236. package/dist/esm/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -0
  237. package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js +12 -0
  238. package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js.map +1 -0
  239. package/dist/esm/parquetjs/parquet-thrift/ConvertedType.js +26 -0
  240. package/dist/esm/parquetjs/parquet-thrift/ConvertedType.js.map +1 -0
  241. package/dist/esm/parquetjs/parquet-thrift/DataPageHeader.js +132 -0
  242. package/dist/esm/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -0
  243. package/dist/esm/parquetjs/parquet-thrift/DataPageHeaderV2.js +187 -0
  244. package/dist/esm/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -0
  245. package/dist/esm/parquetjs/parquet-thrift/DateType.js +31 -0
  246. package/dist/esm/parquetjs/parquet-thrift/DateType.js.map +1 -0
  247. package/dist/esm/parquetjs/parquet-thrift/DecimalType.js +76 -0
  248. package/dist/esm/parquetjs/parquet-thrift/DecimalType.js.map +1 -0
  249. package/dist/esm/parquetjs/parquet-thrift/DictionaryPageHeader.js +93 -0
  250. package/dist/esm/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -0
  251. package/dist/esm/parquetjs/parquet-thrift/Encoding.js +12 -0
  252. package/dist/esm/parquetjs/parquet-thrift/Encoding.js.map +1 -0
  253. package/dist/esm/parquetjs/parquet-thrift/EnumType.js +31 -0
  254. package/dist/esm/parquetjs/parquet-thrift/EnumType.js.map +1 -0
  255. package/dist/esm/parquetjs/parquet-thrift/FieldRepetitionType.js +7 -0
  256. package/dist/esm/parquetjs/parquet-thrift/FieldRepetitionType.js.map +1 -0
  257. package/dist/esm/parquetjs/parquet-thrift/FileMetaData.js +219 -0
  258. package/dist/esm/parquetjs/parquet-thrift/FileMetaData.js.map +1 -0
  259. package/dist/esm/parquetjs/parquet-thrift/IndexPageHeader.js +31 -0
  260. package/dist/esm/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -0
  261. package/dist/esm/parquetjs/parquet-thrift/IntType.js +76 -0
  262. package/dist/esm/parquetjs/parquet-thrift/IntType.js.map +1 -0
  263. package/dist/esm/parquetjs/parquet-thrift/JsonType.js +31 -0
  264. package/dist/esm/parquetjs/parquet-thrift/JsonType.js.map +1 -0
  265. package/dist/esm/parquetjs/parquet-thrift/KeyValue.js +74 -0
  266. package/dist/esm/parquetjs/parquet-thrift/KeyValue.js.map +1 -0
  267. package/dist/esm/parquetjs/parquet-thrift/ListType.js +31 -0
  268. package/dist/esm/parquetjs/parquet-thrift/ListType.js.map +1 -0
  269. package/dist/esm/parquetjs/parquet-thrift/LogicalType.js +377 -0
  270. package/dist/esm/parquetjs/parquet-thrift/LogicalType.js.map +1 -0
  271. package/dist/esm/parquetjs/parquet-thrift/MapType.js +31 -0
  272. package/dist/esm/parquetjs/parquet-thrift/MapType.js.map +1 -0
  273. package/dist/esm/parquetjs/parquet-thrift/MicroSeconds.js +31 -0
  274. package/dist/esm/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -0
  275. package/dist/esm/parquetjs/parquet-thrift/MilliSeconds.js +31 -0
  276. package/dist/esm/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -0
  277. package/dist/esm/parquetjs/parquet-thrift/NullType.js +31 -0
  278. package/dist/esm/parquetjs/parquet-thrift/NullType.js.map +1 -0
  279. package/dist/esm/parquetjs/parquet-thrift/OffsetIndex.js +69 -0
  280. package/dist/esm/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -0
  281. package/dist/esm/parquetjs/parquet-thrift/PageEncodingStats.js +95 -0
  282. package/dist/esm/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -0
  283. package/dist/esm/parquetjs/parquet-thrift/PageHeader.js +184 -0
  284. package/dist/esm/parquetjs/parquet-thrift/PageHeader.js.map +1 -0
  285. package/dist/esm/parquetjs/parquet-thrift/PageLocation.js +104 -0
  286. package/dist/esm/parquetjs/parquet-thrift/PageLocation.js.map +1 -0
  287. package/dist/esm/parquetjs/parquet-thrift/PageType.js +8 -0
  288. package/dist/esm/parquetjs/parquet-thrift/PageType.js.map +1 -0
  289. package/dist/esm/parquetjs/parquet-thrift/RowGroup.js +145 -0
  290. package/dist/esm/parquetjs/parquet-thrift/RowGroup.js.map +1 -0
  291. package/dist/esm/parquetjs/parquet-thrift/SchemaElement.js +211 -0
  292. package/dist/esm/parquetjs/parquet-thrift/SchemaElement.js.map +1 -0
  293. package/dist/esm/parquetjs/parquet-thrift/SortingColumn.js +95 -0
  294. package/dist/esm/parquetjs/parquet-thrift/SortingColumn.js.map +1 -0
  295. package/dist/esm/parquetjs/parquet-thrift/Statistics.js +145 -0
  296. package/dist/esm/parquetjs/parquet-thrift/Statistics.js.map +1 -0
  297. package/dist/esm/parquetjs/parquet-thrift/StringType.js +31 -0
  298. package/dist/esm/parquetjs/parquet-thrift/StringType.js.map +1 -0
  299. package/dist/esm/parquetjs/parquet-thrift/TimeType.js +77 -0
  300. package/dist/esm/parquetjs/parquet-thrift/TimeType.js.map +1 -0
  301. package/dist/esm/parquetjs/parquet-thrift/TimeUnit.js +102 -0
  302. package/dist/esm/parquetjs/parquet-thrift/TimeUnit.js.map +1 -0
  303. package/dist/esm/parquetjs/parquet-thrift/TimestampType.js +77 -0
  304. package/dist/esm/parquetjs/parquet-thrift/TimestampType.js.map +1 -0
  305. package/dist/esm/parquetjs/parquet-thrift/Type.js +12 -0
  306. package/dist/esm/parquetjs/parquet-thrift/Type.js.map +1 -0
  307. package/dist/esm/parquetjs/parquet-thrift/TypeDefinedOrder.js +31 -0
  308. package/dist/esm/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -0
  309. package/dist/esm/parquetjs/parquet-thrift/UUIDType.js +31 -0
  310. package/dist/esm/parquetjs/parquet-thrift/UUIDType.js.map +1 -0
  311. package/dist/esm/parquetjs/parquet-thrift/index.js +44 -0
  312. package/dist/esm/parquetjs/parquet-thrift/index.js.map +1 -0
  313. package/dist/esm/parquetjs/parser/decoders.js +253 -0
  314. package/dist/esm/parquetjs/parser/decoders.js.map +1 -0
  315. package/dist/{parquetjs/parser/parquet-envelope-reader.js → esm/parquetjs/parser/parquet-reader.js} +95 -74
  316. package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -0
  317. package/dist/esm/parquetjs/schema/declare.js +12 -0
  318. package/dist/esm/parquetjs/schema/declare.js.map +1 -0
  319. package/dist/esm/parquetjs/schema/schema.js +140 -0
  320. package/dist/esm/parquetjs/schema/schema.js.map +1 -0
  321. package/dist/esm/parquetjs/schema/shred.js +228 -0
  322. package/dist/esm/parquetjs/schema/shred.js.map +1 -0
  323. package/dist/esm/parquetjs/schema/types.js +397 -0
  324. package/dist/esm/parquetjs/schema/types.js.map +1 -0
  325. package/dist/esm/parquetjs/utils/file-utils.js +34 -0
  326. package/dist/esm/parquetjs/utils/file-utils.js.map +1 -0
  327. package/dist/esm/parquetjs/utils/read-utils.js +90 -0
  328. package/dist/esm/parquetjs/utils/read-utils.js.map +1 -0
  329. package/dist/esm/workers/parquet-worker.js +4 -0
  330. package/dist/esm/workers/parquet-worker.js.map +1 -0
  331. package/dist/index.d.ts +16 -20
  332. package/dist/index.d.ts.map +1 -1
  333. package/dist/index.js +58 -15
  334. package/dist/lib/arrow/convert-columns-to-row-group.d.ts +1 -0
  335. package/dist/lib/arrow/convert-columns-to-row-group.d.ts.map +1 -0
  336. package/dist/lib/arrow/convert-columns-to-row-group.js +1 -0
  337. package/dist/lib/arrow/convert-row-group-to-columns.d.ts +4 -0
  338. package/dist/lib/arrow/convert-row-group-to-columns.d.ts.map +1 -0
  339. package/dist/lib/arrow/convert-row-group-to-columns.js +12 -0
  340. package/dist/lib/arrow/convert-schema-from-parquet.d.ts +9 -0
  341. package/dist/lib/arrow/convert-schema-from-parquet.d.ts.map +1 -0
  342. package/dist/lib/arrow/convert-schema-from-parquet.js +86 -0
  343. package/dist/lib/arrow/convert-schema-to-parquet.d.ts +7 -0
  344. package/dist/lib/arrow/convert-schema-to-parquet.d.ts.map +1 -0
  345. package/dist/lib/arrow/convert-schema-to-parquet.js +71 -0
  346. package/dist/lib/geo/decode-geo-metadata.d.ts +31 -0
  347. package/dist/lib/geo/decode-geo-metadata.d.ts.map +1 -0
  348. package/dist/lib/geo/decode-geo-metadata.js +77 -0
  349. package/dist/lib/geo/geoparquet-schema.d.ts +80 -0
  350. package/dist/lib/geo/geoparquet-schema.d.ts.map +1 -0
  351. package/dist/lib/geo/geoparquet-schema.js +69 -0
  352. package/dist/lib/parsers/parse-parquet-to-columns.d.ts +5 -0
  353. package/dist/lib/parsers/parse-parquet-to-columns.d.ts.map +1 -0
  354. package/dist/lib/parsers/parse-parquet-to-columns.js +46 -0
  355. package/dist/lib/parsers/parse-parquet-to-rows.d.ts +5 -0
  356. package/dist/lib/parsers/parse-parquet-to-rows.d.ts.map +1 -0
  357. package/dist/lib/parsers/parse-parquet-to-rows.js +37 -0
  358. package/dist/lib/wasm/encode-parquet-wasm.d.ts +21 -0
  359. package/dist/lib/wasm/encode-parquet-wasm.d.ts.map +1 -0
  360. package/dist/lib/wasm/encode-parquet-wasm.js +30 -0
  361. package/dist/lib/wasm/load-wasm/index.d.ts +2 -0
  362. package/dist/lib/wasm/load-wasm/index.d.ts.map +1 -0
  363. package/dist/lib/wasm/load-wasm/index.js +5 -0
  364. package/dist/lib/wasm/load-wasm/load-wasm-browser.d.ts +3 -0
  365. package/dist/lib/wasm/load-wasm/load-wasm-browser.d.ts.map +1 -0
  366. package/dist/lib/wasm/load-wasm/load-wasm-browser.js +38 -0
  367. package/dist/lib/wasm/load-wasm/load-wasm-node.d.ts +3 -0
  368. package/dist/lib/wasm/load-wasm/load-wasm-node.d.ts.map +1 -0
  369. package/dist/lib/wasm/load-wasm/load-wasm-node.js +31 -0
  370. package/dist/lib/wasm/parse-parquet-wasm.d.ts +10 -0
  371. package/dist/lib/wasm/parse-parquet-wasm.d.ts.map +1 -0
  372. package/dist/lib/wasm/parse-parquet-wasm.js +27 -0
  373. package/dist/parquet-loader.d.ts +6 -15
  374. package/dist/parquet-loader.d.ts.map +1 -1
  375. package/dist/parquet-loader.js +38 -19
  376. package/dist/parquet-wasm-loader.d.ts +23 -0
  377. package/dist/parquet-wasm-loader.d.ts.map +1 -0
  378. package/dist/parquet-wasm-loader.js +27 -0
  379. package/dist/parquet-wasm-writer.d.ts +3 -0
  380. package/dist/parquet-wasm-writer.d.ts.map +1 -0
  381. package/dist/parquet-wasm-writer.js +23 -0
  382. package/dist/parquet-worker.js +27 -25
  383. package/dist/parquet-worker.js.map +3 -3
  384. package/dist/parquet-writer.d.ts +3 -2
  385. package/dist/parquet-writer.d.ts.map +1 -1
  386. package/dist/parquet-writer.js +18 -14
  387. package/dist/parquetjs/codecs/declare.js +2 -2
  388. package/dist/parquetjs/codecs/dictionary.js +12 -10
  389. package/dist/parquetjs/codecs/index.js +54 -22
  390. package/dist/parquetjs/codecs/plain.js +173 -232
  391. package/dist/parquetjs/codecs/rle.js +134 -140
  392. package/dist/parquetjs/compression.d.ts +3 -0
  393. package/dist/parquetjs/compression.d.ts.map +1 -1
  394. package/dist/parquetjs/compression.js +169 -48
  395. package/dist/parquetjs/encoder/{writer.d.ts → parquet-encoder.d.ts} +15 -23
  396. package/dist/parquetjs/encoder/parquet-encoder.d.ts.map +1 -0
  397. package/dist/parquetjs/encoder/parquet-encoder.js +484 -0
  398. package/dist/parquetjs/parquet-thrift/BoundaryOrder.js +14 -7
  399. package/dist/parquetjs/parquet-thrift/BsonType.js +60 -37
  400. package/dist/parquetjs/parquet-thrift/ColumnChunk.js +209 -215
  401. package/dist/parquetjs/parquet-thrift/ColumnIndex.js +210 -211
  402. package/dist/parquetjs/parquet-thrift/ColumnMetaData.js +394 -421
  403. package/dist/parquetjs/parquet-thrift/ColumnOrder.js +102 -89
  404. package/dist/parquetjs/parquet-thrift/CompressionCodec.js +19 -12
  405. package/dist/parquetjs/parquet-thrift/ConvertedType.js +33 -26
  406. package/dist/parquetjs/parquet-thrift/DataPageHeader.js +165 -161
  407. package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.js +228 -234
  408. package/dist/parquetjs/parquet-thrift/DateType.js +60 -37
  409. package/dist/parquetjs/parquet-thrift/DecimalType.js +104 -90
  410. package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.js +121 -112
  411. package/dist/parquetjs/parquet-thrift/Encoding.js +19 -12
  412. package/dist/parquetjs/parquet-thrift/EnumType.js +60 -37
  413. package/dist/parquetjs/parquet-thrift/FieldRepetitionType.js +14 -7
  414. package/dist/parquetjs/parquet-thrift/FileMetaData.js +253 -263
  415. package/dist/parquetjs/parquet-thrift/IndexPageHeader.js +60 -37
  416. package/dist/parquetjs/parquet-thrift/IntType.js +104 -90
  417. package/dist/parquetjs/parquet-thrift/JsonType.js +60 -37
  418. package/dist/parquetjs/parquet-thrift/KeyValue.js +101 -88
  419. package/dist/parquetjs/parquet-thrift/ListType.js +60 -37
  420. package/dist/parquetjs/parquet-thrift/LogicalType.js +366 -449
  421. package/dist/parquetjs/parquet-thrift/MapType.js +60 -37
  422. package/dist/parquetjs/parquet-thrift/MicroSeconds.js +60 -37
  423. package/dist/parquetjs/parquet-thrift/MilliSeconds.js +60 -37
  424. package/dist/parquetjs/parquet-thrift/NullType.js +60 -37
  425. package/dist/parquetjs/parquet-thrift/OffsetIndex.js +96 -80
  426. package/dist/parquetjs/parquet-thrift/PageEncodingStats.js +126 -114
  427. package/dist/parquetjs/parquet-thrift/PageHeader.js +218 -231
  428. package/dist/parquetjs/parquet-thrift/PageLocation.js +140 -123
  429. package/dist/parquetjs/parquet-thrift/PageType.js +15 -8
  430. package/dist/parquetjs/parquet-thrift/RowGroup.js +179 -171
  431. package/dist/parquetjs/parquet-thrift/SchemaElement.js +241 -268
  432. package/dist/parquetjs/parquet-thrift/SortingColumn.js +126 -114
  433. package/dist/parquetjs/parquet-thrift/Statistics.js +175 -178
  434. package/dist/parquetjs/parquet-thrift/StringType.js +60 -37
  435. package/dist/parquetjs/parquet-thrift/TimeType.js +105 -91
  436. package/dist/parquetjs/parquet-thrift/TimeUnit.js +124 -119
  437. package/dist/parquetjs/parquet-thrift/TimestampType.js +105 -91
  438. package/dist/parquetjs/parquet-thrift/Type.js +19 -12
  439. package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.js +60 -37
  440. package/dist/parquetjs/parquet-thrift/UUIDType.js +60 -37
  441. package/dist/parquetjs/parquet-thrift/index.js +65 -44
  442. package/dist/parquetjs/parser/decoders.d.ts +2 -2
  443. package/dist/parquetjs/parser/decoders.d.ts.map +1 -1
  444. package/dist/parquetjs/parser/decoders.js +301 -283
  445. package/dist/parquetjs/parser/parquet-reader.d.ts +47 -57
  446. package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -1
  447. package/dist/parquetjs/parser/parquet-reader.js +193 -113
  448. package/dist/parquetjs/schema/declare.d.ts +26 -18
  449. package/dist/parquetjs/schema/declare.d.ts.map +1 -1
  450. package/dist/parquetjs/schema/declare.js +11 -12
  451. package/dist/parquetjs/schema/schema.d.ts +4 -4
  452. package/dist/parquetjs/schema/schema.d.ts.map +1 -1
  453. package/dist/parquetjs/schema/schema.js +148 -162
  454. package/dist/parquetjs/schema/shred.d.ts +33 -12
  455. package/dist/parquetjs/schema/shred.d.ts.map +1 -1
  456. package/dist/parquetjs/schema/shred.js +340 -147
  457. package/dist/parquetjs/schema/types.d.ts +2 -2
  458. package/dist/parquetjs/schema/types.d.ts.map +1 -1
  459. package/dist/parquetjs/schema/types.js +355 -415
  460. package/dist/parquetjs/utils/file-utils.d.ts +5 -4
  461. package/dist/parquetjs/utils/file-utils.d.ts.map +1 -1
  462. package/dist/parquetjs/utils/file-utils.js +37 -28
  463. package/dist/parquetjs/utils/read-utils.js +99 -95
  464. package/dist/workers/parquet-worker.js +5 -4
  465. package/package.json +17 -12
  466. package/src/index.ts +58 -7
  467. package/src/lib/arrow/convert-columns-to-row-group.ts +0 -0
  468. package/src/lib/arrow/convert-row-group-to-columns.ts +15 -0
  469. package/src/lib/arrow/convert-schema-from-parquet.ts +104 -0
  470. package/src/lib/arrow/convert-schema-to-parquet.ts +90 -0
  471. package/src/lib/geo/decode-geo-metadata.ts +108 -0
  472. package/src/lib/geo/geoparquet-schema.ts +69 -0
  473. package/src/lib/parsers/parse-parquet-to-columns.ts +60 -0
  474. package/src/lib/parsers/parse-parquet-to-rows.ts +45 -0
  475. package/src/lib/wasm/encode-parquet-wasm.ts +40 -0
  476. package/src/lib/wasm/load-wasm/index.ts +1 -0
  477. package/src/lib/wasm/load-wasm/load-wasm-browser.ts +15 -0
  478. package/src/lib/wasm/load-wasm/load-wasm-node.ts +5 -0
  479. package/src/lib/wasm/parse-parquet-wasm.ts +42 -0
  480. package/src/lib/wip/convert-schema-deep.java.disabled +910 -0
  481. package/src/lib/wip/convert-schema-deep.rs.disabled +976 -0
  482. package/src/parquet-loader.ts +30 -3
  483. package/src/parquet-wasm-loader.ts +36 -0
  484. package/src/parquet-wasm-writer.ts +24 -0
  485. package/src/parquet-writer.ts +4 -1
  486. package/src/parquetjs/compression.ts +24 -7
  487. package/src/parquetjs/encoder/{writer.ts → parquet-encoder.ts} +33 -38
  488. package/src/parquetjs/parser/decoders.ts +3 -3
  489. package/src/parquetjs/parser/parquet-reader.ts +239 -122
  490. package/src/parquetjs/schema/declare.ts +22 -13
  491. package/src/parquetjs/schema/schema.ts +8 -8
  492. package/src/parquetjs/schema/shred.ts +239 -71
  493. package/src/parquetjs/schema/types.ts +25 -30
  494. package/src/parquetjs/utils/file-utils.ts +3 -4
  495. package/dist/bundle.js.map +0 -1
  496. package/dist/constants.js.map +0 -1
  497. package/dist/index.js.map +0 -1
  498. package/dist/lib/convert-schema.d.ts +0 -8
  499. package/dist/lib/convert-schema.d.ts.map +0 -1
  500. package/dist/lib/convert-schema.js +0 -71
  501. package/dist/lib/convert-schema.js.map +0 -1
  502. package/dist/lib/parse-parquet.d.ts +0 -4
  503. package/dist/lib/parse-parquet.d.ts.map +0 -1
  504. package/dist/lib/parse-parquet.js +0 -28
  505. package/dist/lib/parse-parquet.js.map +0 -1
  506. package/dist/lib/read-array-buffer.d.ts +0 -19
  507. package/dist/lib/read-array-buffer.d.ts.map +0 -1
  508. package/dist/lib/read-array-buffer.js +0 -9
  509. package/dist/lib/read-array-buffer.js.map +0 -1
  510. package/dist/parquet-loader.js.map +0 -1
  511. package/dist/parquet-writer.js.map +0 -1
  512. package/dist/parquetjs/codecs/declare.js.map +0 -1
  513. package/dist/parquetjs/codecs/dictionary.js.map +0 -1
  514. package/dist/parquetjs/codecs/index.js.map +0 -1
  515. package/dist/parquetjs/codecs/plain.js.map +0 -1
  516. package/dist/parquetjs/codecs/rle.js.map +0 -1
  517. package/dist/parquetjs/compression.js.map +0 -1
  518. package/dist/parquetjs/encoder/writer.d.ts.map +0 -1
  519. package/dist/parquetjs/encoder/writer.js.map +0 -1
  520. package/dist/parquetjs/file.d.ts +0 -10
  521. package/dist/parquetjs/file.d.ts.map +0 -1
  522. package/dist/parquetjs/file.js +0 -80
  523. package/dist/parquetjs/file.js.map +0 -1
  524. package/dist/parquetjs/parquet-thrift/BoundaryOrder.js.map +0 -1
  525. package/dist/parquetjs/parquet-thrift/BsonType.js.map +0 -1
  526. package/dist/parquetjs/parquet-thrift/ColumnChunk.js.map +0 -1
  527. package/dist/parquetjs/parquet-thrift/ColumnIndex.js.map +0 -1
  528. package/dist/parquetjs/parquet-thrift/ColumnMetaData.js.map +0 -1
  529. package/dist/parquetjs/parquet-thrift/ColumnOrder.js.map +0 -1
  530. package/dist/parquetjs/parquet-thrift/CompressionCodec.js.map +0 -1
  531. package/dist/parquetjs/parquet-thrift/ConvertedType.js.map +0 -1
  532. package/dist/parquetjs/parquet-thrift/DataPageHeader.js.map +0 -1
  533. package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +0 -1
  534. package/dist/parquetjs/parquet-thrift/DateType.js.map +0 -1
  535. package/dist/parquetjs/parquet-thrift/DecimalType.js.map +0 -1
  536. package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +0 -1
  537. package/dist/parquetjs/parquet-thrift/Encoding.js.map +0 -1
  538. package/dist/parquetjs/parquet-thrift/EnumType.js.map +0 -1
  539. package/dist/parquetjs/parquet-thrift/FieldRepetitionType.js.map +0 -1
  540. package/dist/parquetjs/parquet-thrift/FileMetaData.js.map +0 -1
  541. package/dist/parquetjs/parquet-thrift/IndexPageHeader.js.map +0 -1
  542. package/dist/parquetjs/parquet-thrift/IntType.js.map +0 -1
  543. package/dist/parquetjs/parquet-thrift/JsonType.js.map +0 -1
  544. package/dist/parquetjs/parquet-thrift/KeyValue.js.map +0 -1
  545. package/dist/parquetjs/parquet-thrift/ListType.js.map +0 -1
  546. package/dist/parquetjs/parquet-thrift/LogicalType.js.map +0 -1
  547. package/dist/parquetjs/parquet-thrift/MapType.js.map +0 -1
  548. package/dist/parquetjs/parquet-thrift/MicroSeconds.js.map +0 -1
  549. package/dist/parquetjs/parquet-thrift/MilliSeconds.js.map +0 -1
  550. package/dist/parquetjs/parquet-thrift/NullType.js.map +0 -1
  551. package/dist/parquetjs/parquet-thrift/OffsetIndex.js.map +0 -1
  552. package/dist/parquetjs/parquet-thrift/PageEncodingStats.js.map +0 -1
  553. package/dist/parquetjs/parquet-thrift/PageHeader.js.map +0 -1
  554. package/dist/parquetjs/parquet-thrift/PageLocation.js.map +0 -1
  555. package/dist/parquetjs/parquet-thrift/PageType.js.map +0 -1
  556. package/dist/parquetjs/parquet-thrift/RowGroup.js.map +0 -1
  557. package/dist/parquetjs/parquet-thrift/SchemaElement.js.map +0 -1
  558. package/dist/parquetjs/parquet-thrift/SortingColumn.js.map +0 -1
  559. package/dist/parquetjs/parquet-thrift/Statistics.js.map +0 -1
  560. package/dist/parquetjs/parquet-thrift/StringType.js.map +0 -1
  561. package/dist/parquetjs/parquet-thrift/TimeType.js.map +0 -1
  562. package/dist/parquetjs/parquet-thrift/TimeUnit.js.map +0 -1
  563. package/dist/parquetjs/parquet-thrift/TimestampType.js.map +0 -1
  564. package/dist/parquetjs/parquet-thrift/Type.js.map +0 -1
  565. package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +0 -1
  566. package/dist/parquetjs/parquet-thrift/UUIDType.js.map +0 -1
  567. package/dist/parquetjs/parquet-thrift/index.js.map +0 -1
  568. package/dist/parquetjs/parser/decoders.js.map +0 -1
  569. package/dist/parquetjs/parser/parquet-cursor.d.ts +0 -36
  570. package/dist/parquetjs/parser/parquet-cursor.d.ts.map +0 -1
  571. package/dist/parquetjs/parser/parquet-cursor.js +0 -90
  572. package/dist/parquetjs/parser/parquet-cursor.js.map +0 -1
  573. package/dist/parquetjs/parser/parquet-envelope-reader.d.ts +0 -40
  574. package/dist/parquetjs/parser/parquet-envelope-reader.d.ts.map +0 -1
  575. package/dist/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
  576. package/dist/parquetjs/parser/parquet-reader.js.map +0 -1
  577. package/dist/parquetjs/schema/declare.js.map +0 -1
  578. package/dist/parquetjs/schema/schema.js.map +0 -1
  579. package/dist/parquetjs/schema/shred.js.map +0 -1
  580. package/dist/parquetjs/schema/types.js.map +0 -1
  581. package/dist/parquetjs/utils/buffer-utils.d.ts +0 -10
  582. package/dist/parquetjs/utils/buffer-utils.d.ts.map +0 -1
  583. package/dist/parquetjs/utils/buffer-utils.js +0 -12
  584. package/dist/parquetjs/utils/buffer-utils.js.map +0 -1
  585. package/dist/parquetjs/utils/file-utils.js.map +0 -1
  586. package/dist/parquetjs/utils/read-utils.js.map +0 -1
  587. package/dist/workers/parquet-worker.js.map +0 -1
  588. package/src/lib/convert-schema.ts +0 -95
  589. package/src/lib/parse-parquet.ts +0 -27
  590. package/src/lib/read-array-buffer.ts +0 -31
  591. package/src/parquetjs/file.ts +0 -90
  592. package/src/parquetjs/parser/parquet-cursor.ts +0 -94
  593. package/src/parquetjs/parser/parquet-envelope-reader.ts +0 -199
  594. package/src/parquetjs/utils/buffer-utils.ts +0 -18
  595. /package/dist/{parquetjs → es5/parquetjs}/LICENSE +0 -0
  596. /package/dist/{parquetjs → es5/parquetjs}/modules.d.ts +0 -0
@@ -0,0 +1,253 @@
1
+ import { PARQUET_CODECS } from '../codecs';
2
+ import { ConvertedType, Encoding, FieldRepetitionType, PageType, Type } from '../parquet-thrift';
3
+ import { decompress } from '../compression';
4
+ import { PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING } from '../../constants';
5
+ import { decodePageHeader, getThriftEnum, getBitWidth } from '../utils/read-utils';
6
+ export async function decodeDataPages(buffer, options) {
7
+ const cursor = {
8
+ buffer,
9
+ offset: 0,
10
+ size: buffer.length
11
+ };
12
+ const data = {
13
+ rlevels: [],
14
+ dlevels: [],
15
+ values: [],
16
+ pageHeaders: [],
17
+ count: 0
18
+ };
19
+ let dictionary = options.dictionary || [];
20
+ while (cursor.offset < cursor.size && (!options.numValues || data.dlevels.length < Number(options.numValues))) {
21
+ const page = await decodePage(cursor, options);
22
+ if (page.dictionary) {
23
+ dictionary = page.dictionary;
24
+ continue;
25
+ }
26
+ if (dictionary.length) {
27
+ page.values = page.values.map(value => dictionary[value]);
28
+ }
29
+ for (let index = 0; index < page.rlevels.length; index++) {
30
+ data.rlevels.push(page.rlevels[index]);
31
+ data.dlevels.push(page.dlevels[index]);
32
+ const value = page.values[index];
33
+ if (value !== undefined) {
34
+ data.values.push(value);
35
+ }
36
+ }
37
+ data.count += page.count;
38
+ data.pageHeaders.push(page.pageHeader);
39
+ }
40
+ return data;
41
+ }
42
+ export async function decodePage(cursor, options) {
43
+ let page;
44
+ const {
45
+ pageHeader,
46
+ length
47
+ } = decodePageHeader(cursor.buffer, cursor.offset);
48
+ cursor.offset += length;
49
+ const pageType = getThriftEnum(PageType, pageHeader.type);
50
+ switch (pageType) {
51
+ case 'DATA_PAGE':
52
+ page = await decodeDataPage(cursor, pageHeader, options);
53
+ break;
54
+ case 'DATA_PAGE_V2':
55
+ page = await decodeDataPageV2(cursor, pageHeader, options);
56
+ break;
57
+ case 'DICTIONARY_PAGE':
58
+ page = {
59
+ dictionary: await decodeDictionaryPage(cursor, pageHeader, options),
60
+ pageHeader
61
+ };
62
+ break;
63
+ default:
64
+ throw new Error("invalid page type: ".concat(pageType));
65
+ }
66
+ return page;
67
+ }
68
+ export function decodeSchema(schemaElements, offset, len) {
69
+ const schema = {};
70
+ let next = offset;
71
+ for (let i = 0; i < len; i++) {
72
+ const schemaElement = schemaElements[next];
73
+ const repetitionType = next > 0 ? getThriftEnum(FieldRepetitionType, schemaElement.repetition_type) : 'ROOT';
74
+ let optional = false;
75
+ let repeated = false;
76
+ switch (repetitionType) {
77
+ case 'REQUIRED':
78
+ break;
79
+ case 'OPTIONAL':
80
+ optional = true;
81
+ break;
82
+ case 'REPEATED':
83
+ repeated = true;
84
+ break;
85
+ default:
86
+ throw new Error('parquet: unknown repetition type');
87
+ }
88
+ if (schemaElement.num_children > 0) {
89
+ const res = decodeSchema(schemaElements, next + 1, schemaElement.num_children);
90
+ next = res.next;
91
+ schema[schemaElement.name] = {
92
+ optional,
93
+ repeated,
94
+ fields: res.schema
95
+ };
96
+ } else {
97
+ const type = getThriftEnum(Type, schemaElement.type);
98
+ let logicalType = type;
99
+ if (schemaElement.converted_type) {
100
+ logicalType = getThriftEnum(ConvertedType, schemaElement.converted_type);
101
+ }
102
+ switch (logicalType) {
103
+ case 'DECIMAL':
104
+ logicalType = "".concat(logicalType, "_").concat(type);
105
+ break;
106
+ default:
107
+ }
108
+ schema[schemaElement.name] = {
109
+ type: logicalType,
110
+ typeLength: schemaElement.type_length,
111
+ presision: schemaElement.precision,
112
+ scale: schemaElement.scale,
113
+ optional,
114
+ repeated
115
+ };
116
+ next++;
117
+ }
118
+ }
119
+ return {
120
+ schema,
121
+ offset,
122
+ next
123
+ };
124
+ }
125
+ function decodeValues(type, encoding, cursor, count, opts) {
126
+ if (!(encoding in PARQUET_CODECS)) {
127
+ throw new Error("invalid encoding: ".concat(encoding));
128
+ }
129
+ return PARQUET_CODECS[encoding].decodeValues(type, cursor, count, opts);
130
+ }
131
+ async function decodeDataPage(cursor, header, options) {
132
+ var _header$data_page_hea, _header$data_page_hea2, _header$data_page_hea3, _header$data_page_hea4;
133
+ const cursorEnd = cursor.offset + header.compressed_page_size;
134
+ const valueCount = (_header$data_page_hea = header.data_page_header) === null || _header$data_page_hea === void 0 ? void 0 : _header$data_page_hea.num_values;
135
+ let dataCursor = cursor;
136
+ if (options.compression !== 'UNCOMPRESSED') {
137
+ const valuesBuf = await decompress(options.compression, cursor.buffer.slice(cursor.offset, cursorEnd), header.uncompressed_page_size);
138
+ dataCursor = {
139
+ buffer: valuesBuf,
140
+ offset: 0,
141
+ size: valuesBuf.length
142
+ };
143
+ cursor.offset = cursorEnd;
144
+ }
145
+ const rLevelEncoding = getThriftEnum(Encoding, (_header$data_page_hea2 = header.data_page_header) === null || _header$data_page_hea2 === void 0 ? void 0 : _header$data_page_hea2.repetition_level_encoding);
146
+ let rLevels = new Array(valueCount);
147
+ if (options.column.rLevelMax > 0) {
148
+ rLevels = decodeValues(PARQUET_RDLVL_TYPE, rLevelEncoding, dataCursor, valueCount, {
149
+ bitWidth: getBitWidth(options.column.rLevelMax),
150
+ disableEnvelope: false
151
+ });
152
+ } else {
153
+ rLevels.fill(0);
154
+ }
155
+ const dLevelEncoding = getThriftEnum(Encoding, (_header$data_page_hea3 = header.data_page_header) === null || _header$data_page_hea3 === void 0 ? void 0 : _header$data_page_hea3.definition_level_encoding);
156
+ let dLevels = new Array(valueCount);
157
+ if (options.column.dLevelMax > 0) {
158
+ dLevels = decodeValues(PARQUET_RDLVL_TYPE, dLevelEncoding, dataCursor, valueCount, {
159
+ bitWidth: getBitWidth(options.column.dLevelMax),
160
+ disableEnvelope: false
161
+ });
162
+ } else {
163
+ dLevels.fill(0);
164
+ }
165
+ let valueCountNonNull = 0;
166
+ for (const dlvl of dLevels) {
167
+ if (dlvl === options.column.dLevelMax) {
168
+ valueCountNonNull++;
169
+ }
170
+ }
171
+ const valueEncoding = getThriftEnum(Encoding, (_header$data_page_hea4 = header.data_page_header) === null || _header$data_page_hea4 === void 0 ? void 0 : _header$data_page_hea4.encoding);
172
+ const decodeOptions = {
173
+ typeLength: options.column.typeLength,
174
+ bitWidth: options.column.typeLength
175
+ };
176
+ const values = decodeValues(options.column.primitiveType, valueEncoding, dataCursor, valueCountNonNull, decodeOptions);
177
+ return {
178
+ dlevels: dLevels,
179
+ rlevels: rLevels,
180
+ values,
181
+ count: valueCount,
182
+ pageHeader: header
183
+ };
184
+ }
185
+ async function decodeDataPageV2(cursor, header, opts) {
186
+ var _header$data_page_hea5, _header$data_page_hea6, _header$data_page_hea7, _header$data_page_hea8;
187
+ const cursorEnd = cursor.offset + header.compressed_page_size;
188
+ const valueCount = (_header$data_page_hea5 = header.data_page_header_v2) === null || _header$data_page_hea5 === void 0 ? void 0 : _header$data_page_hea5.num_values;
189
+ const valueCountNonNull = valueCount - ((_header$data_page_hea6 = header.data_page_header_v2) === null || _header$data_page_hea6 === void 0 ? void 0 : _header$data_page_hea6.num_nulls);
190
+ const valueEncoding = getThriftEnum(Encoding, (_header$data_page_hea7 = header.data_page_header_v2) === null || _header$data_page_hea7 === void 0 ? void 0 : _header$data_page_hea7.encoding);
191
+ let rLevels = new Array(valueCount);
192
+ if (opts.column.rLevelMax > 0) {
193
+ rLevels = decodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, cursor, valueCount, {
194
+ bitWidth: getBitWidth(opts.column.rLevelMax),
195
+ disableEnvelope: true
196
+ });
197
+ } else {
198
+ rLevels.fill(0);
199
+ }
200
+ let dLevels = new Array(valueCount);
201
+ if (opts.column.dLevelMax > 0) {
202
+ dLevels = decodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, cursor, valueCount, {
203
+ bitWidth: getBitWidth(opts.column.dLevelMax),
204
+ disableEnvelope: true
205
+ });
206
+ } else {
207
+ dLevels.fill(0);
208
+ }
209
+ let valuesBufCursor = cursor;
210
+ if ((_header$data_page_hea8 = header.data_page_header_v2) !== null && _header$data_page_hea8 !== void 0 && _header$data_page_hea8.is_compressed) {
211
+ const valuesBuf = await decompress(opts.compression, cursor.buffer.slice(cursor.offset, cursorEnd), header.uncompressed_page_size);
212
+ valuesBufCursor = {
213
+ buffer: valuesBuf,
214
+ offset: 0,
215
+ size: valuesBuf.length
216
+ };
217
+ cursor.offset = cursorEnd;
218
+ }
219
+ const decodeOptions = {
220
+ typeLength: opts.column.typeLength,
221
+ bitWidth: opts.column.typeLength
222
+ };
223
+ const values = decodeValues(opts.column.primitiveType, valueEncoding, valuesBufCursor, valueCountNonNull, decodeOptions);
224
+ return {
225
+ dlevels: dLevels,
226
+ rlevels: rLevels,
227
+ values,
228
+ count: valueCount,
229
+ pageHeader: header
230
+ };
231
+ }
232
+ async function decodeDictionaryPage(cursor, pageHeader, options) {
233
+ var _pageHeader$dictionar;
234
+ const cursorEnd = cursor.offset + pageHeader.compressed_page_size;
235
+ let dictCursor = {
236
+ offset: 0,
237
+ buffer: cursor.buffer.slice(cursor.offset, cursorEnd),
238
+ size: cursorEnd - cursor.offset
239
+ };
240
+ cursor.offset = cursorEnd;
241
+ if (options.compression !== 'UNCOMPRESSED') {
242
+ const valuesBuf = await decompress(options.compression, dictCursor.buffer.slice(dictCursor.offset, cursorEnd), pageHeader.uncompressed_page_size);
243
+ dictCursor = {
244
+ buffer: valuesBuf,
245
+ offset: 0,
246
+ size: valuesBuf.length
247
+ };
248
+ cursor.offset = cursorEnd;
249
+ }
250
+ const numValues = (pageHeader === null || pageHeader === void 0 ? void 0 : (_pageHeader$dictionar = pageHeader.dictionary_page_header) === null || _pageHeader$dictionar === void 0 ? void 0 : _pageHeader$dictionar.num_values) || 0;
251
+ return decodeValues(options.column.primitiveType, options.column.encoding, dictCursor, numValues, options).map(d => d.toString());
252
+ }
253
+ //# sourceMappingURL=decoders.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"decoders.js","names":["PARQUET_CODECS","ConvertedType","Encoding","FieldRepetitionType","PageType","Type","decompress","PARQUET_RDLVL_TYPE","PARQUET_RDLVL_ENCODING","decodePageHeader","getThriftEnum","getBitWidth","decodeDataPages","buffer","options","cursor","offset","size","length","data","rlevels","dlevels","values","pageHeaders","count","dictionary","numValues","Number","page","decodePage","map","value","index","push","undefined","pageHeader","pageType","type","decodeDataPage","decodeDataPageV2","decodeDictionaryPage","Error","concat","decodeSchema","schemaElements","len","schema","next","i","schemaElement","repetitionType","repetition_type","optional","repeated","num_children","res","name","fields","logicalType","converted_type","typeLength","type_length","presision","precision","scale","decodeValues","encoding","opts","header","_header$data_page_hea","_header$data_page_hea2","_header$data_page_hea3","_header$data_page_hea4","cursorEnd","compressed_page_size","valueCount","data_page_header","num_values","dataCursor","compression","valuesBuf","slice","uncompressed_page_size","rLevelEncoding","repetition_level_encoding","rLevels","Array","column","rLevelMax","bitWidth","disableEnvelope","fill","dLevelEncoding","definition_level_encoding","dLevels","dLevelMax","valueCountNonNull","dlvl","valueEncoding","decodeOptions","primitiveType","_header$data_page_hea5","_header$data_page_hea6","_header$data_page_hea7","_header$data_page_hea8","data_page_header_v2","num_nulls","valuesBufCursor","is_compressed","_pageHeader$dictionar","dictCursor","dictionary_page_header","d","toString"],"sources":["../../../../src/parquetjs/parser/decoders.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {\n ParquetCodec,\n ParquetColumnChunk,\n ParquetOptions,\n ParquetPageData,\n ParquetType,\n PrimitiveType,\n SchemaDefinition\n} from '../schema/declare';\nimport {CursorBuffer, ParquetCodecOptions, PARQUET_CODECS} from '../codecs';\nimport {\n ConvertedType,\n Encoding,\n FieldRepetitionType,\n PageHeader,\n PageType,\n SchemaElement,\n Type\n} from '../parquet-thrift';\nimport {decompress} from '../compression';\nimport {PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING} from '../../constants';\nimport {decodePageHeader, getThriftEnum, getBitWidth} from '../utils/read-utils';\n\n/**\n * Decode data pages\n * @param buffer - input data\n * @param column - parquet column\n * @param compression - compression type\n * @returns parquet data page data\n */\nexport async function decodeDataPages(\n buffer: Buffer,\n options: ParquetOptions\n): Promise<ParquetColumnChunk> {\n const cursor: CursorBuffer = {\n buffer,\n offset: 0,\n size: buffer.length\n };\n\n const data: ParquetColumnChunk = {\n rlevels: [],\n dlevels: [],\n values: [],\n pageHeaders: [],\n count: 0\n };\n\n let dictionary = options.dictionary || [];\n\n while (\n // @ts-ignore size can be undefined\n cursor.offset < cursor.size &&\n (!options.numValues || data.dlevels.length < Number(options.numValues))\n ) {\n // Looks like we have to decode these in sequence due to cursor updates?\n const page = await decodePage(cursor, options);\n\n if (page.dictionary) {\n dictionary = page.dictionary;\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (dictionary.length) {\n // eslint-disable-next-line no-loop-func\n page.values = page.values.map((value) => dictionary[value]);\n }\n\n for (let index = 0; index < page.rlevels.length; index++) {\n data.rlevels.push(page.rlevels[index]);\n data.dlevels.push(page.dlevels[index]);\n const value = page.values[index];\n\n if (value !== undefined) {\n data.values.push(value);\n }\n }\n\n data.count += page.count;\n data.pageHeaders.push(page.pageHeader);\n }\n\n return data;\n}\n\n/**\n * Decode parquet page based on page type\n * @param cursor\n * @param options\n */\nexport async function decodePage(\n cursor: CursorBuffer,\n options: ParquetOptions\n): Promise<ParquetPageData> {\n let page;\n const {pageHeader, length} = decodePageHeader(cursor.buffer, cursor.offset);\n cursor.offset += length;\n\n const pageType = getThriftEnum(PageType, pageHeader.type);\n\n switch (pageType) {\n case 'DATA_PAGE':\n page = await decodeDataPage(cursor, pageHeader, options);\n break;\n case 'DATA_PAGE_V2':\n page = await decodeDataPageV2(cursor, pageHeader, options);\n break;\n case 'DICTIONARY_PAGE':\n page = {\n dictionary: await decodeDictionaryPage(cursor, pageHeader, options),\n pageHeader\n };\n break;\n default:\n throw new Error(`invalid page type: ${pageType}`);\n }\n\n return page;\n}\n\n/**\n * Decode parquet schema\n * @param schemaElements input schema elements data\n * @param offset offset to read from\n * @param len length of data\n * @returns result.offset\n * result.next - offset at the end of function\n * result.schema - schema read from the input data\n * @todo output offset is the same as input - possibly excess output field\n */\nexport function decodeSchema(\n schemaElements: SchemaElement[],\n offset: number,\n len: number\n): {\n offset: number;\n next: number;\n schema: SchemaDefinition;\n} {\n const schema: SchemaDefinition = {};\n let next = offset;\n for (let i = 0; i < len; i++) {\n const schemaElement = schemaElements[next];\n\n const repetitionType =\n next > 0 ? getThriftEnum(FieldRepetitionType, schemaElement.repetition_type!) : 'ROOT';\n\n let optional = false;\n let repeated = false;\n switch (repetitionType) {\n case 'REQUIRED':\n break;\n case 'OPTIONAL':\n optional = true;\n break;\n case 'REPEATED':\n repeated = true;\n break;\n default:\n throw new Error('parquet: unknown repetition type');\n }\n\n if (schemaElement.num_children! > 0) {\n const res = decodeSchema(schemaElements, next + 1, schemaElement.num_children!);\n next = res.next;\n schema[schemaElement.name] = {\n // type: undefined,\n optional,\n repeated,\n fields: res.schema\n };\n } else {\n const type = getThriftEnum(Type, schemaElement.type!);\n let logicalType = type;\n\n if (schemaElement.converted_type) {\n logicalType = getThriftEnum(ConvertedType, schemaElement.converted_type);\n }\n\n switch (logicalType) {\n case 'DECIMAL':\n logicalType = `${logicalType}_${type}` as ParquetType;\n break;\n default:\n }\n\n schema[schemaElement.name] = {\n type: logicalType as ParquetType,\n typeLength: schemaElement.type_length,\n presision: schemaElement.precision,\n scale: schemaElement.scale,\n optional,\n repeated\n };\n next++;\n }\n }\n return {schema, offset, next};\n}\n\n/**\n * Decode a consecutive array of data using one of the parquet encodings\n */\nfunction decodeValues(\n type: PrimitiveType,\n encoding: ParquetCodec,\n cursor: CursorBuffer,\n count: number,\n opts: ParquetCodecOptions\n): any[] {\n if (!(encoding in PARQUET_CODECS)) {\n throw new Error(`invalid encoding: ${encoding}`);\n }\n return PARQUET_CODECS[encoding].decodeValues(type, cursor, count, opts);\n}\n\n/**\n * Do decoding of parquet dataPage from column chunk\n * @param cursor\n * @param header\n * @param options\n */\nasync function decodeDataPage(\n cursor: CursorBuffer,\n header: PageHeader,\n options: ParquetOptions\n): Promise<ParquetPageData> {\n const cursorEnd = cursor.offset + header.compressed_page_size;\n const valueCount = header.data_page_header?.num_values;\n\n /* uncompress page */\n let dataCursor = cursor;\n\n if (options.compression !== 'UNCOMPRESSED') {\n const valuesBuf = await decompress(\n options.compression,\n cursor.buffer.slice(cursor.offset, cursorEnd),\n header.uncompressed_page_size\n );\n dataCursor = {\n buffer: valuesBuf,\n offset: 0,\n size: valuesBuf.length\n };\n cursor.offset = cursorEnd;\n }\n\n /* read repetition levels */\n const rLevelEncoding = getThriftEnum(\n Encoding,\n header.data_page_header?.repetition_level_encoding!\n ) as ParquetCodec;\n // tslint:disable-next-line:prefer-array-literal\n let rLevels = new Array(valueCount);\n\n if (options.column.rLevelMax > 0) {\n rLevels = decodeValues(PARQUET_RDLVL_TYPE, rLevelEncoding, dataCursor, valueCount!, {\n bitWidth: getBitWidth(options.column.rLevelMax),\n disableEnvelope: false\n // column: opts.column\n });\n } else {\n rLevels.fill(0);\n }\n\n /* read definition levels */\n const dLevelEncoding = getThriftEnum(\n Encoding,\n header.data_page_header?.definition_level_encoding!\n ) as ParquetCodec;\n // tslint:disable-next-line:prefer-array-literal\n let dLevels = new Array(valueCount);\n if (options.column.dLevelMax > 0) {\n dLevels = decodeValues(PARQUET_RDLVL_TYPE, dLevelEncoding, dataCursor, valueCount!, {\n bitWidth: getBitWidth(options.column.dLevelMax),\n disableEnvelope: false\n // column: opts.column\n });\n } else {\n dLevels.fill(0);\n }\n let valueCountNonNull = 0;\n for (const dlvl of dLevels) {\n if (dlvl === options.column.dLevelMax) {\n valueCountNonNull++;\n }\n }\n\n /* read values */\n const valueEncoding = getThriftEnum(Encoding, header.data_page_header?.encoding!) as ParquetCodec;\n const decodeOptions = {\n typeLength: options.column.typeLength,\n bitWidth: options.column.typeLength\n };\n\n const values = decodeValues(\n options.column.primitiveType!,\n valueEncoding,\n dataCursor,\n valueCountNonNull,\n decodeOptions\n );\n\n return {\n dlevels: dLevels,\n rlevels: rLevels,\n values,\n count: valueCount!,\n pageHeader: header\n };\n}\n\n/**\n * Do decoding of parquet dataPage in version 2 from column chunk\n * @param cursor\n * @param header\n * @param opts\n * @returns\n */\nasync function decodeDataPageV2(\n cursor: CursorBuffer,\n header: PageHeader,\n opts: any\n): Promise<ParquetPageData> {\n const cursorEnd = cursor.offset + header.compressed_page_size;\n\n const valueCount = header.data_page_header_v2?.num_values;\n // @ts-ignore\n const valueCountNonNull = valueCount - header.data_page_header_v2?.num_nulls;\n const valueEncoding = getThriftEnum(\n Encoding,\n header.data_page_header_v2?.encoding!\n ) as ParquetCodec;\n\n /* read repetition levels */\n // tslint:disable-next-line:prefer-array-literal\n let rLevels = new Array(valueCount);\n if (opts.column.rLevelMax > 0) {\n rLevels = decodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, cursor, valueCount!, {\n bitWidth: getBitWidth(opts.column.rLevelMax),\n disableEnvelope: true\n });\n } else {\n rLevels.fill(0);\n }\n\n /* read definition levels */\n // tslint:disable-next-line:prefer-array-literal\n let dLevels = new Array(valueCount);\n if (opts.column.dLevelMax > 0) {\n dLevels = decodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, cursor, valueCount!, {\n bitWidth: getBitWidth(opts.column.dLevelMax),\n disableEnvelope: true\n });\n } else {\n dLevels.fill(0);\n }\n\n /* read values */\n let valuesBufCursor = cursor;\n\n if (header.data_page_header_v2?.is_compressed) {\n const valuesBuf = await decompress(\n opts.compression,\n cursor.buffer.slice(cursor.offset, cursorEnd),\n header.uncompressed_page_size\n );\n\n valuesBufCursor = {\n buffer: valuesBuf,\n offset: 0,\n size: valuesBuf.length\n };\n\n cursor.offset = cursorEnd;\n }\n\n const decodeOptions = {\n typeLength: opts.column.typeLength,\n bitWidth: opts.column.typeLength\n };\n\n const values = decodeValues(\n opts.column.primitiveType!,\n valueEncoding,\n valuesBufCursor,\n valueCountNonNull,\n decodeOptions\n );\n\n return {\n dlevels: dLevels,\n rlevels: rLevels,\n values,\n count: valueCount!,\n pageHeader: header\n };\n}\n\n/**\n * Do decoding of dictionary page which helps to iterate over all indexes and get dataPage values.\n * @param cursor\n * @param pageHeader\n * @param options\n */\nasync function decodeDictionaryPage(\n cursor: CursorBuffer,\n pageHeader: PageHeader,\n options: ParquetOptions\n): Promise<string[]> {\n const cursorEnd = cursor.offset + pageHeader.compressed_page_size;\n\n let dictCursor = {\n offset: 0,\n buffer: cursor.buffer.slice(cursor.offset, cursorEnd),\n size: cursorEnd - cursor.offset\n };\n\n cursor.offset = cursorEnd;\n\n if (options.compression !== 'UNCOMPRESSED') {\n const valuesBuf = await decompress(\n options.compression,\n dictCursor.buffer.slice(dictCursor.offset, cursorEnd),\n pageHeader.uncompressed_page_size\n );\n\n dictCursor = {\n buffer: valuesBuf,\n offset: 0,\n size: valuesBuf.length\n };\n\n cursor.offset = cursorEnd;\n }\n\n const numValues = pageHeader?.dictionary_page_header?.num_values || 0;\n\n return decodeValues(\n options.column.primitiveType!,\n options.column.encoding!,\n dictCursor,\n numValues,\n options as ParquetCodecOptions\n ).map((d) => d.toString());\n}\n"],"mappings":"AAUA,SAA2CA,cAAc,QAAO,WAAW;AAC3E,SACEC,aAAa,EACbC,QAAQ,EACRC,mBAAmB,EAEnBC,QAAQ,EAERC,IAAI,QACC,mBAAmB;AAC1B,SAAQC,UAAU,QAAO,gBAAgB;AACzC,SAAQC,kBAAkB,EAAEC,sBAAsB,QAAO,iBAAiB;AAC1E,SAAQC,gBAAgB,EAAEC,aAAa,EAAEC,WAAW,QAAO,qBAAqB;AAShF,OAAO,eAAeC,eAAeA,CACnCC,MAAc,EACdC,OAAuB,EACM;EAC7B,MAAMC,MAAoB,GAAG;IAC3BF,MAAM;IACNG,MAAM,EAAE,CAAC;IACTC,IAAI,EAAEJ,MAAM,CAACK;EACf,CAAC;EAED,MAAMC,IAAwB,GAAG;IAC/BC,OAAO,EAAE,EAAE;IACXC,OAAO,EAAE,EAAE;IACXC,MAAM,EAAE,EAAE;IACVC,WAAW,EAAE,EAAE;IACfC,KAAK,EAAE;EACT,CAAC;EAED,IAAIC,UAAU,GAAGX,OAAO,CAACW,UAAU,IAAI,EAAE;EAEzC,OAEEV,MAAM,CAACC,MAAM,GAAGD,MAAM,CAACE,IAAI,KAC1B,CAACH,OAAO,CAACY,SAAS,IAAIP,IAAI,CAACE,OAAO,CAACH,MAAM,GAAGS,MAAM,CAACb,OAAO,CAACY,SAAS,CAAC,CAAC,EACvE;IAEA,MAAME,IAAI,GAAG,MAAMC,UAAU,CAACd,MAAM,EAAED,OAAO,CAAC;IAE9C,IAAIc,IAAI,CAACH,UAAU,EAAE;MACnBA,UAAU,GAAGG,IAAI,CAACH,UAAU;MAE5B;IACF;IAEA,IAAIA,UAAU,CAACP,MAAM,EAAE;MAErBU,IAAI,CAACN,MAAM,GAAGM,IAAI,CAACN,MAAM,CAACQ,GAAG,CAAEC,KAAK,IAAKN,UAAU,CAACM,KAAK,CAAC,CAAC;IAC7D;IAEA,KAAK,IAAIC,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGJ,IAAI,CAACR,OAAO,CAACF,MAAM,EAAEc,KAAK,EAAE,EAAE;MACxDb,IAAI,CAACC,OAAO,CAACa,IAAI,CAACL,IAAI,CAACR,OAAO,CAACY,KAAK,CAAC,CAAC;MACtCb,IAAI,CAACE,OAAO,CAACY,IAAI,CAACL,IAAI,CAACP,OAAO,CAACW,KAAK,CAAC,CAAC;MACtC,MAAMD,KAAK,GAAGH,IAAI,CAACN,MAAM,CAACU,KAAK,CAAC;MAEhC,IAAID,KAAK,KAAKG,SAAS,EAAE;QACvBf,IAAI,CAACG,MAAM,CAACW,IAAI,CAACF,KAAK,CAAC;MACzB;IACF;IAEAZ,IAAI,CAACK,KAAK,IAAII,IAAI,CAACJ,KAAK;IACxBL,IAAI,CAACI,WAAW,CAACU,IAAI,CAACL,IAAI,CAACO,UAAU,CAAC;EACxC;EAEA,OAAOhB,IAAI;AACb;AAOA,OAAO,eAAeU,UAAUA,CAC9Bd,MAAoB,EACpBD,OAAuB,EACG;EAC1B,IAAIc,IAAI;EACR,MAAM;IAACO,UAAU;IAAEjB;EAAM,CAAC,GAAGT,gBAAgB,CAACM,MAAM,CAACF,MAAM,EAAEE,MAAM,CAACC,MAAM,CAAC;EAC3ED,MAAM,CAACC,MAAM,IAAIE,MAAM;EAEvB,MAAMkB,QAAQ,GAAG1B,aAAa,CAACN,QAAQ,EAAE+B,UAAU,CAACE,IAAI,CAAC;EAEzD,QAAQD,QAAQ;IACd,KAAK,WAAW;MACdR,IAAI,GAAG,MAAMU,cAAc,CAACvB,MAAM,EAAEoB,UAAU,EAAErB,OAAO,CAAC;MACxD;IACF,KAAK,cAAc;MACjBc,IAAI,GAAG,MAAMW,gBAAgB,CAACxB,MAAM,EAAEoB,UAAU,EAAErB,OAAO,CAAC;MAC1D;IACF,KAAK,iBAAiB;MACpBc,IAAI,GAAG;QACLH,UAAU,EAAE,MAAMe,oBAAoB,CAACzB,MAAM,EAAEoB,UAAU,EAAErB,OAAO,CAAC;QACnEqB;MACF,CAAC;MACD;IACF;MACE,MAAM,IAAIM,KAAK,uBAAAC,MAAA,CAAuBN,QAAQ,CAAE,CAAC;EACrD;EAEA,OAAOR,IAAI;AACb;AAYA,OAAO,SAASe,YAAYA,CAC1BC,cAA+B,EAC/B5B,MAAc,EACd6B,GAAW,EAKX;EACA,MAAMC,MAAwB,GAAG,CAAC,CAAC;EACnC,IAAIC,IAAI,GAAG/B,MAAM;EACjB,KAAK,IAAIgC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGH,GAAG,EAAEG,CAAC,EAAE,EAAE;IAC5B,MAAMC,aAAa,GAAGL,cAAc,CAACG,IAAI,CAAC;IAE1C,MAAMG,cAAc,GAClBH,IAAI,GAAG,CAAC,GAAGrC,aAAa,CAACP,mBAAmB,EAAE8C,aAAa,CAACE,eAAgB,CAAC,GAAG,MAAM;IAExF,IAAIC,QAAQ,GAAG,KAAK;IACpB,IAAIC,QAAQ,GAAG,KAAK;IACpB,QAAQH,cAAc;MACpB,KAAK,UAAU;QACb;MACF,KAAK,UAAU;QACbE,QAAQ,GAAG,IAAI;QACf;MACF,KAAK,UAAU;QACbC,QAAQ,GAAG,IAAI;QACf;MACF;QACE,MAAM,IAAIZ,KAAK,CAAC,kCAAkC,CAAC;IACvD;IAEA,IAAIQ,aAAa,CAACK,YAAY,GAAI,CAAC,EAAE;MACnC,MAAMC,GAAG,GAAGZ,YAAY,CAACC,cAAc,EAAEG,IAAI,GAAG,CAAC,EAAEE,aAAa,CAACK,YAAa,CAAC;MAC/EP,IAAI,GAAGQ,GAAG,CAACR,IAAI;MACfD,MAAM,CAACG,aAAa,CAACO,IAAI,CAAC,GAAG;QAE3BJ,QAAQ;QACRC,QAAQ;QACRI,MAAM,EAAEF,GAAG,CAACT;MACd,CAAC;IACH,CAAC,MAAM;MACL,MAAMT,IAAI,GAAG3B,aAAa,CAACL,IAAI,EAAE4C,aAAa,CAACZ,IAAK,CAAC;MACrD,IAAIqB,WAAW,GAAGrB,IAAI;MAEtB,IAAIY,aAAa,CAACU,cAAc,EAAE;QAChCD,WAAW,GAAGhD,aAAa,CAACT,aAAa,EAAEgD,aAAa,CAACU,cAAc,CAAC;MAC1E;MAEA,QAAQD,WAAW;QACjB,KAAK,SAAS;UACZA,WAAW,MAAAhB,MAAA,CAAMgB,WAAW,OAAAhB,MAAA,CAAIL,IAAI,CAAiB;UACrD;QACF;MACF;MAEAS,MAAM,CAACG,aAAa,CAACO,IAAI,CAAC,GAAG;QAC3BnB,IAAI,EAAEqB,WAA0B;QAChCE,UAAU,EAAEX,aAAa,CAACY,WAAW;QACrCC,SAAS,EAAEb,aAAa,CAACc,SAAS;QAClCC,KAAK,EAAEf,aAAa,CAACe,KAAK;QAC1BZ,QAAQ;QACRC;MACF,CAAC;MACDN,IAAI,EAAE;IACR;EACF;EACA,OAAO;IAACD,MAAM;IAAE9B,MAAM;IAAE+B;EAAI,CAAC;AAC/B;AAKA,SAASkB,YAAYA,CACnB5B,IAAmB,EACnB6B,QAAsB,EACtBnD,MAAoB,EACpBS,KAAa,EACb2C,IAAyB,EAClB;EACP,IAAI,EAAED,QAAQ,IAAIlE,cAAc,CAAC,EAAE;IACjC,MAAM,IAAIyC,KAAK,sBAAAC,MAAA,CAAsBwB,QAAQ,CAAE,CAAC;EAClD;EACA,OAAOlE,cAAc,CAACkE,QAAQ,CAAC,CAACD,YAAY,CAAC5B,IAAI,EAAEtB,MAAM,EAAES,KAAK,EAAE2C,IAAI,CAAC;AACzE;AAQA,eAAe7B,cAAcA,CAC3BvB,MAAoB,EACpBqD,MAAkB,EAClBtD,OAAuB,EACG;EAAA,IAAAuD,qBAAA,EAAAC,sBAAA,EAAAC,sBAAA,EAAAC,sBAAA;EAC1B,MAAMC,SAAS,GAAG1D,MAAM,CAACC,MAAM,GAAGoD,MAAM,CAACM,oBAAoB;EAC7D,MAAMC,UAAU,IAAAN,qBAAA,GAAGD,MAAM,CAACQ,gBAAgB,cAAAP,qBAAA,uBAAvBA,qBAAA,CAAyBQ,UAAU;EAGtD,IAAIC,UAAU,GAAG/D,MAAM;EAEvB,IAAID,OAAO,CAACiE,WAAW,KAAK,cAAc,EAAE;IAC1C,MAAMC,SAAS,GAAG,MAAM1E,UAAU,CAChCQ,OAAO,CAACiE,WAAW,EACnBhE,MAAM,CAACF,MAAM,CAACoE,KAAK,CAAClE,MAAM,CAACC,MAAM,EAAEyD,SAAS,CAAC,EAC7CL,MAAM,CAACc,sBACT,CAAC;IACDJ,UAAU,GAAG;MACXjE,MAAM,EAAEmE,SAAS;MACjBhE,MAAM,EAAE,CAAC;MACTC,IAAI,EAAE+D,SAAS,CAAC9D;IAClB,CAAC;IACDH,MAAM,CAACC,MAAM,GAAGyD,SAAS;EAC3B;EAGA,MAAMU,cAAc,GAAGzE,aAAa,CAClCR,QAAQ,GAAAoE,sBAAA,GACRF,MAAM,CAACQ,gBAAgB,cAAAN,sBAAA,uBAAvBA,sBAAA,CAAyBc,yBAC3B,CAAiB;EAEjB,IAAIC,OAAO,GAAG,IAAIC,KAAK,CAACX,UAAU,CAAC;EAEnC,IAAI7D,OAAO,CAACyE,MAAM,CAACC,SAAS,GAAG,CAAC,EAAE;IAChCH,OAAO,GAAGpB,YAAY,CAAC1D,kBAAkB,EAAE4E,cAAc,EAAEL,UAAU,EAAEH,UAAU,EAAG;MAClFc,QAAQ,EAAE9E,WAAW,CAACG,OAAO,CAACyE,MAAM,CAACC,SAAS,CAAC;MAC/CE,eAAe,EAAE;IAEnB,CAAC,CAAC;EACJ,CAAC,MAAM;IACLL,OAAO,CAACM,IAAI,CAAC,CAAC,CAAC;EACjB;EAGA,MAAMC,cAAc,GAAGlF,aAAa,CAClCR,QAAQ,GAAAqE,sBAAA,GACRH,MAAM,CAACQ,gBAAgB,cAAAL,sBAAA,uBAAvBA,sBAAA,CAAyBsB,yBAC3B,CAAiB;EAEjB,IAAIC,OAAO,GAAG,IAAIR,KAAK,CAACX,UAAU,CAAC;EACnC,IAAI7D,OAAO,CAACyE,MAAM,CAACQ,SAAS,GAAG,CAAC,EAAE;IAChCD,OAAO,GAAG7B,YAAY,CAAC1D,kBAAkB,EAAEqF,cAAc,EAAEd,UAAU,EAAEH,UAAU,EAAG;MAClFc,QAAQ,EAAE9E,WAAW,CAACG,OAAO,CAACyE,MAAM,CAACQ,SAAS,CAAC;MAC/CL,eAAe,EAAE;IAEnB,CAAC,CAAC;EACJ,CAAC,MAAM;IACLI,OAAO,CAACH,IAAI,CAAC,CAAC,CAAC;EACjB;EACA,IAAIK,iBAAiB,GAAG,CAAC;EACzB,KAAK,MAAMC,IAAI,IAAIH,OAAO,EAAE;IAC1B,IAAIG,IAAI,KAAKnF,OAAO,CAACyE,MAAM,CAACQ,SAAS,EAAE;MACrCC,iBAAiB,EAAE;IACrB;EACF;EAGA,MAAME,aAAa,GAAGxF,aAAa,CAACR,QAAQ,GAAAsE,sBAAA,GAAEJ,MAAM,CAACQ,gBAAgB,cAAAJ,sBAAA,uBAAvBA,sBAAA,CAAyBN,QAAS,CAAiB;EACjG,MAAMiC,aAAa,GAAG;IACpBvC,UAAU,EAAE9C,OAAO,CAACyE,MAAM,CAAC3B,UAAU;IACrC6B,QAAQ,EAAE3E,OAAO,CAACyE,MAAM,CAAC3B;EAC3B,CAAC;EAED,MAAMtC,MAAM,GAAG2C,YAAY,CACzBnD,OAAO,CAACyE,MAAM,CAACa,aAAa,EAC5BF,aAAa,EACbpB,UAAU,EACVkB,iBAAiB,EACjBG,aACF,CAAC;EAED,OAAO;IACL9E,OAAO,EAAEyE,OAAO;IAChB1E,OAAO,EAAEiE,OAAO;IAChB/D,MAAM;IACNE,KAAK,EAAEmD,UAAW;IAClBxC,UAAU,EAAEiC;EACd,CAAC;AACH;AASA,eAAe7B,gBAAgBA,CAC7BxB,MAAoB,EACpBqD,MAAkB,EAClBD,IAAS,EACiB;EAAA,IAAAkC,sBAAA,EAAAC,sBAAA,EAAAC,sBAAA,EAAAC,sBAAA;EAC1B,MAAM/B,SAAS,GAAG1D,MAAM,CAACC,MAAM,GAAGoD,MAAM,CAACM,oBAAoB;EAE7D,MAAMC,UAAU,IAAA0B,sBAAA,GAAGjC,MAAM,CAACqC,mBAAmB,cAAAJ,sBAAA,uBAA1BA,sBAAA,CAA4BxB,UAAU;EAEzD,MAAMmB,iBAAiB,GAAGrB,UAAU,KAAA2B,sBAAA,GAAGlC,MAAM,CAACqC,mBAAmB,cAAAH,sBAAA,uBAA1BA,sBAAA,CAA4BI,SAAS;EAC5E,MAAMR,aAAa,GAAGxF,aAAa,CACjCR,QAAQ,GAAAqG,sBAAA,GACRnC,MAAM,CAACqC,mBAAmB,cAAAF,sBAAA,uBAA1BA,sBAAA,CAA4BrC,QAC9B,CAAiB;EAIjB,IAAImB,OAAO,GAAG,IAAIC,KAAK,CAACX,UAAU,CAAC;EACnC,IAAIR,IAAI,CAACoB,MAAM,CAACC,SAAS,GAAG,CAAC,EAAE;IAC7BH,OAAO,GAAGpB,YAAY,CAAC1D,kBAAkB,EAAEC,sBAAsB,EAAEO,MAAM,EAAE4D,UAAU,EAAG;MACtFc,QAAQ,EAAE9E,WAAW,CAACwD,IAAI,CAACoB,MAAM,CAACC,SAAS,CAAC;MAC5CE,eAAe,EAAE;IACnB,CAAC,CAAC;EACJ,CAAC,MAAM;IACLL,OAAO,CAACM,IAAI,CAAC,CAAC,CAAC;EACjB;EAIA,IAAIG,OAAO,GAAG,IAAIR,KAAK,CAACX,UAAU,CAAC;EACnC,IAAIR,IAAI,CAACoB,MAAM,CAACQ,SAAS,GAAG,CAAC,EAAE;IAC7BD,OAAO,GAAG7B,YAAY,CAAC1D,kBAAkB,EAAEC,sBAAsB,EAAEO,MAAM,EAAE4D,UAAU,EAAG;MACtFc,QAAQ,EAAE9E,WAAW,CAACwD,IAAI,CAACoB,MAAM,CAACQ,SAAS,CAAC;MAC5CL,eAAe,EAAE;IACnB,CAAC,CAAC;EACJ,CAAC,MAAM;IACLI,OAAO,CAACH,IAAI,CAAC,CAAC,CAAC;EACjB;EAGA,IAAIgB,eAAe,GAAG5F,MAAM;EAE5B,KAAAyF,sBAAA,GAAIpC,MAAM,CAACqC,mBAAmB,cAAAD,sBAAA,eAA1BA,sBAAA,CAA4BI,aAAa,EAAE;IAC7C,MAAM5B,SAAS,GAAG,MAAM1E,UAAU,CAChC6D,IAAI,CAACY,WAAW,EAChBhE,MAAM,CAACF,MAAM,CAACoE,KAAK,CAAClE,MAAM,CAACC,MAAM,EAAEyD,SAAS,CAAC,EAC7CL,MAAM,CAACc,sBACT,CAAC;IAEDyB,eAAe,GAAG;MAChB9F,MAAM,EAAEmE,SAAS;MACjBhE,MAAM,EAAE,CAAC;MACTC,IAAI,EAAE+D,SAAS,CAAC9D;IAClB,CAAC;IAEDH,MAAM,CAACC,MAAM,GAAGyD,SAAS;EAC3B;EAEA,MAAM0B,aAAa,GAAG;IACpBvC,UAAU,EAAEO,IAAI,CAACoB,MAAM,CAAC3B,UAAU;IAClC6B,QAAQ,EAAEtB,IAAI,CAACoB,MAAM,CAAC3B;EACxB,CAAC;EAED,MAAMtC,MAAM,GAAG2C,YAAY,CACzBE,IAAI,CAACoB,MAAM,CAACa,aAAa,EACzBF,aAAa,EACbS,eAAe,EACfX,iBAAiB,EACjBG,aACF,CAAC;EAED,OAAO;IACL9E,OAAO,EAAEyE,OAAO;IAChB1E,OAAO,EAAEiE,OAAO;IAChB/D,MAAM;IACNE,KAAK,EAAEmD,UAAW;IAClBxC,UAAU,EAAEiC;EACd,CAAC;AACH;AAQA,eAAe5B,oBAAoBA,CACjCzB,MAAoB,EACpBoB,UAAsB,EACtBrB,OAAuB,EACJ;EAAA,IAAA+F,qBAAA;EACnB,MAAMpC,SAAS,GAAG1D,MAAM,CAACC,MAAM,GAAGmB,UAAU,CAACuC,oBAAoB;EAEjE,IAAIoC,UAAU,GAAG;IACf9F,MAAM,EAAE,CAAC;IACTH,MAAM,EAAEE,MAAM,CAACF,MAAM,CAACoE,KAAK,CAAClE,MAAM,CAACC,MAAM,EAAEyD,SAAS,CAAC;IACrDxD,IAAI,EAAEwD,SAAS,GAAG1D,MAAM,CAACC;EAC3B,CAAC;EAEDD,MAAM,CAACC,MAAM,GAAGyD,SAAS;EAEzB,IAAI3D,OAAO,CAACiE,WAAW,KAAK,cAAc,EAAE;IAC1C,MAAMC,SAAS,GAAG,MAAM1E,UAAU,CAChCQ,OAAO,CAACiE,WAAW,EACnB+B,UAAU,CAACjG,MAAM,CAACoE,KAAK,CAAC6B,UAAU,CAAC9F,MAAM,EAAEyD,SAAS,CAAC,EACrDtC,UAAU,CAAC+C,sBACb,CAAC;IAED4B,UAAU,GAAG;MACXjG,MAAM,EAAEmE,SAAS;MACjBhE,MAAM,EAAE,CAAC;MACTC,IAAI,EAAE+D,SAAS,CAAC9D;IAClB,CAAC;IAEDH,MAAM,CAACC,MAAM,GAAGyD,SAAS;EAC3B;EAEA,MAAM/C,SAAS,GAAG,CAAAS,UAAU,aAAVA,UAAU,wBAAA0E,qBAAA,GAAV1E,UAAU,CAAE4E,sBAAsB,cAAAF,qBAAA,uBAAlCA,qBAAA,CAAoChC,UAAU,KAAI,CAAC;EAErE,OAAOZ,YAAY,CACjBnD,OAAO,CAACyE,MAAM,CAACa,aAAa,EAC5BtF,OAAO,CAACyE,MAAM,CAACrB,QAAQ,EACvB4C,UAAU,EACVpF,SAAS,EACTZ,OACF,CAAC,CAACgB,GAAG,CAAEkF,CAAC,IAAKA,CAAC,CAACC,QAAQ,CAAC,CAAC,CAAC;AAC5B"}
@@ -1,93 +1,141 @@
1
1
  import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
2
+ import { ParquetSchema } from '../schema/schema';
3
+ import { decodeSchema } from './decoders';
4
+ import { materializeRows } from '../schema/shred';
2
5
  import { PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED } from '../../constants';
3
6
  import { CompressionCodec, Type } from '../parquet-thrift';
4
7
  import { decodeFileMetadata, getThriftEnum, fieldIndexOf } from '../utils/read-utils';
5
8
  import { decodeDataPages, decodePage } from './decoders';
6
- const DEFAULT_DICTIONARY_SIZE = 1e6;
7
- export class ParquetEnvelopeReader {
8
- static async openBuffer(buffer) {
9
- const readFn = (position, length) => Promise.resolve(buffer.slice(position, position + length));
10
-
11
- const closeFn = () => Promise.resolve();
12
-
13
- return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);
9
+ const DEFAULT_PROPS = {
10
+ defaultDictionarySize: 1e6
11
+ };
12
+ export class ParquetReader {
13
+ constructor(file, props) {
14
+ _defineProperty(this, "props", void 0);
15
+ _defineProperty(this, "file", void 0);
16
+ _defineProperty(this, "metadata", null);
17
+ this.file = file;
18
+ this.props = {
19
+ ...DEFAULT_PROPS,
20
+ ...props
21
+ };
22
+ }
23
+ close() {
24
+ this.file.close();
25
+ }
26
+ async *rowIterator(props) {
27
+ for await (const rows of this.rowBatchIterator(props)) {
28
+ for (const row of rows) {
29
+ yield row;
30
+ }
31
+ }
32
+ }
33
+ async *rowBatchIterator(props) {
34
+ const schema = await this.getSchema();
35
+ for await (const rowGroup of this.rowGroupIterator(props)) {
36
+ yield materializeRows(schema, rowGroup);
37
+ }
38
+ }
39
+ async *rowGroupIterator(props) {
40
+ const columnList = ((props === null || props === void 0 ? void 0 : props.columnList) || []).map(x => Array.isArray(x) ? x : [x]);
41
+ const metadata = await this.getFileMetadata();
42
+ const schema = await this.getSchema();
43
+ const rowGroupCount = (metadata === null || metadata === void 0 ? void 0 : metadata.row_groups.length) || 0;
44
+ for (let rowGroupIndex = 0; rowGroupIndex < rowGroupCount; rowGroupIndex++) {
45
+ const rowGroup = await this.readRowGroup(schema, metadata.row_groups[rowGroupIndex], columnList);
46
+ yield rowGroup;
47
+ }
14
48
  }
15
-
16
- constructor(read, close, fileSize, options) {
17
- _defineProperty(this, "read", void 0);
18
-
19
- _defineProperty(this, "close", void 0);
20
-
21
- _defineProperty(this, "fileSize", void 0);
22
-
23
- _defineProperty(this, "defaultDictionarySize", void 0);
24
-
25
- this.read = read;
26
- this.close = close;
27
- this.fileSize = fileSize;
28
- this.defaultDictionarySize = (options === null || options === void 0 ? void 0 : options.defaultDictionarySize) || DEFAULT_DICTIONARY_SIZE;
49
+ async getRowCount() {
50
+ const metadata = await this.getFileMetadata();
51
+ return Number(metadata.num_rows);
52
+ }
53
+ async getSchema() {
54
+ const metadata = await this.getFileMetadata();
55
+ const root = metadata.schema[0];
56
+ const {
57
+ schema: schemaDefinition
58
+ } = decodeSchema(metadata.schema, 1, root.num_children);
59
+ const schema = new ParquetSchema(schemaDefinition);
60
+ return schema;
61
+ }
62
+ async getSchemaMetadata() {
63
+ const metadata = await this.getFileMetadata();
64
+ const md = {};
65
+ for (const kv of metadata.key_value_metadata) {
66
+ md[kv.key] = kv.value;
67
+ }
68
+ return md;
69
+ }
70
+ async getFileMetadata() {
71
+ if (!this.metadata) {
72
+ await this.readHeader();
73
+ this.metadata = this.readFooter();
74
+ }
75
+ return this.metadata;
29
76
  }
30
-
31
77
  async readHeader() {
32
- const buffer = await this.read(0, PARQUET_MAGIC.length);
78
+ const buffer = await this.file.read(0, PARQUET_MAGIC.length);
33
79
  const magic = buffer.toString();
34
-
35
80
  switch (magic) {
36
81
  case PARQUET_MAGIC:
37
82
  break;
38
-
39
83
  case PARQUET_MAGIC_ENCRYPTED:
40
84
  throw new Error('Encrypted parquet file not supported');
41
-
42
85
  default:
43
86
  throw new Error("Invalid parquet file (magic=".concat(magic, ")"));
44
87
  }
45
88
  }
46
-
89
+ async readFooter() {
90
+ const trailerLen = PARQUET_MAGIC.length + 4;
91
+ const trailerBuf = await this.file.read(this.file.size - trailerLen, trailerLen);
92
+ const magic = trailerBuf.slice(4).toString();
93
+ if (magic !== PARQUET_MAGIC) {
94
+ throw new Error("Not a valid parquet file (magic=\"".concat(magic, ")"));
95
+ }
96
+ const metadataSize = trailerBuf.readUInt32LE(0);
97
+ const metadataOffset = this.file.size - metadataSize - trailerLen;
98
+ if (metadataOffset < PARQUET_MAGIC.length) {
99
+ throw new Error("Invalid metadata size ".concat(metadataOffset));
100
+ }
101
+ const metadataBuf = await this.file.read(metadataOffset, metadataSize);
102
+ const {
103
+ metadata
104
+ } = decodeFileMetadata(metadataBuf);
105
+ return metadata;
106
+ }
47
107
  async readRowGroup(schema, rowGroup, columnList) {
48
108
  const buffer = {
49
109
  rowCount: Number(rowGroup.num_rows),
50
110
  columnData: {}
51
111
  };
52
-
53
112
  for (const colChunk of rowGroup.columns) {
54
113
  const colMetadata = colChunk.meta_data;
55
114
  const colKey = colMetadata === null || colMetadata === void 0 ? void 0 : colMetadata.path_in_schema;
56
-
57
115
  if (columnList.length > 0 && fieldIndexOf(columnList, colKey) < 0) {
58
116
  continue;
59
117
  }
60
-
61
118
  buffer.columnData[colKey.join()] = await this.readColumnChunk(schema, colChunk);
62
119
  }
63
-
64
120
  return buffer;
65
121
  }
66
-
67
122
  async readColumnChunk(schema, colChunk) {
68
123
  var _colChunk$meta_data, _colChunk$meta_data2, _colChunk$meta_data3, _colChunk$meta_data4, _colChunk$meta_data5, _colChunk$meta_data7, _colChunk$meta_data8, _options$dictionary;
69
-
70
124
  if (colChunk.file_path !== undefined && colChunk.file_path !== null) {
71
125
  throw new Error('external references are not supported');
72
126
  }
73
-
74
127
  const field = schema.findField((_colChunk$meta_data = colChunk.meta_data) === null || _colChunk$meta_data === void 0 ? void 0 : _colChunk$meta_data.path_in_schema);
75
128
  const type = getThriftEnum(Type, (_colChunk$meta_data2 = colChunk.meta_data) === null || _colChunk$meta_data2 === void 0 ? void 0 : _colChunk$meta_data2.type);
76
-
77
129
  if (type !== field.primitiveType) {
78
130
  throw new Error("chunk type not matching schema: ".concat(type));
79
131
  }
80
-
81
132
  const compression = getThriftEnum(CompressionCodec, (_colChunk$meta_data3 = colChunk.meta_data) === null || _colChunk$meta_data3 === void 0 ? void 0 : _colChunk$meta_data3.codec);
82
133
  const pagesOffset = Number((_colChunk$meta_data4 = colChunk.meta_data) === null || _colChunk$meta_data4 === void 0 ? void 0 : _colChunk$meta_data4.data_page_offset);
83
134
  let pagesSize = Number((_colChunk$meta_data5 = colChunk.meta_data) === null || _colChunk$meta_data5 === void 0 ? void 0 : _colChunk$meta_data5.total_compressed_size);
84
-
85
135
  if (!colChunk.file_path) {
86
136
  var _colChunk$meta_data6;
87
-
88
- pagesSize = Math.min(this.fileSize - pagesOffset, Number((_colChunk$meta_data6 = colChunk.meta_data) === null || _colChunk$meta_data6 === void 0 ? void 0 : _colChunk$meta_data6.total_compressed_size));
137
+ pagesSize = Math.min(this.file.size - pagesOffset, Number((_colChunk$meta_data6 = colChunk.meta_data) === null || _colChunk$meta_data6 === void 0 ? void 0 : _colChunk$meta_data6.total_compressed_size));
89
138
  }
90
-
91
139
  const options = {
92
140
  type,
93
141
  rLevelMax: field.rLevelMax,
@@ -99,26 +147,23 @@ export class ParquetEnvelopeReader {
99
147
  };
100
148
  let dictionary;
101
149
  const dictionaryPageOffset = colChunk === null || colChunk === void 0 ? void 0 : (_colChunk$meta_data8 = colChunk.meta_data) === null || _colChunk$meta_data8 === void 0 ? void 0 : _colChunk$meta_data8.dictionary_page_offset;
102
-
103
150
  if (dictionaryPageOffset) {
104
151
  const dictionaryOffset = Number(dictionaryPageOffset);
105
152
  dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);
106
153
  }
107
-
108
154
  dictionary = (_options$dictionary = options.dictionary) !== null && _options$dictionary !== void 0 && _options$dictionary.length ? options.dictionary : dictionary;
109
- const pagesBuf = await this.read(pagesOffset, pagesSize);
110
- return await decodeDataPages(pagesBuf, { ...options,
155
+ const pagesBuf = await this.file.read(pagesOffset, pagesSize);
156
+ return await decodeDataPages(pagesBuf, {
157
+ ...options,
111
158
  dictionary
112
159
  });
113
160
  }
114
-
115
161
  async getDictionary(dictionaryPageOffset, options, pagesOffset) {
116
162
  if (dictionaryPageOffset === 0) {
117
163
  return [];
118
164
  }
119
-
120
- const dictionarySize = Math.min(this.fileSize - dictionaryPageOffset, this.defaultDictionarySize);
121
- const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);
165
+ const dictionarySize = Math.min(this.file.size - dictionaryPageOffset, this.props.defaultDictionarySize);
166
+ const pagesBuf = await this.file.read(dictionaryPageOffset, dictionarySize);
122
167
  const cursor = {
123
168
  buffer: pagesBuf,
124
169
  offset: 0,
@@ -127,29 +172,5 @@ export class ParquetEnvelopeReader {
127
172
  const decodedPage = await decodePage(cursor, options);
128
173
  return decodedPage.dictionary;
129
174
  }
130
-
131
- async readFooter() {
132
- const trailerLen = PARQUET_MAGIC.length + 4;
133
- const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);
134
- const magic = trailerBuf.slice(4).toString();
135
-
136
- if (magic !== PARQUET_MAGIC) {
137
- throw new Error("Not a valid parquet file (magic=\"".concat(magic, ")"));
138
- }
139
-
140
- const metadataSize = trailerBuf.readUInt32LE(0);
141
- const metadataOffset = this.fileSize - metadataSize - trailerLen;
142
-
143
- if (metadataOffset < PARQUET_MAGIC.length) {
144
- throw new Error("Invalid metadata size ".concat(metadataOffset));
145
- }
146
-
147
- const metadataBuf = await this.read(metadataOffset, metadataSize);
148
- const {
149
- metadata
150
- } = decodeFileMetadata(metadataBuf);
151
- return metadata;
152
- }
153
-
154
175
  }
155
- //# sourceMappingURL=parquet-envelope-reader.js.map
176
+ //# sourceMappingURL=parquet-reader.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"parquet-reader.js","names":["ParquetSchema","decodeSchema","materializeRows","PARQUET_MAGIC","PARQUET_MAGIC_ENCRYPTED","CompressionCodec","Type","decodeFileMetadata","getThriftEnum","fieldIndexOf","decodeDataPages","decodePage","DEFAULT_PROPS","defaultDictionarySize","ParquetReader","constructor","file","props","_defineProperty","close","rowIterator","rows","rowBatchIterator","row","schema","getSchema","rowGroup","rowGroupIterator","columnList","map","x","Array","isArray","metadata","getFileMetadata","rowGroupCount","row_groups","length","rowGroupIndex","readRowGroup","getRowCount","Number","num_rows","root","schemaDefinition","num_children","getSchemaMetadata","md","kv","key_value_metadata","key","value","readHeader","readFooter","buffer","read","magic","toString","Error","concat","trailerLen","trailerBuf","size","slice","metadataSize","readUInt32LE","metadataOffset","metadataBuf","rowCount","columnData","colChunk","columns","colMetadata","meta_data","colKey","path_in_schema","join","readColumnChunk","_colChunk$meta_data","_colChunk$meta_data2","_colChunk$meta_data3","_colChunk$meta_data4","_colChunk$meta_data5","_colChunk$meta_data7","_colChunk$meta_data8","_options$dictionary","file_path","undefined","field","findField","type","primitiveType","compression","codec","pagesOffset","data_page_offset","pagesSize","total_compressed_size","_colChunk$meta_data6","Math","min","options","rLevelMax","dLevelMax","column","numValues","num_values","dictionary","dictionaryPageOffset","dictionary_page_offset","dictionaryOffset","getDictionary","pagesBuf","dictionarySize","cursor","offset","decodedPage"],"sources":["../../../../src/parquetjs/parser/parquet-reader.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport type {ReadableFile} from '@loaders.gl/loader-utils';\n\nimport {ParquetSchema} from '../schema/schema';\nimport {decodeSchema} from './decoders';\nimport {materializeRows} from '../schema/shred';\n\nimport {PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED} from '../../constants';\nimport {ColumnChunk, CompressionCodec, FileMetaData, RowGroup, Type} from '../parquet-thrift';\nimport {\n ParquetRowGroup,\n ParquetCompression,\n ParquetColumnChunk,\n PrimitiveType,\n ParquetOptions\n} from '../schema/declare';\nimport {decodeFileMetadata, getThriftEnum, fieldIndexOf} from '../utils/read-utils';\nimport {decodeDataPages, decodePage} from './decoders';\n\nexport type ParquetReaderProps = {\n defaultDictionarySize?: number;\n};\n\n/** Properties for initializing a ParquetRowGroupReader */\nexport type ParquetIterationProps = {\n /** Filter allowing some columns to be dropped */\n columnList?: string[] | string[][];\n};\n\nconst DEFAULT_PROPS: Required<ParquetReaderProps> = {\n defaultDictionarySize: 1e6\n};\n\n/**\n * The parquet envelope reader allows direct, unbuffered access to the individual\n * sections of the parquet file, namely the header, footer and the row groups.\n * This class is intended for advanced/internal users; if you just want to retrieve\n * rows from a parquet file use the ParquetReader instead\n */\nexport class ParquetReader {\n props: Required<ParquetReaderProps>;\n file: ReadableFile;\n metadata: Promise<FileMetaData> | null = null;\n\n constructor(file: ReadableFile, props?: ParquetReaderProps) {\n this.file = file;\n this.props = {...DEFAULT_PROPS, ...props};\n }\n\n close(): void {\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n this.file.close();\n }\n\n // HIGH LEVEL METHODS\n\n /** Yield one row at a time */\n async *rowIterator(props?: ParquetIterationProps) {\n for await (const rows of this.rowBatchIterator(props)) {\n // yield *rows\n for (const row of rows) {\n yield row;\n }\n }\n }\n\n /** Yield one batch of rows at a time */\n async *rowBatchIterator(props?: ParquetIterationProps) {\n const schema = await this.getSchema();\n for await (const rowGroup of this.rowGroupIterator(props)) {\n yield materializeRows(schema, rowGroup);\n }\n }\n\n /** Iterate over the raw row groups */\n async *rowGroupIterator(props?: ParquetIterationProps) {\n // Ensure strings are nested in arrays\n const columnList: string[][] = (props?.columnList || []).map((x) =>\n Array.isArray(x) ? x : [x]\n );\n\n const metadata = await this.getFileMetadata();\n const schema = await this.getSchema();\n\n const rowGroupCount = metadata?.row_groups.length || 0;\n\n for (let rowGroupIndex = 0; rowGroupIndex < rowGroupCount; rowGroupIndex++) {\n const rowGroup = await this.readRowGroup(\n schema,\n metadata.row_groups[rowGroupIndex],\n columnList\n );\n yield rowGroup;\n }\n }\n\n async getRowCount(): Promise<number> {\n const metadata = await this.getFileMetadata();\n return Number(metadata.num_rows);\n }\n\n async getSchema(): Promise<ParquetSchema> {\n const metadata = await this.getFileMetadata();\n const root = metadata.schema[0];\n const {schema: schemaDefinition} = decodeSchema(metadata.schema, 1, root.num_children!);\n const schema = new ParquetSchema(schemaDefinition);\n return schema;\n }\n\n /**\n * Returns the user (key/value) metadata for this file\n * In parquet this is not stored on the schema like it is in arrow\n */\n async getSchemaMetadata(): Promise<Record<string, string>> {\n const metadata = await this.getFileMetadata();\n const md: Record<string, string> = {};\n for (const kv of metadata.key_value_metadata!) {\n md[kv.key] = kv.value!;\n }\n return md;\n }\n\n async getFileMetadata(): Promise<FileMetaData> {\n if (!this.metadata) {\n await this.readHeader();\n this.metadata = this.readFooter();\n }\n return this.metadata;\n }\n\n // LOW LEVEL METHODS\n\n /** Metadata is stored in the footer */\n async readHeader(): Promise<void> {\n const buffer = await this.file.read(0, PARQUET_MAGIC.length);\n const magic = buffer.toString();\n switch (magic) {\n case PARQUET_MAGIC:\n break;\n case PARQUET_MAGIC_ENCRYPTED:\n throw new Error('Encrypted parquet file not supported');\n default:\n throw new Error(`Invalid parquet file (magic=${magic})`);\n }\n }\n\n /** Metadata is stored in the footer */\n async readFooter(): Promise<FileMetaData> {\n const trailerLen = PARQUET_MAGIC.length + 4;\n const trailerBuf = await this.file.read(this.file.size - trailerLen, trailerLen);\n\n const magic = trailerBuf.slice(4).toString();\n if (magic !== PARQUET_MAGIC) {\n throw new Error(`Not a valid parquet file (magic=\"${magic})`);\n }\n\n const metadataSize = trailerBuf.readUInt32LE(0);\n const metadataOffset = this.file.size - metadataSize - trailerLen;\n if (metadataOffset < PARQUET_MAGIC.length) {\n throw new Error(`Invalid metadata size ${metadataOffset}`);\n }\n\n const metadataBuf = await this.file.read(metadataOffset, metadataSize);\n // let metadata = new parquet_thrift.FileMetaData();\n // parquet_util.decodeThrift(metadata, metadataBuf);\n const {metadata} = decodeFileMetadata(metadataBuf);\n return metadata;\n }\n\n /** Data is stored in row groups (similar to Apache Arrow record batches) */\n async readRowGroup(\n schema: ParquetSchema,\n rowGroup: RowGroup,\n columnList: string[][]\n ): Promise<ParquetRowGroup> {\n const buffer: ParquetRowGroup = {\n rowCount: Number(rowGroup.num_rows),\n columnData: {}\n };\n for (const colChunk of rowGroup.columns) {\n const colMetadata = colChunk.meta_data;\n const colKey = colMetadata?.path_in_schema;\n if (columnList.length > 0 && fieldIndexOf(columnList, colKey!) < 0) {\n continue; // eslint-disable-line no-continue\n }\n buffer.columnData[colKey!.join()] = await this.readColumnChunk(schema, colChunk);\n }\n return buffer;\n }\n\n /**\n * Each row group contains column chunks for all the columns.\n */\n async readColumnChunk(schema: ParquetSchema, colChunk: ColumnChunk): Promise<ParquetColumnChunk> {\n if (colChunk.file_path !== undefined && colChunk.file_path !== null) {\n throw new Error('external references are not supported');\n }\n\n const field = schema.findField(colChunk.meta_data?.path_in_schema!);\n const type: PrimitiveType = getThriftEnum(Type, colChunk.meta_data?.type!) as any;\n\n if (type !== field.primitiveType) {\n throw new Error(`chunk type not matching schema: ${type}`);\n }\n\n const compression: ParquetCompression = getThriftEnum(\n CompressionCodec,\n colChunk.meta_data?.codec!\n ) as any;\n\n const pagesOffset = Number(colChunk.meta_data?.data_page_offset!);\n let pagesSize = Number(colChunk.meta_data?.total_compressed_size!);\n\n if (!colChunk.file_path) {\n pagesSize = Math.min(\n this.file.size - pagesOffset,\n Number(colChunk.meta_data?.total_compressed_size)\n );\n }\n\n const options: ParquetOptions = {\n type,\n rLevelMax: field.rLevelMax,\n dLevelMax: field.dLevelMax,\n compression,\n column: field,\n numValues: colChunk.meta_data?.num_values,\n dictionary: []\n };\n\n let dictionary;\n\n const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;\n\n if (dictionaryPageOffset) {\n const dictionaryOffset = Number(dictionaryPageOffset);\n // Getting dictionary from column chunk to iterate all over indexes to get dataPage values.\n dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);\n }\n\n dictionary = options.dictionary?.length ? options.dictionary : dictionary;\n const pagesBuf = await this.file.read(pagesOffset, pagesSize);\n return await decodeDataPages(pagesBuf, {...options, dictionary});\n }\n\n /**\n * Getting dictionary for allows to flatten values by indices.\n * @param dictionaryPageOffset\n * @param options\n * @param pagesOffset\n * @returns\n */\n async getDictionary(\n dictionaryPageOffset: number,\n options: ParquetOptions,\n pagesOffset: number\n ): Promise<string[]> {\n if (dictionaryPageOffset === 0) {\n // dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);\n // pagesBuf = await this.read(pagesOffset, dictionarySize);\n\n // In this case we are working with parquet-mr files format. Problem is described below:\n // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding\n // We need to get dictionary page from column chunk if it exists.\n // Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.\n return [];\n }\n\n const dictionarySize = Math.min(\n this.file.size - dictionaryPageOffset,\n this.props.defaultDictionarySize\n );\n const pagesBuf = await this.file.read(dictionaryPageOffset, dictionarySize);\n\n const cursor = {buffer: pagesBuf, offset: 0, size: pagesBuf.length};\n const decodedPage = await decodePage(cursor, options);\n\n return decodedPage.dictionary!;\n }\n}\n"],"mappings":";AAGA,SAAQA,aAAa,QAAO,kBAAkB;AAC9C,SAAQC,YAAY,QAAO,YAAY;AACvC,SAAQC,eAAe,QAAO,iBAAiB;AAE/C,SAAQC,aAAa,EAAEC,uBAAuB,QAAO,iBAAiB;AACtE,SAAqBC,gBAAgB,EAA0BC,IAAI,QAAO,mBAAmB;AAQ7F,SAAQC,kBAAkB,EAAEC,aAAa,EAAEC,YAAY,QAAO,qBAAqB;AACnF,SAAQC,eAAe,EAAEC,UAAU,QAAO,YAAY;AAYtD,MAAMC,aAA2C,GAAG;EAClDC,qBAAqB,EAAE;AACzB,CAAC;AAQD,OAAO,MAAMC,aAAa,CAAC;EAKzBC,WAAWA,CAACC,IAAkB,EAAEC,KAA0B,EAAE;IAAAC,eAAA;IAAAA,eAAA;IAAAA,eAAA,mBAFnB,IAAI;IAG3C,IAAI,CAACF,IAAI,GAAGA,IAAI;IAChB,IAAI,CAACC,KAAK,GAAG;MAAC,GAAGL,aAAa;MAAE,GAAGK;IAAK,CAAC;EAC3C;EAEAE,KAAKA,CAAA,EAAS;IAEZ,IAAI,CAACH,IAAI,CAACG,KAAK,CAAC,CAAC;EACnB;EAKA,OAAOC,WAAWA,CAACH,KAA6B,EAAE;IAChD,WAAW,MAAMI,IAAI,IAAI,IAAI,CAACC,gBAAgB,CAACL,KAAK,CAAC,EAAE;MAErD,KAAK,MAAMM,GAAG,IAAIF,IAAI,EAAE;QACtB,MAAME,GAAG;MACX;IACF;EACF;EAGA,OAAOD,gBAAgBA,CAACL,KAA6B,EAAE;IACrD,MAAMO,MAAM,GAAG,MAAM,IAAI,CAACC,SAAS,CAAC,CAAC;IACrC,WAAW,MAAMC,QAAQ,IAAI,IAAI,CAACC,gBAAgB,CAACV,KAAK,CAAC,EAAE;MACzD,MAAMf,eAAe,CAACsB,MAAM,EAAEE,QAAQ,CAAC;IACzC;EACF;EAGA,OAAOC,gBAAgBA,CAACV,KAA6B,EAAE;IAErD,MAAMW,UAAsB,GAAG,CAAC,CAAAX,KAAK,aAALA,KAAK,uBAALA,KAAK,CAAEW,UAAU,KAAI,EAAE,EAAEC,GAAG,CAAEC,CAAC,IAC7DC,KAAK,CAACC,OAAO,CAACF,CAAC,CAAC,GAAGA,CAAC,GAAG,CAACA,CAAC,CAC3B,CAAC;IAED,MAAMG,QAAQ,GAAG,MAAM,IAAI,CAACC,eAAe,CAAC,CAAC;IAC7C,MAAMV,MAAM,GAAG,MAAM,IAAI,CAACC,SAAS,CAAC,CAAC;IAErC,MAAMU,aAAa,GAAG,CAAAF,QAAQ,aAARA,QAAQ,uBAARA,QAAQ,CAAEG,UAAU,CAACC,MAAM,KAAI,CAAC;IAEtD,KAAK,IAAIC,aAAa,GAAG,CAAC,EAAEA,aAAa,GAAGH,aAAa,EAAEG,aAAa,EAAE,EAAE;MAC1E,MAAMZ,QAAQ,GAAG,MAAM,IAAI,CAACa,YAAY,CACtCf,MAAM,EACNS,QAAQ,CAACG,UAAU,CAACE,aAAa,CAAC,EAClCV,UACF,CAAC;MACD,MAAMF,QAAQ;IAChB;EACF;EAEA,MAAMc,WAAWA,CAAA,EAAoB;IACnC,MAAMP,QAAQ,GAAG,MAAM,IAAI,CAACC,eAAe,CAAC,CAAC;IAC7C,OAAOO,MAAM,CAACR,QAAQ,CAACS,QAAQ,CAAC;EAClC;EAEA,MAAMjB,SAASA,CAAA,EAA2B;IACxC,MAAMQ,QAAQ,GAAG,MAAM,IAAI,CAACC,eAAe,CAAC,CAAC;IAC7C,MAAMS,IAAI,GAAGV,QAAQ,CAACT,MAAM,CAAC,CAAC,CAAC;IAC/B,MAAM;MAACA,MAAM,EAAEoB;IAAgB,CAAC,GAAG3C,YAAY,CAACgC,QAAQ,CAACT,MAAM,EAAE,CAAC,EAAEmB,IAAI,CAACE,YAAa,CAAC;IACvF,MAAMrB,MAAM,GAAG,IAAIxB,aAAa,CAAC4C,gBAAgB,CAAC;IAClD,OAAOpB,MAAM;EACf;EAMA,MAAMsB,iBAAiBA,CAAA,EAAoC;IACzD,MAAMb,QAAQ,GAAG,MAAM,IAAI,CAACC,eAAe,CAAC,CAAC;IAC7C,MAAMa,EAA0B,GAAG,CAAC,CAAC;IACrC,KAAK,MAAMC,EAAE,IAAIf,QAAQ,CAACgB,kBAAkB,EAAG;MAC7CF,EAAE,CAACC,EAAE,CAACE,GAAG,CAAC,GAAGF,EAAE,CAACG,KAAM;IACxB;IACA,OAAOJ,EAAE;EACX;EAEA,MAAMb,eAAeA,CAAA,EAA0B;IAC7C,IAAI,CAAC,IAAI,CAACD,QAAQ,EAAE;MAClB,MAAM,IAAI,CAACmB,UAAU,CAAC,CAAC;MACvB,IAAI,CAACnB,QAAQ,GAAG,IAAI,CAACoB,UAAU,CAAC,CAAC;IACnC;IACA,OAAO,IAAI,CAACpB,QAAQ;EACtB;EAKA,MAAMmB,UAAUA,CAAA,EAAkB;IAChC,MAAME,MAAM,GAAG,MAAM,IAAI,CAACtC,IAAI,CAACuC,IAAI,CAAC,CAAC,EAAEpD,aAAa,CAACkC,MAAM,CAAC;IAC5D,MAAMmB,KAAK,GAAGF,MAAM,CAACG,QAAQ,CAAC,CAAC;IAC/B,QAAQD,KAAK;MACX,KAAKrD,aAAa;QAChB;MACF,KAAKC,uBAAuB;QAC1B,MAAM,IAAIsD,KAAK,CAAC,sCAAsC,CAAC;MACzD;QACE,MAAM,IAAIA,KAAK,gCAAAC,MAAA,CAAgCH,KAAK,MAAG,CAAC;IAC5D;EACF;EAGA,MAAMH,UAAUA,CAAA,EAA0B;IACxC,MAAMO,UAAU,GAAGzD,aAAa,CAACkC,MAAM,GAAG,CAAC;IAC3C,MAAMwB,UAAU,GAAG,MAAM,IAAI,CAAC7C,IAAI,CAACuC,IAAI,CAAC,IAAI,CAACvC,IAAI,CAAC8C,IAAI,GAAGF,UAAU,EAAEA,UAAU,CAAC;IAEhF,MAAMJ,KAAK,GAAGK,UAAU,CAACE,KAAK,CAAC,CAAC,CAAC,CAACN,QAAQ,CAAC,CAAC;IAC5C,IAAID,KAAK,KAAKrD,aAAa,EAAE;MAC3B,MAAM,IAAIuD,KAAK,sCAAAC,MAAA,CAAqCH,KAAK,MAAG,CAAC;IAC/D;IAEA,MAAMQ,YAAY,GAAGH,UAAU,CAACI,YAAY,CAAC,CAAC,CAAC;IAC/C,MAAMC,cAAc,GAAG,IAAI,CAAClD,IAAI,CAAC8C,IAAI,GAAGE,YAAY,GAAGJ,UAAU;IACjE,IAAIM,cAAc,GAAG/D,aAAa,CAACkC,MAAM,EAAE;MACzC,MAAM,IAAIqB,KAAK,0BAAAC,MAAA,CAA0BO,cAAc,CAAE,CAAC;IAC5D;IAEA,MAAMC,WAAW,GAAG,MAAM,IAAI,CAACnD,IAAI,CAACuC,IAAI,CAACW,cAAc,EAAEF,YAAY,CAAC;IAGtE,MAAM;MAAC/B;IAAQ,CAAC,GAAG1B,kBAAkB,CAAC4D,WAAW,CAAC;IAClD,OAAOlC,QAAQ;EACjB;EAGA,MAAMM,YAAYA,CAChBf,MAAqB,EACrBE,QAAkB,EAClBE,UAAsB,EACI;IAC1B,MAAM0B,MAAuB,GAAG;MAC9Bc,QAAQ,EAAE3B,MAAM,CAACf,QAAQ,CAACgB,QAAQ,CAAC;MACnC2B,UAAU,EAAE,CAAC;IACf,CAAC;IACD,KAAK,MAAMC,QAAQ,IAAI5C,QAAQ,CAAC6C,OAAO,EAAE;MACvC,MAAMC,WAAW,GAAGF,QAAQ,CAACG,SAAS;MACtC,MAAMC,MAAM,GAAGF,WAAW,aAAXA,WAAW,uBAAXA,WAAW,CAAEG,cAAc;MAC1C,IAAI/C,UAAU,CAACS,MAAM,GAAG,CAAC,IAAI5B,YAAY,CAACmB,UAAU,EAAE8C,MAAO,CAAC,GAAG,CAAC,EAAE;QAClE;MACF;MACApB,MAAM,CAACe,UAAU,CAACK,MAAM,CAAEE,IAAI,CAAC,CAAC,CAAC,GAAG,MAAM,IAAI,CAACC,eAAe,CAACrD,MAAM,EAAE8C,QAAQ,CAAC;IAClF;IACA,OAAOhB,MAAM;EACf;EAKA,MAAMuB,eAAeA,CAACrD,MAAqB,EAAE8C,QAAqB,EAA+B;IAAA,IAAAQ,mBAAA,EAAAC,oBAAA,EAAAC,oBAAA,EAAAC,oBAAA,EAAAC,oBAAA,EAAAC,oBAAA,EAAAC,oBAAA,EAAAC,mBAAA;IAC/F,IAAIf,QAAQ,CAACgB,SAAS,KAAKC,SAAS,IAAIjB,QAAQ,CAACgB,SAAS,KAAK,IAAI,EAAE;MACnE,MAAM,IAAI5B,KAAK,CAAC,uCAAuC,CAAC;IAC1D;IAEA,MAAM8B,KAAK,GAAGhE,MAAM,CAACiE,SAAS,EAAAX,mBAAA,GAACR,QAAQ,CAACG,SAAS,cAAAK,mBAAA,uBAAlBA,mBAAA,CAAoBH,cAAe,CAAC;IACnE,MAAMe,IAAmB,GAAGlF,aAAa,CAACF,IAAI,GAAAyE,oBAAA,GAAET,QAAQ,CAACG,SAAS,cAAAM,oBAAA,uBAAlBA,oBAAA,CAAoBW,IAAK,CAAQ;IAEjF,IAAIA,IAAI,KAAKF,KAAK,CAACG,aAAa,EAAE;MAChC,MAAM,IAAIjC,KAAK,oCAAAC,MAAA,CAAoC+B,IAAI,CAAE,CAAC;IAC5D;IAEA,MAAME,WAA+B,GAAGpF,aAAa,CACnDH,gBAAgB,GAAA2E,oBAAA,GAChBV,QAAQ,CAACG,SAAS,cAAAO,oBAAA,uBAAlBA,oBAAA,CAAoBa,KACtB,CAAQ;IAER,MAAMC,WAAW,GAAGrD,MAAM,EAAAwC,oBAAA,GAACX,QAAQ,CAACG,SAAS,cAAAQ,oBAAA,uBAAlBA,oBAAA,CAAoBc,gBAAiB,CAAC;IACjE,IAAIC,SAAS,GAAGvD,MAAM,EAAAyC,oBAAA,GAACZ,QAAQ,CAACG,SAAS,cAAAS,oBAAA,uBAAlBA,oBAAA,CAAoBe,qBAAsB,CAAC;IAElE,IAAI,CAAC3B,QAAQ,CAACgB,SAAS,EAAE;MAAA,IAAAY,oBAAA;MACvBF,SAAS,GAAGG,IAAI,CAACC,GAAG,CAClB,IAAI,CAACpF,IAAI,CAAC8C,IAAI,GAAGgC,WAAW,EAC5BrD,MAAM,EAAAyD,oBAAA,GAAC5B,QAAQ,CAACG,SAAS,cAAAyB,oBAAA,uBAAlBA,oBAAA,CAAoBD,qBAAqB,CAClD,CAAC;IACH;IAEA,MAAMI,OAAuB,GAAG;MAC9BX,IAAI;MACJY,SAAS,EAAEd,KAAK,CAACc,SAAS;MAC1BC,SAAS,EAAEf,KAAK,CAACe,SAAS;MAC1BX,WAAW;MACXY,MAAM,EAAEhB,KAAK;MACbiB,SAAS,GAAAtB,oBAAA,GAAEb,QAAQ,CAACG,SAAS,cAAAU,oBAAA,uBAAlBA,oBAAA,CAAoBuB,UAAU;MACzCC,UAAU,EAAE;IACd,CAAC;IAED,IAAIA,UAAU;IAEd,MAAMC,oBAAoB,GAAGtC,QAAQ,aAARA,QAAQ,wBAAAc,oBAAA,GAARd,QAAQ,CAAEG,SAAS,cAAAW,oBAAA,uBAAnBA,oBAAA,CAAqByB,sBAAsB;IAExE,IAAID,oBAAoB,EAAE;MACxB,MAAME,gBAAgB,GAAGrE,MAAM,CAACmE,oBAAoB,CAAC;MAErDD,UAAU,GAAG,MAAM,IAAI,CAACI,aAAa,CAACD,gBAAgB,EAAET,OAAO,EAAEP,WAAW,CAAC;IAC/E;IAEAa,UAAU,GAAG,CAAAtB,mBAAA,GAAAgB,OAAO,CAACM,UAAU,cAAAtB,mBAAA,eAAlBA,mBAAA,CAAoBhD,MAAM,GAAGgE,OAAO,CAACM,UAAU,GAAGA,UAAU;IACzE,MAAMK,QAAQ,GAAG,MAAM,IAAI,CAAChG,IAAI,CAACuC,IAAI,CAACuC,WAAW,EAAEE,SAAS,CAAC;IAC7D,OAAO,MAAMtF,eAAe,CAACsG,QAAQ,EAAE;MAAC,GAAGX,OAAO;MAAEM;IAAU,CAAC,CAAC;EAClE;EASA,MAAMI,aAAaA,CACjBH,oBAA4B,EAC5BP,OAAuB,EACvBP,WAAmB,EACA;IACnB,IAAIc,oBAAoB,KAAK,CAAC,EAAE;MAQ9B,OAAO,EAAE;IACX;IAEA,MAAMK,cAAc,GAAGd,IAAI,CAACC,GAAG,CAC7B,IAAI,CAACpF,IAAI,CAAC8C,IAAI,GAAG8C,oBAAoB,EACrC,IAAI,CAAC3F,KAAK,CAACJ,qBACb,CAAC;IACD,MAAMmG,QAAQ,GAAG,MAAM,IAAI,CAAChG,IAAI,CAACuC,IAAI,CAACqD,oBAAoB,EAAEK,cAAc,CAAC;IAE3E,MAAMC,MAAM,GAAG;MAAC5D,MAAM,EAAE0D,QAAQ;MAAEG,MAAM,EAAE,CAAC;MAAErD,IAAI,EAAEkD,QAAQ,CAAC3E;IAAM,CAAC;IACnE,MAAM+E,WAAW,GAAG,MAAMzG,UAAU,CAACuG,MAAM,EAAEb,OAAO,CAAC;IAErD,OAAOe,WAAW,CAACT,UAAU;EAC/B;AACF"}
@@ -0,0 +1,12 @@
1
+ import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
2
+ export class ParquetRowGroup {
3
+ constructor() {
4
+ let rowCount = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0;
5
+ let columnData = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
6
+ _defineProperty(this, "rowCount", void 0);
7
+ _defineProperty(this, "columnData", void 0);
8
+ this.rowCount = rowCount;
9
+ this.columnData = columnData;
10
+ }
11
+ }
12
+ //# sourceMappingURL=declare.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"declare.js","names":["ParquetRowGroup","constructor","rowCount","arguments","length","undefined","columnData","_defineProperty"],"sources":["../../../../src/parquetjs/schema/declare.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport Int64 from 'node-int64';\nimport type {PageHeader} from '../parquet-thrift';\n\nexport type ParquetCodec = 'PLAIN' | 'RLE' | 'PLAIN_DICTIONARY';\nexport type ParquetCompression =\n | 'UNCOMPRESSED'\n | 'GZIP'\n | 'SNAPPY'\n | 'LZO'\n | 'BROTLI'\n | 'LZ4'\n | 'LZ4_RAW'\n | 'ZSTD';\nexport type RepetitionType = 'REQUIRED' | 'OPTIONAL' | 'REPEATED';\nexport type ParquetType = PrimitiveType | OriginalType;\n\n/**\n * Physical type\n */\nexport type PrimitiveType =\n // Base Types\n | 'BOOLEAN' // 0\n | 'INT32' // 1\n | 'INT64' // 2\n | 'INT96' // 3\n | 'FLOAT' // 4\n | 'DOUBLE' // 5\n | 'BYTE_ARRAY' // 6,\n | 'FIXED_LEN_BYTE_ARRAY'; // 7\n\n/**\n * Logical type\n */\nexport type OriginalType =\n // Converted Types\n | 'UTF8' // 0\n // | 'MAP' // 1\n // | 'MAP_KEY_VALUE' // 2\n // | 'LIST' // 3\n // | 'ENUM' // 4\n // | 'DECIMAL' // 5\n | 'DECIMAL_INT32' // 5\n | 'DECIMAL_INT64' // 5\n | 'DECIMAL_BYTE_ARRAY' // 5\n | 'DECIMAL_FIXED_LEN_BYTE_ARRAY' // 5\n | 'DATE' // 6\n | 'TIME_MILLIS' // 7\n | 'TIME_MICROS' // 8\n | 'TIMESTAMP_MILLIS' // 9\n | 'TIMESTAMP_MICROS' // 10\n | 'UINT_8' // 11\n | 'UINT_16' // 12\n | 'UINT_32' // 13\n | 'UINT_64' // 14\n | 'INT_8' // 15\n | 'INT_16' // 16\n | 'INT_32' // 17\n | 'INT_64' // 18\n | 'JSON' // 19\n | 'BSON' // 20\n | 'INTERVAL'; // 21\n\nexport type ParquetDictionary = string[];\n\nexport interface SchemaDefinition {\n [string: string]: FieldDefinition;\n}\n\nexport interface FieldDefinition {\n type?: ParquetType;\n typeLength?: number;\n presision?: number;\n scale?: number;\n encoding?: ParquetCodec;\n compression?: ParquetCompression;\n optional?: boolean;\n repeated?: boolean;\n fields?: SchemaDefinition;\n}\n\nexport interface ParquetField {\n name: string;\n path: string[];\n key: string;\n primitiveType?: PrimitiveType;\n originalType?: OriginalType;\n repetitionType: RepetitionType;\n typeLength?: number;\n presision?: number;\n scale?: number;\n encoding?: ParquetCodec;\n compression?: ParquetCompression;\n rLevelMax: number;\n dLevelMax: number;\n isNested?: boolean;\n fieldCount?: number;\n fields?: Record<string, ParquetField>;\n}\n\n/** @todo better name, this is an internal type? */\nexport interface ParquetOptions {\n type: ParquetType;\n rLevelMax: number;\n dLevelMax: number;\n compression: ParquetCompression;\n column: ParquetField;\n numValues?: Int64;\n dictionary?: ParquetDictionary;\n}\n\nexport interface ParquetPageData {\n dlevels: number[];\n rlevels: number[];\n /** Actual column chunks */\n values: any[]; // ArrayLike<any>;\n count: number;\n dictionary?: ParquetDictionary;\n /** The \"raw\" page header from the file */\n pageHeader: PageHeader;\n}\n\nexport interface ParquetRow {\n [key: string]: any;\n}\n\n/** @\n * Holds data for one row group (column chunks) */\nexport class ParquetRowGroup {\n /** Number of rows in this page */\n rowCount: number;\n /** Map of Column chunks */\n columnData: Record<string, ParquetColumnChunk>;\n\n constructor(rowCount: number = 0, columnData: Record<string, ParquetColumnChunk> = {}) {\n this.rowCount = rowCount;\n this.columnData = columnData;\n }\n}\n\n/** Holds the data for one column chunk */\nexport interface ParquetColumnChunk {\n dlevels: number[];\n rlevels: number[];\n values: any[];\n count: number;\n pageHeaders: PageHeader[];\n}\n"],"mappings":";AAgIA,OAAO,MAAMA,eAAe,CAAC;EAM3BC,WAAWA,CAAA,EAA4E;IAAA,IAA3EC,QAAgB,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC;IAAA,IAAEG,UAA8C,GAAAH,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC,CAAC;IAAAI,eAAA;IAAAA,eAAA;IACnF,IAAI,CAACL,QAAQ,GAAGA,QAAQ;IACxB,IAAI,CAACI,UAAU,GAAGA,UAAU;EAC9B;AACF"}