@loaders.gl/parquet 3.3.0-alpha.5 → 3.3.0-alpha.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (361) hide show
  1. package/dist/dist.min.js +14 -14
  2. package/dist/dist.min.js.map +2 -2
  3. package/dist/es5/bundle.js +0 -1
  4. package/dist/es5/bundle.js.map +1 -1
  5. package/dist/es5/constants.js +3 -1
  6. package/dist/es5/constants.js.map +1 -1
  7. package/dist/es5/index.js +23 -39
  8. package/dist/es5/index.js.map +1 -1
  9. package/dist/es5/lib/convert-schema.js +2 -11
  10. package/dist/es5/lib/convert-schema.js.map +1 -1
  11. package/dist/es5/lib/parse-parquet.js +29 -72
  12. package/dist/es5/lib/parse-parquet.js.map +1 -1
  13. package/dist/es5/lib/read-array-buffer.js +0 -10
  14. package/dist/es5/lib/read-array-buffer.js.map +1 -1
  15. package/dist/es5/lib/wasm/encode-parquet-wasm.js +0 -11
  16. package/dist/es5/lib/wasm/encode-parquet-wasm.js.map +1 -1
  17. package/dist/es5/lib/wasm/load-wasm/index.js +0 -1
  18. package/dist/es5/lib/wasm/load-wasm/index.js.map +1 -1
  19. package/dist/es5/lib/wasm/load-wasm/load-wasm-browser.js +0 -14
  20. package/dist/es5/lib/wasm/load-wasm/load-wasm-browser.js.map +1 -1
  21. package/dist/es5/lib/wasm/load-wasm/load-wasm-node.js +0 -10
  22. package/dist/es5/lib/wasm/load-wasm/load-wasm-node.js.map +1 -1
  23. package/dist/es5/lib/wasm/parse-parquet-wasm.js +1 -19
  24. package/dist/es5/lib/wasm/parse-parquet-wasm.js.map +1 -1
  25. package/dist/es5/parquet-loader.js +2 -1
  26. package/dist/es5/parquet-loader.js.map +1 -1
  27. package/dist/es5/parquet-wasm-loader.js +2 -1
  28. package/dist/es5/parquet-wasm-loader.js.map +1 -1
  29. package/dist/es5/parquet-wasm-writer.js +1 -3
  30. package/dist/es5/parquet-wasm-writer.js.map +1 -1
  31. package/dist/es5/parquet-writer.js +1 -2
  32. package/dist/es5/parquet-writer.js.map +1 -1
  33. package/dist/es5/parquetjs/codecs/declare.js.map +1 -1
  34. package/dist/es5/parquetjs/codecs/dictionary.js +2 -9
  35. package/dist/es5/parquetjs/codecs/dictionary.js.map +1 -1
  36. package/dist/es5/parquetjs/codecs/index.js +0 -8
  37. package/dist/es5/parquetjs/codecs/index.js.map +1 -1
  38. package/dist/es5/parquetjs/codecs/plain.js +1 -77
  39. package/dist/es5/parquetjs/codecs/plain.js.map +1 -1
  40. package/dist/es5/parquetjs/codecs/rle.js +1 -39
  41. package/dist/es5/parquetjs/codecs/rle.js.map +1 -1
  42. package/dist/es5/parquetjs/compression.js +5 -30
  43. package/dist/es5/parquetjs/compression.js.map +1 -1
  44. package/dist/es5/parquetjs/encoder/writer.js +31 -149
  45. package/dist/es5/parquetjs/encoder/writer.js.map +1 -1
  46. package/dist/es5/parquetjs/file.js +3 -12
  47. package/dist/es5/parquetjs/file.js.map +1 -1
  48. package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js +0 -1
  49. package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js.map +1 -1
  50. package/dist/es5/parquetjs/parquet-thrift/BsonType.js +0 -15
  51. package/dist/es5/parquetjs/parquet-thrift/BsonType.js.map +1 -1
  52. package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js +0 -48
  53. package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -1
  54. package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js +0 -47
  55. package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -1
  56. package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js +0 -82
  57. package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -1
  58. package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js +0 -25
  59. package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -1
  60. package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js +0 -1
  61. package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js.map +1 -1
  62. package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js +0 -1
  63. package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js.map +1 -1
  64. package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js +0 -39
  65. package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -1
  66. package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js +0 -51
  67. package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -1
  68. package/dist/es5/parquetjs/parquet-thrift/DateType.js +0 -15
  69. package/dist/es5/parquetjs/parquet-thrift/DateType.js.map +1 -1
  70. package/dist/es5/parquetjs/parquet-thrift/DecimalType.js +0 -26
  71. package/dist/es5/parquetjs/parquet-thrift/DecimalType.js.map +1 -1
  72. package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js +0 -30
  73. package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -1
  74. package/dist/es5/parquetjs/parquet-thrift/Encoding.js +0 -1
  75. package/dist/es5/parquetjs/parquet-thrift/Encoding.js.map +1 -1
  76. package/dist/es5/parquetjs/parquet-thrift/EnumType.js +0 -15
  77. package/dist/es5/parquetjs/parquet-thrift/EnumType.js.map +1 -1
  78. package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js +0 -1
  79. package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js.map +1 -1
  80. package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js +0 -59
  81. package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js.map +1 -1
  82. package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js +0 -15
  83. package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -1
  84. package/dist/es5/parquetjs/parquet-thrift/IntType.js +0 -26
  85. package/dist/es5/parquetjs/parquet-thrift/IntType.js.map +1 -1
  86. package/dist/es5/parquetjs/parquet-thrift/JsonType.js +0 -15
  87. package/dist/es5/parquetjs/parquet-thrift/JsonType.js.map +1 -1
  88. package/dist/es5/parquetjs/parquet-thrift/KeyValue.js +0 -26
  89. package/dist/es5/parquetjs/parquet-thrift/KeyValue.js.map +1 -1
  90. package/dist/es5/parquetjs/parquet-thrift/ListType.js +0 -15
  91. package/dist/es5/parquetjs/parquet-thrift/ListType.js.map +1 -1
  92. package/dist/es5/parquetjs/parquet-thrift/LogicalType.js +0 -85
  93. package/dist/es5/parquetjs/parquet-thrift/LogicalType.js.map +1 -1
  94. package/dist/es5/parquetjs/parquet-thrift/MapType.js +0 -15
  95. package/dist/es5/parquetjs/parquet-thrift/MapType.js.map +1 -1
  96. package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js +0 -15
  97. package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -1
  98. package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js +0 -15
  99. package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -1
  100. package/dist/es5/parquetjs/parquet-thrift/NullType.js +0 -15
  101. package/dist/es5/parquetjs/parquet-thrift/NullType.js.map +1 -1
  102. package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js +0 -25
  103. package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -1
  104. package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js +0 -30
  105. package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -1
  106. package/dist/es5/parquetjs/parquet-thrift/PageHeader.js +0 -54
  107. package/dist/es5/parquetjs/parquet-thrift/PageHeader.js.map +1 -1
  108. package/dist/es5/parquetjs/parquet-thrift/PageLocation.js +0 -31
  109. package/dist/es5/parquetjs/parquet-thrift/PageLocation.js.map +1 -1
  110. package/dist/es5/parquetjs/parquet-thrift/PageType.js +0 -1
  111. package/dist/es5/parquetjs/parquet-thrift/PageType.js.map +1 -1
  112. package/dist/es5/parquetjs/parquet-thrift/RowGroup.js +0 -41
  113. package/dist/es5/parquetjs/parquet-thrift/RowGroup.js.map +1 -1
  114. package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js +0 -59
  115. package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js.map +1 -1
  116. package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js +0 -30
  117. package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js.map +1 -1
  118. package/dist/es5/parquetjs/parquet-thrift/Statistics.js +0 -42
  119. package/dist/es5/parquetjs/parquet-thrift/Statistics.js.map +1 -1
  120. package/dist/es5/parquetjs/parquet-thrift/StringType.js +0 -15
  121. package/dist/es5/parquetjs/parquet-thrift/StringType.js.map +1 -1
  122. package/dist/es5/parquetjs/parquet-thrift/TimeType.js +0 -27
  123. package/dist/es5/parquetjs/parquet-thrift/TimeType.js.map +1 -1
  124. package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js +0 -30
  125. package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js.map +1 -1
  126. package/dist/es5/parquetjs/parquet-thrift/TimestampType.js +0 -27
  127. package/dist/es5/parquetjs/parquet-thrift/TimestampType.js.map +1 -1
  128. package/dist/es5/parquetjs/parquet-thrift/Type.js +0 -1
  129. package/dist/es5/parquetjs/parquet-thrift/Type.js.map +1 -1
  130. package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js +0 -15
  131. package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -1
  132. package/dist/es5/parquetjs/parquet-thrift/UUIDType.js +0 -15
  133. package/dist/es5/parquetjs/parquet-thrift/UUIDType.js.map +1 -1
  134. package/dist/es5/parquetjs/parquet-thrift/index.js +0 -86
  135. package/dist/es5/parquetjs/parquet-thrift/index.js.map +1 -1
  136. package/dist/es5/parquetjs/parser/decoders.js +3 -82
  137. package/dist/es5/parquetjs/parser/decoders.js.map +1 -1
  138. package/dist/es5/parquetjs/parser/parquet-cursor.js +5 -37
  139. package/dist/es5/parquetjs/parser/parquet-cursor.js.map +1 -1
  140. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +2 -88
  141. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +1 -1
  142. package/dist/es5/parquetjs/parser/parquet-reader.js +14 -67
  143. package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
  144. package/dist/es5/parquetjs/schema/declare.js +3 -7
  145. package/dist/es5/parquetjs/schema/declare.js.map +1 -1
  146. package/dist/es5/parquetjs/schema/schema.js +6 -34
  147. package/dist/es5/parquetjs/schema/schema.js.map +1 -1
  148. package/dist/es5/parquetjs/schema/shred.js +11 -41
  149. package/dist/es5/parquetjs/schema/shred.js.map +1 -1
  150. package/dist/es5/parquetjs/schema/types.js +3 -84
  151. package/dist/es5/parquetjs/schema/types.js.map +1 -1
  152. package/dist/es5/parquetjs/utils/buffer-utils.js +0 -2
  153. package/dist/es5/parquetjs/utils/buffer-utils.js.map +1 -1
  154. package/dist/es5/parquetjs/utils/file-utils.js +1 -7
  155. package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
  156. package/dist/es5/parquetjs/utils/read-utils.js +6 -38
  157. package/dist/es5/parquetjs/utils/read-utils.js.map +1 -1
  158. package/dist/es5/workers/parquet-worker.js +0 -2
  159. package/dist/es5/workers/parquet-worker.js.map +1 -1
  160. package/dist/esm/bundle.js +1 -1
  161. package/dist/esm/bundle.js.map +1 -1
  162. package/dist/esm/constants.js +3 -0
  163. package/dist/esm/constants.js.map +1 -1
  164. package/dist/esm/index.js +10 -2
  165. package/dist/esm/index.js.map +1 -1
  166. package/dist/esm/lib/convert-schema.js +1 -7
  167. package/dist/esm/lib/convert-schema.js.map +1 -1
  168. package/dist/esm/lib/parse-parquet.js +2 -5
  169. package/dist/esm/lib/parse-parquet.js.map +1 -1
  170. package/dist/esm/lib/read-array-buffer.js +2 -1
  171. package/dist/esm/lib/read-array-buffer.js.map +1 -1
  172. package/dist/esm/lib/wasm/encode-parquet-wasm.js +1 -1
  173. package/dist/esm/lib/wasm/encode-parquet-wasm.js.map +1 -1
  174. package/dist/esm/lib/wasm/load-wasm/index.js.map +1 -1
  175. package/dist/esm/lib/wasm/load-wasm/load-wasm-browser.js.map +1 -1
  176. package/dist/esm/lib/wasm/load-wasm/load-wasm-node.js.map +1 -1
  177. package/dist/esm/lib/wasm/parse-parquet-wasm.js +2 -3
  178. package/dist/esm/lib/wasm/parse-parquet-wasm.js.map +1 -1
  179. package/dist/esm/parquet-loader.js +3 -1
  180. package/dist/esm/parquet-loader.js.map +1 -1
  181. package/dist/esm/parquet-wasm-loader.js +3 -1
  182. package/dist/esm/parquet-wasm-loader.js.map +1 -1
  183. package/dist/esm/parquet-wasm-writer.js +2 -1
  184. package/dist/esm/parquet-wasm-writer.js.map +1 -1
  185. package/dist/esm/parquet-writer.js +2 -2
  186. package/dist/esm/parquet-writer.js.map +1 -1
  187. package/dist/esm/parquetjs/codecs/declare.js.map +1 -1
  188. package/dist/esm/parquetjs/codecs/dictionary.js +2 -1
  189. package/dist/esm/parquetjs/codecs/dictionary.js.map +1 -1
  190. package/dist/esm/parquetjs/codecs/index.js +2 -0
  191. package/dist/esm/parquetjs/codecs/index.js.map +1 -1
  192. package/dist/esm/parquetjs/codecs/plain.js +2 -68
  193. package/dist/esm/parquetjs/codecs/plain.js.map +1 -1
  194. package/dist/esm/parquetjs/codecs/rle.js +3 -29
  195. package/dist/esm/parquetjs/codecs/rle.js.map +1 -1
  196. package/dist/esm/parquetjs/compression.js +9 -5
  197. package/dist/esm/parquetjs/compression.js.map +1 -1
  198. package/dist/esm/parquetjs/encoder/writer.js +21 -51
  199. package/dist/esm/parquetjs/encoder/writer.js.map +1 -1
  200. package/dist/esm/parquetjs/file.js +1 -0
  201. package/dist/esm/parquetjs/file.js.map +1 -1
  202. package/dist/esm/parquetjs/parquet-thrift/BoundaryOrder.js +1 -1
  203. package/dist/esm/parquetjs/parquet-thrift/BoundaryOrder.js.map +1 -1
  204. package/dist/esm/parquetjs/parquet-thrift/BsonType.js +1 -8
  205. package/dist/esm/parquetjs/parquet-thrift/BsonType.js.map +1 -1
  206. package/dist/esm/parquetjs/parquet-thrift/ColumnChunk.js +0 -44
  207. package/dist/esm/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -1
  208. package/dist/esm/parquetjs/parquet-thrift/ColumnIndex.js +0 -42
  209. package/dist/esm/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -1
  210. package/dist/esm/parquetjs/parquet-thrift/ColumnMetaData.js +0 -82
  211. package/dist/esm/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -1
  212. package/dist/esm/parquetjs/parquet-thrift/ColumnOrder.js +0 -18
  213. package/dist/esm/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -1
  214. package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js +1 -1
  215. package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js.map +1 -1
  216. package/dist/esm/parquetjs/parquet-thrift/ConvertedType.js +1 -1
  217. package/dist/esm/parquetjs/parquet-thrift/ConvertedType.js.map +1 -1
  218. package/dist/esm/parquetjs/parquet-thrift/DataPageHeader.js +0 -34
  219. package/dist/esm/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -1
  220. package/dist/esm/parquetjs/parquet-thrift/DataPageHeaderV2.js +0 -49
  221. package/dist/esm/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -1
  222. package/dist/esm/parquetjs/parquet-thrift/DateType.js +1 -8
  223. package/dist/esm/parquetjs/parquet-thrift/DateType.js.map +1 -1
  224. package/dist/esm/parquetjs/parquet-thrift/DecimalType.js +0 -19
  225. package/dist/esm/parquetjs/parquet-thrift/DecimalType.js.map +1 -1
  226. package/dist/esm/parquetjs/parquet-thrift/DictionaryPageHeader.js +0 -24
  227. package/dist/esm/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -1
  228. package/dist/esm/parquetjs/parquet-thrift/Encoding.js +1 -1
  229. package/dist/esm/parquetjs/parquet-thrift/Encoding.js.map +1 -1
  230. package/dist/esm/parquetjs/parquet-thrift/EnumType.js +1 -8
  231. package/dist/esm/parquetjs/parquet-thrift/EnumType.js.map +1 -1
  232. package/dist/esm/parquetjs/parquet-thrift/FieldRepetitionType.js +1 -1
  233. package/dist/esm/parquetjs/parquet-thrift/FieldRepetitionType.js.map +1 -1
  234. package/dist/esm/parquetjs/parquet-thrift/FileMetaData.js +2 -53
  235. package/dist/esm/parquetjs/parquet-thrift/FileMetaData.js.map +1 -1
  236. package/dist/esm/parquetjs/parquet-thrift/IndexPageHeader.js +1 -8
  237. package/dist/esm/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -1
  238. package/dist/esm/parquetjs/parquet-thrift/IntType.js +0 -19
  239. package/dist/esm/parquetjs/parquet-thrift/IntType.js.map +1 -1
  240. package/dist/esm/parquetjs/parquet-thrift/JsonType.js +1 -8
  241. package/dist/esm/parquetjs/parquet-thrift/JsonType.js.map +1 -1
  242. package/dist/esm/parquetjs/parquet-thrift/KeyValue.js +0 -19
  243. package/dist/esm/parquetjs/parquet-thrift/KeyValue.js.map +1 -1
  244. package/dist/esm/parquetjs/parquet-thrift/ListType.js +1 -8
  245. package/dist/esm/parquetjs/parquet-thrift/ListType.js.map +1 -1
  246. package/dist/esm/parquetjs/parquet-thrift/LogicalType.js +0 -90
  247. package/dist/esm/parquetjs/parquet-thrift/LogicalType.js.map +1 -1
  248. package/dist/esm/parquetjs/parquet-thrift/MapType.js +1 -8
  249. package/dist/esm/parquetjs/parquet-thrift/MapType.js.map +1 -1
  250. package/dist/esm/parquetjs/parquet-thrift/MicroSeconds.js +1 -8
  251. package/dist/esm/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -1
  252. package/dist/esm/parquetjs/parquet-thrift/MilliSeconds.js +1 -8
  253. package/dist/esm/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -1
  254. package/dist/esm/parquetjs/parquet-thrift/NullType.js +1 -8
  255. package/dist/esm/parquetjs/parquet-thrift/NullType.js.map +1 -1
  256. package/dist/esm/parquetjs/parquet-thrift/OffsetIndex.js +0 -16
  257. package/dist/esm/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -1
  258. package/dist/esm/parquetjs/parquet-thrift/PageEncodingStats.js +0 -24
  259. package/dist/esm/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -1
  260. package/dist/esm/parquetjs/parquet-thrift/PageHeader.js +0 -49
  261. package/dist/esm/parquetjs/parquet-thrift/PageHeader.js.map +1 -1
  262. package/dist/esm/parquetjs/parquet-thrift/PageLocation.js +0 -24
  263. package/dist/esm/parquetjs/parquet-thrift/PageLocation.js.map +1 -1
  264. package/dist/esm/parquetjs/parquet-thrift/PageType.js +1 -1
  265. package/dist/esm/parquetjs/parquet-thrift/PageType.js.map +1 -1
  266. package/dist/esm/parquetjs/parquet-thrift/RowGroup.js +0 -33
  267. package/dist/esm/parquetjs/parquet-thrift/RowGroup.js.map +1 -1
  268. package/dist/esm/parquetjs/parquet-thrift/SchemaElement.js +0 -59
  269. package/dist/esm/parquetjs/parquet-thrift/SchemaElement.js.map +1 -1
  270. package/dist/esm/parquetjs/parquet-thrift/SortingColumn.js +0 -24
  271. package/dist/esm/parquetjs/parquet-thrift/SortingColumn.js.map +1 -1
  272. package/dist/esm/parquetjs/parquet-thrift/Statistics.js +0 -38
  273. package/dist/esm/parquetjs/parquet-thrift/Statistics.js.map +1 -1
  274. package/dist/esm/parquetjs/parquet-thrift/StringType.js +1 -8
  275. package/dist/esm/parquetjs/parquet-thrift/StringType.js.map +1 -1
  276. package/dist/esm/parquetjs/parquet-thrift/TimeType.js +0 -19
  277. package/dist/esm/parquetjs/parquet-thrift/TimeType.js.map +1 -1
  278. package/dist/esm/parquetjs/parquet-thrift/TimeUnit.js +0 -24
  279. package/dist/esm/parquetjs/parquet-thrift/TimeUnit.js.map +1 -1
  280. package/dist/esm/parquetjs/parquet-thrift/TimestampType.js +0 -19
  281. package/dist/esm/parquetjs/parquet-thrift/TimestampType.js.map +1 -1
  282. package/dist/esm/parquetjs/parquet-thrift/Type.js +1 -1
  283. package/dist/esm/parquetjs/parquet-thrift/Type.js.map +1 -1
  284. package/dist/esm/parquetjs/parquet-thrift/TypeDefinedOrder.js +1 -8
  285. package/dist/esm/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -1
  286. package/dist/esm/parquetjs/parquet-thrift/UUIDType.js +1 -8
  287. package/dist/esm/parquetjs/parquet-thrift/UUIDType.js.map +1 -1
  288. package/dist/esm/parquetjs/parquet-thrift/index.js +1 -0
  289. package/dist/esm/parquetjs/parquet-thrift/index.js.map +1 -1
  290. package/dist/esm/parquetjs/parser/decoders.js +9 -39
  291. package/dist/esm/parquetjs/parser/decoders.js.map +1 -1
  292. package/dist/esm/parquetjs/parser/parquet-cursor.js +1 -13
  293. package/dist/esm/parquetjs/parser/parquet-cursor.js.map +1 -1
  294. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +6 -32
  295. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +1 -1
  296. package/dist/esm/parquetjs/parser/parquet-reader.js +1 -18
  297. package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
  298. package/dist/esm/parquetjs/schema/declare.js +4 -4
  299. package/dist/esm/parquetjs/schema/declare.js.map +1 -1
  300. package/dist/esm/parquetjs/schema/schema.js +3 -29
  301. package/dist/esm/parquetjs/schema/schema.js.map +1 -1
  302. package/dist/esm/parquetjs/schema/shred.js +7 -22
  303. package/dist/esm/parquetjs/schema/shred.js.map +1 -1
  304. package/dist/esm/parquetjs/schema/types.js +3 -78
  305. package/dist/esm/parquetjs/schema/types.js.map +1 -1
  306. package/dist/esm/parquetjs/utils/buffer-utils.js +2 -1
  307. package/dist/esm/parquetjs/utils/buffer-utils.js.map +1 -1
  308. package/dist/esm/parquetjs/utils/file-utils.js +1 -0
  309. package/dist/esm/parquetjs/utils/file-utils.js.map +1 -1
  310. package/dist/esm/parquetjs/utils/read-utils.js +5 -12
  311. package/dist/esm/parquetjs/utils/read-utils.js.map +1 -1
  312. package/dist/esm/workers/parquet-worker.js.map +1 -1
  313. package/dist/lib/wasm/load-wasm/load-wasm-browser.js +5 -1
  314. package/dist/lib/wasm/load-wasm/load-wasm-node.js +5 -1
  315. package/dist/parquet-worker.js +14 -14
  316. package/dist/parquet-worker.js.map +2 -2
  317. package/dist/parquetjs/codecs/index.js +5 -1
  318. package/dist/parquetjs/encoder/writer.d.ts +1 -0
  319. package/dist/parquetjs/encoder/writer.d.ts.map +1 -1
  320. package/dist/parquetjs/encoder/writer.js +5 -1
  321. package/dist/parquetjs/parquet-thrift/BsonType.js +5 -1
  322. package/dist/parquetjs/parquet-thrift/ColumnChunk.js +5 -1
  323. package/dist/parquetjs/parquet-thrift/ColumnIndex.js +5 -1
  324. package/dist/parquetjs/parquet-thrift/ColumnMetaData.js +5 -1
  325. package/dist/parquetjs/parquet-thrift/ColumnOrder.js +5 -1
  326. package/dist/parquetjs/parquet-thrift/DataPageHeader.js +5 -1
  327. package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.js +5 -1
  328. package/dist/parquetjs/parquet-thrift/DateType.js +5 -1
  329. package/dist/parquetjs/parquet-thrift/DecimalType.js +5 -1
  330. package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.js +5 -1
  331. package/dist/parquetjs/parquet-thrift/EnumType.js +5 -1
  332. package/dist/parquetjs/parquet-thrift/FileMetaData.js +5 -1
  333. package/dist/parquetjs/parquet-thrift/IndexPageHeader.js +5 -1
  334. package/dist/parquetjs/parquet-thrift/IntType.js +5 -1
  335. package/dist/parquetjs/parquet-thrift/JsonType.js +5 -1
  336. package/dist/parquetjs/parquet-thrift/KeyValue.js +5 -1
  337. package/dist/parquetjs/parquet-thrift/ListType.js +5 -1
  338. package/dist/parquetjs/parquet-thrift/LogicalType.js +5 -1
  339. package/dist/parquetjs/parquet-thrift/MapType.js +5 -1
  340. package/dist/parquetjs/parquet-thrift/MicroSeconds.js +5 -1
  341. package/dist/parquetjs/parquet-thrift/MilliSeconds.js +5 -1
  342. package/dist/parquetjs/parquet-thrift/NullType.js +5 -1
  343. package/dist/parquetjs/parquet-thrift/OffsetIndex.js +5 -1
  344. package/dist/parquetjs/parquet-thrift/PageEncodingStats.js +5 -1
  345. package/dist/parquetjs/parquet-thrift/PageHeader.js +5 -1
  346. package/dist/parquetjs/parquet-thrift/PageLocation.js +5 -1
  347. package/dist/parquetjs/parquet-thrift/RowGroup.js +5 -1
  348. package/dist/parquetjs/parquet-thrift/SchemaElement.js +5 -1
  349. package/dist/parquetjs/parquet-thrift/SortingColumn.js +5 -1
  350. package/dist/parquetjs/parquet-thrift/Statistics.js +5 -1
  351. package/dist/parquetjs/parquet-thrift/StringType.js +5 -1
  352. package/dist/parquetjs/parquet-thrift/TimeType.js +5 -1
  353. package/dist/parquetjs/parquet-thrift/TimeUnit.js +5 -1
  354. package/dist/parquetjs/parquet-thrift/TimestampType.js +5 -1
  355. package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.js +5 -1
  356. package/dist/parquetjs/parquet-thrift/UUIDType.js +5 -1
  357. package/dist/parquetjs/parquet-thrift/index.js +5 -1
  358. package/dist/parquetjs/schema/shred.js +5 -1
  359. package/dist/parquetjs/utils/file-utils.d.ts +2 -0
  360. package/dist/parquetjs/utils/file-utils.d.ts.map +1 -1
  361. package/package.json +6 -6
@@ -1,93 +1,74 @@
1
1
  import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
2
+
2
3
  import { PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED } from '../../constants';
3
4
  import { CompressionCodec, Type } from '../parquet-thrift';
4
5
  import { decodeFileMetadata, getThriftEnum, fieldIndexOf } from '../utils/read-utils';
5
6
  import { decodeDataPages, decodePage } from './decoders';
6
7
  const DEFAULT_DICTIONARY_SIZE = 1e6;
8
+
7
9
  export class ParquetEnvelopeReader {
10
+
8
11
  static async openBuffer(buffer) {
9
12
  const readFn = (position, length) => Promise.resolve(buffer.slice(position, position + length));
10
-
11
13
  const closeFn = () => Promise.resolve();
12
-
13
14
  return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);
14
15
  }
15
-
16
16
  constructor(read, close, fileSize, options) {
17
17
  _defineProperty(this, "read", void 0);
18
-
19
18
  _defineProperty(this, "close", void 0);
20
-
21
19
  _defineProperty(this, "fileSize", void 0);
22
-
23
20
  _defineProperty(this, "defaultDictionarySize", void 0);
24
-
25
21
  this.read = read;
26
22
  this.close = close;
27
23
  this.fileSize = fileSize;
28
24
  this.defaultDictionarySize = (options === null || options === void 0 ? void 0 : options.defaultDictionarySize) || DEFAULT_DICTIONARY_SIZE;
29
25
  }
30
-
31
26
  async readHeader() {
32
27
  const buffer = await this.read(0, PARQUET_MAGIC.length);
33
28
  const magic = buffer.toString();
34
-
35
29
  switch (magic) {
36
30
  case PARQUET_MAGIC:
37
31
  break;
38
-
39
32
  case PARQUET_MAGIC_ENCRYPTED:
40
33
  throw new Error('Encrypted parquet file not supported');
41
-
42
34
  default:
43
35
  throw new Error("Invalid parquet file (magic=".concat(magic, ")"));
44
36
  }
45
37
  }
46
-
47
38
  async readRowGroup(schema, rowGroup, columnList) {
48
39
  const buffer = {
49
40
  rowCount: Number(rowGroup.num_rows),
50
41
  columnData: {}
51
42
  };
52
-
53
43
  for (const colChunk of rowGroup.columns) {
54
44
  const colMetadata = colChunk.meta_data;
55
45
  const colKey = colMetadata === null || colMetadata === void 0 ? void 0 : colMetadata.path_in_schema;
56
-
57
46
  if (columnList.length > 0 && fieldIndexOf(columnList, colKey) < 0) {
58
47
  continue;
59
48
  }
60
49
 
61
50
  buffer.columnData[colKey.join()] = await this.readColumnChunk(schema, colChunk);
62
51
  }
63
-
64
52
  return buffer;
65
53
  }
66
54
 
67
55
  async readColumnChunk(schema, colChunk) {
68
56
  var _colChunk$meta_data, _colChunk$meta_data2, _colChunk$meta_data3, _colChunk$meta_data4, _colChunk$meta_data5, _colChunk$meta_data7, _colChunk$meta_data8, _options$dictionary;
69
-
70
57
  if (colChunk.file_path !== undefined && colChunk.file_path !== null) {
71
58
  throw new Error('external references are not supported');
72
59
  }
73
-
74
60
  const field = schema.findField((_colChunk$meta_data = colChunk.meta_data) === null || _colChunk$meta_data === void 0 ? void 0 : _colChunk$meta_data.path_in_schema);
75
61
  const type = getThriftEnum(Type, (_colChunk$meta_data2 = colChunk.meta_data) === null || _colChunk$meta_data2 === void 0 ? void 0 : _colChunk$meta_data2.type);
76
-
77
62
  if (type !== field.primitiveType) {
78
63
  throw new Error("chunk type not matching schema: ".concat(type));
79
64
  }
80
-
81
65
  const compression = getThriftEnum(CompressionCodec, (_colChunk$meta_data3 = colChunk.meta_data) === null || _colChunk$meta_data3 === void 0 ? void 0 : _colChunk$meta_data3.codec);
82
66
  const pagesOffset = Number((_colChunk$meta_data4 = colChunk.meta_data) === null || _colChunk$meta_data4 === void 0 ? void 0 : _colChunk$meta_data4.data_page_offset);
83
67
  let pagesSize = Number((_colChunk$meta_data5 = colChunk.meta_data) === null || _colChunk$meta_data5 === void 0 ? void 0 : _colChunk$meta_data5.total_compressed_size);
84
-
85
68
  if (!colChunk.file_path) {
86
69
  var _colChunk$meta_data6;
87
-
88
70
  pagesSize = Math.min(this.fileSize - pagesOffset, Number((_colChunk$meta_data6 = colChunk.meta_data) === null || _colChunk$meta_data6 === void 0 ? void 0 : _colChunk$meta_data6.total_compressed_size));
89
71
  }
90
-
91
72
  const options = {
92
73
  type,
93
74
  rLevelMax: field.rLevelMax,
@@ -99,24 +80,23 @@ export class ParquetEnvelopeReader {
99
80
  };
100
81
  let dictionary;
101
82
  const dictionaryPageOffset = colChunk === null || colChunk === void 0 ? void 0 : (_colChunk$meta_data8 = colChunk.meta_data) === null || _colChunk$meta_data8 === void 0 ? void 0 : _colChunk$meta_data8.dictionary_page_offset;
102
-
103
83
  if (dictionaryPageOffset) {
104
84
  const dictionaryOffset = Number(dictionaryPageOffset);
105
85
  dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);
106
86
  }
107
-
108
87
  dictionary = (_options$dictionary = options.dictionary) !== null && _options$dictionary !== void 0 && _options$dictionary.length ? options.dictionary : dictionary;
109
88
  const pagesBuf = await this.read(pagesOffset, pagesSize);
110
- return await decodeDataPages(pagesBuf, { ...options,
89
+ return await decodeDataPages(pagesBuf, {
90
+ ...options,
111
91
  dictionary
112
92
  });
113
93
  }
114
94
 
115
95
  async getDictionary(dictionaryPageOffset, options, pagesOffset) {
116
96
  if (dictionaryPageOffset === 0) {
97
+
117
98
  return [];
118
99
  }
119
-
120
100
  const dictionarySize = Math.min(this.fileSize - dictionaryPageOffset, this.defaultDictionarySize);
121
101
  const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);
122
102
  const cursor = {
@@ -127,29 +107,23 @@ export class ParquetEnvelopeReader {
127
107
  const decodedPage = await decodePage(cursor, options);
128
108
  return decodedPage.dictionary;
129
109
  }
130
-
131
110
  async readFooter() {
132
111
  const trailerLen = PARQUET_MAGIC.length + 4;
133
112
  const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);
134
113
  const magic = trailerBuf.slice(4).toString();
135
-
136
114
  if (magic !== PARQUET_MAGIC) {
137
115
  throw new Error("Not a valid parquet file (magic=\"".concat(magic, ")"));
138
116
  }
139
-
140
117
  const metadataSize = trailerBuf.readUInt32LE(0);
141
118
  const metadataOffset = this.fileSize - metadataSize - trailerLen;
142
-
143
119
  if (metadataOffset < PARQUET_MAGIC.length) {
144
120
  throw new Error("Invalid metadata size ".concat(metadataOffset));
145
121
  }
146
-
147
122
  const metadataBuf = await this.read(metadataOffset, metadataSize);
148
123
  const {
149
124
  metadata
150
125
  } = decodeFileMetadata(metadataBuf);
151
126
  return metadata;
152
127
  }
153
-
154
128
  }
155
129
  //# sourceMappingURL=parquet-envelope-reader.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/parquetjs/parser/parquet-envelope-reader.ts"],"names":["PARQUET_MAGIC","PARQUET_MAGIC_ENCRYPTED","CompressionCodec","Type","decodeFileMetadata","getThriftEnum","fieldIndexOf","decodeDataPages","decodePage","DEFAULT_DICTIONARY_SIZE","ParquetEnvelopeReader","openBuffer","buffer","readFn","position","length","Promise","resolve","slice","closeFn","constructor","read","close","fileSize","options","defaultDictionarySize","readHeader","magic","toString","Error","readRowGroup","schema","rowGroup","columnList","rowCount","Number","num_rows","columnData","colChunk","columns","colMetadata","meta_data","colKey","path_in_schema","join","readColumnChunk","file_path","undefined","field","findField","type","primitiveType","compression","codec","pagesOffset","data_page_offset","pagesSize","total_compressed_size","Math","min","rLevelMax","dLevelMax","column","numValues","num_values","dictionary","dictionaryPageOffset","dictionary_page_offset","dictionaryOffset","getDictionary","pagesBuf","dictionarySize","cursor","offset","size","decodedPage","readFooter","trailerLen","trailerBuf","metadataSize","readUInt32LE","metadataOffset","metadataBuf","metadata"],"mappings":";AAEA,SAAQA,aAAR,EAAuBC,uBAAvB,QAAqD,iBAArD;AACA,SAAqBC,gBAArB,EAA+DC,IAA/D,QAA0E,mBAA1E;AAQA,SAAQC,kBAAR,EAA4BC,aAA5B,EAA2CC,YAA3C,QAA8D,qBAA9D;AACA,SAAQC,eAAR,EAAyBC,UAAzB,QAA0C,YAA1C;AAEA,MAAMC,uBAAuB,GAAG,GAAhC;AAQA,OAAO,MAAMC,qBAAN,CAA4B;AAUV,eAAVC,UAAU,CAACC,MAAD,EAAiD;AACtE,UAAMC,MAAM,GAAG,CAACC,QAAD,EAAmBC,MAAnB,KACbC,OAAO,CAACC,OAAR,CAAgBL,MAAM,CAACM,KAAP,CAAaJ,QAAb,EAAuBA,QAAQ,GAAGC,MAAlC,CAAhB,CADF;;AAEA,UAAMI,OAAO,GAAG,MAAMH,OAAO,CAACC,OAAR,EAAtB;;AACA,WAAO,IAAIP,qBAAJ,CAA0BG,MAA1B,EAAkCM,OAAlC,EAA2CP,MAAM,CAACG,MAAlD,CAAP;AACD;;AAEDK,EAAAA,WAAW,CACTC,IADS,EAETC,KAFS,EAGTC,QAHS,EAITC,OAJS,EAKT;AAAA;;AAAA;;AAAA;;AAAA;;AACA,SAAKH,IAAL,GAAYA,IAAZ;AACA,SAAKC,KAAL,GAAaA,KAAb;AACA,SAAKC,QAAL,GAAgBA,QAAhB;AACA,SAAKE,qBAAL,GAA6B,CAAAD,OAAO,SAAP,IAAAA,OAAO,WAAP,YAAAA,OAAO,CAAEC,qBAAT,KAAkChB,uBAA/D;AACD;;AAEe,QAAViB,UAAU,GAAkB;AAChC,UAAMd,MAAM,GAAG,MAAM,KAAKS,IAAL,CAAU,CAAV,EAAarB,aAAa,CAACe,MAA3B,CAArB;AAEA,UAAMY,KAAK,GAAGf,MAAM,CAACgB,QAAP,EAAd;;AACA,YAAQD,KAAR;AACE,WAAK3B,aAAL;AACE;;AACF,WAAKC,uBAAL;AACE,cAAM,IAAI4B,KAAJ,CAAU,sCAAV,CAAN;;AACF;AACE,cAAM,IAAIA,KAAJ,uCAAyCF,KAAzC,OAAN;AANJ;AAQD;;AAEiB,QAAZG,YAAY,CAChBC,MADgB,EAEhBC,QAFgB,EAGhBC,UAHgB,EAIQ;AACxB,UAAMrB,MAAqB,GAAG;AAC5BsB,MAAAA,QAAQ,EAAEC,MAAM,CAACH,QAAQ,CAACI,QAAV,CADY;AAE5BC,MAAAA,UAAU,EAAE;AAFgB,KAA9B;;AAIA,SAAK,MAAMC,QAAX,IAAuBN,QAAQ,CAACO,OAAhC,EAAyC;AACvC,YAAMC,WAAW,GAAGF,QAAQ,CAACG,SAA7B;AACA,YAAMC,MAAM,GAAGF,WAAH,aAAGA,WAAH,uBAAGA,WAAW,CAAEG,cAA5B;;AACA,UAAIV,UAAU,CAAClB,MAAX,GAAoB,CAApB,IAAyBT,YAAY,CAAC2B,UAAD,EAAaS,MAAb,CAAZ,GAAoC,CAAjE,EAAoE;AAClE;AACD;;AACD9B,MAAAA,MAAM,CAACyB,UAAP,CAAkBK,MAAM,CAAEE,IAAR,EAAlB,IAAoC,MAAM,KAAKC,eAAL,CAAqBd,MAArB,EAA6BO,QAA7B,CAA1C;AACD;;AACD,WAAO1B,MAAP;AACD;;AAOoB,QAAfiC,eAAe,CAACd,MAAD,EAAwBO,QAAxB,EAAqE;AAAA;;AACxF,QAAIA,QAAQ,CAACQ,SAAT,KAAuBC,SAAvB,IAAoCT,QAAQ,CAACQ,SAAT,KAAuB,IAA/D,EAAqE;AACnE,YAAM,IAAIjB,KAAJ,CAAU,uCAAV,CAAN;AACD;;AAED,UAAMmB,KAAK,GAAGjB,MAAM,CAACkB,SAAP,wBAAiBX,QAAQ,CAACG,SAA1B,wDAAiB,oBAAoBE,cAArC,CAAd;AACA,UAAMO,IAAmB,GAAG7C,aAAa,CAACF,IAAD,0BAAOmC,QAAQ,CAACG,SAAhB,yDAAO,qBAAoBS,IAA3B,CAAzC;;AAEA,QAAIA,IAAI,KAAKF,KAAK,CAACG,aAAnB,EAAkC;AAChC,YAAM,IAAItB,KAAJ,2CAA6CqB,IAA7C,EAAN;AACD;;AAED,UAAME,WAA+B,GAAG/C,aAAa,CACnDH,gBADmD,0BAEnDoC,QAAQ,CAACG,SAF0C,yDAEnD,qBAAoBY,KAF+B,CAArD;AAKA,UAAMC,WAAW,GAAGnB,MAAM,yBAACG,QAAQ,CAACG,SAAV,yDAAC,qBAAoBc,gBAArB,CAA1B;AACA,QAAIC,SAAS,GAAGrB,MAAM,yBAACG,QAAQ,CAACG,SAAV,yDAAC,qBAAoBgB,qBAArB,CAAtB;;AAEA,QAAI,CAACnB,QAAQ,CAACQ,SAAd,EAAyB;AAAA;;AACvBU,MAAAA,SAAS,GAAGE,IAAI,CAACC,GAAL,CACV,KAAKpC,QAAL,GAAgB+B,WADN,EAEVnB,MAAM,yBAACG,QAAQ,CAACG,SAAV,yDAAC,qBAAoBgB,qBAArB,CAFI,CAAZ;AAID;;AAED,UAAMjC,OAAuB,GAAG;AAC9B0B,MAAAA,IAD8B;AAE9BU,MAAAA,SAAS,EAAEZ,KAAK,CAACY,SAFa;AAG9BC,MAAAA,SAAS,EAAEb,KAAK,CAACa,SAHa;AAI9BT,MAAAA,WAJ8B;AAK9BU,MAAAA,MAAM,EAAEd,KALsB;AAM9Be,MAAAA,SAAS,0BAAEzB,QAAQ,CAACG,SAAX,yDAAE,qBAAoBuB,UAND;AAO9BC,MAAAA,UAAU,EAAE;AAPkB,KAAhC;AAUA,QAAIA,UAAJ;AAEA,UAAMC,oBAAoB,GAAG5B,QAAH,aAAGA,QAAH,+CAAGA,QAAQ,CAAEG,SAAb,yDAAG,qBAAqB0B,sBAAlD;;AAEA,QAAID,oBAAJ,EAA0B;AACxB,YAAME,gBAAgB,GAAGjC,MAAM,CAAC+B,oBAAD,CAA/B;AAEAD,MAAAA,UAAU,GAAG,MAAM,KAAKI,aAAL,CAAmBD,gBAAnB,EAAqC5C,OAArC,EAA8C8B,WAA9C,CAAnB;AACD;;AAEDW,IAAAA,UAAU,GAAG,uBAAAzC,OAAO,CAACyC,UAAR,oEAAoBlD,MAApB,GAA6BS,OAAO,CAACyC,UAArC,GAAkDA,UAA/D;AACA,UAAMK,QAAQ,GAAG,MAAM,KAAKjD,IAAL,CAAUiC,WAAV,EAAuBE,SAAvB,CAAvB;AACA,WAAO,MAAMjD,eAAe,CAAC+D,QAAD,EAAW,EAAC,GAAG9C,OAAJ;AAAayC,MAAAA;AAAb,KAAX,CAA5B;AACD;;AASkB,QAAbI,aAAa,CACjBH,oBADiB,EAEjB1C,OAFiB,EAGjB8B,WAHiB,EAIE;AACnB,QAAIY,oBAAoB,KAAK,CAA7B,EAAgC;AAQ9B,aAAO,EAAP;AACD;;AAED,UAAMK,cAAc,GAAGb,IAAI,CAACC,GAAL,CACrB,KAAKpC,QAAL,GAAgB2C,oBADK,EAErB,KAAKzC,qBAFgB,CAAvB;AAIA,UAAM6C,QAAQ,GAAG,MAAM,KAAKjD,IAAL,CAAU6C,oBAAV,EAAgCK,cAAhC,CAAvB;AAEA,UAAMC,MAAM,GAAG;AAAC5D,MAAAA,MAAM,EAAE0D,QAAT;AAAmBG,MAAAA,MAAM,EAAE,CAA3B;AAA8BC,MAAAA,IAAI,EAAEJ,QAAQ,CAACvD;AAA7C,KAAf;AACA,UAAM4D,WAAW,GAAG,MAAMnE,UAAU,CAACgE,MAAD,EAAShD,OAAT,CAApC;AAEA,WAAOmD,WAAW,CAACV,UAAnB;AACD;;AAEe,QAAVW,UAAU,GAA0B;AACxC,UAAMC,UAAU,GAAG7E,aAAa,CAACe,MAAd,GAAuB,CAA1C;AACA,UAAM+D,UAAU,GAAG,MAAM,KAAKzD,IAAL,CAAU,KAAKE,QAAL,GAAgBsD,UAA1B,EAAsCA,UAAtC,CAAzB;AAEA,UAAMlD,KAAK,GAAGmD,UAAU,CAAC5D,KAAX,CAAiB,CAAjB,EAAoBU,QAApB,EAAd;;AACA,QAAID,KAAK,KAAK3B,aAAd,EAA6B;AAC3B,YAAM,IAAI6B,KAAJ,6CAA8CF,KAA9C,OAAN;AACD;;AAED,UAAMoD,YAAY,GAAGD,UAAU,CAACE,YAAX,CAAwB,CAAxB,CAArB;AACA,UAAMC,cAAc,GAAG,KAAK1D,QAAL,GAAgBwD,YAAhB,GAA+BF,UAAtD;;AACA,QAAII,cAAc,GAAGjF,aAAa,CAACe,MAAnC,EAA2C;AACzC,YAAM,IAAIc,KAAJ,iCAAmCoD,cAAnC,EAAN;AACD;;AAED,UAAMC,WAAW,GAAG,MAAM,KAAK7D,IAAL,CAAU4D,cAAV,EAA0BF,YAA1B,CAA1B;AAGA,UAAM;AAACI,MAAAA;AAAD,QAAa/E,kBAAkB,CAAC8E,WAAD,CAArC;AACA,WAAOC,QAAP;AACD;;AA/KgC","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetSchema} from '../schema/schema';\nimport {PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED} from '../../constants';\nimport {ColumnChunk, CompressionCodec, FileMetaData, RowGroup, Type} from '../parquet-thrift';\nimport {\n ParquetBuffer,\n ParquetCompression,\n ParquetData,\n PrimitiveType,\n ParquetOptions\n} from '../schema/declare';\nimport {decodeFileMetadata, getThriftEnum, fieldIndexOf} from '../utils/read-utils';\nimport {decodeDataPages, decodePage} from './decoders';\n\nconst DEFAULT_DICTIONARY_SIZE = 1e6;\n\n/**\n * The parquet envelope reader allows direct, unbuffered access to the individual\n * sections of the parquet file, namely the header, footer and the row groups.\n * This class is intended for advanced/internal users; if you just want to retrieve\n * rows from a parquet file use the ParquetReader instead\n */\nexport class ParquetEnvelopeReader {\n public read: (position: number, length: number) => Promise<Buffer>;\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n public close: () => Promise<void>;\n public fileSize: number;\n public defaultDictionarySize: number;\n\n static async openBuffer(buffer: Buffer): Promise<ParquetEnvelopeReader> {\n const readFn = (position: number, length: number) =>\n Promise.resolve(buffer.slice(position, position + length));\n const closeFn = () => Promise.resolve();\n return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);\n }\n\n constructor(\n read: (position: number, length: number) => Promise<Buffer>,\n close: () => Promise<void>,\n fileSize: number,\n options?: any\n ) {\n this.read = read;\n this.close = close;\n this.fileSize = fileSize;\n this.defaultDictionarySize = options?.defaultDictionarySize || DEFAULT_DICTIONARY_SIZE;\n }\n\n async readHeader(): Promise<void> {\n const buffer = await this.read(0, PARQUET_MAGIC.length);\n\n const magic = buffer.toString();\n switch (magic) {\n case PARQUET_MAGIC:\n break;\n case PARQUET_MAGIC_ENCRYPTED:\n throw new Error('Encrypted parquet file not supported');\n default:\n throw new Error(`Invalid parquet file (magic=${magic})`);\n }\n }\n\n async readRowGroup(\n schema: ParquetSchema,\n rowGroup: RowGroup,\n columnList: string[][]\n ): Promise<ParquetBuffer> {\n const buffer: ParquetBuffer = {\n rowCount: Number(rowGroup.num_rows),\n columnData: {}\n };\n for (const colChunk of rowGroup.columns) {\n const colMetadata = colChunk.meta_data;\n const colKey = colMetadata?.path_in_schema;\n if (columnList.length > 0 && fieldIndexOf(columnList, colKey!) < 0) {\n continue; // eslint-disable-line no-continue\n }\n buffer.columnData[colKey!.join()] = await this.readColumnChunk(schema, colChunk);\n }\n return buffer;\n }\n\n /**\n * Do reading of parquet file's column chunk\n * @param schema\n * @param colChunk\n */\n async readColumnChunk(schema: ParquetSchema, colChunk: ColumnChunk): Promise<ParquetData> {\n if (colChunk.file_path !== undefined && colChunk.file_path !== null) {\n throw new Error('external references are not supported');\n }\n\n const field = schema.findField(colChunk.meta_data?.path_in_schema!);\n const type: PrimitiveType = getThriftEnum(Type, colChunk.meta_data?.type!) as any;\n\n if (type !== field.primitiveType) {\n throw new Error(`chunk type not matching schema: ${type}`);\n }\n\n const compression: ParquetCompression = getThriftEnum(\n CompressionCodec,\n colChunk.meta_data?.codec!\n ) as any;\n\n const pagesOffset = Number(colChunk.meta_data?.data_page_offset!);\n let pagesSize = Number(colChunk.meta_data?.total_compressed_size!);\n\n if (!colChunk.file_path) {\n pagesSize = Math.min(\n this.fileSize - pagesOffset,\n Number(colChunk.meta_data?.total_compressed_size)\n );\n }\n\n const options: ParquetOptions = {\n type,\n rLevelMax: field.rLevelMax,\n dLevelMax: field.dLevelMax,\n compression,\n column: field,\n numValues: colChunk.meta_data?.num_values,\n dictionary: []\n };\n\n let dictionary;\n\n const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;\n\n if (dictionaryPageOffset) {\n const dictionaryOffset = Number(dictionaryPageOffset);\n // Getting dictionary from column chunk to iterate all over indexes to get dataPage values.\n dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);\n }\n\n dictionary = options.dictionary?.length ? options.dictionary : dictionary;\n const pagesBuf = await this.read(pagesOffset, pagesSize);\n return await decodeDataPages(pagesBuf, {...options, dictionary});\n }\n\n /**\n * Getting dictionary for allows to flatten values by indices.\n * @param dictionaryPageOffset\n * @param options\n * @param pagesOffset\n * @returns\n */\n async getDictionary(\n dictionaryPageOffset: number,\n options: ParquetOptions,\n pagesOffset: number\n ): Promise<string[]> {\n if (dictionaryPageOffset === 0) {\n // dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);\n // pagesBuf = await this.read(pagesOffset, dictionarySize);\n\n // In this case we are working with parquet-mr files format. Problem is described below:\n // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding\n // We need to get dictionary page from column chunk if it exists.\n // Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.\n return [];\n }\n\n const dictionarySize = Math.min(\n this.fileSize - dictionaryPageOffset,\n this.defaultDictionarySize\n );\n const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);\n\n const cursor = {buffer: pagesBuf, offset: 0, size: pagesBuf.length};\n const decodedPage = await decodePage(cursor, options);\n\n return decodedPage.dictionary!;\n }\n\n async readFooter(): Promise<FileMetaData> {\n const trailerLen = PARQUET_MAGIC.length + 4;\n const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);\n\n const magic = trailerBuf.slice(4).toString();\n if (magic !== PARQUET_MAGIC) {\n throw new Error(`Not a valid parquet file (magic=\"${magic})`);\n }\n\n const metadataSize = trailerBuf.readUInt32LE(0);\n const metadataOffset = this.fileSize - metadataSize - trailerLen;\n if (metadataOffset < PARQUET_MAGIC.length) {\n throw new Error(`Invalid metadata size ${metadataOffset}`);\n }\n\n const metadataBuf = await this.read(metadataOffset, metadataSize);\n // let metadata = new parquet_thrift.FileMetaData();\n // parquet_util.decodeThrift(metadata, metadataBuf);\n const {metadata} = decodeFileMetadata(metadataBuf);\n return metadata;\n }\n}\n"],"file":"parquet-envelope-reader.js"}
1
+ {"version":3,"file":"parquet-envelope-reader.js","names":["PARQUET_MAGIC","PARQUET_MAGIC_ENCRYPTED","CompressionCodec","Type","decodeFileMetadata","getThriftEnum","fieldIndexOf","decodeDataPages","decodePage","DEFAULT_DICTIONARY_SIZE","ParquetEnvelopeReader","openBuffer","buffer","readFn","position","length","Promise","resolve","slice","closeFn","constructor","read","close","fileSize","options","defaultDictionarySize","readHeader","magic","toString","Error","readRowGroup","schema","rowGroup","columnList","rowCount","Number","num_rows","columnData","colChunk","columns","colMetadata","meta_data","colKey","path_in_schema","join","readColumnChunk","file_path","undefined","field","findField","type","primitiveType","compression","codec","pagesOffset","data_page_offset","pagesSize","total_compressed_size","Math","min","rLevelMax","dLevelMax","column","numValues","num_values","dictionary","dictionaryPageOffset","dictionary_page_offset","dictionaryOffset","getDictionary","pagesBuf","dictionarySize","cursor","offset","size","decodedPage","readFooter","trailerLen","trailerBuf","metadataSize","readUInt32LE","metadataOffset","metadataBuf","metadata"],"sources":["../../../../src/parquetjs/parser/parquet-envelope-reader.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetSchema} from '../schema/schema';\nimport {PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED} from '../../constants';\nimport {ColumnChunk, CompressionCodec, FileMetaData, RowGroup, Type} from '../parquet-thrift';\nimport {\n ParquetBuffer,\n ParquetCompression,\n ParquetData,\n PrimitiveType,\n ParquetOptions\n} from '../schema/declare';\nimport {decodeFileMetadata, getThriftEnum, fieldIndexOf} from '../utils/read-utils';\nimport {decodeDataPages, decodePage} from './decoders';\n\nconst DEFAULT_DICTIONARY_SIZE = 1e6;\n\n/**\n * The parquet envelope reader allows direct, unbuffered access to the individual\n * sections of the parquet file, namely the header, footer and the row groups.\n * This class is intended for advanced/internal users; if you just want to retrieve\n * rows from a parquet file use the ParquetReader instead\n */\nexport class ParquetEnvelopeReader {\n public read: (position: number, length: number) => Promise<Buffer>;\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n public close: () => Promise<void>;\n public fileSize: number;\n public defaultDictionarySize: number;\n\n static async openBuffer(buffer: Buffer): Promise<ParquetEnvelopeReader> {\n const readFn = (position: number, length: number) =>\n Promise.resolve(buffer.slice(position, position + length));\n const closeFn = () => Promise.resolve();\n return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);\n }\n\n constructor(\n read: (position: number, length: number) => Promise<Buffer>,\n close: () => Promise<void>,\n fileSize: number,\n options?: any\n ) {\n this.read = read;\n this.close = close;\n this.fileSize = fileSize;\n this.defaultDictionarySize = options?.defaultDictionarySize || DEFAULT_DICTIONARY_SIZE;\n }\n\n async readHeader(): Promise<void> {\n const buffer = await this.read(0, PARQUET_MAGIC.length);\n\n const magic = buffer.toString();\n switch (magic) {\n case PARQUET_MAGIC:\n break;\n case PARQUET_MAGIC_ENCRYPTED:\n throw new Error('Encrypted parquet file not supported');\n default:\n throw new Error(`Invalid parquet file (magic=${magic})`);\n }\n }\n\n async readRowGroup(\n schema: ParquetSchema,\n rowGroup: RowGroup,\n columnList: string[][]\n ): Promise<ParquetBuffer> {\n const buffer: ParquetBuffer = {\n rowCount: Number(rowGroup.num_rows),\n columnData: {}\n };\n for (const colChunk of rowGroup.columns) {\n const colMetadata = colChunk.meta_data;\n const colKey = colMetadata?.path_in_schema;\n if (columnList.length > 0 && fieldIndexOf(columnList, colKey!) < 0) {\n continue; // eslint-disable-line no-continue\n }\n buffer.columnData[colKey!.join()] = await this.readColumnChunk(schema, colChunk);\n }\n return buffer;\n }\n\n /**\n * Do reading of parquet file's column chunk\n * @param schema\n * @param colChunk\n */\n async readColumnChunk(schema: ParquetSchema, colChunk: ColumnChunk): Promise<ParquetData> {\n if (colChunk.file_path !== undefined && colChunk.file_path !== null) {\n throw new Error('external references are not supported');\n }\n\n const field = schema.findField(colChunk.meta_data?.path_in_schema!);\n const type: PrimitiveType = getThriftEnum(Type, colChunk.meta_data?.type!) as any;\n\n if (type !== field.primitiveType) {\n throw new Error(`chunk type not matching schema: ${type}`);\n }\n\n const compression: ParquetCompression = getThriftEnum(\n CompressionCodec,\n colChunk.meta_data?.codec!\n ) as any;\n\n const pagesOffset = Number(colChunk.meta_data?.data_page_offset!);\n let pagesSize = Number(colChunk.meta_data?.total_compressed_size!);\n\n if (!colChunk.file_path) {\n pagesSize = Math.min(\n this.fileSize - pagesOffset,\n Number(colChunk.meta_data?.total_compressed_size)\n );\n }\n\n const options: ParquetOptions = {\n type,\n rLevelMax: field.rLevelMax,\n dLevelMax: field.dLevelMax,\n compression,\n column: field,\n numValues: colChunk.meta_data?.num_values,\n dictionary: []\n };\n\n let dictionary;\n\n const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;\n\n if (dictionaryPageOffset) {\n const dictionaryOffset = Number(dictionaryPageOffset);\n // Getting dictionary from column chunk to iterate all over indexes to get dataPage values.\n dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);\n }\n\n dictionary = options.dictionary?.length ? options.dictionary : dictionary;\n const pagesBuf = await this.read(pagesOffset, pagesSize);\n return await decodeDataPages(pagesBuf, {...options, dictionary});\n }\n\n /**\n * Getting dictionary for allows to flatten values by indices.\n * @param dictionaryPageOffset\n * @param options\n * @param pagesOffset\n * @returns\n */\n async getDictionary(\n dictionaryPageOffset: number,\n options: ParquetOptions,\n pagesOffset: number\n ): Promise<string[]> {\n if (dictionaryPageOffset === 0) {\n // dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);\n // pagesBuf = await this.read(pagesOffset, dictionarySize);\n\n // In this case we are working with parquet-mr files format. Problem is described below:\n // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding\n // We need to get dictionary page from column chunk if it exists.\n // Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.\n return [];\n }\n\n const dictionarySize = Math.min(\n this.fileSize - dictionaryPageOffset,\n this.defaultDictionarySize\n );\n const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);\n\n const cursor = {buffer: pagesBuf, offset: 0, size: pagesBuf.length};\n const decodedPage = await decodePage(cursor, options);\n\n return decodedPage.dictionary!;\n }\n\n async readFooter(): Promise<FileMetaData> {\n const trailerLen = PARQUET_MAGIC.length + 4;\n const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);\n\n const magic = trailerBuf.slice(4).toString();\n if (magic !== PARQUET_MAGIC) {\n throw new Error(`Not a valid parquet file (magic=\"${magic})`);\n }\n\n const metadataSize = trailerBuf.readUInt32LE(0);\n const metadataOffset = this.fileSize - metadataSize - trailerLen;\n if (metadataOffset < PARQUET_MAGIC.length) {\n throw new Error(`Invalid metadata size ${metadataOffset}`);\n }\n\n const metadataBuf = await this.read(metadataOffset, metadataSize);\n // let metadata = new parquet_thrift.FileMetaData();\n // parquet_util.decodeThrift(metadata, metadataBuf);\n const {metadata} = decodeFileMetadata(metadataBuf);\n return metadata;\n }\n}\n"],"mappings":";;AAEA,SAAQA,aAAa,EAAEC,uBAAuB,QAAO,iBAAiB;AACtE,SAAqBC,gBAAgB,EAA0BC,IAAI,QAAO,mBAAmB;AAQ7F,SAAQC,kBAAkB,EAAEC,aAAa,EAAEC,YAAY,QAAO,qBAAqB;AACnF,SAAQC,eAAe,EAAEC,UAAU,QAAO,YAAY;AAEtD,MAAMC,uBAAuB,GAAG,GAAG;;AAQnC,OAAO,MAAMC,qBAAqB,CAAC;;EAUjC,aAAaC,UAAU,CAACC,MAAc,EAAkC;IACtE,MAAMC,MAAM,GAAG,CAACC,QAAgB,EAAEC,MAAc,KAC9CC,OAAO,CAACC,OAAO,CAACL,MAAM,CAACM,KAAK,CAACJ,QAAQ,EAAEA,QAAQ,GAAGC,MAAM,CAAC,CAAC;IAC5D,MAAMI,OAAO,GAAG,MAAMH,OAAO,CAACC,OAAO,EAAE;IACvC,OAAO,IAAIP,qBAAqB,CAACG,MAAM,EAAEM,OAAO,EAAEP,MAAM,CAACG,MAAM,CAAC;EAClE;EAEAK,WAAW,CACTC,IAA2D,EAC3DC,KAA0B,EAC1BC,QAAgB,EAChBC,OAAa,EACb;IAAA;IAAA;IAAA;IAAA;IACA,IAAI,CAACH,IAAI,GAAGA,IAAI;IAChB,IAAI,CAACC,KAAK,GAAGA,KAAK;IAClB,IAAI,CAACC,QAAQ,GAAGA,QAAQ;IACxB,IAAI,CAACE,qBAAqB,GAAG,CAAAD,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEC,qBAAqB,KAAIhB,uBAAuB;EACxF;EAEA,MAAMiB,UAAU,GAAkB;IAChC,MAAMd,MAAM,GAAG,MAAM,IAAI,CAACS,IAAI,CAAC,CAAC,EAAErB,aAAa,CAACe,MAAM,CAAC;IAEvD,MAAMY,KAAK,GAAGf,MAAM,CAACgB,QAAQ,EAAE;IAC/B,QAAQD,KAAK;MACX,KAAK3B,aAAa;QAChB;MACF,KAAKC,uBAAuB;QAC1B,MAAM,IAAI4B,KAAK,CAAC,sCAAsC,CAAC;MACzD;QACE,MAAM,IAAIA,KAAK,uCAAgCF,KAAK,OAAI;IAAC;EAE/D;EAEA,MAAMG,YAAY,CAChBC,MAAqB,EACrBC,QAAkB,EAClBC,UAAsB,EACE;IACxB,MAAMrB,MAAqB,GAAG;MAC5BsB,QAAQ,EAAEC,MAAM,CAACH,QAAQ,CAACI,QAAQ,CAAC;MACnCC,UAAU,EAAE,CAAC;IACf,CAAC;IACD,KAAK,MAAMC,QAAQ,IAAIN,QAAQ,CAACO,OAAO,EAAE;MACvC,MAAMC,WAAW,GAAGF,QAAQ,CAACG,SAAS;MACtC,MAAMC,MAAM,GAAGF,WAAW,aAAXA,WAAW,uBAAXA,WAAW,CAAEG,cAAc;MAC1C,IAAIV,UAAU,CAAClB,MAAM,GAAG,CAAC,IAAIT,YAAY,CAAC2B,UAAU,EAAES,MAAM,CAAE,GAAG,CAAC,EAAE;QAClE;MACF;;MACA9B,MAAM,CAACyB,UAAU,CAACK,MAAM,CAAEE,IAAI,EAAE,CAAC,GAAG,MAAM,IAAI,CAACC,eAAe,CAACd,MAAM,EAAEO,QAAQ,CAAC;IAClF;IACA,OAAO1B,MAAM;EACf;;EAOA,MAAMiC,eAAe,CAACd,MAAqB,EAAEO,QAAqB,EAAwB;IAAA;IACxF,IAAIA,QAAQ,CAACQ,SAAS,KAAKC,SAAS,IAAIT,QAAQ,CAACQ,SAAS,KAAK,IAAI,EAAE;MACnE,MAAM,IAAIjB,KAAK,CAAC,uCAAuC,CAAC;IAC1D;IAEA,MAAMmB,KAAK,GAAGjB,MAAM,CAACkB,SAAS,wBAACX,QAAQ,CAACG,SAAS,wDAAlB,oBAAoBE,cAAc,CAAE;IACnE,MAAMO,IAAmB,GAAG7C,aAAa,CAACF,IAAI,0BAAEmC,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBS,IAAI,CAAS;IAEjF,IAAIA,IAAI,KAAKF,KAAK,CAACG,aAAa,EAAE;MAChC,MAAM,IAAItB,KAAK,2CAAoCqB,IAAI,EAAG;IAC5D;IAEA,MAAME,WAA+B,GAAG/C,aAAa,CACnDH,gBAAgB,0BAChBoC,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBY,KAAK,CACnB;IAER,MAAMC,WAAW,GAAGnB,MAAM,yBAACG,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBc,gBAAgB,CAAE;IACjE,IAAIC,SAAS,GAAGrB,MAAM,yBAACG,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBgB,qBAAqB,CAAE;IAElE,IAAI,CAACnB,QAAQ,CAACQ,SAAS,EAAE;MAAA;MACvBU,SAAS,GAAGE,IAAI,CAACC,GAAG,CAClB,IAAI,CAACpC,QAAQ,GAAG+B,WAAW,EAC3BnB,MAAM,yBAACG,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBgB,qBAAqB,CAAC,CAClD;IACH;IAEA,MAAMjC,OAAuB,GAAG;MAC9B0B,IAAI;MACJU,SAAS,EAAEZ,KAAK,CAACY,SAAS;MAC1BC,SAAS,EAAEb,KAAK,CAACa,SAAS;MAC1BT,WAAW;MACXU,MAAM,EAAEd,KAAK;MACbe,SAAS,0BAAEzB,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBuB,UAAU;MACzCC,UAAU,EAAE;IACd,CAAC;IAED,IAAIA,UAAU;IAEd,MAAMC,oBAAoB,GAAG5B,QAAQ,aAARA,QAAQ,+CAARA,QAAQ,CAAEG,SAAS,yDAAnB,qBAAqB0B,sBAAsB;IAExE,IAAID,oBAAoB,EAAE;MACxB,MAAME,gBAAgB,GAAGjC,MAAM,CAAC+B,oBAAoB,CAAC;MAErDD,UAAU,GAAG,MAAM,IAAI,CAACI,aAAa,CAACD,gBAAgB,EAAE5C,OAAO,EAAE8B,WAAW,CAAC;IAC/E;IAEAW,UAAU,GAAG,uBAAAzC,OAAO,CAACyC,UAAU,gDAAlB,oBAAoBlD,MAAM,GAAGS,OAAO,CAACyC,UAAU,GAAGA,UAAU;IACzE,MAAMK,QAAQ,GAAG,MAAM,IAAI,CAACjD,IAAI,CAACiC,WAAW,EAAEE,SAAS,CAAC;IACxD,OAAO,MAAMjD,eAAe,CAAC+D,QAAQ,EAAE;MAAC,GAAG9C,OAAO;MAAEyC;IAAU,CAAC,CAAC;EAClE;;EASA,MAAMI,aAAa,CACjBH,oBAA4B,EAC5B1C,OAAuB,EACvB8B,WAAmB,EACA;IACnB,IAAIY,oBAAoB,KAAK,CAAC,EAAE;;MAQ9B,OAAO,EAAE;IACX;IAEA,MAAMK,cAAc,GAAGb,IAAI,CAACC,GAAG,CAC7B,IAAI,CAACpC,QAAQ,GAAG2C,oBAAoB,EACpC,IAAI,CAACzC,qBAAqB,CAC3B;IACD,MAAM6C,QAAQ,GAAG,MAAM,IAAI,CAACjD,IAAI,CAAC6C,oBAAoB,EAAEK,cAAc,CAAC;IAEtE,MAAMC,MAAM,GAAG;MAAC5D,MAAM,EAAE0D,QAAQ;MAAEG,MAAM,EAAE,CAAC;MAAEC,IAAI,EAAEJ,QAAQ,CAACvD;IAAM,CAAC;IACnE,MAAM4D,WAAW,GAAG,MAAMnE,UAAU,CAACgE,MAAM,EAAEhD,OAAO,CAAC;IAErD,OAAOmD,WAAW,CAACV,UAAU;EAC/B;EAEA,MAAMW,UAAU,GAA0B;IACxC,MAAMC,UAAU,GAAG7E,aAAa,CAACe,MAAM,GAAG,CAAC;IAC3C,MAAM+D,UAAU,GAAG,MAAM,IAAI,CAACzD,IAAI,CAAC,IAAI,CAACE,QAAQ,GAAGsD,UAAU,EAAEA,UAAU,CAAC;IAE1E,MAAMlD,KAAK,GAAGmD,UAAU,CAAC5D,KAAK,CAAC,CAAC,CAAC,CAACU,QAAQ,EAAE;IAC5C,IAAID,KAAK,KAAK3B,aAAa,EAAE;MAC3B,MAAM,IAAI6B,KAAK,6CAAqCF,KAAK,OAAI;IAC/D;IAEA,MAAMoD,YAAY,GAAGD,UAAU,CAACE,YAAY,CAAC,CAAC,CAAC;IAC/C,MAAMC,cAAc,GAAG,IAAI,CAAC1D,QAAQ,GAAGwD,YAAY,GAAGF,UAAU;IAChE,IAAII,cAAc,GAAGjF,aAAa,CAACe,MAAM,EAAE;MACzC,MAAM,IAAIc,KAAK,iCAA0BoD,cAAc,EAAG;IAC5D;IAEA,MAAMC,WAAW,GAAG,MAAM,IAAI,CAAC7D,IAAI,CAAC4D,cAAc,EAAEF,YAAY,CAAC;IAGjE,MAAM;MAACI;IAAQ,CAAC,GAAG/E,kBAAkB,CAAC8E,WAAW,CAAC;IAClD,OAAOC,QAAQ;EACjB;AACF"}
@@ -1,12 +1,11 @@
1
1
  import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
2
-
3
2
  let _Symbol$asyncIterator;
4
-
5
3
  import { ParquetEnvelopeReader } from './parquet-envelope-reader';
6
4
  import { ParquetSchema } from '../schema/schema';
7
5
  import { ParquetCursor } from './parquet-cursor';
8
6
  import { PARQUET_VERSION } from '../../constants';
9
7
  import { decodeSchema } from './decoders';
8
+
10
9
  _Symbol$asyncIterator = Symbol.asyncIterator;
11
10
  export class ParquetReader {
12
11
  static async openBlob(blob) {
@@ -14,12 +13,9 @@ export class ParquetReader {
14
13
  const arrayBuffer = await blob.slice(start, start + length).arrayBuffer();
15
14
  return Buffer.from(arrayBuffer);
16
15
  };
17
-
18
16
  const closeFn = async () => {};
19
-
20
17
  const size = blob.size;
21
18
  const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);
22
-
23
19
  try {
24
20
  await envelopeReader.readHeader();
25
21
  const metadata = await envelopeReader.readFooter();
@@ -32,12 +28,9 @@ export class ParquetReader {
32
28
 
33
29
  static async openArrayBuffer(arrayBuffer) {
34
30
  const readFn = async (start, length) => Buffer.from(arrayBuffer, start, length);
35
-
36
31
  const closeFn = async () => {};
37
-
38
32
  const size = arrayBuffer.byteLength;
39
33
  const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);
40
-
41
34
  try {
42
35
  await envelopeReader.readHeader();
43
36
  const metadata = await envelopeReader.readFooter();
@@ -47,10 +40,8 @@ export class ParquetReader {
47
40
  throw err;
48
41
  }
49
42
  }
50
-
51
43
  static async openBuffer(buffer) {
52
44
  const envelopeReader = await ParquetEnvelopeReader.openBuffer(buffer);
53
-
54
45
  try {
55
46
  await envelopeReader.readHeader();
56
47
  const metadata = await envelopeReader.readFooter();
@@ -60,18 +51,13 @@ export class ParquetReader {
60
51
  throw err;
61
52
  }
62
53
  }
63
-
64
54
  constructor(metadata, envelopeReader) {
65
55
  _defineProperty(this, "metadata", void 0);
66
-
67
56
  _defineProperty(this, "envelopeReader", void 0);
68
-
69
57
  _defineProperty(this, "schema", void 0);
70
-
71
58
  if (metadata.version !== PARQUET_VERSION) {
72
59
  throw new Error('invalid parquet version');
73
60
  }
74
-
75
61
  this.metadata = metadata;
76
62
  this.envelopeReader = envelopeReader;
77
63
  const root = this.metadata.schema[0];
@@ -104,17 +90,14 @@ export class ParquetReader {
104
90
 
105
91
  getMetadata() {
106
92
  const md = {};
107
-
108
93
  for (const kv of this.metadata.key_value_metadata) {
109
94
  md[kv.key] = kv.value;
110
95
  }
111
-
112
96
  return md;
113
97
  }
114
98
 
115
99
  [_Symbol$asyncIterator]() {
116
100
  return this.getCursor()[Symbol.asyncIterator]();
117
101
  }
118
-
119
102
  }
120
103
  //# sourceMappingURL=parquet-reader.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/parquetjs/parser/parquet-reader.ts"],"names":["ParquetEnvelopeReader","ParquetSchema","ParquetCursor","PARQUET_VERSION","decodeSchema","Symbol","asyncIterator","ParquetReader","openBlob","blob","readFn","start","length","arrayBuffer","slice","Buffer","from","closeFn","size","envelopeReader","readHeader","metadata","readFooter","err","close","openArrayBuffer","byteLength","openBuffer","buffer","constructor","version","Error","root","schema","num_children","getCursor","columnList","map","x","Array","isArray","getRowCount","Number","num_rows","getSchema","getMetadata","md","kv","key_value_metadata","key","value"],"mappings":";;;;AACA,SAAQA,qBAAR,QAAoC,2BAApC;AAEA,SAAQC,aAAR,QAA4B,kBAA5B;AACA,SAAQC,aAAR,QAA4B,kBAA5B;AACA,SAAQC,eAAR,QAA8B,iBAA9B;AACA,SAAQC,YAAR,QAA2B,YAA3B;wBAyJGC,MAAM,CAACC,a;AAhJV,OAAO,MAAMC,aAAN,CAAmD;AAInC,eAARC,QAAQ,CAAIC,IAAJ,EAA2C;AAC9D,UAAMC,MAAM,GAAG,OAAOC,KAAP,EAAsBC,MAAtB,KAAyC;AACtD,YAAMC,WAAW,GAAG,MAAMJ,IAAI,CAACK,KAAL,CAAWH,KAAX,EAAkBA,KAAK,GAAGC,MAA1B,EAAkCC,WAAlC,EAA1B;AACA,aAAOE,MAAM,CAACC,IAAP,CAAYH,WAAZ,CAAP;AACD,KAHD;;AAIA,UAAMI,OAAO,GAAG,YAAY,CAAE,CAA9B;;AACA,UAAMC,IAAI,GAAGT,IAAI,CAACS,IAAlB;AACA,UAAMC,cAAc,GAAG,IAAInB,qBAAJ,CAA0BU,MAA1B,EAAkCO,OAAlC,EAA2CC,IAA3C,CAAvB;;AACA,QAAI;AACF,YAAMC,cAAc,CAACC,UAAf,EAAN;AACA,YAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAf,EAAvB;AACA,aAAO,IAAIf,aAAJ,CAAkBc,QAAlB,EAA4BF,cAA5B,CAAP;AACD,KAJD,CAIE,OAAOI,GAAP,EAAY;AACZ,YAAMJ,cAAc,CAACK,KAAf,EAAN;AACA,YAAMD,GAAN;AACD;AACF;;AAK2B,eAAfE,eAAe,CAAIZ,WAAJ,EAAyD;AACnF,UAAMH,MAAM,GAAG,OAAOC,KAAP,EAAsBC,MAAtB,KAAyCG,MAAM,CAACC,IAAP,CAAYH,WAAZ,EAAyBF,KAAzB,EAAgCC,MAAhC,CAAxD;;AACA,UAAMK,OAAO,GAAG,YAAY,CAAE,CAA9B;;AACA,UAAMC,IAAI,GAAGL,WAAW,CAACa,UAAzB;AACA,UAAMP,cAAc,GAAG,IAAInB,qBAAJ,CAA0BU,MAA1B,EAAkCO,OAAlC,EAA2CC,IAA3C,CAAvB;;AACA,QAAI;AACF,YAAMC,cAAc,CAACC,UAAf,EAAN;AACA,YAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAf,EAAvB;AACA,aAAO,IAAIf,aAAJ,CAAkBc,QAAlB,EAA4BF,cAA5B,CAAP;AACD,KAJD,CAIE,OAAOI,GAAP,EAAY;AACZ,YAAMJ,cAAc,CAACK,KAAf,EAAN;AACA,YAAMD,GAAN;AACD;AACF;;AAEsB,eAAVI,UAAU,CAAIC,MAAJ,EAA+C;AACpE,UAAMT,cAAc,GAAG,MAAMnB,qBAAqB,CAAC2B,UAAtB,CAAiCC,MAAjC,CAA7B;;AACA,QAAI;AACF,YAAMT,cAAc,CAACC,UAAf,EAAN;AACA,YAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAf,EAAvB;AACA,aAAO,IAAIf,aAAJ,CAAqBc,QAArB,EAA+BF,cAA/B,CAAP;AACD,KAJD,CAIE,OAAOI,GAAP,EAAY;AACZ,YAAMJ,cAAc,CAACK,KAAf,EAAN;AACA,YAAMD,GAAN;AACD;AACF;;AAYDM,EAAAA,WAAW,CAACR,QAAD,EAAyBF,cAAzB,EAAgE;AAAA;;AAAA;;AAAA;;AACzE,QAAIE,QAAQ,CAACS,OAAT,KAAqB3B,eAAzB,EAA0C;AACxC,YAAM,IAAI4B,KAAJ,CAAU,yBAAV,CAAN;AACD;;AAED,SAAKV,QAAL,GAAgBA,QAAhB;AACA,SAAKF,cAAL,GAAsBA,cAAtB;AACA,UAAMa,IAAI,GAAG,KAAKX,QAAL,CAAcY,MAAd,CAAqB,CAArB,CAAb;AACA,UAAM;AAACA,MAAAA;AAAD,QAAW7B,YAAY,CAAC,KAAKiB,QAAL,CAAcY,MAAf,EAAuB,CAAvB,EAA0BD,IAAI,CAACE,YAA/B,CAA7B;AACA,SAAKD,MAAL,GAAc,IAAIhC,aAAJ,CAAkBgC,MAAlB,CAAd;AACD;;AAMU,QAALT,KAAK,GAAkB;AAC3B,UAAM,KAAKL,cAAL,CAAoBK,KAApB,EAAN;AAGD;;AAeDW,EAAAA,SAAS,CAACC,UAAD,EAAgE;AACvE,QAAI,CAACA,UAAL,EAAiB;AAEfA,MAAAA,UAAU,GAAG,EAAb;AACD;;AAGDA,IAAAA,UAAU,GAAGA,UAAU,CAACC,GAAX,CAAgBC,CAAD,IAAQC,KAAK,CAACC,OAAN,CAAcF,CAAd,IAAmBA,CAAnB,GAAuB,CAACA,CAAD,CAA9C,CAAb;AAEA,WAAO,IAAIpC,aAAJ,CACL,KAAKmB,QADA,EAEL,KAAKF,cAFA,EAGL,KAAKc,MAHA,EAILG,UAJK,CAAP;AAMD;;AAMDK,EAAAA,WAAW,GAAW;AACpB,WAAOC,MAAM,CAAC,KAAKrB,QAAL,CAAcsB,QAAf,CAAb;AACD;;AAKDC,EAAAA,SAAS,GAAkB;AACzB,WAAO,KAAKX,MAAZ;AACD;;AAKDY,EAAAA,WAAW,GAA2B;AACpC,UAAMC,EAA0B,GAAG,EAAnC;;AACA,SAAK,MAAMC,EAAX,IAAiB,KAAK1B,QAAL,CAAc2B,kBAA/B,EAAoD;AAClDF,MAAAA,EAAE,CAACC,EAAE,CAACE,GAAJ,CAAF,GAAaF,EAAE,CAACG,KAAhB;AACD;;AACD,WAAOJ,EAAP;AACD;;AAMD,4BAA2C;AACzC,WAAO,KAAKX,SAAL,GAAiB9B,MAAM,CAACC,aAAxB,GAAP;AACD;;AAlJuD","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetEnvelopeReader} from './parquet-envelope-reader';\nimport {FileMetaData} from '../parquet-thrift';\nimport {ParquetSchema} from '../schema/schema';\nimport {ParquetCursor} from './parquet-cursor';\nimport {PARQUET_VERSION} from '../../constants';\nimport {decodeSchema} from './decoders';\n\n/**\n * A parquet reader allows retrieving the rows from a parquet file in order.\n * The basic usage is to create a reader and then retrieve a cursor/iterator\n * which allows you to consume row after row until all rows have been read. It is\n * important that you call close() after you are finished reading the file to\n * avoid leaking file descriptors.\n */\nexport class ParquetReader<T> implements AsyncIterable<T> {\n /**\n * return a new parquet reader initialized with a read function\n */\n static async openBlob<T>(blob: Blob): Promise<ParquetReader<T>> {\n const readFn = async (start: number, length: number) => {\n const arrayBuffer = await blob.slice(start, start + length).arrayBuffer();\n return Buffer.from(arrayBuffer);\n };\n const closeFn = async () => {};\n const size = blob.size;\n const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n /**\n * return a new parquet reader initialized with a read function\n */\n static async openArrayBuffer<T>(arrayBuffer: ArrayBuffer): Promise<ParquetReader<T>> {\n const readFn = async (start: number, length: number) => Buffer.from(arrayBuffer, start, length);\n const closeFn = async () => {};\n const size = arrayBuffer.byteLength;\n const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n static async openBuffer<T>(buffer: Buffer): Promise<ParquetReader<T>> {\n const envelopeReader = await ParquetEnvelopeReader.openBuffer(buffer);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader<T>(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n public metadata: FileMetaData;\n public envelopeReader: ParquetEnvelopeReader;\n public schema: ParquetSchema;\n\n /**\n * Create a new parquet reader from the file metadata and an envelope reader.\n * It is not recommended to call this constructor directly except for advanced\n * and internal use cases. Consider using one of the open{File,Buffer} methods\n * instead\n */\n constructor(metadata: FileMetaData, envelopeReader: ParquetEnvelopeReader) {\n if (metadata.version !== PARQUET_VERSION) {\n throw new Error('invalid parquet version');\n }\n\n this.metadata = metadata;\n this.envelopeReader = envelopeReader;\n const root = this.metadata.schema[0];\n const {schema} = decodeSchema(this.metadata.schema, 1, root.num_children!);\n this.schema = new ParquetSchema(schema);\n }\n\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n async close(): Promise<void> {\n await this.envelopeReader.close();\n // this.envelopeReader = null;\n // this.metadata = null;\n }\n\n /**\n * Return a cursor to the file. You may open more than one cursor and use\n * them concurrently. All cursors become invalid once close() is called on\n * the reader object.\n *\n * The required_columns parameter controls which columns are actually read\n * from disk. An empty array or no value implies all columns. A list of column\n * names means that only those columns should be loaded from disk.\n */\n getCursor(): ParquetCursor<T>;\n // @ts-ignore\n getCursor<K extends keyof T>(columnList: (K | K[])[]): ParquetCursor<Pick<T, K>>;\n getCursor(columnList: (string | string[])[]): ParquetCursor<Partial<T>>;\n getCursor(columnList?: (string | string[])[]): ParquetCursor<Partial<T>> {\n if (!columnList) {\n // tslint:disable-next-line:no-parameter-reassignment\n columnList = [];\n }\n\n // tslint:disable-next-line:no-parameter-reassignment\n columnList = columnList.map((x) => (Array.isArray(x) ? x : [x]));\n\n return new ParquetCursor<T>(\n this.metadata,\n this.envelopeReader,\n this.schema,\n columnList as string[][]\n );\n }\n\n /**\n * Return the number of rows in this file. Note that the number of rows is\n * not neccessarily equal to the number of rows in each column.\n */\n getRowCount(): number {\n return Number(this.metadata.num_rows);\n }\n\n /**\n * Returns the ParquetSchema for this file\n */\n getSchema(): ParquetSchema {\n return this.schema;\n }\n\n /**\n * Returns the user (key/value) metadata for this file\n */\n getMetadata(): Record<string, string> {\n const md: Record<string, string> = {};\n for (const kv of this.metadata.key_value_metadata!) {\n md[kv.key] = kv.value!;\n }\n return md;\n }\n\n /**\n * Implement AsyncIterable\n */\n // tslint:disable-next-line:function-name\n [Symbol.asyncIterator](): AsyncIterator<T> {\n return this.getCursor()[Symbol.asyncIterator]();\n }\n}\n"],"file":"parquet-reader.js"}
1
+ {"version":3,"file":"parquet-reader.js","names":["ParquetEnvelopeReader","ParquetSchema","ParquetCursor","PARQUET_VERSION","decodeSchema","Symbol","asyncIterator","ParquetReader","openBlob","blob","readFn","start","length","arrayBuffer","slice","Buffer","from","closeFn","size","envelopeReader","readHeader","metadata","readFooter","err","close","openArrayBuffer","byteLength","openBuffer","buffer","constructor","version","Error","root","schema","num_children","getCursor","columnList","map","x","Array","isArray","getRowCount","Number","num_rows","getSchema","getMetadata","md","kv","key_value_metadata","key","value"],"sources":["../../../../src/parquetjs/parser/parquet-reader.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetEnvelopeReader} from './parquet-envelope-reader';\nimport {FileMetaData} from '../parquet-thrift';\nimport {ParquetSchema} from '../schema/schema';\nimport {ParquetCursor} from './parquet-cursor';\nimport {PARQUET_VERSION} from '../../constants';\nimport {decodeSchema} from './decoders';\n\n/**\n * A parquet reader allows retrieving the rows from a parquet file in order.\n * The basic usage is to create a reader and then retrieve a cursor/iterator\n * which allows you to consume row after row until all rows have been read. It is\n * important that you call close() after you are finished reading the file to\n * avoid leaking file descriptors.\n */\nexport class ParquetReader<T> implements AsyncIterable<T> {\n /**\n * return a new parquet reader initialized with a read function\n */\n static async openBlob<T>(blob: Blob): Promise<ParquetReader<T>> {\n const readFn = async (start: number, length: number) => {\n const arrayBuffer = await blob.slice(start, start + length).arrayBuffer();\n return Buffer.from(arrayBuffer);\n };\n const closeFn = async () => {};\n const size = blob.size;\n const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n /**\n * return a new parquet reader initialized with a read function\n */\n static async openArrayBuffer<T>(arrayBuffer: ArrayBuffer): Promise<ParquetReader<T>> {\n const readFn = async (start: number, length: number) => Buffer.from(arrayBuffer, start, length);\n const closeFn = async () => {};\n const size = arrayBuffer.byteLength;\n const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n static async openBuffer<T>(buffer: Buffer): Promise<ParquetReader<T>> {\n const envelopeReader = await ParquetEnvelopeReader.openBuffer(buffer);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader<T>(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n public metadata: FileMetaData;\n public envelopeReader: ParquetEnvelopeReader;\n public schema: ParquetSchema;\n\n /**\n * Create a new parquet reader from the file metadata and an envelope reader.\n * It is not recommended to call this constructor directly except for advanced\n * and internal use cases. Consider using one of the open{File,Buffer} methods\n * instead\n */\n constructor(metadata: FileMetaData, envelopeReader: ParquetEnvelopeReader) {\n if (metadata.version !== PARQUET_VERSION) {\n throw new Error('invalid parquet version');\n }\n\n this.metadata = metadata;\n this.envelopeReader = envelopeReader;\n const root = this.metadata.schema[0];\n const {schema} = decodeSchema(this.metadata.schema, 1, root.num_children!);\n this.schema = new ParquetSchema(schema);\n }\n\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n async close(): Promise<void> {\n await this.envelopeReader.close();\n // this.envelopeReader = null;\n // this.metadata = null;\n }\n\n /**\n * Return a cursor to the file. You may open more than one cursor and use\n * them concurrently. All cursors become invalid once close() is called on\n * the reader object.\n *\n * The required_columns parameter controls which columns are actually read\n * from disk. An empty array or no value implies all columns. A list of column\n * names means that only those columns should be loaded from disk.\n */\n getCursor(): ParquetCursor<T>;\n // @ts-ignore\n getCursor<K extends keyof T>(columnList: (K | K[])[]): ParquetCursor<Pick<T, K>>;\n getCursor(columnList: (string | string[])[]): ParquetCursor<Partial<T>>;\n getCursor(columnList?: (string | string[])[]): ParquetCursor<Partial<T>> {\n if (!columnList) {\n // tslint:disable-next-line:no-parameter-reassignment\n columnList = [];\n }\n\n // tslint:disable-next-line:no-parameter-reassignment\n columnList = columnList.map((x) => (Array.isArray(x) ? x : [x]));\n\n return new ParquetCursor<T>(\n this.metadata,\n this.envelopeReader,\n this.schema,\n columnList as string[][]\n );\n }\n\n /**\n * Return the number of rows in this file. Note that the number of rows is\n * not neccessarily equal to the number of rows in each column.\n */\n getRowCount(): number {\n return Number(this.metadata.num_rows);\n }\n\n /**\n * Returns the ParquetSchema for this file\n */\n getSchema(): ParquetSchema {\n return this.schema;\n }\n\n /**\n * Returns the user (key/value) metadata for this file\n */\n getMetadata(): Record<string, string> {\n const md: Record<string, string> = {};\n for (const kv of this.metadata.key_value_metadata!) {\n md[kv.key] = kv.value!;\n }\n return md;\n }\n\n /**\n * Implement AsyncIterable\n */\n // tslint:disable-next-line:function-name\n [Symbol.asyncIterator](): AsyncIterator<T> {\n return this.getCursor()[Symbol.asyncIterator]();\n }\n}\n"],"mappings":";;AACA,SAAQA,qBAAqB,QAAO,2BAA2B;AAE/D,SAAQC,aAAa,QAAO,kBAAkB;AAC9C,SAAQC,aAAa,QAAO,kBAAkB;AAC9C,SAAQC,eAAe,QAAO,iBAAiB;AAC/C,SAAQC,YAAY,QAAO,YAAY;;AAAC,wBAyJrCC,MAAM,CAACC,aAAa;AAhJvB,OAAO,MAAMC,aAAa,CAAgC;EAIxD,aAAaC,QAAQ,CAAIC,IAAU,EAA6B;IAC9D,MAAMC,MAAM,GAAG,OAAOC,KAAa,EAAEC,MAAc,KAAK;MACtD,MAAMC,WAAW,GAAG,MAAMJ,IAAI,CAACK,KAAK,CAACH,KAAK,EAAEA,KAAK,GAAGC,MAAM,CAAC,CAACC,WAAW,EAAE;MACzE,OAAOE,MAAM,CAACC,IAAI,CAACH,WAAW,CAAC;IACjC,CAAC;IACD,MAAMI,OAAO,GAAG,YAAY,CAAC,CAAC;IAC9B,MAAMC,IAAI,GAAGT,IAAI,CAACS,IAAI;IACtB,MAAMC,cAAc,GAAG,IAAInB,qBAAqB,CAACU,MAAM,EAAEO,OAAO,EAAEC,IAAI,CAAC;IACvE,IAAI;MACF,MAAMC,cAAc,CAACC,UAAU,EAAE;MACjC,MAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAU,EAAE;MAClD,OAAO,IAAIf,aAAa,CAACc,QAAQ,EAAEF,cAAc,CAAC;IACpD,CAAC,CAAC,OAAOI,GAAG,EAAE;MACZ,MAAMJ,cAAc,CAACK,KAAK,EAAE;MAC5B,MAAMD,GAAG;IACX;EACF;;EAKA,aAAaE,eAAe,CAAIZ,WAAwB,EAA6B;IACnF,MAAMH,MAAM,GAAG,OAAOC,KAAa,EAAEC,MAAc,KAAKG,MAAM,CAACC,IAAI,CAACH,WAAW,EAAEF,KAAK,EAAEC,MAAM,CAAC;IAC/F,MAAMK,OAAO,GAAG,YAAY,CAAC,CAAC;IAC9B,MAAMC,IAAI,GAAGL,WAAW,CAACa,UAAU;IACnC,MAAMP,cAAc,GAAG,IAAInB,qBAAqB,CAACU,MAAM,EAAEO,OAAO,EAAEC,IAAI,CAAC;IACvE,IAAI;MACF,MAAMC,cAAc,CAACC,UAAU,EAAE;MACjC,MAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAU,EAAE;MAClD,OAAO,IAAIf,aAAa,CAACc,QAAQ,EAAEF,cAAc,CAAC;IACpD,CAAC,CAAC,OAAOI,GAAG,EAAE;MACZ,MAAMJ,cAAc,CAACK,KAAK,EAAE;MAC5B,MAAMD,GAAG;IACX;EACF;EAEA,aAAaI,UAAU,CAAIC,MAAc,EAA6B;IACpE,MAAMT,cAAc,GAAG,MAAMnB,qBAAqB,CAAC2B,UAAU,CAACC,MAAM,CAAC;IACrE,IAAI;MACF,MAAMT,cAAc,CAACC,UAAU,EAAE;MACjC,MAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAU,EAAE;MAClD,OAAO,IAAIf,aAAa,CAAIc,QAAQ,EAAEF,cAAc,CAAC;IACvD,CAAC,CAAC,OAAOI,GAAG,EAAE;MACZ,MAAMJ,cAAc,CAACK,KAAK,EAAE;MAC5B,MAAMD,GAAG;IACX;EACF;EAYAM,WAAW,CAACR,QAAsB,EAAEF,cAAqC,EAAE;IAAA;IAAA;IAAA;IACzE,IAAIE,QAAQ,CAACS,OAAO,KAAK3B,eAAe,EAAE;MACxC,MAAM,IAAI4B,KAAK,CAAC,yBAAyB,CAAC;IAC5C;IAEA,IAAI,CAACV,QAAQ,GAAGA,QAAQ;IACxB,IAAI,CAACF,cAAc,GAAGA,cAAc;IACpC,MAAMa,IAAI,GAAG,IAAI,CAACX,QAAQ,CAACY,MAAM,CAAC,CAAC,CAAC;IACpC,MAAM;MAACA;IAAM,CAAC,GAAG7B,YAAY,CAAC,IAAI,CAACiB,QAAQ,CAACY,MAAM,EAAE,CAAC,EAAED,IAAI,CAACE,YAAY,CAAE;IAC1E,IAAI,CAACD,MAAM,GAAG,IAAIhC,aAAa,CAACgC,MAAM,CAAC;EACzC;;EAMA,MAAMT,KAAK,GAAkB;IAC3B,MAAM,IAAI,CAACL,cAAc,CAACK,KAAK,EAAE;EAGnC;;EAeAW,SAAS,CAACC,UAAkC,EAA6B;IACvE,IAAI,CAACA,UAAU,EAAE;MAEfA,UAAU,GAAG,EAAE;IACjB;;IAGAA,UAAU,GAAGA,UAAU,CAACC,GAAG,CAAEC,CAAC,IAAMC,KAAK,CAACC,OAAO,CAACF,CAAC,CAAC,GAAGA,CAAC,GAAG,CAACA,CAAC,CAAE,CAAC;IAEhE,OAAO,IAAIpC,aAAa,CACtB,IAAI,CAACmB,QAAQ,EACb,IAAI,CAACF,cAAc,EACnB,IAAI,CAACc,MAAM,EACXG,UAAU,CACX;EACH;;EAMAK,WAAW,GAAW;IACpB,OAAOC,MAAM,CAAC,IAAI,CAACrB,QAAQ,CAACsB,QAAQ,CAAC;EACvC;;EAKAC,SAAS,GAAkB;IACzB,OAAO,IAAI,CAACX,MAAM;EACpB;;EAKAY,WAAW,GAA2B;IACpC,MAAMC,EAA0B,GAAG,CAAC,CAAC;IACrC,KAAK,MAAMC,EAAE,IAAI,IAAI,CAAC1B,QAAQ,CAAC2B,kBAAkB,EAAG;MAClDF,EAAE,CAACC,EAAE,CAACE,GAAG,CAAC,GAAGF,EAAE,CAACG,KAAM;IACxB;IACA,OAAOJ,EAAE;EACX;;EAMA,0BAA2C;IACzC,OAAO,IAAI,CAACX,SAAS,EAAE,CAAC9B,MAAM,CAACC,aAAa,CAAC,EAAE;EACjD;AACF"}
@@ -1,13 +1,13 @@
1
1
  import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
2
+
2
3
  export class ParquetBuffer {
3
- constructor(rowCount = 0, columnData = {}) {
4
+ constructor() {
5
+ let rowCount = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0;
6
+ let columnData = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
4
7
  _defineProperty(this, "rowCount", void 0);
5
-
6
8
  _defineProperty(this, "columnData", void 0);
7
-
8
9
  this.rowCount = rowCount;
9
10
  this.columnData = columnData;
10
11
  }
11
-
12
12
  }
13
13
  //# sourceMappingURL=declare.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/parquetjs/schema/declare.ts"],"names":["ParquetBuffer","constructor","rowCount","columnData"],"mappings":";AAmIA,OAAO,MAAMA,aAAN,CAAoB;AAGzBC,EAAAA,WAAW,CAACC,QAAgB,GAAG,CAApB,EAAuBC,UAAuC,GAAG,EAAjE,EAAqE;AAAA;;AAAA;;AAC9E,SAAKD,QAAL,GAAgBA,QAAhB;AACA,SAAKC,UAAL,GAAkBA,UAAlB;AACD;;AANwB","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport Int64 from 'node-int64';\nimport type {PageHeader} from '../parquet-thrift';\n\nexport type ParquetCodec = 'PLAIN' | 'RLE' | 'PLAIN_DICTIONARY';\nexport type ParquetCompression =\n | 'UNCOMPRESSED'\n | 'GZIP'\n | 'SNAPPY'\n | 'LZO'\n | 'BROTLI'\n | 'LZ4'\n | 'LZ4_RAW'\n | 'ZSTD';\nexport type RepetitionType = 'REQUIRED' | 'OPTIONAL' | 'REPEATED';\nexport type ParquetType = PrimitiveType | OriginalType;\n\n/**\n * Physical type\n */\nexport type PrimitiveType =\n // Base Types\n | 'BOOLEAN' // 0\n | 'INT32' // 1\n | 'INT64' // 2\n | 'INT96' // 3\n | 'FLOAT' // 4\n | 'DOUBLE' // 5\n | 'BYTE_ARRAY' // 6,\n | 'FIXED_LEN_BYTE_ARRAY'; // 7\n\n/**\n * Logical type\n */\nexport type OriginalType =\n // Converted Types\n | 'UTF8' // 0\n // | 'MAP' // 1\n // | 'MAP_KEY_VALUE' // 2\n // | 'LIST' // 3\n // | 'ENUM' // 4\n // | 'DECIMAL' // 5\n | 'DECIMAL_INT32' // 5\n | 'DECIMAL_INT64' // 5\n | 'DECIMAL_BYTE_ARRAY' // 5\n | 'DECIMAL_FIXED_LEN_BYTE_ARRAY' // 5\n | 'DATE' // 6\n | 'TIME_MILLIS' // 7\n | 'TIME_MICROS' // 8\n | 'TIMESTAMP_MILLIS' // 9\n | 'TIMESTAMP_MICROS' // 10\n | 'UINT_8' // 11\n | 'UINT_16' // 12\n | 'UINT_32' // 13\n | 'UINT_64' // 14\n | 'INT_8' // 15\n | 'INT_16' // 16\n | 'INT_32' // 17\n | 'INT_64' // 18\n | 'JSON' // 19\n | 'BSON' // 20\n | 'INTERVAL'; // 21\n\nexport type ParquetDictionary = string[];\n\nexport interface SchemaDefinition {\n [string: string]: FieldDefinition;\n}\n\nexport interface FieldDefinition {\n type?: ParquetType;\n typeLength?: number;\n presision?: number;\n scale?: number;\n encoding?: ParquetCodec;\n compression?: ParquetCompression;\n optional?: boolean;\n repeated?: boolean;\n fields?: SchemaDefinition;\n}\n\nexport interface ParquetField {\n name: string;\n path: string[];\n key: string;\n primitiveType?: PrimitiveType;\n originalType?: OriginalType;\n repetitionType: RepetitionType;\n typeLength?: number;\n presision?: number;\n scale?: number;\n encoding?: ParquetCodec;\n compression?: ParquetCompression;\n rLevelMax: number;\n dLevelMax: number;\n isNested?: boolean;\n fieldCount?: number;\n fields?: Record<string, ParquetField>;\n}\n\nexport interface ParquetOptions {\n type: ParquetType;\n rLevelMax: number;\n dLevelMax: number;\n compression: ParquetCompression;\n column: ParquetField;\n numValues?: Int64;\n dictionary?: ParquetDictionary;\n}\n\nexport interface ParquetData {\n dlevels: number[];\n rlevels: number[];\n values: any[];\n count: number;\n pageHeaders: PageHeader[];\n}\n\nexport interface ParquetPageData {\n dlevels: number[];\n rlevels: number[];\n values: any[];\n count: number;\n dictionary?: ParquetDictionary;\n pageHeader: PageHeader;\n}\n\nexport interface ParquetRecord {\n [key: string]: any;\n}\n\nexport class ParquetBuffer {\n rowCount: number;\n columnData: Record<string, ParquetData>;\n constructor(rowCount: number = 0, columnData: Record<string, ParquetData> = {}) {\n this.rowCount = rowCount;\n this.columnData = columnData;\n }\n}\n"],"file":"declare.js"}
1
+ {"version":3,"file":"declare.js","names":["ParquetBuffer","constructor","rowCount","columnData"],"sources":["../../../../src/parquetjs/schema/declare.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport Int64 from 'node-int64';\nimport type {PageHeader} from '../parquet-thrift';\n\nexport type ParquetCodec = 'PLAIN' | 'RLE' | 'PLAIN_DICTIONARY';\nexport type ParquetCompression =\n | 'UNCOMPRESSED'\n | 'GZIP'\n | 'SNAPPY'\n | 'LZO'\n | 'BROTLI'\n | 'LZ4'\n | 'LZ4_RAW'\n | 'ZSTD';\nexport type RepetitionType = 'REQUIRED' | 'OPTIONAL' | 'REPEATED';\nexport type ParquetType = PrimitiveType | OriginalType;\n\n/**\n * Physical type\n */\nexport type PrimitiveType =\n // Base Types\n | 'BOOLEAN' // 0\n | 'INT32' // 1\n | 'INT64' // 2\n | 'INT96' // 3\n | 'FLOAT' // 4\n | 'DOUBLE' // 5\n | 'BYTE_ARRAY' // 6,\n | 'FIXED_LEN_BYTE_ARRAY'; // 7\n\n/**\n * Logical type\n */\nexport type OriginalType =\n // Converted Types\n | 'UTF8' // 0\n // | 'MAP' // 1\n // | 'MAP_KEY_VALUE' // 2\n // | 'LIST' // 3\n // | 'ENUM' // 4\n // | 'DECIMAL' // 5\n | 'DECIMAL_INT32' // 5\n | 'DECIMAL_INT64' // 5\n | 'DECIMAL_BYTE_ARRAY' // 5\n | 'DECIMAL_FIXED_LEN_BYTE_ARRAY' // 5\n | 'DATE' // 6\n | 'TIME_MILLIS' // 7\n | 'TIME_MICROS' // 8\n | 'TIMESTAMP_MILLIS' // 9\n | 'TIMESTAMP_MICROS' // 10\n | 'UINT_8' // 11\n | 'UINT_16' // 12\n | 'UINT_32' // 13\n | 'UINT_64' // 14\n | 'INT_8' // 15\n | 'INT_16' // 16\n | 'INT_32' // 17\n | 'INT_64' // 18\n | 'JSON' // 19\n | 'BSON' // 20\n | 'INTERVAL'; // 21\n\nexport type ParquetDictionary = string[];\n\nexport interface SchemaDefinition {\n [string: string]: FieldDefinition;\n}\n\nexport interface FieldDefinition {\n type?: ParquetType;\n typeLength?: number;\n presision?: number;\n scale?: number;\n encoding?: ParquetCodec;\n compression?: ParquetCompression;\n optional?: boolean;\n repeated?: boolean;\n fields?: SchemaDefinition;\n}\n\nexport interface ParquetField {\n name: string;\n path: string[];\n key: string;\n primitiveType?: PrimitiveType;\n originalType?: OriginalType;\n repetitionType: RepetitionType;\n typeLength?: number;\n presision?: number;\n scale?: number;\n encoding?: ParquetCodec;\n compression?: ParquetCompression;\n rLevelMax: number;\n dLevelMax: number;\n isNested?: boolean;\n fieldCount?: number;\n fields?: Record<string, ParquetField>;\n}\n\nexport interface ParquetOptions {\n type: ParquetType;\n rLevelMax: number;\n dLevelMax: number;\n compression: ParquetCompression;\n column: ParquetField;\n numValues?: Int64;\n dictionary?: ParquetDictionary;\n}\n\nexport interface ParquetData {\n dlevels: number[];\n rlevels: number[];\n values: any[];\n count: number;\n pageHeaders: PageHeader[];\n}\n\nexport interface ParquetPageData {\n dlevels: number[];\n rlevels: number[];\n values: any[];\n count: number;\n dictionary?: ParquetDictionary;\n pageHeader: PageHeader;\n}\n\nexport interface ParquetRecord {\n [key: string]: any;\n}\n\nexport class ParquetBuffer {\n rowCount: number;\n columnData: Record<string, ParquetData>;\n constructor(rowCount: number = 0, columnData: Record<string, ParquetData> = {}) {\n this.rowCount = rowCount;\n this.columnData = columnData;\n }\n}\n"],"mappings":";;AAmIA,OAAO,MAAMA,aAAa,CAAC;EAGzBC,WAAW,GAAqE;IAAA,IAApEC,QAAgB,uEAAG,CAAC;IAAA,IAAEC,UAAuC,uEAAG,CAAC,CAAC;IAAA;IAAA;IAC5E,IAAI,CAACD,QAAQ,GAAGA,QAAQ;IACxB,IAAI,CAACC,UAAU,GAAGA,UAAU;EAC9B;AACF"}
@@ -1,16 +1,15 @@
1
1
  import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
2
+
2
3
  import { PARQUET_CODECS } from '../codecs';
3
4
  import { PARQUET_COMPRESSION_METHODS } from '../compression';
4
5
  import { materializeRecords, shredBuffer, shredRecord } from './shred';
5
6
  import { PARQUET_LOGICAL_TYPES } from './types';
7
+
6
8
  export class ParquetSchema {
7
9
  constructor(schema) {
8
10
  _defineProperty(this, "schema", void 0);
9
-
10
11
  _defineProperty(this, "fields", void 0);
11
-
12
12
  _defineProperty(this, "fieldList", void 0);
13
-
14
13
  this.schema = schema;
15
14
  this.fields = buildFields(schema, 0, 0, []);
16
15
  this.fieldList = listFields(this.fields);
@@ -24,11 +23,9 @@ export class ParquetSchema {
24
23
  }
25
24
 
26
25
  let n = this.fields;
27
-
28
26
  for (; path.length > 1; path.shift()) {
29
27
  n = n[path[0]].fields;
30
28
  }
31
-
32
29
  return n[path[0]];
33
30
  }
34
31
 
@@ -36,45 +33,34 @@ export class ParquetSchema {
36
33
  if (typeof path === 'string') {
37
34
  path = path.split(',');
38
35
  }
39
-
40
36
  const branch = [];
41
37
  let n = this.fields;
42
-
43
38
  for (; path.length > 0; path.shift()) {
44
39
  branch.push(n[path[0]]);
45
-
46
40
  if (path.length > 1) {
47
41
  n = n[path[0]].fields;
48
42
  }
49
43
  }
50
-
51
44
  return branch;
52
45
  }
53
-
54
46
  shredRecord(record, buffer) {
55
47
  shredRecord(this, record, buffer);
56
48
  }
57
-
58
49
  materializeRecords(buffer) {
59
50
  return materializeRecords(this, buffer);
60
51
  }
61
-
62
52
  compress(type) {
63
53
  setCompress(this.schema, type);
64
54
  setCompress(this.fields, type);
65
55
  return this;
66
56
  }
67
-
68
57
  buffer() {
69
58
  return shredBuffer(this);
70
59
  }
71
-
72
60
  }
73
-
74
61
  function setCompress(schema, type) {
75
62
  for (const name in schema) {
76
63
  const node = schema[name];
77
-
78
64
  if (node.fields) {
79
65
  setCompress(node.fields, type);
80
66
  } else {
@@ -85,20 +71,18 @@ function setCompress(schema, type) {
85
71
 
86
72
  function buildFields(schema, rLevelParentMax, dLevelParentMax, path) {
87
73
  const fieldList = {};
88
-
89
74
  for (const name in schema) {
90
75
  const opts = schema[name];
76
+
91
77
  const required = !opts.optional;
92
78
  const repeated = Boolean(opts.repeated);
93
79
  let rLevelMax = rLevelParentMax;
94
80
  let dLevelMax = dLevelParentMax;
95
81
  let repetitionType = 'REQUIRED';
96
-
97
82
  if (!required) {
98
83
  repetitionType = 'OPTIONAL';
99
84
  dLevelMax++;
100
85
  }
101
-
102
86
  if (repeated) {
103
87
  repetitionType = 'REPEATED';
104
88
  rLevelMax++;
@@ -122,19 +106,14 @@ function buildFields(schema, rLevelParentMax, dLevelParentMax, path) {
122
106
  }
123
107
 
124
108
  const typeDef = PARQUET_LOGICAL_TYPES[opts.type];
125
-
126
109
  if (!typeDef) {
127
110
  throw new Error("invalid parquet type: ".concat(opts.type));
128
111
  }
129
-
130
112
  opts.encoding = opts.encoding || 'PLAIN';
131
-
132
113
  if (!(opts.encoding in PARQUET_CODECS)) {
133
114
  throw new Error("unsupported parquet encoding: ".concat(opts.encoding));
134
115
  }
135
-
136
116
  opts.compression = opts.compression || 'UNCOMPRESSED';
137
-
138
117
  if (!(opts.compression in PARQUET_COMPRESSION_METHODS)) {
139
118
  throw new Error("unsupported compression method: ".concat(opts.compression));
140
119
  }
@@ -156,21 +135,16 @@ function buildFields(schema, rLevelParentMax, dLevelParentMax, path) {
156
135
  dLevelMax
157
136
  };
158
137
  }
159
-
160
138
  return fieldList;
161
139
  }
162
-
163
140
  function listFields(fields) {
164
141
  let list = [];
165
-
166
142
  for (const k in fields) {
167
143
  list.push(fields[k]);
168
-
169
144
  if (fields[k].isNested) {
170
145
  list = list.concat(listFields(fields[k].fields));
171
146
  }
172
147
  }
173
-
174
148
  return list;
175
149
  }
176
150
  //# sourceMappingURL=schema.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/parquetjs/schema/schema.ts"],"names":["PARQUET_CODECS","PARQUET_COMPRESSION_METHODS","materializeRecords","shredBuffer","shredRecord","PARQUET_LOGICAL_TYPES","ParquetSchema","constructor","schema","fields","buildFields","fieldList","listFields","findField","path","split","slice","n","length","shift","findFieldBranch","branch","push","record","buffer","compress","type","setCompress","name","node","compression","rLevelParentMax","dLevelParentMax","opts","required","optional","repeated","Boolean","rLevelMax","dLevelMax","repetitionType","cpath","concat","key","join","isNested","fieldCount","Object","keys","typeDef","Error","encoding","primitiveType","originalType","typeLength","presision","scale","list","k"],"mappings":";AAEA,SAAQA,cAAR,QAA6B,WAA7B;AACA,SAAQC,2BAAR,QAA0C,gBAA1C;AAUA,SAAQC,kBAAR,EAA4BC,WAA5B,EAAyCC,WAAzC,QAA2D,SAA3D;AACA,SAAQC,qBAAR,QAAoC,SAApC;AAKA,OAAO,MAAMC,aAAN,CAAoB;AAQzBC,EAAAA,WAAW,CAACC,MAAD,EAA2B;AAAA;;AAAA;;AAAA;;AACpC,SAAKA,MAAL,GAAcA,MAAd;AACA,SAAKC,MAAL,GAAcC,WAAW,CAACF,MAAD,EAAS,CAAT,EAAY,CAAZ,EAAe,EAAf,CAAzB;AACA,SAAKG,SAAL,GAAiBC,UAAU,CAAC,KAAKH,MAAN,CAA3B;AACD;;AAKDI,EAAAA,SAAS,CAACC,IAAD,EAAwC;AAC/C,QAAI,OAAOA,IAAP,KAAgB,QAApB,EAA8B;AAE5BA,MAAAA,IAAI,GAAGA,IAAI,CAACC,KAAL,CAAW,GAAX,CAAP;AACD,KAHD,MAGO;AAELD,MAAAA,IAAI,GAAGA,IAAI,CAACE,KAAL,CAAW,CAAX,CAAP;AACD;;AAED,QAAIC,CAAC,GAAG,KAAKR,MAAb;;AACA,WAAOK,IAAI,CAACI,MAAL,GAAc,CAArB,EAAwBJ,IAAI,CAACK,KAAL,EAAxB,EAAsC;AACpCF,MAAAA,CAAC,GAAGA,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAD,CAAWL,MAAf;AACD;;AAED,WAAOQ,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAR;AACD;;AAKDM,EAAAA,eAAe,CAACN,IAAD,EAA0C;AACvD,QAAI,OAAOA,IAAP,KAAgB,QAApB,EAA8B;AAE5BA,MAAAA,IAAI,GAAGA,IAAI,CAACC,KAAL,CAAW,GAAX,CAAP;AACD;;AACD,UAAMM,MAAsB,GAAG,EAA/B;AACA,QAAIJ,CAAC,GAAG,KAAKR,MAAb;;AACA,WAAOK,IAAI,CAACI,MAAL,GAAc,CAArB,EAAwBJ,IAAI,CAACK,KAAL,EAAxB,EAAsC;AACpCE,MAAAA,MAAM,CAACC,IAAP,CAAYL,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAb;;AACA,UAAIA,IAAI,CAACI,MAAL,GAAc,CAAlB,EAAqB;AACnBD,QAAAA,CAAC,GAAGA,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAD,CAAWL,MAAf;AACD;AACF;;AACD,WAAOY,MAAP;AACD;;AAEDjB,EAAAA,WAAW,CAACmB,MAAD,EAAwBC,MAAxB,EAAqD;AAC9DpB,IAAAA,WAAW,CAAC,IAAD,EAAOmB,MAAP,EAAeC,MAAf,CAAX;AACD;;AAEDtB,EAAAA,kBAAkB,CAACsB,MAAD,EAAyC;AACzD,WAAOtB,kBAAkB,CAAC,IAAD,EAAOsB,MAAP,CAAzB;AACD;;AAEDC,EAAAA,QAAQ,CAACC,IAAD,EAAiC;AACvCC,IAAAA,WAAW,CAAC,KAAKnB,MAAN,EAAckB,IAAd,CAAX;AACAC,IAAAA,WAAW,CAAC,KAAKlB,MAAN,EAAciB,IAAd,CAAX;AACA,WAAO,IAAP;AACD;;AAEDF,EAAAA,MAAM,GAAkB;AACtB,WAAOrB,WAAW,CAAC,IAAD,CAAlB;AACD;;AArEwB;;AAwE3B,SAASwB,WAAT,CAAqBnB,MAArB,EAAkCkB,IAAlC,EAA4D;AAC1D,OAAK,MAAME,IAAX,IAAmBpB,MAAnB,EAA2B;AACzB,UAAMqB,IAAI,GAAGrB,MAAM,CAACoB,IAAD,CAAnB;;AACA,QAAIC,IAAI,CAACpB,MAAT,EAAiB;AACfkB,MAAAA,WAAW,CAACE,IAAI,CAACpB,MAAN,EAAciB,IAAd,CAAX;AACD,KAFD,MAEO;AACLG,MAAAA,IAAI,CAACC,WAAL,GAAmBJ,IAAnB;AACD;AACF;AACF;;AAGD,SAAShB,WAAT,CACEF,MADF,EAEEuB,eAFF,EAGEC,eAHF,EAIElB,IAJF,EAKgC;AAC9B,QAAMH,SAAuC,GAAG,EAAhD;;AAEA,OAAK,MAAMiB,IAAX,IAAmBpB,MAAnB,EAA2B;AACzB,UAAMyB,IAAI,GAAGzB,MAAM,CAACoB,IAAD,CAAnB;AAGA,UAAMM,QAAQ,GAAG,CAACD,IAAI,CAACE,QAAvB;AACA,UAAMC,QAAQ,GAAGC,OAAO,CAACJ,IAAI,CAACG,QAAN,CAAxB;AACA,QAAIE,SAAS,GAAGP,eAAhB;AACA,QAAIQ,SAAS,GAAGP,eAAhB;AAEA,QAAIQ,cAA8B,GAAG,UAArC;;AACA,QAAI,CAACN,QAAL,EAAe;AACbM,MAAAA,cAAc,GAAG,UAAjB;AACAD,MAAAA,SAAS;AACV;;AACD,QAAIH,QAAJ,EAAc;AACZI,MAAAA,cAAc,GAAG,UAAjB;AACAF,MAAAA,SAAS;AACT,UAAIJ,QAAJ,EAAcK,SAAS;AACxB;;AAGD,QAAIN,IAAI,CAACxB,MAAT,EAAiB;AACf,YAAMgC,KAAK,GAAG3B,IAAI,CAAC4B,MAAL,CAAY,CAACd,IAAD,CAAZ,CAAd;AACAjB,MAAAA,SAAS,CAACiB,IAAD,CAAT,GAAkB;AAChBA,QAAAA,IADgB;AAEhBd,QAAAA,IAAI,EAAE2B,KAFU;AAGhBE,QAAAA,GAAG,EAAEF,KAAK,CAACG,IAAN,EAHW;AAIhBJ,QAAAA,cAJgB;AAKhBF,QAAAA,SALgB;AAMhBC,QAAAA,SANgB;AAOhBM,QAAAA,QAAQ,EAAE,IAPM;AAQhBC,QAAAA,UAAU,EAAEC,MAAM,CAACC,IAAP,CAAYf,IAAI,CAACxB,MAAjB,EAAyBS,MARrB;AAShBT,QAAAA,MAAM,EAAEC,WAAW,CAACuB,IAAI,CAACxB,MAAN,EAAc6B,SAAd,EAAyBC,SAAzB,EAAoCE,KAApC;AATH,OAAlB;AAWA;AACD;;AAED,UAAMQ,OAAY,GAAG5C,qBAAqB,CAAC4B,IAAI,CAACP,IAAN,CAA1C;;AACA,QAAI,CAACuB,OAAL,EAAc;AACZ,YAAM,IAAIC,KAAJ,iCAAmCjB,IAAI,CAACP,IAAxC,EAAN;AACD;;AAEDO,IAAAA,IAAI,CAACkB,QAAL,GAAgBlB,IAAI,CAACkB,QAAL,IAAiB,OAAjC;;AACA,QAAI,EAAElB,IAAI,CAACkB,QAAL,IAAiBnD,cAAnB,CAAJ,EAAwC;AACtC,YAAM,IAAIkD,KAAJ,yCAA2CjB,IAAI,CAACkB,QAAhD,EAAN;AACD;;AAEDlB,IAAAA,IAAI,CAACH,WAAL,GAAmBG,IAAI,CAACH,WAAL,IAAoB,cAAvC;;AACA,QAAI,EAAEG,IAAI,CAACH,WAAL,IAAoB7B,2BAAtB,CAAJ,EAAwD;AACtD,YAAM,IAAIiD,KAAJ,2CAA6CjB,IAAI,CAACH,WAAlD,EAAN;AACD;;AAGD,UAAMW,KAAK,GAAG3B,IAAI,CAAC4B,MAAL,CAAY,CAACd,IAAD,CAAZ,CAAd;AACAjB,IAAAA,SAAS,CAACiB,IAAD,CAAT,GAAkB;AAChBA,MAAAA,IADgB;AAEhBwB,MAAAA,aAAa,EAAEH,OAAO,CAACG,aAFP;AAGhBC,MAAAA,YAAY,EAAEJ,OAAO,CAACI,YAHN;AAIhBvC,MAAAA,IAAI,EAAE2B,KAJU;AAKhBE,MAAAA,GAAG,EAAEF,KAAK,CAACG,IAAN,EALW;AAMhBJ,MAAAA,cANgB;AAOhBW,MAAAA,QAAQ,EAAElB,IAAI,CAACkB,QAPC;AAQhBrB,MAAAA,WAAW,EAAEG,IAAI,CAACH,WARF;AAShBwB,MAAAA,UAAU,EAAErB,IAAI,CAACqB,UAAL,IAAmBL,OAAO,CAACK,UATvB;AAUhBC,MAAAA,SAAS,EAAEtB,IAAI,CAACsB,SAVA;AAWhBC,MAAAA,KAAK,EAAEvB,IAAI,CAACuB,KAXI;AAYhBlB,MAAAA,SAZgB;AAahBC,MAAAA;AAbgB,KAAlB;AAeD;;AACD,SAAO5B,SAAP;AACD;;AAED,SAASC,UAAT,CAAoBH,MAApB,EAA0E;AACxE,MAAIgD,IAAoB,GAAG,EAA3B;;AACA,OAAK,MAAMC,CAAX,IAAgBjD,MAAhB,EAAwB;AACtBgD,IAAAA,IAAI,CAACnC,IAAL,CAAUb,MAAM,CAACiD,CAAD,CAAhB;;AACA,QAAIjD,MAAM,CAACiD,CAAD,CAAN,CAAUb,QAAd,EAAwB;AACtBY,MAAAA,IAAI,GAAGA,IAAI,CAACf,MAAL,CAAY9B,UAAU,CAACH,MAAM,CAACiD,CAAD,CAAN,CAAUjD,MAAX,CAAtB,CAAP;AACD;AACF;;AACD,SAAOgD,IAAP;AACD","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport {PARQUET_CODECS} from '../codecs';\nimport {PARQUET_COMPRESSION_METHODS} from '../compression';\nimport {\n FieldDefinition,\n ParquetBuffer,\n ParquetCompression,\n ParquetField,\n ParquetRecord,\n RepetitionType,\n SchemaDefinition\n} from './declare';\nimport {materializeRecords, shredBuffer, shredRecord} from './shred';\nimport {PARQUET_LOGICAL_TYPES} from './types';\n\n/**\n * A parquet file schema\n */\nexport class ParquetSchema {\n public schema: Record<string, FieldDefinition>;\n public fields: Record<string, ParquetField>;\n public fieldList: ParquetField[];\n\n /**\n * Create a new schema from a JSON schema definition\n */\n constructor(schema: SchemaDefinition) {\n this.schema = schema;\n this.fields = buildFields(schema, 0, 0, []);\n this.fieldList = listFields(this.fields);\n }\n\n /**\n * Retrieve a field definition\n */\n findField(path: string | string[]): ParquetField {\n if (typeof path === 'string') {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.split(',');\n } else {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.slice(0); // clone array\n }\n\n let n = this.fields;\n for (; path.length > 1; path.shift()) {\n n = n[path[0]].fields as Record<string, ParquetField>;\n }\n\n return n[path[0]];\n }\n\n /**\n * Retrieve a field definition and all the field's ancestors\n */\n findFieldBranch(path: string | string[]): ParquetField[] {\n if (typeof path === 'string') {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.split(',');\n }\n const branch: ParquetField[] = [];\n let n = this.fields;\n for (; path.length > 0; path.shift()) {\n branch.push(n[path[0]]);\n if (path.length > 1) {\n n = n[path[0]].fields as Record<string, ParquetField>;\n }\n }\n return branch;\n }\n\n shredRecord(record: ParquetRecord, buffer: ParquetBuffer): void {\n shredRecord(this, record, buffer);\n }\n\n materializeRecords(buffer: ParquetBuffer): ParquetRecord[] {\n return materializeRecords(this, buffer);\n }\n\n compress(type: ParquetCompression): this {\n setCompress(this.schema, type);\n setCompress(this.fields, type);\n return this;\n }\n\n buffer(): ParquetBuffer {\n return shredBuffer(this);\n }\n}\n\nfunction setCompress(schema: any, type: ParquetCompression) {\n for (const name in schema) {\n const node = schema[name];\n if (node.fields) {\n setCompress(node.fields, type);\n } else {\n node.compression = type;\n }\n }\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction buildFields(\n schema: SchemaDefinition,\n rLevelParentMax: number,\n dLevelParentMax: number,\n path: string[]\n): Record<string, ParquetField> {\n const fieldList: Record<string, ParquetField> = {};\n\n for (const name in schema) {\n const opts = schema[name];\n\n /* field repetition type */\n const required = !opts.optional;\n const repeated = Boolean(opts.repeated);\n let rLevelMax = rLevelParentMax;\n let dLevelMax = dLevelParentMax;\n\n let repetitionType: RepetitionType = 'REQUIRED';\n if (!required) {\n repetitionType = 'OPTIONAL';\n dLevelMax++;\n }\n if (repeated) {\n repetitionType = 'REPEATED';\n rLevelMax++;\n if (required) dLevelMax++;\n }\n\n /* nested field */\n if (opts.fields) {\n const cpath = path.concat([name]);\n fieldList[name] = {\n name,\n path: cpath,\n key: cpath.join(),\n repetitionType,\n rLevelMax,\n dLevelMax,\n isNested: true,\n fieldCount: Object.keys(opts.fields).length,\n fields: buildFields(opts.fields, rLevelMax, dLevelMax, cpath)\n };\n continue; // eslint-disable-line no-continue\n }\n\n const typeDef: any = PARQUET_LOGICAL_TYPES[opts.type!];\n if (!typeDef) {\n throw new Error(`invalid parquet type: ${opts.type}`);\n }\n\n opts.encoding = opts.encoding || 'PLAIN';\n if (!(opts.encoding in PARQUET_CODECS)) {\n throw new Error(`unsupported parquet encoding: ${opts.encoding}`);\n }\n\n opts.compression = opts.compression || 'UNCOMPRESSED';\n if (!(opts.compression in PARQUET_COMPRESSION_METHODS)) {\n throw new Error(`unsupported compression method: ${opts.compression}`);\n }\n\n /* add to schema */\n const cpath = path.concat([name]);\n fieldList[name] = {\n name,\n primitiveType: typeDef.primitiveType,\n originalType: typeDef.originalType,\n path: cpath,\n key: cpath.join(),\n repetitionType,\n encoding: opts.encoding,\n compression: opts.compression,\n typeLength: opts.typeLength || typeDef.typeLength,\n presision: opts.presision,\n scale: opts.scale,\n rLevelMax,\n dLevelMax\n };\n }\n return fieldList;\n}\n\nfunction listFields(fields: Record<string, ParquetField>): ParquetField[] {\n let list: ParquetField[] = [];\n for (const k in fields) {\n list.push(fields[k]);\n if (fields[k].isNested) {\n list = list.concat(listFields(fields[k].fields!));\n }\n }\n return list;\n}\n"],"file":"schema.js"}
1
+ {"version":3,"file":"schema.js","names":["PARQUET_CODECS","PARQUET_COMPRESSION_METHODS","materializeRecords","shredBuffer","shredRecord","PARQUET_LOGICAL_TYPES","ParquetSchema","constructor","schema","fields","buildFields","fieldList","listFields","findField","path","split","slice","n","length","shift","findFieldBranch","branch","push","record","buffer","compress","type","setCompress","name","node","compression","rLevelParentMax","dLevelParentMax","opts","required","optional","repeated","Boolean","rLevelMax","dLevelMax","repetitionType","cpath","concat","key","join","isNested","fieldCount","Object","keys","typeDef","Error","encoding","primitiveType","originalType","typeLength","presision","scale","list","k"],"sources":["../../../../src/parquetjs/schema/schema.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport {PARQUET_CODECS} from '../codecs';\nimport {PARQUET_COMPRESSION_METHODS} from '../compression';\nimport {\n FieldDefinition,\n ParquetBuffer,\n ParquetCompression,\n ParquetField,\n ParquetRecord,\n RepetitionType,\n SchemaDefinition\n} from './declare';\nimport {materializeRecords, shredBuffer, shredRecord} from './shred';\nimport {PARQUET_LOGICAL_TYPES} from './types';\n\n/**\n * A parquet file schema\n */\nexport class ParquetSchema {\n public schema: Record<string, FieldDefinition>;\n public fields: Record<string, ParquetField>;\n public fieldList: ParquetField[];\n\n /**\n * Create a new schema from a JSON schema definition\n */\n constructor(schema: SchemaDefinition) {\n this.schema = schema;\n this.fields = buildFields(schema, 0, 0, []);\n this.fieldList = listFields(this.fields);\n }\n\n /**\n * Retrieve a field definition\n */\n findField(path: string | string[]): ParquetField {\n if (typeof path === 'string') {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.split(',');\n } else {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.slice(0); // clone array\n }\n\n let n = this.fields;\n for (; path.length > 1; path.shift()) {\n n = n[path[0]].fields as Record<string, ParquetField>;\n }\n\n return n[path[0]];\n }\n\n /**\n * Retrieve a field definition and all the field's ancestors\n */\n findFieldBranch(path: string | string[]): ParquetField[] {\n if (typeof path === 'string') {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.split(',');\n }\n const branch: ParquetField[] = [];\n let n = this.fields;\n for (; path.length > 0; path.shift()) {\n branch.push(n[path[0]]);\n if (path.length > 1) {\n n = n[path[0]].fields as Record<string, ParquetField>;\n }\n }\n return branch;\n }\n\n shredRecord(record: ParquetRecord, buffer: ParquetBuffer): void {\n shredRecord(this, record, buffer);\n }\n\n materializeRecords(buffer: ParquetBuffer): ParquetRecord[] {\n return materializeRecords(this, buffer);\n }\n\n compress(type: ParquetCompression): this {\n setCompress(this.schema, type);\n setCompress(this.fields, type);\n return this;\n }\n\n buffer(): ParquetBuffer {\n return shredBuffer(this);\n }\n}\n\nfunction setCompress(schema: any, type: ParquetCompression) {\n for (const name in schema) {\n const node = schema[name];\n if (node.fields) {\n setCompress(node.fields, type);\n } else {\n node.compression = type;\n }\n }\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction buildFields(\n schema: SchemaDefinition,\n rLevelParentMax: number,\n dLevelParentMax: number,\n path: string[]\n): Record<string, ParquetField> {\n const fieldList: Record<string, ParquetField> = {};\n\n for (const name in schema) {\n const opts = schema[name];\n\n /* field repetition type */\n const required = !opts.optional;\n const repeated = Boolean(opts.repeated);\n let rLevelMax = rLevelParentMax;\n let dLevelMax = dLevelParentMax;\n\n let repetitionType: RepetitionType = 'REQUIRED';\n if (!required) {\n repetitionType = 'OPTIONAL';\n dLevelMax++;\n }\n if (repeated) {\n repetitionType = 'REPEATED';\n rLevelMax++;\n if (required) dLevelMax++;\n }\n\n /* nested field */\n if (opts.fields) {\n const cpath = path.concat([name]);\n fieldList[name] = {\n name,\n path: cpath,\n key: cpath.join(),\n repetitionType,\n rLevelMax,\n dLevelMax,\n isNested: true,\n fieldCount: Object.keys(opts.fields).length,\n fields: buildFields(opts.fields, rLevelMax, dLevelMax, cpath)\n };\n continue; // eslint-disable-line no-continue\n }\n\n const typeDef: any = PARQUET_LOGICAL_TYPES[opts.type!];\n if (!typeDef) {\n throw new Error(`invalid parquet type: ${opts.type}`);\n }\n\n opts.encoding = opts.encoding || 'PLAIN';\n if (!(opts.encoding in PARQUET_CODECS)) {\n throw new Error(`unsupported parquet encoding: ${opts.encoding}`);\n }\n\n opts.compression = opts.compression || 'UNCOMPRESSED';\n if (!(opts.compression in PARQUET_COMPRESSION_METHODS)) {\n throw new Error(`unsupported compression method: ${opts.compression}`);\n }\n\n /* add to schema */\n const cpath = path.concat([name]);\n fieldList[name] = {\n name,\n primitiveType: typeDef.primitiveType,\n originalType: typeDef.originalType,\n path: cpath,\n key: cpath.join(),\n repetitionType,\n encoding: opts.encoding,\n compression: opts.compression,\n typeLength: opts.typeLength || typeDef.typeLength,\n presision: opts.presision,\n scale: opts.scale,\n rLevelMax,\n dLevelMax\n };\n }\n return fieldList;\n}\n\nfunction listFields(fields: Record<string, ParquetField>): ParquetField[] {\n let list: ParquetField[] = [];\n for (const k in fields) {\n list.push(fields[k]);\n if (fields[k].isNested) {\n list = list.concat(listFields(fields[k].fields!));\n }\n }\n return list;\n}\n"],"mappings":";;AAEA,SAAQA,cAAc,QAAO,WAAW;AACxC,SAAQC,2BAA2B,QAAO,gBAAgB;AAU1D,SAAQC,kBAAkB,EAAEC,WAAW,EAAEC,WAAW,QAAO,SAAS;AACpE,SAAQC,qBAAqB,QAAO,SAAS;;AAK7C,OAAO,MAAMC,aAAa,CAAC;EAQzBC,WAAW,CAACC,MAAwB,EAAE;IAAA;IAAA;IAAA;IACpC,IAAI,CAACA,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACC,MAAM,GAAGC,WAAW,CAACF,MAAM,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;IAC3C,IAAI,CAACG,SAAS,GAAGC,UAAU,CAAC,IAAI,CAACH,MAAM,CAAC;EAC1C;;EAKAI,SAAS,CAACC,IAAuB,EAAgB;IAC/C,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;MAE5BA,IAAI,GAAGA,IAAI,CAACC,KAAK,CAAC,GAAG,CAAC;IACxB,CAAC,MAAM;MAELD,IAAI,GAAGA,IAAI,CAACE,KAAK,CAAC,CAAC,CAAC;IACtB;;IAEA,IAAIC,CAAC,GAAG,IAAI,CAACR,MAAM;IACnB,OAAOK,IAAI,CAACI,MAAM,GAAG,CAAC,EAAEJ,IAAI,CAACK,KAAK,EAAE,EAAE;MACpCF,CAAC,GAAGA,CAAC,CAACH,IAAI,CAAC,CAAC,CAAC,CAAC,CAACL,MAAsC;IACvD;IAEA,OAAOQ,CAAC,CAACH,IAAI,CAAC,CAAC,CAAC,CAAC;EACnB;;EAKAM,eAAe,CAACN,IAAuB,EAAkB;IACvD,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;MAE5BA,IAAI,GAAGA,IAAI,CAACC,KAAK,CAAC,GAAG,CAAC;IACxB;IACA,MAAMM,MAAsB,GAAG,EAAE;IACjC,IAAIJ,CAAC,GAAG,IAAI,CAACR,MAAM;IACnB,OAAOK,IAAI,CAACI,MAAM,GAAG,CAAC,EAAEJ,IAAI,CAACK,KAAK,EAAE,EAAE;MACpCE,MAAM,CAACC,IAAI,CAACL,CAAC,CAACH,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;MACvB,IAAIA,IAAI,CAACI,MAAM,GAAG,CAAC,EAAE;QACnBD,CAAC,GAAGA,CAAC,CAACH,IAAI,CAAC,CAAC,CAAC,CAAC,CAACL,MAAsC;MACvD;IACF;IACA,OAAOY,MAAM;EACf;EAEAjB,WAAW,CAACmB,MAAqB,EAAEC,MAAqB,EAAQ;IAC9DpB,WAAW,CAAC,IAAI,EAAEmB,MAAM,EAAEC,MAAM,CAAC;EACnC;EAEAtB,kBAAkB,CAACsB,MAAqB,EAAmB;IACzD,OAAOtB,kBAAkB,CAAC,IAAI,EAAEsB,MAAM,CAAC;EACzC;EAEAC,QAAQ,CAACC,IAAwB,EAAQ;IACvCC,WAAW,CAAC,IAAI,CAACnB,MAAM,EAAEkB,IAAI,CAAC;IAC9BC,WAAW,CAAC,IAAI,CAAClB,MAAM,EAAEiB,IAAI,CAAC;IAC9B,OAAO,IAAI;EACb;EAEAF,MAAM,GAAkB;IACtB,OAAOrB,WAAW,CAAC,IAAI,CAAC;EAC1B;AACF;AAEA,SAASwB,WAAW,CAACnB,MAAW,EAAEkB,IAAwB,EAAE;EAC1D,KAAK,MAAME,IAAI,IAAIpB,MAAM,EAAE;IACzB,MAAMqB,IAAI,GAAGrB,MAAM,CAACoB,IAAI,CAAC;IACzB,IAAIC,IAAI,CAACpB,MAAM,EAAE;MACfkB,WAAW,CAACE,IAAI,CAACpB,MAAM,EAAEiB,IAAI,CAAC;IAChC,CAAC,MAAM;MACLG,IAAI,CAACC,WAAW,GAAGJ,IAAI;IACzB;EACF;AACF;;AAGA,SAAShB,WAAW,CAClBF,MAAwB,EACxBuB,eAAuB,EACvBC,eAAuB,EACvBlB,IAAc,EACgB;EAC9B,MAAMH,SAAuC,GAAG,CAAC,CAAC;EAElD,KAAK,MAAMiB,IAAI,IAAIpB,MAAM,EAAE;IACzB,MAAMyB,IAAI,GAAGzB,MAAM,CAACoB,IAAI,CAAC;;IAGzB,MAAMM,QAAQ,GAAG,CAACD,IAAI,CAACE,QAAQ;IAC/B,MAAMC,QAAQ,GAAGC,OAAO,CAACJ,IAAI,CAACG,QAAQ,CAAC;IACvC,IAAIE,SAAS,GAAGP,eAAe;IAC/B,IAAIQ,SAAS,GAAGP,eAAe;IAE/B,IAAIQ,cAA8B,GAAG,UAAU;IAC/C,IAAI,CAACN,QAAQ,EAAE;MACbM,cAAc,GAAG,UAAU;MAC3BD,SAAS,EAAE;IACb;IACA,IAAIH,QAAQ,EAAE;MACZI,cAAc,GAAG,UAAU;MAC3BF,SAAS,EAAE;MACX,IAAIJ,QAAQ,EAAEK,SAAS,EAAE;IAC3B;;IAGA,IAAIN,IAAI,CAACxB,MAAM,EAAE;MACf,MAAMgC,KAAK,GAAG3B,IAAI,CAAC4B,MAAM,CAAC,CAACd,IAAI,CAAC,CAAC;MACjCjB,SAAS,CAACiB,IAAI,CAAC,GAAG;QAChBA,IAAI;QACJd,IAAI,EAAE2B,KAAK;QACXE,GAAG,EAAEF,KAAK,CAACG,IAAI,EAAE;QACjBJ,cAAc;QACdF,SAAS;QACTC,SAAS;QACTM,QAAQ,EAAE,IAAI;QACdC,UAAU,EAAEC,MAAM,CAACC,IAAI,CAACf,IAAI,CAACxB,MAAM,CAAC,CAACS,MAAM;QAC3CT,MAAM,EAAEC,WAAW,CAACuB,IAAI,CAACxB,MAAM,EAAE6B,SAAS,EAAEC,SAAS,EAAEE,KAAK;MAC9D,CAAC;MACD;IACF;;IAEA,MAAMQ,OAAY,GAAG5C,qBAAqB,CAAC4B,IAAI,CAACP,IAAI,CAAE;IACtD,IAAI,CAACuB,OAAO,EAAE;MACZ,MAAM,IAAIC,KAAK,iCAA0BjB,IAAI,CAACP,IAAI,EAAG;IACvD;IAEAO,IAAI,CAACkB,QAAQ,GAAGlB,IAAI,CAACkB,QAAQ,IAAI,OAAO;IACxC,IAAI,EAAElB,IAAI,CAACkB,QAAQ,IAAInD,cAAc,CAAC,EAAE;MACtC,MAAM,IAAIkD,KAAK,yCAAkCjB,IAAI,CAACkB,QAAQ,EAAG;IACnE;IAEAlB,IAAI,CAACH,WAAW,GAAGG,IAAI,CAACH,WAAW,IAAI,cAAc;IACrD,IAAI,EAAEG,IAAI,CAACH,WAAW,IAAI7B,2BAA2B,CAAC,EAAE;MACtD,MAAM,IAAIiD,KAAK,2CAAoCjB,IAAI,CAACH,WAAW,EAAG;IACxE;;IAGA,MAAMW,KAAK,GAAG3B,IAAI,CAAC4B,MAAM,CAAC,CAACd,IAAI,CAAC,CAAC;IACjCjB,SAAS,CAACiB,IAAI,CAAC,GAAG;MAChBA,IAAI;MACJwB,aAAa,EAAEH,OAAO,CAACG,aAAa;MACpCC,YAAY,EAAEJ,OAAO,CAACI,YAAY;MAClCvC,IAAI,EAAE2B,KAAK;MACXE,GAAG,EAAEF,KAAK,CAACG,IAAI,EAAE;MACjBJ,cAAc;MACdW,QAAQ,EAAElB,IAAI,CAACkB,QAAQ;MACvBrB,WAAW,EAAEG,IAAI,CAACH,WAAW;MAC7BwB,UAAU,EAAErB,IAAI,CAACqB,UAAU,IAAIL,OAAO,CAACK,UAAU;MACjDC,SAAS,EAAEtB,IAAI,CAACsB,SAAS;MACzBC,KAAK,EAAEvB,IAAI,CAACuB,KAAK;MACjBlB,SAAS;MACTC;IACF,CAAC;EACH;EACA,OAAO5B,SAAS;AAClB;AAEA,SAASC,UAAU,CAACH,MAAoC,EAAkB;EACxE,IAAIgD,IAAoB,GAAG,EAAE;EAC7B,KAAK,MAAMC,CAAC,IAAIjD,MAAM,EAAE;IACtBgD,IAAI,CAACnC,IAAI,CAACb,MAAM,CAACiD,CAAC,CAAC,CAAC;IACpB,IAAIjD,MAAM,CAACiD,CAAC,CAAC,CAACb,QAAQ,EAAE;MACtBY,IAAI,GAAGA,IAAI,CAACf,MAAM,CAAC9B,UAAU,CAACH,MAAM,CAACiD,CAAC,CAAC,CAACjD,MAAM,CAAE,CAAC;IACnD;EACF;EACA,OAAOgD,IAAI;AACb"}