@loaders.gl/parquet 3.1.3 → 4.0.0-alpha.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (438) hide show
  1. package/dist/bundle.js +2 -2
  2. package/dist/bundle.js.map +1 -0
  3. package/dist/constants.js +6 -18
  4. package/dist/constants.js.map +1 -0
  5. package/dist/dist.min.js +17 -8
  6. package/dist/dist.min.js.map +3 -3
  7. package/dist/index.js +14 -29
  8. package/dist/index.js.map +1 -0
  9. package/dist/lib/convert-schema.js +63 -62
  10. package/dist/lib/convert-schema.js.map +1 -0
  11. package/dist/lib/parse-parquet.js +25 -25
  12. package/dist/lib/parse-parquet.js.map +1 -0
  13. package/dist/lib/read-array-buffer.js +8 -28
  14. package/dist/lib/read-array-buffer.js.map +1 -0
  15. package/dist/parquet-loader.js +19 -24
  16. package/dist/parquet-loader.js.map +1 -0
  17. package/dist/parquet-worker.js +18 -9
  18. package/dist/parquet-worker.js.map +3 -3
  19. package/dist/parquet-writer.js +14 -17
  20. package/dist/parquet-writer.js.map +1 -0
  21. package/dist/{es5/parquetjs → parquetjs}/LICENSE +0 -0
  22. package/dist/parquetjs/codecs/declare.js +2 -2
  23. package/dist/{es5/parquetjs → parquetjs}/codecs/declare.js.map +0 -0
  24. package/dist/parquetjs/codecs/dictionary.js +10 -12
  25. package/dist/parquetjs/codecs/dictionary.js.map +1 -0
  26. package/dist/parquetjs/codecs/index.js +22 -50
  27. package/dist/parquetjs/codecs/index.js.map +1 -0
  28. package/dist/parquetjs/codecs/plain.js +232 -173
  29. package/dist/parquetjs/codecs/plain.js.map +1 -0
  30. package/dist/parquetjs/codecs/rle.js +140 -134
  31. package/dist/parquetjs/codecs/rle.js.map +1 -0
  32. package/dist/parquetjs/compression.js +48 -154
  33. package/dist/parquetjs/compression.js.map +1 -0
  34. package/dist/parquetjs/encoder/writer.js +383 -440
  35. package/dist/parquetjs/encoder/writer.js.map +1 -0
  36. package/dist/parquetjs/file.js +66 -85
  37. package/dist/parquetjs/file.js.map +1 -0
  38. package/dist/{es5/parquetjs → parquetjs}/modules.d.ts +0 -0
  39. package/dist/parquetjs/parquet-thrift/BoundaryOrder.js +7 -14
  40. package/dist/parquetjs/parquet-thrift/BoundaryOrder.js.map +1 -0
  41. package/dist/parquetjs/parquet-thrift/BsonType.js +37 -56
  42. package/dist/parquetjs/parquet-thrift/BsonType.js.map +1 -0
  43. package/dist/parquetjs/parquet-thrift/ColumnChunk.js +215 -205
  44. package/dist/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -0
  45. package/dist/parquetjs/parquet-thrift/ColumnIndex.js +212 -207
  46. package/dist/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -0
  47. package/dist/parquetjs/parquet-thrift/ColumnMetaData.js +422 -391
  48. package/dist/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -0
  49. package/dist/parquetjs/parquet-thrift/ColumnOrder.js +90 -99
  50. package/dist/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -0
  51. package/dist/parquetjs/parquet-thrift/CompressionCodec.js +12 -19
  52. package/dist/parquetjs/parquet-thrift/CompressionCodec.js.map +1 -0
  53. package/dist/parquetjs/parquet-thrift/ConvertedType.js +26 -33
  54. package/dist/parquetjs/parquet-thrift/ConvertedType.js.map +1 -0
  55. package/dist/parquetjs/parquet-thrift/DataPageHeader.js +162 -162
  56. package/dist/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -0
  57. package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.js +234 -224
  58. package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -0
  59. package/dist/parquetjs/parquet-thrift/DateType.js +37 -56
  60. package/dist/parquetjs/parquet-thrift/DateType.js.map +1 -0
  61. package/dist/parquetjs/parquet-thrift/DecimalType.js +91 -101
  62. package/dist/parquetjs/parquet-thrift/DecimalType.js.map +1 -0
  63. package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.js +113 -118
  64. package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -0
  65. package/dist/parquetjs/parquet-thrift/Encoding.js +12 -19
  66. package/dist/parquetjs/parquet-thrift/Encoding.js.map +1 -0
  67. package/dist/parquetjs/parquet-thrift/EnumType.js +37 -56
  68. package/dist/parquetjs/parquet-thrift/EnumType.js.map +1 -0
  69. package/dist/parquetjs/parquet-thrift/FieldRepetitionType.js +7 -14
  70. package/dist/parquetjs/parquet-thrift/FieldRepetitionType.js.map +1 -0
  71. package/dist/parquetjs/parquet-thrift/FileMetaData.js +264 -250
  72. package/dist/parquetjs/parquet-thrift/FileMetaData.js.map +1 -0
  73. package/dist/parquetjs/parquet-thrift/IndexPageHeader.js +37 -56
  74. package/dist/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -0
  75. package/dist/parquetjs/parquet-thrift/IntType.js +91 -101
  76. package/dist/parquetjs/parquet-thrift/IntType.js.map +1 -0
  77. package/dist/parquetjs/parquet-thrift/JsonType.js +37 -56
  78. package/dist/parquetjs/parquet-thrift/JsonType.js.map +1 -0
  79. package/dist/parquetjs/parquet-thrift/KeyValue.js +89 -98
  80. package/dist/parquetjs/parquet-thrift/KeyValue.js.map +1 -0
  81. package/dist/parquetjs/parquet-thrift/ListType.js +37 -56
  82. package/dist/parquetjs/parquet-thrift/ListType.js.map +1 -0
  83. package/dist/parquetjs/parquet-thrift/LogicalType.js +450 -363
  84. package/dist/parquetjs/parquet-thrift/LogicalType.js.map +1 -0
  85. package/dist/parquetjs/parquet-thrift/MapType.js +37 -56
  86. package/dist/parquetjs/parquet-thrift/MapType.js.map +1 -0
  87. package/dist/parquetjs/parquet-thrift/MicroSeconds.js +37 -56
  88. package/dist/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -0
  89. package/dist/parquetjs/parquet-thrift/MilliSeconds.js +37 -56
  90. package/dist/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -0
  91. package/dist/parquetjs/parquet-thrift/NullType.js +37 -56
  92. package/dist/parquetjs/parquet-thrift/NullType.js.map +1 -0
  93. package/dist/parquetjs/parquet-thrift/OffsetIndex.js +80 -92
  94. package/dist/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -0
  95. package/dist/parquetjs/parquet-thrift/PageEncodingStats.js +115 -123
  96. package/dist/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -0
  97. package/dist/parquetjs/parquet-thrift/PageHeader.js +231 -214
  98. package/dist/parquetjs/parquet-thrift/PageHeader.js.map +1 -0
  99. package/dist/parquetjs/parquet-thrift/PageLocation.js +124 -137
  100. package/dist/parquetjs/parquet-thrift/PageLocation.js.map +1 -0
  101. package/dist/parquetjs/parquet-thrift/PageType.js +8 -15
  102. package/dist/parquetjs/parquet-thrift/PageType.js.map +1 -0
  103. package/dist/parquetjs/parquet-thrift/RowGroup.js +172 -176
  104. package/dist/parquetjs/parquet-thrift/RowGroup.js.map +1 -0
  105. package/dist/parquetjs/parquet-thrift/SchemaElement.js +268 -237
  106. package/dist/parquetjs/parquet-thrift/SchemaElement.js.map +1 -0
  107. package/dist/parquetjs/parquet-thrift/SortingColumn.js +115 -123
  108. package/dist/parquetjs/parquet-thrift/SortingColumn.js.map +1 -0
  109. package/dist/parquetjs/parquet-thrift/Statistics.js +179 -172
  110. package/dist/parquetjs/parquet-thrift/Statistics.js.map +1 -0
  111. package/dist/parquetjs/parquet-thrift/StringType.js +37 -56
  112. package/dist/parquetjs/parquet-thrift/StringType.js.map +1 -0
  113. package/dist/parquetjs/parquet-thrift/TimeType.js +92 -102
  114. package/dist/parquetjs/parquet-thrift/TimeType.js.map +1 -0
  115. package/dist/parquetjs/parquet-thrift/TimeUnit.js +120 -121
  116. package/dist/parquetjs/parquet-thrift/TimeUnit.js.map +1 -0
  117. package/dist/parquetjs/parquet-thrift/TimestampType.js +92 -102
  118. package/dist/parquetjs/parquet-thrift/TimestampType.js.map +1 -0
  119. package/dist/parquetjs/parquet-thrift/Type.js +12 -19
  120. package/dist/parquetjs/parquet-thrift/Type.js.map +1 -0
  121. package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.js +37 -56
  122. package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -0
  123. package/dist/parquetjs/parquet-thrift/UUIDType.js +37 -56
  124. package/dist/parquetjs/parquet-thrift/UUIDType.js.map +1 -0
  125. package/dist/parquetjs/parquet-thrift/index.js +44 -61
  126. package/dist/parquetjs/parquet-thrift/index.js.map +1 -0
  127. package/dist/parquetjs/parser/decoders.js +283 -301
  128. package/dist/{es5/parquetjs → parquetjs}/parser/decoders.js.map +1 -1
  129. package/dist/parquetjs/parser/parquet-cursor.js +85 -69
  130. package/dist/parquetjs/parser/parquet-cursor.js.map +1 -0
  131. package/dist/parquetjs/parser/parquet-envelope-reader.js +146 -127
  132. package/dist/parquetjs/parser/parquet-envelope-reader.js.map +1 -0
  133. package/dist/parquetjs/parser/parquet-reader.js +113 -127
  134. package/dist/parquetjs/parser/parquet-reader.js.map +1 -0
  135. package/dist/parquetjs/schema/declare.js +12 -9
  136. package/dist/parquetjs/schema/declare.js.map +1 -0
  137. package/dist/parquetjs/schema/schema.js +162 -148
  138. package/dist/{es5/parquetjs → parquetjs}/schema/schema.js.map +1 -1
  139. package/dist/parquetjs/schema/shred.js +151 -214
  140. package/dist/parquetjs/schema/shred.js.map +1 -0
  141. package/dist/parquetjs/schema/types.js +415 -357
  142. package/dist/parquetjs/schema/types.js.map +1 -0
  143. package/dist/parquetjs/utils/buffer-utils.js +10 -20
  144. package/dist/parquetjs/utils/buffer-utils.js.map +1 -0
  145. package/dist/parquetjs/utils/file-utils.js +28 -40
  146. package/dist/parquetjs/utils/file-utils.js.map +1 -0
  147. package/dist/parquetjs/utils/read-utils.js +95 -99
  148. package/dist/parquetjs/utils/read-utils.js.map +1 -0
  149. package/dist/workers/parquet-worker.js +4 -5
  150. package/dist/workers/parquet-worker.js.map +1 -0
  151. package/package.json +8 -8
  152. package/dist/es5/bundle.js +0 -7
  153. package/dist/es5/bundle.js.map +0 -1
  154. package/dist/es5/constants.js +0 -17
  155. package/dist/es5/constants.js.map +0 -1
  156. package/dist/es5/index.js +0 -82
  157. package/dist/es5/index.js.map +0 -1
  158. package/dist/es5/lib/convert-schema.js +0 -82
  159. package/dist/es5/lib/convert-schema.js.map +0 -1
  160. package/dist/es5/lib/parse-parquet.js +0 -173
  161. package/dist/es5/lib/parse-parquet.js.map +0 -1
  162. package/dist/es5/lib/read-array-buffer.js +0 -53
  163. package/dist/es5/lib/read-array-buffer.js.map +0 -1
  164. package/dist/es5/parquet-loader.js +0 -30
  165. package/dist/es5/parquet-loader.js.map +0 -1
  166. package/dist/es5/parquet-writer.js +0 -25
  167. package/dist/es5/parquet-writer.js.map +0 -1
  168. package/dist/es5/parquetjs/codecs/declare.js +0 -2
  169. package/dist/es5/parquetjs/codecs/dictionary.js +0 -30
  170. package/dist/es5/parquetjs/codecs/dictionary.js.map +0 -1
  171. package/dist/es5/parquetjs/codecs/index.js +0 -56
  172. package/dist/es5/parquetjs/codecs/index.js.map +0 -1
  173. package/dist/es5/parquetjs/codecs/plain.js +0 -287
  174. package/dist/es5/parquetjs/codecs/plain.js.map +0 -1
  175. package/dist/es5/parquetjs/codecs/rle.js +0 -174
  176. package/dist/es5/parquetjs/codecs/rle.js.map +0 -1
  177. package/dist/es5/parquetjs/compression.js +0 -167
  178. package/dist/es5/parquetjs/compression.js.map +0 -1
  179. package/dist/es5/parquetjs/encoder/writer.js +0 -875
  180. package/dist/es5/parquetjs/encoder/writer.js.map +0 -1
  181. package/dist/es5/parquetjs/file.js +0 -103
  182. package/dist/es5/parquetjs/file.js.map +0 -1
  183. package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js +0 -15
  184. package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js.map +0 -1
  185. package/dist/es5/parquetjs/parquet-thrift/BsonType.js +0 -67
  186. package/dist/es5/parquetjs/parquet-thrift/BsonType.js.map +0 -1
  187. package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js +0 -241
  188. package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js.map +0 -1
  189. package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js +0 -245
  190. package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js.map +0 -1
  191. package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js +0 -449
  192. package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js.map +0 -1
  193. package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js +0 -124
  194. package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js.map +0 -1
  195. package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js +0 -20
  196. package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js.map +0 -1
  197. package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js +0 -34
  198. package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js.map +0 -1
  199. package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js +0 -191
  200. package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js.map +0 -1
  201. package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js +0 -258
  202. package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +0 -1
  203. package/dist/es5/parquetjs/parquet-thrift/DateType.js +0 -67
  204. package/dist/es5/parquetjs/parquet-thrift/DateType.js.map +0 -1
  205. package/dist/es5/parquetjs/parquet-thrift/DecimalType.js +0 -122
  206. package/dist/es5/parquetjs/parquet-thrift/DecimalType.js.map +0 -1
  207. package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js +0 -143
  208. package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +0 -1
  209. package/dist/es5/parquetjs/parquet-thrift/Encoding.js +0 -20
  210. package/dist/es5/parquetjs/parquet-thrift/Encoding.js.map +0 -1
  211. package/dist/es5/parquetjs/parquet-thrift/EnumType.js +0 -67
  212. package/dist/es5/parquetjs/parquet-thrift/EnumType.js.map +0 -1
  213. package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js +0 -15
  214. package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js.map +0 -1
  215. package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js +0 -298
  216. package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js.map +0 -1
  217. package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js +0 -67
  218. package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js.map +0 -1
  219. package/dist/es5/parquetjs/parquet-thrift/IntType.js +0 -122
  220. package/dist/es5/parquetjs/parquet-thrift/IntType.js.map +0 -1
  221. package/dist/es5/parquetjs/parquet-thrift/JsonType.js +0 -67
  222. package/dist/es5/parquetjs/parquet-thrift/JsonType.js.map +0 -1
  223. package/dist/es5/parquetjs/parquet-thrift/KeyValue.js +0 -120
  224. package/dist/es5/parquetjs/parquet-thrift/KeyValue.js.map +0 -1
  225. package/dist/es5/parquetjs/parquet-thrift/ListType.js +0 -67
  226. package/dist/es5/parquetjs/parquet-thrift/ListType.js.map +0 -1
  227. package/dist/es5/parquetjs/parquet-thrift/LogicalType.js +0 -508
  228. package/dist/es5/parquetjs/parquet-thrift/LogicalType.js.map +0 -1
  229. package/dist/es5/parquetjs/parquet-thrift/MapType.js +0 -67
  230. package/dist/es5/parquetjs/parquet-thrift/MapType.js.map +0 -1
  231. package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js +0 -67
  232. package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js.map +0 -1
  233. package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js +0 -67
  234. package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js.map +0 -1
  235. package/dist/es5/parquetjs/parquet-thrift/NullType.js +0 -67
  236. package/dist/es5/parquetjs/parquet-thrift/NullType.js.map +0 -1
  237. package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js +0 -114
  238. package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js.map +0 -1
  239. package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js +0 -145
  240. package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js.map +0 -1
  241. package/dist/es5/parquetjs/parquet-thrift/PageHeader.js +0 -258
  242. package/dist/es5/parquetjs/parquet-thrift/PageHeader.js.map +0 -1
  243. package/dist/es5/parquetjs/parquet-thrift/PageLocation.js +0 -155
  244. package/dist/es5/parquetjs/parquet-thrift/PageLocation.js.map +0 -1
  245. package/dist/es5/parquetjs/parquet-thrift/PageType.js +0 -16
  246. package/dist/es5/parquetjs/parquet-thrift/PageType.js.map +0 -1
  247. package/dist/es5/parquetjs/parquet-thrift/RowGroup.js +0 -206
  248. package/dist/es5/parquetjs/parquet-thrift/RowGroup.js.map +0 -1
  249. package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js +0 -290
  250. package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js.map +0 -1
  251. package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js +0 -145
  252. package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js.map +0 -1
  253. package/dist/es5/parquetjs/parquet-thrift/Statistics.js +0 -207
  254. package/dist/es5/parquetjs/parquet-thrift/Statistics.js.map +0 -1
  255. package/dist/es5/parquetjs/parquet-thrift/StringType.js +0 -67
  256. package/dist/es5/parquetjs/parquet-thrift/StringType.js.map +0 -1
  257. package/dist/es5/parquetjs/parquet-thrift/TimeType.js +0 -124
  258. package/dist/es5/parquetjs/parquet-thrift/TimeType.js.map +0 -1
  259. package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js +0 -156
  260. package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js.map +0 -1
  261. package/dist/es5/parquetjs/parquet-thrift/TimestampType.js +0 -124
  262. package/dist/es5/parquetjs/parquet-thrift/TimestampType.js.map +0 -1
  263. package/dist/es5/parquetjs/parquet-thrift/Type.js +0 -20
  264. package/dist/es5/parquetjs/parquet-thrift/Type.js.map +0 -1
  265. package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js +0 -67
  266. package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +0 -1
  267. package/dist/es5/parquetjs/parquet-thrift/UUIDType.js +0 -67
  268. package/dist/es5/parquetjs/parquet-thrift/UUIDType.js.map +0 -1
  269. package/dist/es5/parquetjs/parquet-thrift/index.js +0 -565
  270. package/dist/es5/parquetjs/parquet-thrift/index.js.map +0 -1
  271. package/dist/es5/parquetjs/parser/decoders.js +0 -489
  272. package/dist/es5/parquetjs/parser/parquet-cursor.js +0 -215
  273. package/dist/es5/parquetjs/parser/parquet-cursor.js.map +0 -1
  274. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +0 -413
  275. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
  276. package/dist/es5/parquetjs/parser/parquet-reader.js +0 -364
  277. package/dist/es5/parquetjs/parser/parquet-reader.js.map +0 -1
  278. package/dist/es5/parquetjs/schema/declare.js +0 -25
  279. package/dist/es5/parquetjs/schema/declare.js.map +0 -1
  280. package/dist/es5/parquetjs/schema/schema.js +0 -203
  281. package/dist/es5/parquetjs/schema/shred.js +0 -223
  282. package/dist/es5/parquetjs/schema/shred.js.map +0 -1
  283. package/dist/es5/parquetjs/schema/types.js +0 -492
  284. package/dist/es5/parquetjs/schema/types.js.map +0 -1
  285. package/dist/es5/parquetjs/utils/buffer-utils.js +0 -21
  286. package/dist/es5/parquetjs/utils/buffer-utils.js.map +0 -1
  287. package/dist/es5/parquetjs/utils/file-utils.js +0 -55
  288. package/dist/es5/parquetjs/utils/file-utils.js.map +0 -1
  289. package/dist/es5/parquetjs/utils/read-utils.js +0 -159
  290. package/dist/es5/parquetjs/utils/read-utils.js.map +0 -1
  291. package/dist/es5/workers/parquet-worker.js +0 -8
  292. package/dist/es5/workers/parquet-worker.js.map +0 -1
  293. package/dist/esm/bundle.js +0 -5
  294. package/dist/esm/bundle.js.map +0 -1
  295. package/dist/esm/constants.js +0 -6
  296. package/dist/esm/constants.js.map +0 -1
  297. package/dist/esm/index.js +0 -15
  298. package/dist/esm/index.js.map +0 -1
  299. package/dist/esm/lib/convert-schema.js +0 -71
  300. package/dist/esm/lib/convert-schema.js.map +0 -1
  301. package/dist/esm/lib/parse-parquet.js +0 -28
  302. package/dist/esm/lib/parse-parquet.js.map +0 -1
  303. package/dist/esm/lib/read-array-buffer.js +0 -9
  304. package/dist/esm/lib/read-array-buffer.js.map +0 -1
  305. package/dist/esm/parquet-loader.js +0 -22
  306. package/dist/esm/parquet-loader.js.map +0 -1
  307. package/dist/esm/parquet-writer.js +0 -18
  308. package/dist/esm/parquet-writer.js.map +0 -1
  309. package/dist/esm/parquetjs/LICENSE +0 -20
  310. package/dist/esm/parquetjs/codecs/declare.js +0 -2
  311. package/dist/esm/parquetjs/codecs/declare.js.map +0 -1
  312. package/dist/esm/parquetjs/codecs/dictionary.js +0 -12
  313. package/dist/esm/parquetjs/codecs/dictionary.js.map +0 -1
  314. package/dist/esm/parquetjs/codecs/index.js +0 -23
  315. package/dist/esm/parquetjs/codecs/index.js.map +0 -1
  316. package/dist/esm/parquetjs/codecs/plain.js +0 -270
  317. package/dist/esm/parquetjs/codecs/plain.js.map +0 -1
  318. package/dist/esm/parquetjs/codecs/rle.js +0 -151
  319. package/dist/esm/parquetjs/codecs/rle.js.map +0 -1
  320. package/dist/esm/parquetjs/compression.js +0 -62
  321. package/dist/esm/parquetjs/compression.js.map +0 -1
  322. package/dist/esm/parquetjs/encoder/writer.js +0 -421
  323. package/dist/esm/parquetjs/encoder/writer.js.map +0 -1
  324. package/dist/esm/parquetjs/file.js +0 -80
  325. package/dist/esm/parquetjs/file.js.map +0 -1
  326. package/dist/esm/parquetjs/modules.d.ts +0 -21
  327. package/dist/esm/parquetjs/parquet-thrift/BoundaryOrder.js +0 -8
  328. package/dist/esm/parquetjs/parquet-thrift/BoundaryOrder.js.map +0 -1
  329. package/dist/esm/parquetjs/parquet-thrift/BsonType.js +0 -39
  330. package/dist/esm/parquetjs/parquet-thrift/BsonType.js.map +0 -1
  331. package/dist/esm/parquetjs/parquet-thrift/ColumnChunk.js +0 -217
  332. package/dist/esm/parquetjs/parquet-thrift/ColumnChunk.js.map +0 -1
  333. package/dist/esm/parquetjs/parquet-thrift/ColumnIndex.js +0 -218
  334. package/dist/esm/parquetjs/parquet-thrift/ColumnIndex.js.map +0 -1
  335. package/dist/esm/parquetjs/parquet-thrift/ColumnMetaData.js +0 -429
  336. package/dist/esm/parquetjs/parquet-thrift/ColumnMetaData.js.map +0 -1
  337. package/dist/esm/parquetjs/parquet-thrift/ColumnOrder.js +0 -95
  338. package/dist/esm/parquetjs/parquet-thrift/ColumnOrder.js.map +0 -1
  339. package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js +0 -13
  340. package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js.map +0 -1
  341. package/dist/esm/parquetjs/parquet-thrift/ConvertedType.js +0 -27
  342. package/dist/esm/parquetjs/parquet-thrift/ConvertedType.js.map +0 -1
  343. package/dist/esm/parquetjs/parquet-thrift/DataPageHeader.js +0 -166
  344. package/dist/esm/parquetjs/parquet-thrift/DataPageHeader.js.map +0 -1
  345. package/dist/esm/parquetjs/parquet-thrift/DataPageHeaderV2.js +0 -236
  346. package/dist/esm/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +0 -1
  347. package/dist/esm/parquetjs/parquet-thrift/DateType.js +0 -39
  348. package/dist/esm/parquetjs/parquet-thrift/DateType.js.map +0 -1
  349. package/dist/esm/parquetjs/parquet-thrift/DecimalType.js +0 -95
  350. package/dist/esm/parquetjs/parquet-thrift/DecimalType.js.map +0 -1
  351. package/dist/esm/parquetjs/parquet-thrift/DictionaryPageHeader.js +0 -117
  352. package/dist/esm/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +0 -1
  353. package/dist/esm/parquetjs/parquet-thrift/Encoding.js +0 -13
  354. package/dist/esm/parquetjs/parquet-thrift/Encoding.js.map +0 -1
  355. package/dist/esm/parquetjs/parquet-thrift/EnumType.js +0 -39
  356. package/dist/esm/parquetjs/parquet-thrift/EnumType.js.map +0 -1
  357. package/dist/esm/parquetjs/parquet-thrift/FieldRepetitionType.js +0 -8
  358. package/dist/esm/parquetjs/parquet-thrift/FieldRepetitionType.js.map +0 -1
  359. package/dist/esm/parquetjs/parquet-thrift/FileMetaData.js +0 -270
  360. package/dist/esm/parquetjs/parquet-thrift/FileMetaData.js.map +0 -1
  361. package/dist/esm/parquetjs/parquet-thrift/IndexPageHeader.js +0 -39
  362. package/dist/esm/parquetjs/parquet-thrift/IndexPageHeader.js.map +0 -1
  363. package/dist/esm/parquetjs/parquet-thrift/IntType.js +0 -95
  364. package/dist/esm/parquetjs/parquet-thrift/IntType.js.map +0 -1
  365. package/dist/esm/parquetjs/parquet-thrift/JsonType.js +0 -39
  366. package/dist/esm/parquetjs/parquet-thrift/JsonType.js.map +0 -1
  367. package/dist/esm/parquetjs/parquet-thrift/KeyValue.js +0 -93
  368. package/dist/esm/parquetjs/parquet-thrift/KeyValue.js.map +0 -1
  369. package/dist/esm/parquetjs/parquet-thrift/ListType.js +0 -39
  370. package/dist/esm/parquetjs/parquet-thrift/ListType.js.map +0 -1
  371. package/dist/esm/parquetjs/parquet-thrift/LogicalType.js +0 -467
  372. package/dist/esm/parquetjs/parquet-thrift/LogicalType.js.map +0 -1
  373. package/dist/esm/parquetjs/parquet-thrift/MapType.js +0 -39
  374. package/dist/esm/parquetjs/parquet-thrift/MapType.js.map +0 -1
  375. package/dist/esm/parquetjs/parquet-thrift/MicroSeconds.js +0 -39
  376. package/dist/esm/parquetjs/parquet-thrift/MicroSeconds.js.map +0 -1
  377. package/dist/esm/parquetjs/parquet-thrift/MilliSeconds.js +0 -39
  378. package/dist/esm/parquetjs/parquet-thrift/MilliSeconds.js.map +0 -1
  379. package/dist/esm/parquetjs/parquet-thrift/NullType.js +0 -39
  380. package/dist/esm/parquetjs/parquet-thrift/NullType.js.map +0 -1
  381. package/dist/esm/parquetjs/parquet-thrift/OffsetIndex.js +0 -85
  382. package/dist/esm/parquetjs/parquet-thrift/OffsetIndex.js.map +0 -1
  383. package/dist/esm/parquetjs/parquet-thrift/PageEncodingStats.js +0 -119
  384. package/dist/esm/parquetjs/parquet-thrift/PageEncodingStats.js.map +0 -1
  385. package/dist/esm/parquetjs/parquet-thrift/PageHeader.js +0 -233
  386. package/dist/esm/parquetjs/parquet-thrift/PageHeader.js.map +0 -1
  387. package/dist/esm/parquetjs/parquet-thrift/PageLocation.js +0 -128
  388. package/dist/esm/parquetjs/parquet-thrift/PageLocation.js.map +0 -1
  389. package/dist/esm/parquetjs/parquet-thrift/PageType.js +0 -9
  390. package/dist/esm/parquetjs/parquet-thrift/PageType.js.map +0 -1
  391. package/dist/esm/parquetjs/parquet-thrift/RowGroup.js +0 -178
  392. package/dist/esm/parquetjs/parquet-thrift/RowGroup.js.map +0 -1
  393. package/dist/esm/parquetjs/parquet-thrift/SchemaElement.js +0 -270
  394. package/dist/esm/parquetjs/parquet-thrift/SchemaElement.js.map +0 -1
  395. package/dist/esm/parquetjs/parquet-thrift/SortingColumn.js +0 -119
  396. package/dist/esm/parquetjs/parquet-thrift/SortingColumn.js.map +0 -1
  397. package/dist/esm/parquetjs/parquet-thrift/Statistics.js +0 -183
  398. package/dist/esm/parquetjs/parquet-thrift/Statistics.js.map +0 -1
  399. package/dist/esm/parquetjs/parquet-thrift/StringType.js +0 -39
  400. package/dist/esm/parquetjs/parquet-thrift/StringType.js.map +0 -1
  401. package/dist/esm/parquetjs/parquet-thrift/TimeType.js +0 -96
  402. package/dist/esm/parquetjs/parquet-thrift/TimeType.js.map +0 -1
  403. package/dist/esm/parquetjs/parquet-thrift/TimeUnit.js +0 -126
  404. package/dist/esm/parquetjs/parquet-thrift/TimeUnit.js.map +0 -1
  405. package/dist/esm/parquetjs/parquet-thrift/TimestampType.js +0 -96
  406. package/dist/esm/parquetjs/parquet-thrift/TimestampType.js.map +0 -1
  407. package/dist/esm/parquetjs/parquet-thrift/Type.js +0 -13
  408. package/dist/esm/parquetjs/parquet-thrift/Type.js.map +0 -1
  409. package/dist/esm/parquetjs/parquet-thrift/TypeDefinedOrder.js +0 -39
  410. package/dist/esm/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +0 -1
  411. package/dist/esm/parquetjs/parquet-thrift/UUIDType.js +0 -39
  412. package/dist/esm/parquetjs/parquet-thrift/UUIDType.js.map +0 -1
  413. package/dist/esm/parquetjs/parquet-thrift/index.js +0 -44
  414. package/dist/esm/parquetjs/parquet-thrift/index.js.map +0 -1
  415. package/dist/esm/parquetjs/parser/decoders.js +0 -300
  416. package/dist/esm/parquetjs/parser/decoders.js.map +0 -1
  417. package/dist/esm/parquetjs/parser/parquet-cursor.js +0 -90
  418. package/dist/esm/parquetjs/parser/parquet-cursor.js.map +0 -1
  419. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +0 -155
  420. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
  421. package/dist/esm/parquetjs/parser/parquet-reader.js +0 -120
  422. package/dist/esm/parquetjs/parser/parquet-reader.js.map +0 -1
  423. package/dist/esm/parquetjs/schema/declare.js +0 -13
  424. package/dist/esm/parquetjs/schema/declare.js.map +0 -1
  425. package/dist/esm/parquetjs/schema/schema.js +0 -176
  426. package/dist/esm/parquetjs/schema/schema.js.map +0 -1
  427. package/dist/esm/parquetjs/schema/shred.js +0 -162
  428. package/dist/esm/parquetjs/schema/shred.js.map +0 -1
  429. package/dist/esm/parquetjs/schema/types.js +0 -476
  430. package/dist/esm/parquetjs/schema/types.js.map +0 -1
  431. package/dist/esm/parquetjs/utils/buffer-utils.js +0 -12
  432. package/dist/esm/parquetjs/utils/buffer-utils.js.map +0 -1
  433. package/dist/esm/parquetjs/utils/file-utils.js +0 -34
  434. package/dist/esm/parquetjs/utils/file-utils.js.map +0 -1
  435. package/dist/esm/parquetjs/utils/read-utils.js +0 -105
  436. package/dist/esm/parquetjs/utils/read-utils.js.map +0 -1
  437. package/dist/esm/workers/parquet-worker.js +0 -4
  438. package/dist/esm/workers/parquet-worker.js.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/parquetjs/schema/schema.ts"],"names":["ParquetSchema","schema","fields","buildFields","fieldList","listFields","path","split","slice","n","length","shift","branch","push","record","buffer","type","setCompress","name","node","compression","rLevelParentMax","dLevelParentMax","opts","required","optional","repeated","Boolean","rLevelMax","dLevelMax","repetitionType","cpath","concat","key","join","isNested","fieldCount","Object","keys","typeDef","PARQUET_LOGICAL_TYPES","Error","encoding","PARQUET_CODECS","PARQUET_COMPRESSION_METHODS","primitiveType","originalType","typeLength","presision","scale","list","k"],"mappings":";;;;;;;;;;;;;;;AAEA;;AACA;;AAUA;;AACA;;IAKaA,a;AAQX,yBAAYC,MAAZ,EAAsC;AAAA;AAAA;AAAA;AAAA;AACpC,SAAKA,MAAL,GAAcA,MAAd;AACA,SAAKC,MAAL,GAAcC,WAAW,CAACF,MAAD,EAAS,CAAT,EAAY,CAAZ,EAAe,EAAf,CAAzB;AACA,SAAKG,SAAL,GAAiBC,UAAU,CAAC,KAAKH,MAAN,CAA3B;AACD;;;;WAKD,mBAAUI,IAAV,EAAiD;AAC/C,UAAI,OAAOA,IAAP,KAAgB,QAApB,EAA8B;AAE5BA,QAAAA,IAAI,GAAGA,IAAI,CAACC,KAAL,CAAW,GAAX,CAAP;AACD,OAHD,MAGO;AAELD,QAAAA,IAAI,GAAGA,IAAI,CAACE,KAAL,CAAW,CAAX,CAAP;AACD;;AAED,UAAIC,CAAC,GAAG,KAAKP,MAAb;;AACA,aAAOI,IAAI,CAACI,MAAL,GAAc,CAArB,EAAwBJ,IAAI,CAACK,KAAL,EAAxB,EAAsC;AACpCF,QAAAA,CAAC,GAAGA,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAD,CAAWJ,MAAf;AACD;;AAED,aAAOO,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAR;AACD;;;WAKD,yBAAgBA,IAAhB,EAAyD;AACvD,UAAI,OAAOA,IAAP,KAAgB,QAApB,EAA8B;AAE5BA,QAAAA,IAAI,GAAGA,IAAI,CAACC,KAAL,CAAW,GAAX,CAAP;AACD;;AACD,UAAMK,MAAsB,GAAG,EAA/B;AACA,UAAIH,CAAC,GAAG,KAAKP,MAAb;;AACA,aAAOI,IAAI,CAACI,MAAL,GAAc,CAArB,EAAwBJ,IAAI,CAACK,KAAL,EAAxB,EAAsC;AACpCC,QAAAA,MAAM,CAACC,IAAP,CAAYJ,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAb;;AACA,YAAIA,IAAI,CAACI,MAAL,GAAc,CAAlB,EAAqB;AACnBD,UAAAA,CAAC,GAAGA,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAD,CAAWJ,MAAf;AACD;AACF;;AACD,aAAOU,MAAP;AACD;;;WAED,qBAAYE,MAAZ,EAAmCC,MAAnC,EAAgE;AAC9D,8BAAY,IAAZ,EAAkBD,MAAlB,EAA0BC,MAA1B;AACD;;;WAED,4BAAmBA,MAAnB,EAA2D;AACzD,aAAO,+BAAmB,IAAnB,EAAyBA,MAAzB,CAAP;AACD;;;WAED,kBAASC,IAAT,EAAyC;AACvCC,MAAAA,WAAW,CAAC,KAAKhB,MAAN,EAAce,IAAd,CAAX;AACAC,MAAAA,WAAW,CAAC,KAAKf,MAAN,EAAcc,IAAd,CAAX;AACA,aAAO,IAAP;AACD;;;WAED,kBAAwB;AACtB,aAAO,wBAAY,IAAZ,CAAP;AACD;;;;;;;AAGH,SAASC,WAAT,CAAqBhB,MAArB,EAAkCe,IAAlC,EAA4D;AAC1D,OAAK,IAAME,IAAX,IAAmBjB,MAAnB,EAA2B;AACzB,QAAMkB,IAAI,GAAGlB,MAAM,CAACiB,IAAD,CAAnB;;AACA,QAAIC,IAAI,CAACjB,MAAT,EAAiB;AACfe,MAAAA,WAAW,CAACE,IAAI,CAACjB,MAAN,EAAcc,IAAd,CAAX;AACD,KAFD,MAEO;AACLG,MAAAA,IAAI,CAACC,WAAL,GAAmBJ,IAAnB;AACD;AACF;AACF;;AAGD,SAASb,WAAT,CACEF,MADF,EAEEoB,eAFF,EAGEC,eAHF,EAIEhB,IAJF,EAKgC;AAC9B,MAAMF,SAAuC,GAAG,EAAhD;;AAEA,OAAK,IAAMc,IAAX,IAAmBjB,MAAnB,EAA2B;AACzB,QAAMsB,IAAI,GAAGtB,MAAM,CAACiB,IAAD,CAAnB;AAGA,QAAMM,QAAQ,GAAG,CAACD,IAAI,CAACE,QAAvB;AACA,QAAMC,QAAQ,GAAGC,OAAO,CAACJ,IAAI,CAACG,QAAN,CAAxB;AACA,QAAIE,SAAS,GAAGP,eAAhB;AACA,QAAIQ,SAAS,GAAGP,eAAhB;AAEA,QAAIQ,cAA8B,GAAG,UAArC;;AACA,QAAI,CAACN,QAAL,EAAe;AACbM,MAAAA,cAAc,GAAG,UAAjB;AACAD,MAAAA,SAAS;AACV;;AACD,QAAIH,QAAJ,EAAc;AACZI,MAAAA,cAAc,GAAG,UAAjB;AACAF,MAAAA,SAAS;AACT,UAAIJ,QAAJ,EAAcK,SAAS;AACxB;;AAGD,QAAIN,IAAI,CAACrB,MAAT,EAAiB;AACf,UAAM6B,MAAK,GAAGzB,IAAI,CAAC0B,MAAL,CAAY,CAACd,IAAD,CAAZ,CAAd;;AACAd,MAAAA,SAAS,CAACc,IAAD,CAAT,GAAkB;AAChBA,QAAAA,IAAI,EAAJA,IADgB;AAEhBZ,QAAAA,IAAI,EAAEyB,MAFU;AAGhBE,QAAAA,GAAG,EAAEF,MAAK,CAACG,IAAN,EAHW;AAIhBJ,QAAAA,cAAc,EAAdA,cAJgB;AAKhBF,QAAAA,SAAS,EAATA,SALgB;AAMhBC,QAAAA,SAAS,EAATA,SANgB;AAOhBM,QAAAA,QAAQ,EAAE,IAPM;AAQhBC,QAAAA,UAAU,EAAEC,MAAM,CAACC,IAAP,CAAYf,IAAI,CAACrB,MAAjB,EAAyBQ,MARrB;AAShBR,QAAAA,MAAM,EAAEC,WAAW,CAACoB,IAAI,CAACrB,MAAN,EAAc0B,SAAd,EAAyBC,SAAzB,EAAoCE,MAApC;AATH,OAAlB;AAWA;AACD;;AAED,QAAMQ,OAAY,GAAGC,6BAAsBjB,IAAI,CAACP,IAA3B,CAArB;;AACA,QAAI,CAACuB,OAAL,EAAc;AACZ,YAAM,IAAIE,KAAJ,iCAAmClB,IAAI,CAACP,IAAxC,EAAN;AACD;;AAEDO,IAAAA,IAAI,CAACmB,QAAL,GAAgBnB,IAAI,CAACmB,QAAL,IAAiB,OAAjC;;AACA,QAAI,EAAEnB,IAAI,CAACmB,QAAL,IAAiBC,sBAAnB,CAAJ,EAAwC;AACtC,YAAM,IAAIF,KAAJ,yCAA2ClB,IAAI,CAACmB,QAAhD,EAAN;AACD;;AAEDnB,IAAAA,IAAI,CAACH,WAAL,GAAmBG,IAAI,CAACH,WAAL,IAAoB,cAAvC;;AACA,QAAI,EAAEG,IAAI,CAACH,WAAL,IAAoBwB,wCAAtB,CAAJ,EAAwD;AACtD,YAAM,IAAIH,KAAJ,2CAA6ClB,IAAI,CAACH,WAAlD,EAAN;AACD;;AAGD,QAAMW,KAAK,GAAGzB,IAAI,CAAC0B,MAAL,CAAY,CAACd,IAAD,CAAZ,CAAd;AACAd,IAAAA,SAAS,CAACc,IAAD,CAAT,GAAkB;AAChBA,MAAAA,IAAI,EAAJA,IADgB;AAEhB2B,MAAAA,aAAa,EAAEN,OAAO,CAACM,aAFP;AAGhBC,MAAAA,YAAY,EAAEP,OAAO,CAACO,YAHN;AAIhBxC,MAAAA,IAAI,EAAEyB,KAJU;AAKhBE,MAAAA,GAAG,EAAEF,KAAK,CAACG,IAAN,EALW;AAMhBJ,MAAAA,cAAc,EAAdA,cANgB;AAOhBY,MAAAA,QAAQ,EAAEnB,IAAI,CAACmB,QAPC;AAQhBtB,MAAAA,WAAW,EAAEG,IAAI,CAACH,WARF;AAShB2B,MAAAA,UAAU,EAAExB,IAAI,CAACwB,UAAL,IAAmBR,OAAO,CAACQ,UATvB;AAUhBC,MAAAA,SAAS,EAAEzB,IAAI,CAACyB,SAVA;AAWhBC,MAAAA,KAAK,EAAE1B,IAAI,CAAC0B,KAXI;AAYhBrB,MAAAA,SAAS,EAATA,SAZgB;AAahBC,MAAAA,SAAS,EAATA;AAbgB,KAAlB;AAeD;;AACD,SAAOzB,SAAP;AACD;;AAED,SAASC,UAAT,CAAoBH,MAApB,EAA0E;AACxE,MAAIgD,IAAoB,GAAG,EAA3B;;AACA,OAAK,IAAMC,CAAX,IAAgBjD,MAAhB,EAAwB;AACtBgD,IAAAA,IAAI,CAACrC,IAAL,CAAUX,MAAM,CAACiD,CAAD,CAAhB;;AACA,QAAIjD,MAAM,CAACiD,CAAD,CAAN,CAAUhB,QAAd,EAAwB;AACtBe,MAAAA,IAAI,GAAGA,IAAI,CAAClB,MAAL,CAAY3B,UAAU,CAACH,MAAM,CAACiD,CAAD,CAAN,CAAUjD,MAAX,CAAtB,CAAP;AACD;AACF;;AACD,SAAOgD,IAAP;AACD","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport {PARQUET_CODECS} from '../codecs';\nimport {PARQUET_COMPRESSION_METHODS} from '../compression';\nimport {\n FieldDefinition,\n ParquetBuffer,\n ParquetCompression,\n ParquetField,\n ParquetRecord,\n RepetitionType,\n SchemaDefinition\n} from './declare';\nimport {materializeRecords, shredBuffer, shredRecord} from './shred';\nimport {PARQUET_LOGICAL_TYPES} from './types';\n\n/**\n * A parquet file schema\n */\nexport class ParquetSchema {\n public schema: Record<string, FieldDefinition>;\n public fields: Record<string, ParquetField>;\n public fieldList: ParquetField[];\n\n /**\n * Create a new schema from a JSON schema definition\n */\n constructor(schema: SchemaDefinition) {\n this.schema = schema;\n this.fields = buildFields(schema, 0, 0, []);\n this.fieldList = listFields(this.fields);\n }\n\n /**\n * Retrieve a field definition\n */\n findField(path: string | string[]): ParquetField {\n if (typeof path === 'string') {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.split(',');\n } else {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.slice(0); // clone array\n }\n\n let n = this.fields;\n for (; path.length > 1; path.shift()) {\n n = n[path[0]].fields as Record<string, ParquetField>;\n }\n\n return n[path[0]];\n }\n\n /**\n * Retrieve a field definition and all the field's ancestors\n */\n findFieldBranch(path: string | string[]): ParquetField[] {\n if (typeof path === 'string') {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.split(',');\n }\n const branch: ParquetField[] = [];\n let n = this.fields;\n for (; path.length > 0; path.shift()) {\n branch.push(n[path[0]]);\n if (path.length > 1) {\n n = n[path[0]].fields as Record<string, ParquetField>;\n }\n }\n return branch;\n }\n\n shredRecord(record: ParquetRecord, buffer: ParquetBuffer): void {\n shredRecord(this, record, buffer);\n }\n\n materializeRecords(buffer: ParquetBuffer): ParquetRecord[] {\n return materializeRecords(this, buffer);\n }\n\n compress(type: ParquetCompression): this {\n setCompress(this.schema, type);\n setCompress(this.fields, type);\n return this;\n }\n\n buffer(): ParquetBuffer {\n return shredBuffer(this);\n }\n}\n\nfunction setCompress(schema: any, type: ParquetCompression) {\n for (const name in schema) {\n const node = schema[name];\n if (node.fields) {\n setCompress(node.fields, type);\n } else {\n node.compression = type;\n }\n }\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction buildFields(\n schema: SchemaDefinition,\n rLevelParentMax: number,\n dLevelParentMax: number,\n path: string[]\n): Record<string, ParquetField> {\n const fieldList: Record<string, ParquetField> = {};\n\n for (const name in schema) {\n const opts = schema[name];\n\n /* field repetition type */\n const required = !opts.optional;\n const repeated = Boolean(opts.repeated);\n let rLevelMax = rLevelParentMax;\n let dLevelMax = dLevelParentMax;\n\n let repetitionType: RepetitionType = 'REQUIRED';\n if (!required) {\n repetitionType = 'OPTIONAL';\n dLevelMax++;\n }\n if (repeated) {\n repetitionType = 'REPEATED';\n rLevelMax++;\n if (required) dLevelMax++;\n }\n\n /* nested field */\n if (opts.fields) {\n const cpath = path.concat([name]);\n fieldList[name] = {\n name,\n path: cpath,\n key: cpath.join(),\n repetitionType,\n rLevelMax,\n dLevelMax,\n isNested: true,\n fieldCount: Object.keys(opts.fields).length,\n fields: buildFields(opts.fields, rLevelMax, dLevelMax, cpath)\n };\n continue; // eslint-disable-line no-continue\n }\n\n const typeDef: any = PARQUET_LOGICAL_TYPES[opts.type!];\n if (!typeDef) {\n throw new Error(`invalid parquet type: ${opts.type}`);\n }\n\n opts.encoding = opts.encoding || 'PLAIN';\n if (!(opts.encoding in PARQUET_CODECS)) {\n throw new Error(`unsupported parquet encoding: ${opts.encoding}`);\n }\n\n opts.compression = opts.compression || 'UNCOMPRESSED';\n if (!(opts.compression in PARQUET_COMPRESSION_METHODS)) {\n throw new Error(`unsupported compression method: ${opts.compression}`);\n }\n\n /* add to schema */\n const cpath = path.concat([name]);\n fieldList[name] = {\n name,\n primitiveType: typeDef.primitiveType,\n originalType: typeDef.originalType,\n path: cpath,\n key: cpath.join(),\n repetitionType,\n encoding: opts.encoding,\n compression: opts.compression,\n typeLength: opts.typeLength || typeDef.typeLength,\n presision: opts.presision,\n scale: opts.scale,\n rLevelMax,\n dLevelMax\n };\n }\n return fieldList;\n}\n\nfunction listFields(fields: Record<string, ParquetField>): ParquetField[] {\n let list: ParquetField[] = [];\n for (const k in fields) {\n list.push(fields[k]);\n if (fields[k].isNested) {\n list = list.concat(listFields(fields[k].fields!));\n }\n }\n return list;\n}\n"],"file":"schema.js"}
1
+ {"version":3,"sources":["../../../src/parquetjs/schema/schema.ts"],"names":["PARQUET_CODECS","PARQUET_COMPRESSION_METHODS","materializeRecords","shredBuffer","shredRecord","PARQUET_LOGICAL_TYPES","ParquetSchema","constructor","schema","fields","buildFields","fieldList","listFields","findField","path","split","slice","n","length","shift","findFieldBranch","branch","push","record","buffer","compress","type","setCompress","name","node","compression","rLevelParentMax","dLevelParentMax","opts","required","optional","repeated","Boolean","rLevelMax","dLevelMax","repetitionType","cpath","concat","key","join","isNested","fieldCount","Object","keys","typeDef","Error","encoding","primitiveType","originalType","typeLength","presision","scale","list","k"],"mappings":";AAEA,SAAQA,cAAR,QAA6B,WAA7B;AACA,SAAQC,2BAAR,QAA0C,gBAA1C;AAUA,SAAQC,kBAAR,EAA4BC,WAA5B,EAAyCC,WAAzC,QAA2D,SAA3D;AACA,SAAQC,qBAAR,QAAoC,SAApC;AAKA,OAAO,MAAMC,aAAN,CAAoB;AAQzBC,EAAAA,WAAW,CAACC,MAAD,EAA2B;AAAA;;AAAA;;AAAA;;AACpC,SAAKA,MAAL,GAAcA,MAAd;AACA,SAAKC,MAAL,GAAcC,WAAW,CAACF,MAAD,EAAS,CAAT,EAAY,CAAZ,EAAe,EAAf,CAAzB;AACA,SAAKG,SAAL,GAAiBC,UAAU,CAAC,KAAKH,MAAN,CAA3B;AACD;;AAKDI,EAAAA,SAAS,CAACC,IAAD,EAAwC;AAC/C,QAAI,OAAOA,IAAP,KAAgB,QAApB,EAA8B;AAE5BA,MAAAA,IAAI,GAAGA,IAAI,CAACC,KAAL,CAAW,GAAX,CAAP;AACD,KAHD,MAGO;AAELD,MAAAA,IAAI,GAAGA,IAAI,CAACE,KAAL,CAAW,CAAX,CAAP;AACD;;AAED,QAAIC,CAAC,GAAG,KAAKR,MAAb;;AACA,WAAOK,IAAI,CAACI,MAAL,GAAc,CAArB,EAAwBJ,IAAI,CAACK,KAAL,EAAxB,EAAsC;AACpCF,MAAAA,CAAC,GAAGA,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAD,CAAWL,MAAf;AACD;;AAED,WAAOQ,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAR;AACD;;AAKDM,EAAAA,eAAe,CAACN,IAAD,EAA0C;AACvD,QAAI,OAAOA,IAAP,KAAgB,QAApB,EAA8B;AAE5BA,MAAAA,IAAI,GAAGA,IAAI,CAACC,KAAL,CAAW,GAAX,CAAP;AACD;;AACD,UAAMM,MAAsB,GAAG,EAA/B;AACA,QAAIJ,CAAC,GAAG,KAAKR,MAAb;;AACA,WAAOK,IAAI,CAACI,MAAL,GAAc,CAArB,EAAwBJ,IAAI,CAACK,KAAL,EAAxB,EAAsC;AACpCE,MAAAA,MAAM,CAACC,IAAP,CAAYL,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAb;;AACA,UAAIA,IAAI,CAACI,MAAL,GAAc,CAAlB,EAAqB;AACnBD,QAAAA,CAAC,GAAGA,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAD,CAAWL,MAAf;AACD;AACF;;AACD,WAAOY,MAAP;AACD;;AAEDjB,EAAAA,WAAW,CAACmB,MAAD,EAAwBC,MAAxB,EAAqD;AAC9DpB,IAAAA,WAAW,CAAC,IAAD,EAAOmB,MAAP,EAAeC,MAAf,CAAX;AACD;;AAEDtB,EAAAA,kBAAkB,CAACsB,MAAD,EAAyC;AACzD,WAAOtB,kBAAkB,CAAC,IAAD,EAAOsB,MAAP,CAAzB;AACD;;AAEDC,EAAAA,QAAQ,CAACC,IAAD,EAAiC;AACvCC,IAAAA,WAAW,CAAC,KAAKnB,MAAN,EAAckB,IAAd,CAAX;AACAC,IAAAA,WAAW,CAAC,KAAKlB,MAAN,EAAciB,IAAd,CAAX;AACA,WAAO,IAAP;AACD;;AAEDF,EAAAA,MAAM,GAAkB;AACtB,WAAOrB,WAAW,CAAC,IAAD,CAAlB;AACD;;AArEwB;;AAwE3B,SAASwB,WAAT,CAAqBnB,MAArB,EAAkCkB,IAAlC,EAA4D;AAC1D,OAAK,MAAME,IAAX,IAAmBpB,MAAnB,EAA2B;AACzB,UAAMqB,IAAI,GAAGrB,MAAM,CAACoB,IAAD,CAAnB;;AACA,QAAIC,IAAI,CAACpB,MAAT,EAAiB;AACfkB,MAAAA,WAAW,CAACE,IAAI,CAACpB,MAAN,EAAciB,IAAd,CAAX;AACD,KAFD,MAEO;AACLG,MAAAA,IAAI,CAACC,WAAL,GAAmBJ,IAAnB;AACD;AACF;AACF;;AAGD,SAAShB,WAAT,CACEF,MADF,EAEEuB,eAFF,EAGEC,eAHF,EAIElB,IAJF,EAKgC;AAC9B,QAAMH,SAAuC,GAAG,EAAhD;;AAEA,OAAK,MAAMiB,IAAX,IAAmBpB,MAAnB,EAA2B;AACzB,UAAMyB,IAAI,GAAGzB,MAAM,CAACoB,IAAD,CAAnB;AAGA,UAAMM,QAAQ,GAAG,CAACD,IAAI,CAACE,QAAvB;AACA,UAAMC,QAAQ,GAAGC,OAAO,CAACJ,IAAI,CAACG,QAAN,CAAxB;AACA,QAAIE,SAAS,GAAGP,eAAhB;AACA,QAAIQ,SAAS,GAAGP,eAAhB;AAEA,QAAIQ,cAA8B,GAAG,UAArC;;AACA,QAAI,CAACN,QAAL,EAAe;AACbM,MAAAA,cAAc,GAAG,UAAjB;AACAD,MAAAA,SAAS;AACV;;AACD,QAAIH,QAAJ,EAAc;AACZI,MAAAA,cAAc,GAAG,UAAjB;AACAF,MAAAA,SAAS;AACT,UAAIJ,QAAJ,EAAcK,SAAS;AACxB;;AAGD,QAAIN,IAAI,CAACxB,MAAT,EAAiB;AACf,YAAMgC,KAAK,GAAG3B,IAAI,CAAC4B,MAAL,CAAY,CAACd,IAAD,CAAZ,CAAd;AACAjB,MAAAA,SAAS,CAACiB,IAAD,CAAT,GAAkB;AAChBA,QAAAA,IADgB;AAEhBd,QAAAA,IAAI,EAAE2B,KAFU;AAGhBE,QAAAA,GAAG,EAAEF,KAAK,CAACG,IAAN,EAHW;AAIhBJ,QAAAA,cAJgB;AAKhBF,QAAAA,SALgB;AAMhBC,QAAAA,SANgB;AAOhBM,QAAAA,QAAQ,EAAE,IAPM;AAQhBC,QAAAA,UAAU,EAAEC,MAAM,CAACC,IAAP,CAAYf,IAAI,CAACxB,MAAjB,EAAyBS,MARrB;AAShBT,QAAAA,MAAM,EAAEC,WAAW,CAACuB,IAAI,CAACxB,MAAN,EAAc6B,SAAd,EAAyBC,SAAzB,EAAoCE,KAApC;AATH,OAAlB;AAWA;AACD;;AAED,UAAMQ,OAAY,GAAG5C,qBAAqB,CAAC4B,IAAI,CAACP,IAAN,CAA1C;;AACA,QAAI,CAACuB,OAAL,EAAc;AACZ,YAAM,IAAIC,KAAJ,iCAAmCjB,IAAI,CAACP,IAAxC,EAAN;AACD;;AAEDO,IAAAA,IAAI,CAACkB,QAAL,GAAgBlB,IAAI,CAACkB,QAAL,IAAiB,OAAjC;;AACA,QAAI,EAAElB,IAAI,CAACkB,QAAL,IAAiBnD,cAAnB,CAAJ,EAAwC;AACtC,YAAM,IAAIkD,KAAJ,yCAA2CjB,IAAI,CAACkB,QAAhD,EAAN;AACD;;AAEDlB,IAAAA,IAAI,CAACH,WAAL,GAAmBG,IAAI,CAACH,WAAL,IAAoB,cAAvC;;AACA,QAAI,EAAEG,IAAI,CAACH,WAAL,IAAoB7B,2BAAtB,CAAJ,EAAwD;AACtD,YAAM,IAAIiD,KAAJ,2CAA6CjB,IAAI,CAACH,WAAlD,EAAN;AACD;;AAGD,UAAMW,KAAK,GAAG3B,IAAI,CAAC4B,MAAL,CAAY,CAACd,IAAD,CAAZ,CAAd;AACAjB,IAAAA,SAAS,CAACiB,IAAD,CAAT,GAAkB;AAChBA,MAAAA,IADgB;AAEhBwB,MAAAA,aAAa,EAAEH,OAAO,CAACG,aAFP;AAGhBC,MAAAA,YAAY,EAAEJ,OAAO,CAACI,YAHN;AAIhBvC,MAAAA,IAAI,EAAE2B,KAJU;AAKhBE,MAAAA,GAAG,EAAEF,KAAK,CAACG,IAAN,EALW;AAMhBJ,MAAAA,cANgB;AAOhBW,MAAAA,QAAQ,EAAElB,IAAI,CAACkB,QAPC;AAQhBrB,MAAAA,WAAW,EAAEG,IAAI,CAACH,WARF;AAShBwB,MAAAA,UAAU,EAAErB,IAAI,CAACqB,UAAL,IAAmBL,OAAO,CAACK,UATvB;AAUhBC,MAAAA,SAAS,EAAEtB,IAAI,CAACsB,SAVA;AAWhBC,MAAAA,KAAK,EAAEvB,IAAI,CAACuB,KAXI;AAYhBlB,MAAAA,SAZgB;AAahBC,MAAAA;AAbgB,KAAlB;AAeD;;AACD,SAAO5B,SAAP;AACD;;AAED,SAASC,UAAT,CAAoBH,MAApB,EAA0E;AACxE,MAAIgD,IAAoB,GAAG,EAA3B;;AACA,OAAK,MAAMC,CAAX,IAAgBjD,MAAhB,EAAwB;AACtBgD,IAAAA,IAAI,CAACnC,IAAL,CAAUb,MAAM,CAACiD,CAAD,CAAhB;;AACA,QAAIjD,MAAM,CAACiD,CAAD,CAAN,CAAUb,QAAd,EAAwB;AACtBY,MAAAA,IAAI,GAAGA,IAAI,CAACf,MAAL,CAAY9B,UAAU,CAACH,MAAM,CAACiD,CAAD,CAAN,CAAUjD,MAAX,CAAtB,CAAP;AACD;AACF;;AACD,SAAOgD,IAAP;AACD","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport {PARQUET_CODECS} from '../codecs';\nimport {PARQUET_COMPRESSION_METHODS} from '../compression';\nimport {\n FieldDefinition,\n ParquetBuffer,\n ParquetCompression,\n ParquetField,\n ParquetRecord,\n RepetitionType,\n SchemaDefinition\n} from './declare';\nimport {materializeRecords, shredBuffer, shredRecord} from './shred';\nimport {PARQUET_LOGICAL_TYPES} from './types';\n\n/**\n * A parquet file schema\n */\nexport class ParquetSchema {\n public schema: Record<string, FieldDefinition>;\n public fields: Record<string, ParquetField>;\n public fieldList: ParquetField[];\n\n /**\n * Create a new schema from a JSON schema definition\n */\n constructor(schema: SchemaDefinition) {\n this.schema = schema;\n this.fields = buildFields(schema, 0, 0, []);\n this.fieldList = listFields(this.fields);\n }\n\n /**\n * Retrieve a field definition\n */\n findField(path: string | string[]): ParquetField {\n if (typeof path === 'string') {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.split(',');\n } else {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.slice(0); // clone array\n }\n\n let n = this.fields;\n for (; path.length > 1; path.shift()) {\n n = n[path[0]].fields as Record<string, ParquetField>;\n }\n\n return n[path[0]];\n }\n\n /**\n * Retrieve a field definition and all the field's ancestors\n */\n findFieldBranch(path: string | string[]): ParquetField[] {\n if (typeof path === 'string') {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.split(',');\n }\n const branch: ParquetField[] = [];\n let n = this.fields;\n for (; path.length > 0; path.shift()) {\n branch.push(n[path[0]]);\n if (path.length > 1) {\n n = n[path[0]].fields as Record<string, ParquetField>;\n }\n }\n return branch;\n }\n\n shredRecord(record: ParquetRecord, buffer: ParquetBuffer): void {\n shredRecord(this, record, buffer);\n }\n\n materializeRecords(buffer: ParquetBuffer): ParquetRecord[] {\n return materializeRecords(this, buffer);\n }\n\n compress(type: ParquetCompression): this {\n setCompress(this.schema, type);\n setCompress(this.fields, type);\n return this;\n }\n\n buffer(): ParquetBuffer {\n return shredBuffer(this);\n }\n}\n\nfunction setCompress(schema: any, type: ParquetCompression) {\n for (const name in schema) {\n const node = schema[name];\n if (node.fields) {\n setCompress(node.fields, type);\n } else {\n node.compression = type;\n }\n }\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction buildFields(\n schema: SchemaDefinition,\n rLevelParentMax: number,\n dLevelParentMax: number,\n path: string[]\n): Record<string, ParquetField> {\n const fieldList: Record<string, ParquetField> = {};\n\n for (const name in schema) {\n const opts = schema[name];\n\n /* field repetition type */\n const required = !opts.optional;\n const repeated = Boolean(opts.repeated);\n let rLevelMax = rLevelParentMax;\n let dLevelMax = dLevelParentMax;\n\n let repetitionType: RepetitionType = 'REQUIRED';\n if (!required) {\n repetitionType = 'OPTIONAL';\n dLevelMax++;\n }\n if (repeated) {\n repetitionType = 'REPEATED';\n rLevelMax++;\n if (required) dLevelMax++;\n }\n\n /* nested field */\n if (opts.fields) {\n const cpath = path.concat([name]);\n fieldList[name] = {\n name,\n path: cpath,\n key: cpath.join(),\n repetitionType,\n rLevelMax,\n dLevelMax,\n isNested: true,\n fieldCount: Object.keys(opts.fields).length,\n fields: buildFields(opts.fields, rLevelMax, dLevelMax, cpath)\n };\n continue; // eslint-disable-line no-continue\n }\n\n const typeDef: any = PARQUET_LOGICAL_TYPES[opts.type!];\n if (!typeDef) {\n throw new Error(`invalid parquet type: ${opts.type}`);\n }\n\n opts.encoding = opts.encoding || 'PLAIN';\n if (!(opts.encoding in PARQUET_CODECS)) {\n throw new Error(`unsupported parquet encoding: ${opts.encoding}`);\n }\n\n opts.compression = opts.compression || 'UNCOMPRESSED';\n if (!(opts.compression in PARQUET_COMPRESSION_METHODS)) {\n throw new Error(`unsupported compression method: ${opts.compression}`);\n }\n\n /* add to schema */\n const cpath = path.concat([name]);\n fieldList[name] = {\n name,\n primitiveType: typeDef.primitiveType,\n originalType: typeDef.originalType,\n path: cpath,\n key: cpath.join(),\n repetitionType,\n encoding: opts.encoding,\n compression: opts.compression,\n typeLength: opts.typeLength || typeDef.typeLength,\n presision: opts.presision,\n scale: opts.scale,\n rLevelMax,\n dLevelMax\n };\n }\n return fieldList;\n}\n\nfunction listFields(fields: Record<string, ParquetField>): ParquetField[] {\n let list: ParquetField[] = [];\n for (const k in fields) {\n list.push(fields[k]);\n if (fields[k].isNested) {\n list = list.concat(listFields(fields[k].fields!));\n }\n }\n return list;\n}\n"],"file":"schema.js"}
@@ -1,225 +1,162 @@
1
- "use strict";
2
- // Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
3
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
4
- if (k2 === undefined) k2 = k;
5
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
6
- }) : (function(o, m, k, k2) {
7
- if (k2 === undefined) k2 = k;
8
- o[k2] = m[k];
9
- }));
10
- var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
11
- Object.defineProperty(o, "default", { enumerable: true, value: v });
12
- }) : function(o, v) {
13
- o["default"] = v;
14
- });
15
- var __importStar = (this && this.__importStar) || function (mod) {
16
- if (mod && mod.__esModule) return mod;
17
- var result = {};
18
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
19
- __setModuleDefault(result, mod);
20
- return result;
21
- };
22
- Object.defineProperty(exports, "__esModule", { value: true });
23
- exports.materializeRecords = exports.shredRecord = exports.shredBuffer = exports.ParquetBuffer = void 0;
24
- const declare_1 = require("./declare");
25
- Object.defineProperty(exports, "ParquetBuffer", { enumerable: true, get: function () { return declare_1.ParquetBuffer; } });
26
- const Types = __importStar(require("./types"));
27
- function shredBuffer(schema) {
28
- const columnData = {};
29
- for (const field of schema.fieldList) {
30
- columnData[field.key] = {
31
- dlevels: [],
32
- rlevels: [],
33
- values: [],
34
- pageHeaders: [],
35
- count: 0
36
- };
37
- }
38
- return { rowCount: 0, columnData };
1
+ import { ParquetBuffer } from './declare';
2
+ import * as Types from './types';
3
+ export { ParquetBuffer };
4
+ export function shredBuffer(schema) {
5
+ const columnData = {};
6
+
7
+ for (const field of schema.fieldList) {
8
+ columnData[field.key] = {
9
+ dlevels: [],
10
+ rlevels: [],
11
+ values: [],
12
+ pageHeaders: [],
13
+ count: 0
14
+ };
15
+ }
16
+
17
+ return {
18
+ rowCount: 0,
19
+ columnData
20
+ };
39
21
  }
40
- exports.shredBuffer = shredBuffer;
41
- /**
42
- * 'Shred' a record into a list of <value, repetition_level, definition_level>
43
- * tuples per column using the Google Dremel Algorithm..
44
- *
45
- * The buffer argument must point to an object into which the shredded record
46
- * will be returned. You may re-use the buffer for repeated calls to this function
47
- * to append to an existing buffer, as long as the schema is unchanged.
48
- *
49
- * The format in which the shredded records will be stored in the buffer is as
50
- * follows:
51
- *
52
- * buffer = {
53
- * columnData: [
54
- * 'my_col': {
55
- * dlevels: [d1, d2, .. dN],
56
- * rlevels: [r1, r2, .. rN],
57
- * values: [v1, v2, .. vN],
58
- * }, ...
59
- * ],
60
- * rowCount: X,
61
- * }
62
- */
63
- function shredRecord(schema, record, buffer) {
64
- /* shred the record, this may raise an exception */
65
- const data = shredBuffer(schema).columnData;
66
- shredRecordFields(schema.fields, record, data, 0, 0);
67
- /* if no error during shredding, add the shredded record to the buffer */
68
- if (buffer.rowCount === 0) {
69
- buffer.rowCount = 1;
70
- buffer.columnData = data;
71
- return;
72
- }
73
- buffer.rowCount += 1;
74
- for (const field of schema.fieldList) {
75
- Array.prototype.push.apply(buffer.columnData[field.key].rlevels, data[field.key].rlevels);
76
- Array.prototype.push.apply(buffer.columnData[field.key].dlevels, data[field.key].dlevels);
77
- Array.prototype.push.apply(buffer.columnData[field.key].values, data[field.key].values);
78
- buffer.columnData[field.key].count += data[field.key].count;
79
- }
22
+ export function shredRecord(schema, record, buffer) {
23
+ const data = shredBuffer(schema).columnData;
24
+ shredRecordFields(schema.fields, record, data, 0, 0);
25
+
26
+ if (buffer.rowCount === 0) {
27
+ buffer.rowCount = 1;
28
+ buffer.columnData = data;
29
+ return;
30
+ }
31
+
32
+ buffer.rowCount += 1;
33
+
34
+ for (const field of schema.fieldList) {
35
+ Array.prototype.push.apply(buffer.columnData[field.key].rlevels, data[field.key].rlevels);
36
+ Array.prototype.push.apply(buffer.columnData[field.key].dlevels, data[field.key].dlevels);
37
+ Array.prototype.push.apply(buffer.columnData[field.key].values, data[field.key].values);
38
+ buffer.columnData[field.key].count += data[field.key].count;
39
+ }
80
40
  }
81
- exports.shredRecord = shredRecord;
82
- // eslint-disable-next-line max-statements, complexity
41
+
83
42
  function shredRecordFields(fields, record, data, rLevel, dLevel) {
84
- for (const name in fields) {
85
- const field = fields[name];
86
- // fetch values
87
- let values = [];
88
- if (record &&
89
- field.name in record &&
90
- record[field.name] !== undefined &&
91
- record[field.name] !== null) {
92
- if (record[field.name].constructor === Array) {
93
- values = record[field.name];
94
- }
95
- else {
96
- values.push(record[field.name]);
97
- }
98
- }
99
- // check values
100
- if (values.length === 0 && Boolean(record) && field.repetitionType === 'REQUIRED') {
101
- throw new Error(`missing required field: ${field.name}`);
102
- }
103
- if (values.length > 1 && field.repetitionType !== 'REPEATED') {
104
- throw new Error(`too many values for field: ${field.name}`);
105
- }
106
- // push null
107
- if (values.length === 0) {
108
- if (field.isNested) {
109
- shredRecordFields(field.fields, null, data, rLevel, dLevel);
110
- }
111
- else {
112
- data[field.key].count += 1;
113
- data[field.key].rlevels.push(rLevel);
114
- data[field.key].dlevels.push(dLevel);
115
- }
116
- continue; // eslint-disable-line no-continue
117
- }
118
- // push values
119
- for (let i = 0; i < values.length; i++) {
120
- const rlvl = i === 0 ? rLevel : field.rLevelMax;
121
- if (field.isNested) {
122
- shredRecordFields(field.fields, values[i], data, rlvl, field.dLevelMax);
123
- }
124
- else {
125
- data[field.key].count += 1;
126
- data[field.key].rlevels.push(rlvl);
127
- data[field.key].dlevels.push(field.dLevelMax);
128
- data[field.key].values.push(Types.toPrimitive((field.originalType || field.primitiveType), values[i]));
129
- }
130
- }
43
+ for (const name in fields) {
44
+ const field = fields[name];
45
+ let values = [];
46
+
47
+ if (record && field.name in record && record[field.name] !== undefined && record[field.name] !== null) {
48
+ if (record[field.name].constructor === Array) {
49
+ values = record[field.name];
50
+ } else {
51
+ values.push(record[field.name]);
52
+ }
131
53
  }
132
- }
133
- /**
134
- * 'Materialize' a list of <value, repetition_level, definition_level>
135
- * tuples back to nested records (objects/arrays) using the Google Dremel
136
- * Algorithm..
137
- *
138
- * The buffer argument must point to an object with the following structure (i.e.
139
- * the same structure that is returned by shredRecords):
140
- *
141
- * buffer = {
142
- * columnData: [
143
- * 'my_col': {
144
- * dlevels: [d1, d2, .. dN],
145
- * rlevels: [r1, r2, .. rN],
146
- * values: [v1, v2, .. vN],
147
- * }, ...
148
- * ],
149
- * rowCount: X,
150
- * }
151
- */
152
- function materializeRecords(schema, buffer) {
153
- const records = [];
154
- for (let i = 0; i < buffer.rowCount; i++)
155
- records.push({});
156
- for (const key in buffer.columnData) {
157
- materializeColumn(schema, buffer, key, records);
54
+
55
+ if (values.length === 0 && Boolean(record) && field.repetitionType === 'REQUIRED') {
56
+ throw new Error("missing required field: ".concat(field.name));
57
+ }
58
+
59
+ if (values.length > 1 && field.repetitionType !== 'REPEATED') {
60
+ throw new Error("too many values for field: ".concat(field.name));
61
+ }
62
+
63
+ if (values.length === 0) {
64
+ if (field.isNested) {
65
+ shredRecordFields(field.fields, null, data, rLevel, dLevel);
66
+ } else {
67
+ data[field.key].count += 1;
68
+ data[field.key].rlevels.push(rLevel);
69
+ data[field.key].dlevels.push(dLevel);
70
+ }
71
+
72
+ continue;
73
+ }
74
+
75
+ for (let i = 0; i < values.length; i++) {
76
+ const rlvl = i === 0 ? rLevel : field.rLevelMax;
77
+
78
+ if (field.isNested) {
79
+ shredRecordFields(field.fields, values[i], data, rlvl, field.dLevelMax);
80
+ } else {
81
+ data[field.key].count += 1;
82
+ data[field.key].rlevels.push(rlvl);
83
+ data[field.key].dlevels.push(field.dLevelMax);
84
+ data[field.key].values.push(Types.toPrimitive(field.originalType || field.primitiveType, values[i]));
85
+ }
158
86
  }
159
- return records;
87
+ }
160
88
  }
161
- exports.materializeRecords = materializeRecords;
162
- // eslint-disable-next-line max-statements, complexity
89
+
90
+ export function materializeRecords(schema, buffer) {
91
+ const records = [];
92
+
93
+ for (let i = 0; i < buffer.rowCount; i++) records.push({});
94
+
95
+ for (const key in buffer.columnData) {
96
+ materializeColumn(schema, buffer, key, records);
97
+ }
98
+
99
+ return records;
100
+ }
101
+
163
102
  function materializeColumn(schema, buffer, key, records) {
164
- const data = buffer.columnData[key];
165
- if (!data.count)
166
- return;
167
- const field = schema.findField(key);
168
- const branch = schema.findFieldBranch(key);
169
- // tslint:disable-next-line:prefer-array-literal
170
- const rLevels = new Array(field.rLevelMax + 1).fill(0);
171
- let vIndex = 0;
172
- for (let i = 0; i < data.count; i++) {
173
- const dLevel = data.dlevels[i];
174
- const rLevel = data.rlevels[i];
175
- rLevels[rLevel]++;
176
- rLevels.fill(0, rLevel + 1);
177
- let rIndex = 0;
178
- let record = records[rLevels[rIndex++] - 1];
179
- // Internal nodes
180
- for (const step of branch) {
181
- if (step === field)
182
- break;
183
- if (dLevel < step.dLevelMax)
184
- break;
185
- if (step.repetitionType === 'REPEATED') {
186
- if (!(step.name in record)) {
187
- // eslint-disable max-depth
188
- record[step.name] = [];
189
- }
190
- const ix = rLevels[rIndex++];
191
- while (record[step.name].length <= ix) {
192
- // eslint-disable max-depth
193
- record[step.name].push({});
194
- }
195
- record = record[step.name][ix];
196
- }
197
- else {
198
- record[step.name] = record[step.name] || {};
199
- record = record[step.name];
200
- }
103
+ const data = buffer.columnData[key];
104
+ if (!data.count) return;
105
+ const field = schema.findField(key);
106
+ const branch = schema.findFieldBranch(key);
107
+ const rLevels = new Array(field.rLevelMax + 1).fill(0);
108
+ let vIndex = 0;
109
+
110
+ for (let i = 0; i < data.count; i++) {
111
+ const dLevel = data.dlevels[i];
112
+ const rLevel = data.rlevels[i];
113
+ rLevels[rLevel]++;
114
+ rLevels.fill(0, rLevel + 1);
115
+ let rIndex = 0;
116
+ let record = records[rLevels[rIndex++] - 1];
117
+
118
+ for (const step of branch) {
119
+ if (step === field) break;
120
+ if (dLevel < step.dLevelMax) break;
121
+
122
+ if (step.repetitionType === 'REPEATED') {
123
+ if (!(step.name in record)) {
124
+ record[step.name] = [];
125
+ }
126
+
127
+ const ix = rLevels[rIndex++];
128
+
129
+ while (record[step.name].length <= ix) {
130
+ record[step.name].push({});
131
+ }
132
+
133
+ record = record[step.name][ix];
134
+ } else {
135
+ record[step.name] = record[step.name] || {};
136
+ record = record[step.name];
137
+ }
138
+ }
139
+
140
+ if (dLevel === field.dLevelMax) {
141
+ const value = Types.fromPrimitive(field.originalType || field.primitiveType, data.values[vIndex], field);
142
+ vIndex++;
143
+
144
+ if (field.repetitionType === 'REPEATED') {
145
+ if (!(field.name in record)) {
146
+ record[field.name] = [];
201
147
  }
202
- // Leaf node
203
- if (dLevel === field.dLevelMax) {
204
- const value = Types.fromPrimitive(
205
- // @ts-ignore
206
- field.originalType || field.primitiveType, data.values[vIndex], field);
207
- vIndex++;
208
- if (field.repetitionType === 'REPEATED') {
209
- if (!(field.name in record)) {
210
- // eslint-disable max-depth
211
- record[field.name] = [];
212
- }
213
- const ix = rLevels[rIndex];
214
- while (record[field.name].length <= ix) {
215
- // eslint-disable max-depth
216
- record[field.name].push(null);
217
- }
218
- record[field.name][ix] = value;
219
- }
220
- else {
221
- record[field.name] = value;
222
- }
148
+
149
+ const ix = rLevels[rIndex];
150
+
151
+ while (record[field.name].length <= ix) {
152
+ record[field.name].push(null);
223
153
  }
154
+
155
+ record[field.name][ix] = value;
156
+ } else {
157
+ record[field.name] = value;
158
+ }
224
159
  }
160
+ }
225
161
  }
162
+ //# sourceMappingURL=shred.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/parquetjs/schema/shred.ts"],"names":["ParquetBuffer","Types","shredBuffer","schema","columnData","field","fieldList","key","dlevels","rlevels","values","pageHeaders","count","rowCount","shredRecord","record","buffer","data","shredRecordFields","fields","Array","prototype","push","apply","rLevel","dLevel","name","undefined","constructor","length","Boolean","repetitionType","Error","isNested","i","rlvl","rLevelMax","dLevelMax","toPrimitive","originalType","primitiveType","materializeRecords","records","materializeColumn","findField","branch","findFieldBranch","rLevels","fill","vIndex","rIndex","step","ix","value","fromPrimitive"],"mappings":"AAEA,SAAQA,aAAR,QAAsE,WAAtE;AAEA,OAAO,KAAKC,KAAZ,MAAuB,SAAvB;AAEA,SAAQD,aAAR;AAEA,OAAO,SAASE,WAAT,CAAqBC,MAArB,EAA2D;AAChE,QAAMC,UAAuC,GAAG,EAAhD;;AACA,OAAK,MAAMC,KAAX,IAAoBF,MAAM,CAACG,SAA3B,EAAsC;AACpCF,IAAAA,UAAU,CAACC,KAAK,CAACE,GAAP,CAAV,GAAwB;AACtBC,MAAAA,OAAO,EAAE,EADa;AAEtBC,MAAAA,OAAO,EAAE,EAFa;AAGtBC,MAAAA,MAAM,EAAE,EAHc;AAItBC,MAAAA,WAAW,EAAE,EAJS;AAKtBC,MAAAA,KAAK,EAAE;AALe,KAAxB;AAOD;;AACD,SAAO;AAACC,IAAAA,QAAQ,EAAE,CAAX;AAAcT,IAAAA;AAAd,GAAP;AACD;AAwBD,OAAO,SAASU,WAAT,CAAqBX,MAArB,EAA4CY,MAA5C,EAAyDC,MAAzD,EAAsF;AAE3F,QAAMC,IAAI,GAAGf,WAAW,CAACC,MAAD,CAAX,CAAoBC,UAAjC;AAEAc,EAAAA,iBAAiB,CAACf,MAAM,CAACgB,MAAR,EAAgBJ,MAAhB,EAAwBE,IAAxB,EAA8B,CAA9B,EAAiC,CAAjC,CAAjB;;AAGA,MAAID,MAAM,CAACH,QAAP,KAAoB,CAAxB,EAA2B;AACzBG,IAAAA,MAAM,CAACH,QAAP,GAAkB,CAAlB;AACAG,IAAAA,MAAM,CAACZ,UAAP,GAAoBa,IAApB;AACA;AACD;;AACDD,EAAAA,MAAM,CAACH,QAAP,IAAmB,CAAnB;;AACA,OAAK,MAAMR,KAAX,IAAoBF,MAAM,CAACG,SAA3B,EAAsC;AACpCc,IAAAA,KAAK,CAACC,SAAN,CAAgBC,IAAhB,CAAqBC,KAArB,CAA2BP,MAAM,CAACZ,UAAP,CAAkBC,KAAK,CAACE,GAAxB,EAA6BE,OAAxD,EAAiEQ,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBE,OAAjF;AACAW,IAAAA,KAAK,CAACC,SAAN,CAAgBC,IAAhB,CAAqBC,KAArB,CAA2BP,MAAM,CAACZ,UAAP,CAAkBC,KAAK,CAACE,GAAxB,EAA6BC,OAAxD,EAAiES,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBC,OAAjF;AACAY,IAAAA,KAAK,CAACC,SAAN,CAAgBC,IAAhB,CAAqBC,KAArB,CAA2BP,MAAM,CAACZ,UAAP,CAAkBC,KAAK,CAACE,GAAxB,EAA6BG,MAAxD,EAAgEO,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBG,MAAhF;AACAM,IAAAA,MAAM,CAACZ,UAAP,CAAkBC,KAAK,CAACE,GAAxB,EAA6BK,KAA7B,IAAsCK,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBK,KAAtD;AACD;AACF;;AAGD,SAASM,iBAAT,CACEC,MADF,EAEEJ,MAFF,EAGEE,IAHF,EAIEO,MAJF,EAKEC,MALF,EAME;AACA,OAAK,MAAMC,IAAX,IAAmBP,MAAnB,EAA2B;AACzB,UAAMd,KAAK,GAAGc,MAAM,CAACO,IAAD,CAApB;AAGA,QAAIhB,MAAa,GAAG,EAApB;;AACA,QACEK,MAAM,IACNV,KAAK,CAACqB,IAAN,IAAcX,MADd,IAEAA,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,KAAuBC,SAFvB,IAGAZ,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,KAAuB,IAJzB,EAKE;AACA,UAAIX,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,CAAmBE,WAAnB,KAAmCR,KAAvC,EAA8C;AAC5CV,QAAAA,MAAM,GAAGK,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAf;AACD,OAFD,MAEO;AACLhB,QAAAA,MAAM,CAACY,IAAP,CAAYP,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAlB;AACD;AACF;;AAED,QAAIhB,MAAM,CAACmB,MAAP,KAAkB,CAAlB,IAAuBC,OAAO,CAACf,MAAD,CAA9B,IAA0CV,KAAK,CAAC0B,cAAN,KAAyB,UAAvE,EAAmF;AACjF,YAAM,IAAIC,KAAJ,mCAAqC3B,KAAK,CAACqB,IAA3C,EAAN;AACD;;AACD,QAAIhB,MAAM,CAACmB,MAAP,GAAgB,CAAhB,IAAqBxB,KAAK,CAAC0B,cAAN,KAAyB,UAAlD,EAA8D;AAC5D,YAAM,IAAIC,KAAJ,sCAAwC3B,KAAK,CAACqB,IAA9C,EAAN;AACD;;AAGD,QAAIhB,MAAM,CAACmB,MAAP,KAAkB,CAAtB,EAAyB;AACvB,UAAIxB,KAAK,CAAC4B,QAAV,EAAoB;AAClBf,QAAAA,iBAAiB,CAACb,KAAK,CAACc,MAAP,EAAgB,IAAhB,EAAsBF,IAAtB,EAA4BO,MAA5B,EAAoCC,MAApC,CAAjB;AACD,OAFD,MAEO;AACLR,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBK,KAAhB,IAAyB,CAAzB;AACAK,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBE,OAAhB,CAAwBa,IAAxB,CAA6BE,MAA7B;AACAP,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBC,OAAhB,CAAwBc,IAAxB,CAA6BG,MAA7B;AACD;;AACD;AACD;;AAGD,SAAK,IAAIS,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGxB,MAAM,CAACmB,MAA3B,EAAmCK,CAAC,EAApC,EAAwC;AACtC,YAAMC,IAAI,GAAGD,CAAC,KAAK,CAAN,GAAUV,MAAV,GAAmBnB,KAAK,CAAC+B,SAAtC;;AACA,UAAI/B,KAAK,CAAC4B,QAAV,EAAoB;AAClBf,QAAAA,iBAAiB,CAACb,KAAK,CAACc,MAAP,EAAgBT,MAAM,CAACwB,CAAD,CAAtB,EAA2BjB,IAA3B,EAAiCkB,IAAjC,EAAuC9B,KAAK,CAACgC,SAA7C,CAAjB;AACD,OAFD,MAEO;AACLpB,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBK,KAAhB,IAAyB,CAAzB;AACAK,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBE,OAAhB,CAAwBa,IAAxB,CAA6Ba,IAA7B;AACAlB,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBC,OAAhB,CAAwBc,IAAxB,CAA6BjB,KAAK,CAACgC,SAAnC;AACApB,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBG,MAAhB,CAAuBY,IAAvB,CACErB,KAAK,CAACqC,WAAN,CAAmBjC,KAAK,CAACkC,YAAN,IAAsBlC,KAAK,CAACmC,aAA/C,EAAgE9B,MAAM,CAACwB,CAAD,CAAtE,CADF;AAGD;AACF;AACF;AACF;;AAqBD,OAAO,SAASO,kBAAT,CAA4BtC,MAA5B,EAAmDa,MAAnD,EAA2F;AAChG,QAAM0B,OAAwB,GAAG,EAAjC;;AACA,OAAK,IAAIR,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGlB,MAAM,CAACH,QAA3B,EAAqCqB,CAAC,EAAtC,EAA0CQ,OAAO,CAACpB,IAAR,CAAa,EAAb;;AAC1C,OAAK,MAAMf,GAAX,IAAkBS,MAAM,CAACZ,UAAzB,EAAqC;AACnCuC,IAAAA,iBAAiB,CAACxC,MAAD,EAASa,MAAT,EAAiBT,GAAjB,EAAsBmC,OAAtB,CAAjB;AACD;;AACD,SAAOA,OAAP;AACD;;AAGD,SAASC,iBAAT,CACExC,MADF,EAEEa,MAFF,EAGET,GAHF,EAIEmC,OAJF,EAKE;AACA,QAAMzB,IAAI,GAAGD,MAAM,CAACZ,UAAP,CAAkBG,GAAlB,CAAb;AACA,MAAI,CAACU,IAAI,CAACL,KAAV,EAAiB;AAEjB,QAAMP,KAAK,GAAGF,MAAM,CAACyC,SAAP,CAAiBrC,GAAjB,CAAd;AACA,QAAMsC,MAAM,GAAG1C,MAAM,CAAC2C,eAAP,CAAuBvC,GAAvB,CAAf;AAGA,QAAMwC,OAAiB,GAAG,IAAI3B,KAAJ,CAAUf,KAAK,CAAC+B,SAAN,GAAkB,CAA5B,EAA+BY,IAA/B,CAAoC,CAApC,CAA1B;AACA,MAAIC,MAAM,GAAG,CAAb;;AACA,OAAK,IAAIf,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGjB,IAAI,CAACL,KAAzB,EAAgCsB,CAAC,EAAjC,EAAqC;AACnC,UAAMT,MAAM,GAAGR,IAAI,CAACT,OAAL,CAAa0B,CAAb,CAAf;AACA,UAAMV,MAAM,GAAGP,IAAI,CAACR,OAAL,CAAayB,CAAb,CAAf;AACAa,IAAAA,OAAO,CAACvB,MAAD,CAAP;AACAuB,IAAAA,OAAO,CAACC,IAAR,CAAa,CAAb,EAAgBxB,MAAM,GAAG,CAAzB;AAEA,QAAI0B,MAAM,GAAG,CAAb;AACA,QAAInC,MAAM,GAAG2B,OAAO,CAACK,OAAO,CAACG,MAAM,EAAP,CAAP,GAAoB,CAArB,CAApB;;AAGA,SAAK,MAAMC,IAAX,IAAmBN,MAAnB,EAA2B;AACzB,UAAIM,IAAI,KAAK9C,KAAb,EAAoB;AACpB,UAAIoB,MAAM,GAAG0B,IAAI,CAACd,SAAlB,EAA6B;;AAC7B,UAAIc,IAAI,CAACpB,cAAL,KAAwB,UAA5B,EAAwC;AACtC,YAAI,EAAEoB,IAAI,CAACzB,IAAL,IAAaX,MAAf,CAAJ,EAA4B;AAE1BA,UAAAA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,GAAoB,EAApB;AACD;;AACD,cAAM0B,EAAE,GAAGL,OAAO,CAACG,MAAM,EAAP,CAAlB;;AACA,eAAOnC,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,CAAkBG,MAAlB,IAA4BuB,EAAnC,EAAuC;AAErCrC,UAAAA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,CAAkBJ,IAAlB,CAAuB,EAAvB;AACD;;AACDP,QAAAA,MAAM,GAAGA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,CAAkB0B,EAAlB,CAAT;AACD,OAXD,MAWO;AACLrC,QAAAA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,GAAoBX,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,IAAqB,EAAzC;AACAX,QAAAA,MAAM,GAAGA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAf;AACD;AACF;;AAGD,QAAID,MAAM,KAAKpB,KAAK,CAACgC,SAArB,EAAgC;AAC9B,YAAMgB,KAAK,GAAGpD,KAAK,CAACqD,aAAN,CAEZjD,KAAK,CAACkC,YAAN,IAAsBlC,KAAK,CAACmC,aAFhB,EAGZvB,IAAI,CAACP,MAAL,CAAYuC,MAAZ,CAHY,EAIZ5C,KAJY,CAAd;AAMA4C,MAAAA,MAAM;;AACN,UAAI5C,KAAK,CAAC0B,cAAN,KAAyB,UAA7B,EAAyC;AACvC,YAAI,EAAE1B,KAAK,CAACqB,IAAN,IAAcX,MAAhB,CAAJ,EAA6B;AAE3BA,UAAAA,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,GAAqB,EAArB;AACD;;AACD,cAAM0B,EAAE,GAAGL,OAAO,CAACG,MAAD,CAAlB;;AACA,eAAOnC,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,CAAmBG,MAAnB,IAA6BuB,EAApC,EAAwC;AAEtCrC,UAAAA,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,CAAmBJ,IAAnB,CAAwB,IAAxB;AACD;;AACDP,QAAAA,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,CAAmB0B,EAAnB,IAAyBC,KAAzB;AACD,OAXD,MAWO;AACLtC,QAAAA,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,GAAqB2B,KAArB;AACD;AACF;AACF;AACF","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport {ParquetBuffer, ParquetData, ParquetField, ParquetRecord} from './declare';\nimport {ParquetSchema} from './schema';\nimport * as Types from './types';\n\nexport {ParquetBuffer};\n\nexport function shredBuffer(schema: ParquetSchema): ParquetBuffer {\n const columnData: Record<string, ParquetData> = {};\n for (const field of schema.fieldList) {\n columnData[field.key] = {\n dlevels: [],\n rlevels: [],\n values: [],\n pageHeaders: [],\n count: 0\n };\n }\n return {rowCount: 0, columnData};\n}\n\n/**\n * 'Shred' a record into a list of <value, repetition_level, definition_level>\n * tuples per column using the Google Dremel Algorithm..\n *\n * The buffer argument must point to an object into which the shredded record\n * will be returned. You may re-use the buffer for repeated calls to this function\n * to append to an existing buffer, as long as the schema is unchanged.\n *\n * The format in which the shredded records will be stored in the buffer is as\n * follows:\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n */\nexport function shredRecord(schema: ParquetSchema, record: any, buffer: ParquetBuffer): void {\n /* shred the record, this may raise an exception */\n const data = shredBuffer(schema).columnData;\n\n shredRecordFields(schema.fields, record, data, 0, 0);\n\n /* if no error during shredding, add the shredded record to the buffer */\n if (buffer.rowCount === 0) {\n buffer.rowCount = 1;\n buffer.columnData = data;\n return;\n }\n buffer.rowCount += 1;\n for (const field of schema.fieldList) {\n Array.prototype.push.apply(buffer.columnData[field.key].rlevels, data[field.key].rlevels);\n Array.prototype.push.apply(buffer.columnData[field.key].dlevels, data[field.key].dlevels);\n Array.prototype.push.apply(buffer.columnData[field.key].values, data[field.key].values);\n buffer.columnData[field.key].count += data[field.key].count;\n }\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction shredRecordFields(\n fields: Record<string, ParquetField>,\n record: any,\n data: Record<string, ParquetData>,\n rLevel: number,\n dLevel: number\n) {\n for (const name in fields) {\n const field = fields[name];\n\n // fetch values\n let values: any[] = [];\n if (\n record &&\n field.name in record &&\n record[field.name] !== undefined &&\n record[field.name] !== null\n ) {\n if (record[field.name].constructor === Array) {\n values = record[field.name];\n } else {\n values.push(record[field.name]);\n }\n }\n // check values\n if (values.length === 0 && Boolean(record) && field.repetitionType === 'REQUIRED') {\n throw new Error(`missing required field: ${field.name}`);\n }\n if (values.length > 1 && field.repetitionType !== 'REPEATED') {\n throw new Error(`too many values for field: ${field.name}`);\n }\n\n // push null\n if (values.length === 0) {\n if (field.isNested) {\n shredRecordFields(field.fields!, null, data, rLevel, dLevel);\n } else {\n data[field.key].count += 1;\n data[field.key].rlevels.push(rLevel);\n data[field.key].dlevels.push(dLevel);\n }\n continue; // eslint-disable-line no-continue\n }\n\n // push values\n for (let i = 0; i < values.length; i++) {\n const rlvl = i === 0 ? rLevel : field.rLevelMax;\n if (field.isNested) {\n shredRecordFields(field.fields!, values[i], data, rlvl, field.dLevelMax);\n } else {\n data[field.key].count += 1;\n data[field.key].rlevels.push(rlvl);\n data[field.key].dlevels.push(field.dLevelMax);\n data[field.key].values.push(\n Types.toPrimitive((field.originalType || field.primitiveType)!, values[i])\n );\n }\n }\n }\n}\n\n/**\n * 'Materialize' a list of <value, repetition_level, definition_level>\n * tuples back to nested records (objects/arrays) using the Google Dremel\n * Algorithm..\n *\n * The buffer argument must point to an object with the following structure (i.e.\n * the same structure that is returned by shredRecords):\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n */\nexport function materializeRecords(schema: ParquetSchema, buffer: ParquetBuffer): ParquetRecord[] {\n const records: ParquetRecord[] = [];\n for (let i = 0; i < buffer.rowCount; i++) records.push({});\n for (const key in buffer.columnData) {\n materializeColumn(schema, buffer, key, records);\n }\n return records;\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction materializeColumn(\n schema: ParquetSchema,\n buffer: ParquetBuffer,\n key: string,\n records: ParquetRecord[]\n) {\n const data = buffer.columnData[key];\n if (!data.count) return;\n\n const field = schema.findField(key);\n const branch = schema.findFieldBranch(key);\n\n // tslint:disable-next-line:prefer-array-literal\n const rLevels: number[] = new Array(field.rLevelMax + 1).fill(0);\n let vIndex = 0;\n for (let i = 0; i < data.count; i++) {\n const dLevel = data.dlevels[i];\n const rLevel = data.rlevels[i];\n rLevels[rLevel]++;\n rLevels.fill(0, rLevel + 1);\n\n let rIndex = 0;\n let record = records[rLevels[rIndex++] - 1];\n\n // Internal nodes\n for (const step of branch) {\n if (step === field) break;\n if (dLevel < step.dLevelMax) break;\n if (step.repetitionType === 'REPEATED') {\n if (!(step.name in record)) {\n // eslint-disable max-depth\n record[step.name] = [];\n }\n const ix = rLevels[rIndex++];\n while (record[step.name].length <= ix) {\n // eslint-disable max-depth\n record[step.name].push({});\n }\n record = record[step.name][ix];\n } else {\n record[step.name] = record[step.name] || {};\n record = record[step.name];\n }\n }\n\n // Leaf node\n if (dLevel === field.dLevelMax) {\n const value = Types.fromPrimitive(\n // @ts-ignore\n field.originalType || field.primitiveType,\n data.values[vIndex],\n field\n );\n vIndex++;\n if (field.repetitionType === 'REPEATED') {\n if (!(field.name in record)) {\n // eslint-disable max-depth\n record[field.name] = [];\n }\n const ix = rLevels[rIndex];\n while (record[field.name].length <= ix) {\n // eslint-disable max-depth\n record[field.name].push(null);\n }\n record[field.name][ix] = value;\n } else {\n record[field.name] = value;\n }\n }\n }\n}\n"],"file":"shred.js"}