@loaders.gl/parquet 3.1.0-alpha.4 → 3.1.0-beta.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (387) hide show
  1. package/dist/bundle.d.ts +2 -0
  2. package/dist/bundle.d.ts.map +1 -0
  3. package/dist/bundle.js +5 -0
  4. package/dist/constants.d.ts +15 -0
  5. package/dist/constants.d.ts.map +1 -0
  6. package/dist/constants.js +18 -0
  7. package/dist/dist.min.js +27 -13
  8. package/dist/dist.min.js.map +7 -1
  9. package/dist/es5/bundle.js +1 -1
  10. package/dist/es5/bundle.js.map +1 -1
  11. package/dist/es5/constants.js +5 -5
  12. package/dist/es5/constants.js.map +1 -1
  13. package/dist/es5/index.js +16 -45
  14. package/dist/es5/index.js.map +1 -1
  15. package/dist/es5/lib/convert-schema.js +13 -13
  16. package/dist/es5/lib/convert-schema.js.map +1 -1
  17. package/dist/es5/lib/parse-parquet.js +19 -154
  18. package/dist/es5/lib/parse-parquet.js.map +1 -1
  19. package/dist/es5/lib/read-array-buffer.js +6 -43
  20. package/dist/es5/lib/read-array-buffer.js.map +1 -1
  21. package/dist/es5/parquet-loader.js +4 -4
  22. package/dist/es5/parquet-loader.js.map +1 -1
  23. package/dist/es5/parquet-writer.js +4 -4
  24. package/dist/es5/parquet-writer.js.map +1 -1
  25. package/dist/es5/parquetjs/codecs/dictionary.js +2 -10
  26. package/dist/es5/parquetjs/codecs/dictionary.js.map +1 -1
  27. package/dist/es5/parquetjs/codecs/index.js +4 -6
  28. package/dist/es5/parquetjs/codecs/index.js.map +1 -1
  29. package/dist/es5/parquetjs/codecs/plain.js +41 -43
  30. package/dist/es5/parquetjs/codecs/plain.js.map +1 -1
  31. package/dist/es5/parquetjs/codecs/rle.js +25 -35
  32. package/dist/es5/parquetjs/codecs/rle.js.map +1 -1
  33. package/dist/es5/parquetjs/compression.js +28 -122
  34. package/dist/es5/parquetjs/compression.js.map +1 -1
  35. package/dist/es5/parquetjs/encoder/writer.js +301 -737
  36. package/dist/es5/parquetjs/encoder/writer.js.map +1 -1
  37. package/dist/es5/parquetjs/file.js +15 -15
  38. package/dist/es5/parquetjs/file.js.map +1 -1
  39. package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js +1 -1
  40. package/dist/es5/parquetjs/parquet-thrift/BsonType.js +31 -45
  41. package/dist/es5/parquetjs/parquet-thrift/BsonType.js.map +1 -1
  42. package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js +141 -152
  43. package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -1
  44. package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js +147 -160
  45. package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -1
  46. package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js +248 -259
  47. package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -1
  48. package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js +67 -79
  49. package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -1
  50. package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js +1 -1
  51. package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js +1 -1
  52. package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js +113 -124
  53. package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -1
  54. package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js +158 -169
  55. package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -1
  56. package/dist/es5/parquetjs/parquet-thrift/DateType.js +31 -45
  57. package/dist/es5/parquetjs/parquet-thrift/DateType.js.map +1 -1
  58. package/dist/es5/parquetjs/parquet-thrift/DecimalType.js +68 -79
  59. package/dist/es5/parquetjs/parquet-thrift/DecimalType.js.map +1 -1
  60. package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js +83 -94
  61. package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -1
  62. package/dist/es5/parquetjs/parquet-thrift/Encoding.js +1 -1
  63. package/dist/es5/parquetjs/parquet-thrift/EnumType.js +31 -45
  64. package/dist/es5/parquetjs/parquet-thrift/EnumType.js.map +1 -1
  65. package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js +1 -1
  66. package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js +170 -182
  67. package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js.map +1 -1
  68. package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js +31 -45
  69. package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -1
  70. package/dist/es5/parquetjs/parquet-thrift/IntType.js +68 -79
  71. package/dist/es5/parquetjs/parquet-thrift/IntType.js.map +1 -1
  72. package/dist/es5/parquetjs/parquet-thrift/JsonType.js +31 -45
  73. package/dist/es5/parquetjs/parquet-thrift/JsonType.js.map +1 -1
  74. package/dist/es5/parquetjs/parquet-thrift/KeyValue.js +68 -79
  75. package/dist/es5/parquetjs/parquet-thrift/KeyValue.js.map +1 -1
  76. package/dist/es5/parquetjs/parquet-thrift/ListType.js +31 -45
  77. package/dist/es5/parquetjs/parquet-thrift/ListType.js.map +1 -1
  78. package/dist/es5/parquetjs/parquet-thrift/LogicalType.js +319 -343
  79. package/dist/es5/parquetjs/parquet-thrift/LogicalType.js.map +1 -1
  80. package/dist/es5/parquetjs/parquet-thrift/MapType.js +31 -45
  81. package/dist/es5/parquetjs/parquet-thrift/MapType.js.map +1 -1
  82. package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js +31 -45
  83. package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -1
  84. package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js +31 -45
  85. package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -1
  86. package/dist/es5/parquetjs/parquet-thrift/NullType.js +31 -45
  87. package/dist/es5/parquetjs/parquet-thrift/NullType.js.map +1 -1
  88. package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js +64 -75
  89. package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -1
  90. package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js +83 -94
  91. package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -1
  92. package/dist/es5/parquetjs/parquet-thrift/PageHeader.js +158 -169
  93. package/dist/es5/parquetjs/parquet-thrift/PageHeader.js.map +1 -1
  94. package/dist/es5/parquetjs/parquet-thrift/PageLocation.js +83 -94
  95. package/dist/es5/parquetjs/parquet-thrift/PageLocation.js.map +1 -1
  96. package/dist/es5/parquetjs/parquet-thrift/PageType.js +1 -1
  97. package/dist/es5/parquetjs/parquet-thrift/RowGroup.js +113 -124
  98. package/dist/es5/parquetjs/parquet-thrift/RowGroup.js.map +1 -1
  99. package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js +188 -199
  100. package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js.map +1 -1
  101. package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js +83 -94
  102. package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js.map +1 -1
  103. package/dist/es5/parquetjs/parquet-thrift/Statistics.js +124 -135
  104. package/dist/es5/parquetjs/parquet-thrift/Statistics.js.map +1 -1
  105. package/dist/es5/parquetjs/parquet-thrift/StringType.js +31 -45
  106. package/dist/es5/parquetjs/parquet-thrift/StringType.js.map +1 -1
  107. package/dist/es5/parquetjs/parquet-thrift/TimeType.js +68 -79
  108. package/dist/es5/parquetjs/parquet-thrift/TimeType.js.map +1 -1
  109. package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js +88 -101
  110. package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js.map +1 -1
  111. package/dist/es5/parquetjs/parquet-thrift/TimestampType.js +68 -79
  112. package/dist/es5/parquetjs/parquet-thrift/TimestampType.js.map +1 -1
  113. package/dist/es5/parquetjs/parquet-thrift/Type.js +1 -1
  114. package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js +31 -45
  115. package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -1
  116. package/dist/es5/parquetjs/parquet-thrift/UUIDType.js +31 -45
  117. package/dist/es5/parquetjs/parquet-thrift/UUIDType.js.map +1 -1
  118. package/dist/es5/parquetjs/parquet-thrift/index.js +43 -43
  119. package/dist/es5/parquetjs/parser/decoders.js +218 -397
  120. package/dist/es5/parquetjs/parser/decoders.js.map +1 -1
  121. package/dist/es5/parquetjs/parser/parquet-cursor.js +62 -180
  122. package/dist/es5/parquetjs/parser/parquet-cursor.js.map +1 -1
  123. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +124 -408
  124. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +1 -1
  125. package/dist/es5/parquetjs/parser/parquet-reader.js +91 -369
  126. package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
  127. package/dist/es5/parquetjs/schema/declare.js +9 -11
  128. package/dist/es5/parquetjs/schema/declare.js.map +1 -1
  129. package/dist/es5/parquetjs/schema/schema.js +73 -87
  130. package/dist/es5/parquetjs/schema/schema.js.map +1 -1
  131. package/dist/es5/parquetjs/schema/shred.js +56 -96
  132. package/dist/es5/parquetjs/schema/shred.js.map +1 -1
  133. package/dist/es5/parquetjs/schema/types.js +39 -40
  134. package/dist/es5/parquetjs/schema/types.js.map +1 -1
  135. package/dist/es5/parquetjs/utils/buffer-utils.js +1 -1
  136. package/dist/es5/parquetjs/utils/buffer-utils.js.map +1 -1
  137. package/dist/es5/parquetjs/utils/file-utils.js +8 -65
  138. package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
  139. package/dist/es5/parquetjs/utils/read-utils.js +22 -50
  140. package/dist/es5/parquetjs/utils/read-utils.js.map +1 -1
  141. package/dist/esm/index.js +2 -3
  142. package/dist/esm/index.js.map +1 -1
  143. package/dist/esm/parquet-loader.js +1 -1
  144. package/dist/esm/parquet-loader.js.map +1 -1
  145. package/dist/esm/parquet-writer.js +1 -1
  146. package/dist/esm/parquet-writer.js.map +1 -1
  147. package/dist/esm/parquetjs/codecs/plain.js +3 -3
  148. package/dist/esm/parquetjs/codecs/plain.js.map +1 -1
  149. package/dist/esm/parquetjs/codecs/rle.js +1 -1
  150. package/dist/esm/parquetjs/codecs/rle.js.map +1 -1
  151. package/dist/esm/parquetjs/compression.js +4 -13
  152. package/dist/esm/parquetjs/compression.js.map +1 -1
  153. package/dist/esm/parquetjs/encoder/writer.js +1 -1
  154. package/dist/esm/parquetjs/encoder/writer.js.map +1 -1
  155. package/dist/esm/parquetjs/parser/decoders.js +4 -4
  156. package/dist/esm/parquetjs/parser/decoders.js.map +1 -1
  157. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +4 -13
  158. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +1 -1
  159. package/dist/esm/parquetjs/parser/parquet-reader.js +0 -13
  160. package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
  161. package/dist/esm/parquetjs/schema/schema.js +3 -3
  162. package/dist/esm/parquetjs/schema/schema.js.map +1 -1
  163. package/dist/esm/parquetjs/schema/shred.js +2 -2
  164. package/dist/esm/parquetjs/schema/shred.js.map +1 -1
  165. package/dist/esm/parquetjs/schema/types.js +20 -20
  166. package/dist/esm/parquetjs/schema/types.js.map +1 -1
  167. package/dist/esm/parquetjs/utils/file-utils.js +0 -45
  168. package/dist/esm/parquetjs/utils/file-utils.js.map +1 -1
  169. package/dist/index.d.ts +28 -0
  170. package/dist/index.d.ts.map +1 -0
  171. package/dist/index.js +30 -0
  172. package/dist/lib/convert-schema.d.ts +8 -0
  173. package/dist/lib/convert-schema.d.ts.map +1 -0
  174. package/dist/lib/convert-schema.js +70 -0
  175. package/dist/lib/parse-parquet.d.ts +4 -0
  176. package/dist/lib/parse-parquet.d.ts.map +1 -0
  177. package/dist/lib/parse-parquet.js +28 -0
  178. package/dist/lib/read-array-buffer.d.ts +19 -0
  179. package/dist/lib/read-array-buffer.d.ts.map +1 -0
  180. package/dist/lib/read-array-buffer.js +29 -0
  181. package/dist/parquet-loader.d.ts +23 -0
  182. package/dist/parquet-loader.d.ts.map +1 -0
  183. package/dist/parquet-loader.js +27 -0
  184. package/dist/parquet-worker.js +27 -13
  185. package/dist/parquet-worker.js.map +7 -1
  186. package/dist/parquet-writer.d.ts +4 -0
  187. package/dist/parquet-writer.d.ts.map +1 -0
  188. package/dist/parquet-writer.js +21 -0
  189. package/dist/parquetjs/codecs/declare.d.ts +17 -0
  190. package/dist/parquetjs/codecs/declare.d.ts.map +1 -0
  191. package/dist/parquetjs/codecs/declare.js +2 -0
  192. package/dist/parquetjs/codecs/dictionary.d.ts +3 -0
  193. package/dist/parquetjs/codecs/dictionary.d.ts.map +1 -0
  194. package/dist/parquetjs/codecs/dictionary.js +14 -0
  195. package/dist/parquetjs/codecs/index.d.ts +5 -0
  196. package/dist/parquetjs/codecs/index.d.ts.map +1 -0
  197. package/dist/parquetjs/codecs/index.js +51 -0
  198. package/dist/parquetjs/codecs/plain.d.ts +6 -0
  199. package/dist/parquetjs/codecs/plain.d.ts.map +1 -0
  200. package/dist/parquetjs/codecs/plain.js +211 -0
  201. package/dist/parquetjs/codecs/rle.d.ts +6 -0
  202. package/dist/parquetjs/codecs/rle.d.ts.map +1 -0
  203. package/dist/parquetjs/codecs/rle.js +145 -0
  204. package/dist/parquetjs/compression.d.ts +23 -0
  205. package/dist/parquetjs/compression.d.ts.map +1 -0
  206. package/dist/parquetjs/compression.js +168 -0
  207. package/dist/parquetjs/encoder/writer.d.ts +123 -0
  208. package/dist/parquetjs/encoder/writer.d.ts.map +1 -0
  209. package/dist/parquetjs/encoder/writer.js +478 -0
  210. package/dist/parquetjs/file.d.ts +10 -0
  211. package/dist/parquetjs/file.d.ts.map +1 -0
  212. package/dist/parquetjs/file.js +99 -0
  213. package/dist/parquetjs/parquet-thrift/BoundaryOrder.d.ts +6 -0
  214. package/dist/parquetjs/parquet-thrift/BoundaryOrder.d.ts.map +1 -0
  215. package/dist/parquetjs/parquet-thrift/BoundaryOrder.js +15 -0
  216. package/dist/parquetjs/parquet-thrift/BsonType.d.ts +9 -0
  217. package/dist/parquetjs/parquet-thrift/BsonType.d.ts.map +1 -0
  218. package/dist/parquetjs/parquet-thrift/BsonType.js +58 -0
  219. package/dist/parquetjs/parquet-thrift/ColumnChunk.d.ts +25 -0
  220. package/dist/parquetjs/parquet-thrift/ColumnChunk.d.ts.map +1 -0
  221. package/dist/parquetjs/parquet-thrift/ColumnChunk.js +207 -0
  222. package/dist/parquetjs/parquet-thrift/ColumnIndex.d.ts +22 -0
  223. package/dist/parquetjs/parquet-thrift/ColumnIndex.d.ts.map +1 -0
  224. package/dist/parquetjs/parquet-thrift/ColumnIndex.js +213 -0
  225. package/dist/parquetjs/parquet-thrift/ColumnMetaData.d.ts +42 -0
  226. package/dist/parquetjs/parquet-thrift/ColumnMetaData.d.ts.map +1 -0
  227. package/dist/parquetjs/parquet-thrift/ColumnMetaData.js +398 -0
  228. package/dist/parquetjs/parquet-thrift/ColumnOrder.d.ts +13 -0
  229. package/dist/parquetjs/parquet-thrift/ColumnOrder.d.ts.map +1 -0
  230. package/dist/parquetjs/parquet-thrift/ColumnOrder.js +104 -0
  231. package/dist/parquetjs/parquet-thrift/CompressionCodec.d.ts +11 -0
  232. package/dist/parquetjs/parquet-thrift/CompressionCodec.d.ts.map +1 -0
  233. package/dist/parquetjs/parquet-thrift/CompressionCodec.js +20 -0
  234. package/dist/parquetjs/parquet-thrift/ConvertedType.d.ts +25 -0
  235. package/dist/parquetjs/parquet-thrift/ConvertedType.d.ts.map +1 -0
  236. package/dist/parquetjs/parquet-thrift/ConvertedType.js +34 -0
  237. package/dist/parquetjs/parquet-thrift/DataPageHeader.d.ts +21 -0
  238. package/dist/parquetjs/parquet-thrift/DataPageHeader.d.ts.map +1 -0
  239. package/dist/parquetjs/parquet-thrift/DataPageHeader.js +166 -0
  240. package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.d.ts +27 -0
  241. package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.d.ts.map +1 -0
  242. package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.js +226 -0
  243. package/dist/parquetjs/parquet-thrift/DateType.d.ts +9 -0
  244. package/dist/parquetjs/parquet-thrift/DateType.d.ts.map +1 -0
  245. package/dist/parquetjs/parquet-thrift/DateType.js +58 -0
  246. package/dist/parquetjs/parquet-thrift/DecimalType.d.ts +13 -0
  247. package/dist/parquetjs/parquet-thrift/DecimalType.d.ts.map +1 -0
  248. package/dist/parquetjs/parquet-thrift/DecimalType.js +105 -0
  249. package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.d.ts +16 -0
  250. package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.d.ts.map +1 -0
  251. package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.js +122 -0
  252. package/dist/parquetjs/parquet-thrift/Encoding.d.ts +11 -0
  253. package/dist/parquetjs/parquet-thrift/Encoding.d.ts.map +1 -0
  254. package/dist/parquetjs/parquet-thrift/Encoding.js +20 -0
  255. package/dist/parquetjs/parquet-thrift/EnumType.d.ts +9 -0
  256. package/dist/parquetjs/parquet-thrift/EnumType.d.ts.map +1 -0
  257. package/dist/parquetjs/parquet-thrift/EnumType.js +58 -0
  258. package/dist/parquetjs/parquet-thrift/FieldRepetitionType.d.ts +6 -0
  259. package/dist/parquetjs/parquet-thrift/FieldRepetitionType.d.ts.map +1 -0
  260. package/dist/parquetjs/parquet-thrift/FieldRepetitionType.js +15 -0
  261. package/dist/parquetjs/parquet-thrift/FileMetaData.d.ts +28 -0
  262. package/dist/parquetjs/parquet-thrift/FileMetaData.d.ts.map +1 -0
  263. package/dist/parquetjs/parquet-thrift/FileMetaData.js +256 -0
  264. package/dist/parquetjs/parquet-thrift/IndexPageHeader.d.ts +9 -0
  265. package/dist/parquetjs/parquet-thrift/IndexPageHeader.d.ts.map +1 -0
  266. package/dist/parquetjs/parquet-thrift/IndexPageHeader.js +58 -0
  267. package/dist/parquetjs/parquet-thrift/IntType.d.ts +13 -0
  268. package/dist/parquetjs/parquet-thrift/IntType.d.ts.map +1 -0
  269. package/dist/parquetjs/parquet-thrift/IntType.js +105 -0
  270. package/dist/parquetjs/parquet-thrift/JsonType.d.ts +9 -0
  271. package/dist/parquetjs/parquet-thrift/JsonType.d.ts.map +1 -0
  272. package/dist/parquetjs/parquet-thrift/JsonType.js +58 -0
  273. package/dist/parquetjs/parquet-thrift/KeyValue.d.ts +13 -0
  274. package/dist/parquetjs/parquet-thrift/KeyValue.d.ts.map +1 -0
  275. package/dist/parquetjs/parquet-thrift/KeyValue.js +102 -0
  276. package/dist/parquetjs/parquet-thrift/ListType.d.ts +9 -0
  277. package/dist/parquetjs/parquet-thrift/ListType.d.ts.map +1 -0
  278. package/dist/parquetjs/parquet-thrift/ListType.js +58 -0
  279. package/dist/parquetjs/parquet-thrift/LogicalType.d.ts +61 -0
  280. package/dist/parquetjs/parquet-thrift/LogicalType.d.ts.map +1 -0
  281. package/dist/parquetjs/parquet-thrift/LogicalType.js +380 -0
  282. package/dist/parquetjs/parquet-thrift/MapType.d.ts +9 -0
  283. package/dist/parquetjs/parquet-thrift/MapType.d.ts.map +1 -0
  284. package/dist/parquetjs/parquet-thrift/MapType.js +58 -0
  285. package/dist/parquetjs/parquet-thrift/MicroSeconds.d.ts +9 -0
  286. package/dist/parquetjs/parquet-thrift/MicroSeconds.d.ts.map +1 -0
  287. package/dist/parquetjs/parquet-thrift/MicroSeconds.js +58 -0
  288. package/dist/parquetjs/parquet-thrift/MilliSeconds.d.ts +9 -0
  289. package/dist/parquetjs/parquet-thrift/MilliSeconds.d.ts.map +1 -0
  290. package/dist/parquetjs/parquet-thrift/MilliSeconds.js +58 -0
  291. package/dist/parquetjs/parquet-thrift/NullType.d.ts +9 -0
  292. package/dist/parquetjs/parquet-thrift/NullType.d.ts.map +1 -0
  293. package/dist/parquetjs/parquet-thrift/NullType.js +58 -0
  294. package/dist/parquetjs/parquet-thrift/OffsetIndex.d.ts +12 -0
  295. package/dist/parquetjs/parquet-thrift/OffsetIndex.d.ts.map +1 -0
  296. package/dist/parquetjs/parquet-thrift/OffsetIndex.js +97 -0
  297. package/dist/parquetjs/parquet-thrift/PageEncodingStats.d.ts +17 -0
  298. package/dist/parquetjs/parquet-thrift/PageEncodingStats.d.ts.map +1 -0
  299. package/dist/parquetjs/parquet-thrift/PageEncodingStats.js +127 -0
  300. package/dist/parquetjs/parquet-thrift/PageHeader.d.ts +30 -0
  301. package/dist/parquetjs/parquet-thrift/PageHeader.d.ts.map +1 -0
  302. package/dist/parquetjs/parquet-thrift/PageHeader.js +216 -0
  303. package/dist/parquetjs/parquet-thrift/PageLocation.d.ts +16 -0
  304. package/dist/parquetjs/parquet-thrift/PageLocation.d.ts.map +1 -0
  305. package/dist/parquetjs/parquet-thrift/PageLocation.js +141 -0
  306. package/dist/parquetjs/parquet-thrift/PageType.d.ts +7 -0
  307. package/dist/parquetjs/parquet-thrift/PageType.d.ts.map +1 -0
  308. package/dist/parquetjs/parquet-thrift/PageType.js +16 -0
  309. package/dist/parquetjs/parquet-thrift/RowGroup.d.ts +20 -0
  310. package/dist/parquetjs/parquet-thrift/RowGroup.d.ts.map +1 -0
  311. package/dist/parquetjs/parquet-thrift/RowGroup.js +182 -0
  312. package/dist/parquetjs/parquet-thrift/SchemaElement.d.ts +33 -0
  313. package/dist/parquetjs/parquet-thrift/SchemaElement.d.ts.map +1 -0
  314. package/dist/parquetjs/parquet-thrift/SchemaElement.js +239 -0
  315. package/dist/parquetjs/parquet-thrift/SortingColumn.d.ts +15 -0
  316. package/dist/parquetjs/parquet-thrift/SortingColumn.d.ts.map +1 -0
  317. package/dist/parquetjs/parquet-thrift/SortingColumn.js +127 -0
  318. package/dist/parquetjs/parquet-thrift/Statistics.d.ts +23 -0
  319. package/dist/parquetjs/parquet-thrift/Statistics.d.ts.map +1 -0
  320. package/dist/parquetjs/parquet-thrift/Statistics.js +176 -0
  321. package/dist/parquetjs/parquet-thrift/StringType.d.ts +9 -0
  322. package/dist/parquetjs/parquet-thrift/StringType.d.ts.map +1 -0
  323. package/dist/parquetjs/parquet-thrift/StringType.js +58 -0
  324. package/dist/parquetjs/parquet-thrift/TimeType.d.ts +14 -0
  325. package/dist/parquetjs/parquet-thrift/TimeType.d.ts.map +1 -0
  326. package/dist/parquetjs/parquet-thrift/TimeType.js +106 -0
  327. package/dist/parquetjs/parquet-thrift/TimeUnit.d.ts +17 -0
  328. package/dist/parquetjs/parquet-thrift/TimeUnit.d.ts.map +1 -0
  329. package/dist/parquetjs/parquet-thrift/TimeUnit.js +127 -0
  330. package/dist/parquetjs/parquet-thrift/TimestampType.d.ts +14 -0
  331. package/dist/parquetjs/parquet-thrift/TimestampType.d.ts.map +1 -0
  332. package/dist/parquetjs/parquet-thrift/TimestampType.js +106 -0
  333. package/dist/parquetjs/parquet-thrift/Type.d.ts +11 -0
  334. package/dist/parquetjs/parquet-thrift/Type.d.ts.map +1 -0
  335. package/dist/parquetjs/parquet-thrift/Type.js +20 -0
  336. package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.d.ts +9 -0
  337. package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.d.ts.map +1 -0
  338. package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.js +58 -0
  339. package/dist/parquetjs/parquet-thrift/UUIDType.d.ts +9 -0
  340. package/dist/parquetjs/parquet-thrift/UUIDType.d.ts.map +1 -0
  341. package/dist/parquetjs/parquet-thrift/UUIDType.js +58 -0
  342. package/dist/parquetjs/parquet-thrift/index.d.ts +44 -0
  343. package/dist/parquetjs/parquet-thrift/index.d.ts.map +1 -0
  344. package/dist/parquetjs/parquet-thrift/index.js +61 -0
  345. package/dist/parquetjs/parser/decoders.d.ts +34 -0
  346. package/dist/parquetjs/parser/decoders.d.ts.map +1 -0
  347. package/dist/parquetjs/parser/decoders.js +318 -0
  348. package/dist/parquetjs/parser/parquet-cursor.d.ts +36 -0
  349. package/dist/parquetjs/parser/parquet-cursor.d.ts.map +1 -0
  350. package/dist/parquetjs/parser/parquet-cursor.js +74 -0
  351. package/dist/parquetjs/parser/parquet-envelope-reader.d.ts +40 -0
  352. package/dist/parquetjs/parser/parquet-envelope-reader.d.ts.map +1 -0
  353. package/dist/parquetjs/parser/parquet-envelope-reader.js +136 -0
  354. package/dist/parquetjs/parser/parquet-reader.d.ts +68 -0
  355. package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -0
  356. package/dist/parquetjs/parser/parquet-reader.js +134 -0
  357. package/dist/parquetjs/schema/declare.d.ts +80 -0
  358. package/dist/parquetjs/schema/declare.d.ts.map +1 -0
  359. package/dist/parquetjs/schema/declare.js +10 -0
  360. package/dist/parquetjs/schema/schema.d.ts +26 -0
  361. package/dist/parquetjs/schema/schema.d.ts.map +1 -0
  362. package/dist/parquetjs/schema/schema.js +162 -0
  363. package/dist/parquetjs/schema/shred.d.ts +48 -0
  364. package/dist/parquetjs/schema/shred.d.ts.map +1 -0
  365. package/dist/parquetjs/schema/shred.js +225 -0
  366. package/dist/parquetjs/schema/types.d.ts +20 -0
  367. package/dist/parquetjs/schema/types.d.ts.map +1 -0
  368. package/dist/parquetjs/schema/types.js +418 -0
  369. package/dist/parquetjs/utils/buffer-utils.d.ts +10 -0
  370. package/dist/parquetjs/utils/buffer-utils.d.ts.map +1 -0
  371. package/dist/parquetjs/utils/buffer-utils.js +22 -0
  372. package/dist/parquetjs/utils/file-utils.d.ts +16 -0
  373. package/dist/parquetjs/utils/file-utils.d.ts.map +1 -0
  374. package/dist/parquetjs/utils/file-utils.js +46 -0
  375. package/dist/parquetjs/utils/read-utils.d.ts +25 -0
  376. package/dist/parquetjs/utils/read-utils.d.ts.map +1 -0
  377. package/dist/parquetjs/utils/read-utils.js +109 -0
  378. package/dist/workers/parquet-worker.d.ts +2 -0
  379. package/dist/workers/parquet-worker.d.ts.map +1 -0
  380. package/dist/workers/parquet-worker.js +5 -0
  381. package/package.json +8 -8
  382. package/src/index.ts +3 -3
  383. package/src/parquetjs/compression.ts +10 -10
  384. package/src/parquetjs/parser/decoders.ts +1 -1
  385. package/src/parquetjs/parser/parquet-envelope-reader.ts +0 -11
  386. package/src/parquetjs/parser/parquet-reader.ts +0 -16
  387. package/src/parquetjs/utils/file-utils.ts +0 -49
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
 
3
- var moduleExports = require('./index');
3
+ const moduleExports = require('./index');
4
4
 
5
5
  globalThis.loaders = globalThis.loaders || {};
6
6
  module.exports = Object.assign(globalThis.loaders, moduleExports);
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/bundle.ts"],"names":["moduleExports","require","globalThis","loaders","module","exports","Object","assign"],"mappings":";;AACA,IAAMA,aAAa,GAAGC,OAAO,CAAC,SAAD,CAA7B;;AACAC,UAAU,CAACC,OAAX,GAAqBD,UAAU,CAACC,OAAX,IAAsB,EAA3C;AACAC,MAAM,CAACC,OAAP,GAAiBC,MAAM,CAACC,MAAP,CAAcL,UAAU,CAACC,OAAzB,EAAkCH,aAAlC,CAAjB","sourcesContent":["// @ts-nocheck\nconst moduleExports = require('./index');\nglobalThis.loaders = globalThis.loaders || {};\nmodule.exports = Object.assign(globalThis.loaders, moduleExports);\n"],"file":"bundle.js"}
1
+ {"version":3,"sources":["../../src/bundle.ts"],"names":["moduleExports","require","globalThis","loaders","module","exports","Object","assign"],"mappings":";;AACA,MAAMA,aAAa,GAAGC,OAAO,CAAC,SAAD,CAA7B;;AACAC,UAAU,CAACC,OAAX,GAAqBD,UAAU,CAACC,OAAX,IAAsB,EAA3C;AACAC,MAAM,CAACC,OAAP,GAAiBC,MAAM,CAACC,MAAP,CAAcL,UAAU,CAACC,OAAzB,EAAkCH,aAAlC,CAAjB","sourcesContent":["// @ts-nocheck\nconst moduleExports = require('./index');\nglobalThis.loaders = globalThis.loaders || {};\nmodule.exports = Object.assign(globalThis.loaders, moduleExports);\n"],"file":"bundle.js"}
@@ -4,14 +4,14 @@ Object.defineProperty(exports, "__esModule", {
4
4
  value: true
5
5
  });
6
6
  exports.PARQUET_RDLVL_ENCODING = exports.PARQUET_RDLVL_TYPE = exports.PARQUET_VERSION = exports.PARQUET_MAGIC_ENCRYPTED = exports.PARQUET_MAGIC = void 0;
7
- var PARQUET_MAGIC = 'PAR1';
7
+ const PARQUET_MAGIC = 'PAR1';
8
8
  exports.PARQUET_MAGIC = PARQUET_MAGIC;
9
- var PARQUET_MAGIC_ENCRYPTED = 'PARE';
9
+ const PARQUET_MAGIC_ENCRYPTED = 'PARE';
10
10
  exports.PARQUET_MAGIC_ENCRYPTED = PARQUET_MAGIC_ENCRYPTED;
11
- var PARQUET_VERSION = 1;
11
+ const PARQUET_VERSION = 1;
12
12
  exports.PARQUET_VERSION = PARQUET_VERSION;
13
- var PARQUET_RDLVL_TYPE = 'INT32';
13
+ const PARQUET_RDLVL_TYPE = 'INT32';
14
14
  exports.PARQUET_RDLVL_TYPE = PARQUET_RDLVL_TYPE;
15
- var PARQUET_RDLVL_ENCODING = 'RLE';
15
+ const PARQUET_RDLVL_ENCODING = 'RLE';
16
16
  exports.PARQUET_RDLVL_ENCODING = PARQUET_RDLVL_ENCODING;
17
17
  //# sourceMappingURL=constants.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/constants.ts"],"names":["PARQUET_MAGIC","PARQUET_MAGIC_ENCRYPTED","PARQUET_VERSION","PARQUET_RDLVL_TYPE","PARQUET_RDLVL_ENCODING"],"mappings":";;;;;;AAIO,IAAMA,aAAa,GAAG,MAAtB;;AACA,IAAMC,uBAAuB,GAAG,MAAhC;;AAKA,IAAMC,eAAe,GAAG,CAAxB;;AAKA,IAAMC,kBAAkB,GAAG,OAA3B;;AACA,IAAMC,sBAAsB,GAAG,KAA/B","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/**\n * Parquet File Magic String\n */\nexport const PARQUET_MAGIC = 'PAR1';\nexport const PARQUET_MAGIC_ENCRYPTED = 'PARE';\n\n/**\n * Parquet File Format Version\n */\nexport const PARQUET_VERSION = 1;\n\n/**\n * Internal type used for repetition/definition levels\n */\nexport const PARQUET_RDLVL_TYPE = 'INT32';\nexport const PARQUET_RDLVL_ENCODING = 'RLE';\n"],"file":"constants.js"}
1
+ {"version":3,"sources":["../../src/constants.ts"],"names":["PARQUET_MAGIC","PARQUET_MAGIC_ENCRYPTED","PARQUET_VERSION","PARQUET_RDLVL_TYPE","PARQUET_RDLVL_ENCODING"],"mappings":";;;;;;AAIO,MAAMA,aAAa,GAAG,MAAtB;;AACA,MAAMC,uBAAuB,GAAG,MAAhC;;AAKA,MAAMC,eAAe,GAAG,CAAxB;;AAKA,MAAMC,kBAAkB,GAAG,OAA3B;;AACA,MAAMC,sBAAsB,GAAG,KAA/B","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/**\n * Parquet File Magic String\n */\nexport const PARQUET_MAGIC = 'PAR1';\nexport const PARQUET_MAGIC_ENCRYPTED = 'PARE';\n\n/**\n * Parquet File Format Version\n */\nexport const PARQUET_VERSION = 1;\n\n/**\n * Internal type used for repetition/definition levels\n */\nexport const PARQUET_RDLVL_TYPE = 'INT32';\nexport const PARQUET_RDLVL_ENCODING = 'RLE';\n"],"file":"constants.js"}
package/dist/es5/index.js CHANGED
@@ -1,74 +1,52 @@
1
1
  "use strict";
2
2
 
3
- var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
-
5
3
  Object.defineProperty(exports, "__esModule", {
6
4
  value: true
7
5
  });
8
6
  Object.defineProperty(exports, "ParquetWorkerLoader", {
9
7
  enumerable: true,
10
- get: function get() {
8
+ get: function () {
11
9
  return _parquetLoader.ParquetLoader;
12
10
  }
13
11
  });
14
12
  Object.defineProperty(exports, "_ParquetWriter", {
15
13
  enumerable: true,
16
- get: function get() {
14
+ get: function () {
17
15
  return _parquetWriter.ParquetWriter;
18
16
  }
19
17
  });
20
18
  Object.defineProperty(exports, "preloadCompressions", {
21
19
  enumerable: true,
22
- get: function get() {
20
+ get: function () {
23
21
  return _compression.preloadCompressions;
24
22
  }
25
23
  });
26
- Object.defineProperty(exports, "ParquetEnvelopeReader", {
24
+ Object.defineProperty(exports, "ParquetSchema", {
27
25
  enumerable: true,
28
- get: function get() {
29
- return _parquetEnvelopeReader.ParquetEnvelopeReader;
26
+ get: function () {
27
+ return _schema.ParquetSchema;
30
28
  }
31
29
  });
32
30
  Object.defineProperty(exports, "ParquetReader", {
33
31
  enumerable: true,
34
- get: function get() {
32
+ get: function () {
35
33
  return _parquetReader.ParquetReader;
36
34
  }
37
35
  });
38
- Object.defineProperty(exports, "ParquetWriter", {
39
- enumerable: true,
40
- get: function get() {
41
- return _writer.ParquetWriter;
42
- }
43
- });
44
- Object.defineProperty(exports, "ParquetEnvelopeWriter", {
45
- enumerable: true,
46
- get: function get() {
47
- return _writer.ParquetEnvelopeWriter;
48
- }
49
- });
50
- Object.defineProperty(exports, "ParquetTransformer", {
51
- enumerable: true,
52
- get: function get() {
53
- return _writer.ParquetTransformer;
54
- }
55
- });
56
- Object.defineProperty(exports, "ParquetSchema", {
36
+ Object.defineProperty(exports, "ParquetEnvelopeReader", {
57
37
  enumerable: true,
58
- get: function get() {
59
- return _schema.ParquetSchema;
38
+ get: function () {
39
+ return _parquetEnvelopeReader.ParquetEnvelopeReader;
60
40
  }
61
41
  });
62
42
  Object.defineProperty(exports, "convertParquetToArrowSchema", {
63
43
  enumerable: true,
64
- get: function get() {
44
+ get: function () {
65
45
  return _convertSchema.convertParquetToArrowSchema;
66
46
  }
67
47
  });
68
48
  exports._typecheckParquetLoader = exports.ParquetLoader = void 0;
69
49
 
70
- var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
71
-
72
50
  var _parquetLoader = require("./parquet-loader");
73
51
 
74
52
  var _parseParquet = require("./lib/parse-parquet");
@@ -77,26 +55,19 @@ var _parquetWriter = require("./parquet-writer");
77
55
 
78
56
  var _compression = require("./parquetjs/compression");
79
57
 
80
- var _parquetEnvelopeReader = require("./parquetjs/parser/parquet-envelope-reader");
58
+ var _schema = require("./parquetjs/schema/schema");
81
59
 
82
60
  var _parquetReader = require("./parquetjs/parser/parquet-reader");
83
61
 
84
- var _writer = require("./parquetjs/encoder/writer");
85
-
86
- var _schema = require("./parquetjs/schema/schema");
62
+ var _parquetEnvelopeReader = require("./parquetjs/parser/parquet-envelope-reader");
87
63
 
88
64
  var _convertSchema = require("./lib/convert-schema");
89
65
 
90
- function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
91
-
92
- function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
93
-
94
- var ParquetLoader = _objectSpread(_objectSpread({}, _parquetLoader.ParquetLoader), {}, {
66
+ const ParquetLoader = { ..._parquetLoader.ParquetLoader,
95
67
  parse: _parseParquet.parseParquet,
96
68
  parseFileInBatches: _parseParquet.parseParquetFileInBatches
97
- });
98
-
69
+ };
99
70
  exports.ParquetLoader = ParquetLoader;
100
- var _typecheckParquetLoader = ParquetLoader;
71
+ const _typecheckParquetLoader = ParquetLoader;
101
72
  exports._typecheckParquetLoader = _typecheckParquetLoader;
102
73
  //# sourceMappingURL=index.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/index.ts"],"names":["ParquetLoader","ParquetWorkerLoader","parse","parseParquet","parseFileInBatches","parseParquetFileInBatches","_typecheckParquetLoader"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAIA;;AACA;;AAaA;;AAIA;;AAEA;;AACA;;AACA;;AACA;;AACA;;;;;;AAlBO,IAAMA,aAAa,mCACrBC,4BADqB;AAExBC,EAAAA,KAAK,EAAEC,0BAFiB;AAGxBC,EAAAA,kBAAkB,EAAEC;AAHI,EAAnB;;;AAqBA,IAAMC,uBAAyC,GAAGN,aAAlD","sourcesContent":["import type {LoaderWithParser} from '@loaders.gl/loader-utils';\n\n// ParquetLoader\n\nimport {ParquetLoader as ParquetWorkerLoader} from './parquet-loader';\nimport {parseParquet, parseParquetFileInBatches} from './lib/parse-parquet';\n\nexport {ParquetWorkerLoader};\n\n/** ParquetJS table loader */\nexport const ParquetLoader = {\n ...ParquetWorkerLoader,\n parse: parseParquet,\n parseFileInBatches: parseParquetFileInBatches\n};\n\n// ParquetWriter\n\nexport {ParquetWriter as _ParquetWriter} from './parquet-writer';\n\n// EXPERIMENTAL - expose the internal parquetjs API\n\nexport {preloadCompressions} from './parquetjs/compression';\n\nexport {ParquetEnvelopeReader} from './parquetjs/parser/parquet-envelope-reader';\nexport {ParquetReader} from './parquetjs/parser/parquet-reader';\nexport {ParquetWriter, ParquetEnvelopeWriter, ParquetTransformer} from './parquetjs/encoder/writer';\nexport {ParquetSchema} from './parquetjs/schema/schema';\nexport {convertParquetToArrowSchema} from './lib/convert-schema';\n\n// TESTS\nexport const _typecheckParquetLoader: LoaderWithParser = ParquetLoader;\n"],"file":"index.js"}
1
+ {"version":3,"sources":["../../src/index.ts"],"names":["ParquetLoader","ParquetWorkerLoader","parse","parseParquet","parseFileInBatches","parseParquetFileInBatches","_typecheckParquetLoader"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAIA;;AACA;;AAaA;;AAIA;;AAEA;;AACA;;AACA;;AAEA;;AAlBO,MAAMA,aAAa,GAAG,EAC3B,GAAGC,4BADwB;AAE3BC,EAAAA,KAAK,EAAEC,0BAFoB;AAG3BC,EAAAA,kBAAkB,EAAEC;AAHO,CAAtB;;AAqBA,MAAMC,uBAAyC,GAAGN,aAAlD","sourcesContent":["import type {LoaderWithParser} from '@loaders.gl/loader-utils';\n\n// ParquetLoader\n\nimport {ParquetLoader as ParquetWorkerLoader} from './parquet-loader';\nimport {parseParquet, parseParquetFileInBatches} from './lib/parse-parquet';\n\nexport {ParquetWorkerLoader};\n\n/** ParquetJS table loader */\nexport const ParquetLoader = {\n ...ParquetWorkerLoader,\n parse: parseParquet,\n parseFileInBatches: parseParquetFileInBatches\n};\n\n// ParquetWriter\n\nexport {ParquetWriter as _ParquetWriter} from './parquet-writer';\n\n// EXPERIMENTAL - expose the internal parquetjs API\n\nexport {preloadCompressions} from './parquetjs/compression';\n\nexport {ParquetSchema} from './parquetjs/schema/schema';\nexport {ParquetReader} from './parquetjs/parser/parquet-reader';\nexport {ParquetEnvelopeReader} from './parquetjs/parser/parquet-envelope-reader';\n// export {ParquetWriter, ParquetEnvelopeWriter, ParquetTransformer} from './parquetjs/encoder/writer';\nexport {convertParquetToArrowSchema} from './lib/convert-schema';\n\n// TESTS\nexport const _typecheckParquetLoader: LoaderWithParser = ParquetLoader;\n"],"file":"index.js"}
@@ -8,7 +8,7 @@ exports.PARQUET_TYPE_MAPPING = void 0;
8
8
 
9
9
  var _schema = require("@loaders.gl/schema");
10
10
 
11
- var PARQUET_TYPE_MAPPING = {
11
+ const PARQUET_TYPE_MAPPING = {
12
12
  BOOLEAN: _schema.Bool,
13
13
  INT32: _schema.Int32,
14
14
  INT64: _schema.Float64,
@@ -42,16 +42,16 @@ var PARQUET_TYPE_MAPPING = {
42
42
  exports.PARQUET_TYPE_MAPPING = PARQUET_TYPE_MAPPING;
43
43
 
44
44
  function convertParquetToArrowSchema(parquetSchema) {
45
- var fields = getFields(parquetSchema.schema);
45
+ const fields = getFields(parquetSchema.schema);
46
46
  return new _schema.Schema(fields);
47
47
  }
48
48
 
49
49
  function getFieldMetadata(field) {
50
- var metadata = new Map();
50
+ const metadata = new Map();
51
51
 
52
- for (var key in field) {
52
+ for (const key in field) {
53
53
  if (key !== 'name') {
54
- var value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];
54
+ const value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];
55
55
  metadata.set(key, value);
56
56
  }
57
57
  }
@@ -60,19 +60,19 @@ function getFieldMetadata(field) {
60
60
  }
61
61
 
62
62
  function getFields(schema) {
63
- var fields = [];
63
+ const fields = [];
64
64
 
65
- for (var name in schema) {
66
- var field = schema[name];
65
+ for (const name in schema) {
66
+ const field = schema[name];
67
67
 
68
68
  if (field.fields) {
69
- var childField = getFields(field.fields);
70
- var nestedField = new _schema.Field(name, new _schema.Struct(childField), field.optional);
69
+ const childField = getFields(field.fields);
70
+ const nestedField = new _schema.Field(name, new _schema.Struct(childField), field.optional);
71
71
  fields.push(nestedField);
72
72
  } else {
73
- var FieldType = PARQUET_TYPE_MAPPING[field.type];
74
- var metadata = getFieldMetadata(field);
75
- var arrowField = new _schema.Field(name, new FieldType(), field.optional, metadata);
73
+ const FieldType = PARQUET_TYPE_MAPPING[field.type];
74
+ const metadata = getFieldMetadata(field);
75
+ const arrowField = new _schema.Field(name, new FieldType(), field.optional, metadata);
76
76
  fields.push(arrowField);
77
77
  }
78
78
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/lib/convert-schema.ts"],"names":["PARQUET_TYPE_MAPPING","BOOLEAN","Bool","INT32","Int32","INT64","Float64","INT96","FLOAT","Float32","DOUBLE","BYTE_ARRAY","Binary","FIXED_LEN_BYTE_ARRAY","UTF8","Utf8","DATE","TIME_MILLIS","Int64","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","Uint16","UINT_32","Uint32","UINT_64","Uint64","INT_8","Int8","INT_16","Int16","INT_32","INT_64","JSON","BSON","INTERVAL","DECIMAL_INT32","DECIMAL_INT64","DECIMAL_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","convertParquetToArrowSchema","parquetSchema","fields","getFields","schema","Schema","getFieldMetadata","field","metadata","Map","key","value","stringify","set","name","childField","nestedField","Field","Struct","optional","push","FieldType","type","arrowField"],"mappings":";;;;;;;;AAGA;;AAmBO,IAAMA,oBAA8D,GAAG;AAC5EC,EAAAA,OAAO,EAAEC,YADmE;AAE5EC,EAAAA,KAAK,EAAEC,aAFqE;AAG5EC,EAAAA,KAAK,EAAEC,eAHqE;AAI5EC,EAAAA,KAAK,EAAED,eAJqE;AAK5EE,EAAAA,KAAK,EAAEC,eALqE;AAM5EC,EAAAA,MAAM,EAAEJ,eANoE;AAO5EK,EAAAA,UAAU,EAAEC,cAPgE;AAQ5EC,EAAAA,oBAAoB,EAAED,cARsD;AAS5EE,EAAAA,IAAI,EAAEC,YATsE;AAU5EC,EAAAA,IAAI,EAAEZ,aAVsE;AAW5Ea,EAAAA,WAAW,EAAEC,aAX+D;AAY5EC,EAAAA,WAAW,EAAED,aAZ+D;AAa5EE,EAAAA,gBAAgB,EAAEF,aAb0D;AAc5EG,EAAAA,gBAAgB,EAAEH,aAd0D;AAe5EI,EAAAA,MAAM,EAAElB,aAfoE;AAgB5EmB,EAAAA,OAAO,EAAEC,cAhBmE;AAiB5EC,EAAAA,OAAO,EAAEC,cAjBmE;AAkB5EC,EAAAA,OAAO,EAAEC,cAlBmE;AAmB5EC,EAAAA,KAAK,EAAEC,YAnBqE;AAoB5EC,EAAAA,MAAM,EAAEC,aApBoE;AAqB5EC,EAAAA,MAAM,EAAE7B,aArBoE;AAsB5E8B,EAAAA,MAAM,EAAEhB,aAtBoE;AAuB5EiB,EAAAA,IAAI,EAAEvB,cAvBsE;AAwB5EwB,EAAAA,IAAI,EAAExB,cAxBsE;AA0B5EyB,EAAAA,QAAQ,EAAEzB,cA1BkE;AA2B5E0B,EAAAA,aAAa,EAAE7B,eA3B6D;AA4B5E8B,EAAAA,aAAa,EAAEjC,eA5B6D;AA6B5EkC,EAAAA,kBAAkB,EAAElC,eA7BwD;AA8B5EmC,EAAAA,4BAA4B,EAAEnC;AA9B8C,CAAvE;;;AAiCA,SAASoC,2BAAT,CAAqCC,aAArC,EAA2E;AAChF,MAAMC,MAAM,GAAGC,SAAS,CAACF,aAAa,CAACG,MAAf,CAAxB;AAGA,SAAO,IAAIC,cAAJ,CAAWH,MAAX,CAAP;AACD;;AAED,SAASI,gBAAT,CAA0BC,KAA1B,EAAoE;AAClE,MAAMC,QAAQ,GAAG,IAAIC,GAAJ,EAAjB;;AAEA,OAAK,IAAMC,GAAX,IAAkBH,KAAlB,EAAyB;AACvB,QAAIG,GAAG,KAAK,MAAZ,EAAoB;AAClB,UAAMC,KAAK,GAAG,OAAOJ,KAAK,CAACG,GAAD,CAAZ,KAAsB,QAAtB,GAAiCjB,IAAI,CAACmB,SAAL,CAAeL,KAAK,CAACG,GAAD,CAApB,CAAjC,GAA8DH,KAAK,CAACG,GAAD,CAAjF;AACAF,MAAAA,QAAQ,CAACK,GAAT,CAAaH,GAAb,EAAkBC,KAAlB;AACD;AACF;;AAED,SAAOH,QAAP;AACD;;AAED,SAASL,SAAT,CAAmBC,MAAnB,EAAqD;AACnD,MAAMF,MAAe,GAAG,EAAxB;;AAEA,OAAK,IAAMY,IAAX,IAAmBV,MAAnB,EAA2B;AACzB,QAAMG,KAAK,GAAGH,MAAM,CAACU,IAAD,CAApB;;AAEA,QAAIP,KAAK,CAACL,MAAV,EAAkB;AAChB,UAAMa,UAAU,GAAGZ,SAAS,CAACI,KAAK,CAACL,MAAP,CAA5B;AACA,UAAMc,WAAW,GAAG,IAAIC,aAAJ,CAAUH,IAAV,EAAgB,IAAII,cAAJ,CAAWH,UAAX,CAAhB,EAAwCR,KAAK,CAACY,QAA9C,CAApB;AACAjB,MAAAA,MAAM,CAACkB,IAAP,CAAYJ,WAAZ;AACD,KAJD,MAIO;AACL,UAAMK,SAAS,GAAG/D,oBAAoB,CAACiD,KAAK,CAACe,IAAP,CAAtC;AACA,UAAMd,QAAQ,GAAGF,gBAAgB,CAACC,KAAD,CAAjC;AACA,UAAMgB,UAAU,GAAG,IAAIN,aAAJ,CAAUH,IAAV,EAAgB,IAAIO,SAAJ,EAAhB,EAAiCd,KAAK,CAACY,QAAvC,EAAiDX,QAAjD,CAAnB;AACAN,MAAAA,MAAM,CAACkB,IAAP,CAAYG,UAAZ;AACD;AACF;;AAED,SAAOrB,MAAP;AACD","sourcesContent":["import type {ParquetSchema} from '../parquetjs/schema/schema';\nimport type {FieldDefinition, ParquetField, ParquetType} from '../parquetjs/schema/declare';\n\nimport {\n Schema,\n Struct,\n Field,\n DataType,\n Bool,\n Float64,\n Int32,\n Float32,\n Binary,\n Utf8,\n Int64,\n Uint16,\n Uint32,\n Uint64,\n Int8,\n Int16\n} from '@loaders.gl/schema';\n\nexport const PARQUET_TYPE_MAPPING: {[type in ParquetType]: typeof DataType} = {\n BOOLEAN: Bool,\n INT32: Int32,\n INT64: Float64,\n INT96: Float64,\n FLOAT: Float32,\n DOUBLE: Float64,\n BYTE_ARRAY: Binary,\n FIXED_LEN_BYTE_ARRAY: Binary,\n UTF8: Utf8,\n DATE: Int32,\n TIME_MILLIS: Int64,\n TIME_MICROS: Int64,\n TIMESTAMP_MILLIS: Int64,\n TIMESTAMP_MICROS: Int64,\n UINT_8: Int32,\n UINT_16: Uint16,\n UINT_32: Uint32,\n UINT_64: Uint64,\n INT_8: Int8,\n INT_16: Int16,\n INT_32: Int32,\n INT_64: Int64,\n JSON: Binary,\n BSON: Binary,\n // TODO check interal type\n INTERVAL: Binary,\n DECIMAL_INT32: Float32,\n DECIMAL_INT64: Float64,\n DECIMAL_BYTE_ARRAY: Float64,\n DECIMAL_FIXED_LEN_BYTE_ARRAY: Float64\n};\n\nexport function convertParquetToArrowSchema(parquetSchema: ParquetSchema): Schema {\n const fields = getFields(parquetSchema.schema);\n\n // TODO add metadata if needed.\n return new Schema(fields);\n}\n\nfunction getFieldMetadata(field: ParquetField): Map<string, string> {\n const metadata = new Map();\n\n for (const key in field) {\n if (key !== 'name') {\n const value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];\n metadata.set(key, value);\n }\n }\n\n return metadata;\n}\n\nfunction getFields(schema: FieldDefinition): Field[] {\n const fields: Field[] = [];\n\n for (const name in schema) {\n const field = schema[name];\n\n if (field.fields) {\n const childField = getFields(field.fields);\n const nestedField = new Field(name, new Struct(childField), field.optional);\n fields.push(nestedField);\n } else {\n const FieldType = PARQUET_TYPE_MAPPING[field.type];\n const metadata = getFieldMetadata(field);\n const arrowField = new Field(name, new FieldType(), field.optional, metadata);\n fields.push(arrowField);\n }\n }\n\n return fields;\n}\n"],"file":"convert-schema.js"}
1
+ {"version":3,"sources":["../../../src/lib/convert-schema.ts"],"names":["PARQUET_TYPE_MAPPING","BOOLEAN","Bool","INT32","Int32","INT64","Float64","INT96","FLOAT","Float32","DOUBLE","BYTE_ARRAY","Binary","FIXED_LEN_BYTE_ARRAY","UTF8","Utf8","DATE","TIME_MILLIS","Int64","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","Uint16","UINT_32","Uint32","UINT_64","Uint64","INT_8","Int8","INT_16","Int16","INT_32","INT_64","JSON","BSON","INTERVAL","DECIMAL_INT32","DECIMAL_INT64","DECIMAL_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","convertParquetToArrowSchema","parquetSchema","fields","getFields","schema","Schema","getFieldMetadata","field","metadata","Map","key","value","stringify","set","name","childField","nestedField","Field","Struct","optional","push","FieldType","type","arrowField"],"mappings":";;;;;;;;AAGA;;AAmBO,MAAMA,oBAA8D,GAAG;AAC5EC,EAAAA,OAAO,EAAEC,YADmE;AAE5EC,EAAAA,KAAK,EAAEC,aAFqE;AAG5EC,EAAAA,KAAK,EAAEC,eAHqE;AAI5EC,EAAAA,KAAK,EAAED,eAJqE;AAK5EE,EAAAA,KAAK,EAAEC,eALqE;AAM5EC,EAAAA,MAAM,EAAEJ,eANoE;AAO5EK,EAAAA,UAAU,EAAEC,cAPgE;AAQ5EC,EAAAA,oBAAoB,EAAED,cARsD;AAS5EE,EAAAA,IAAI,EAAEC,YATsE;AAU5EC,EAAAA,IAAI,EAAEZ,aAVsE;AAW5Ea,EAAAA,WAAW,EAAEC,aAX+D;AAY5EC,EAAAA,WAAW,EAAED,aAZ+D;AAa5EE,EAAAA,gBAAgB,EAAEF,aAb0D;AAc5EG,EAAAA,gBAAgB,EAAEH,aAd0D;AAe5EI,EAAAA,MAAM,EAAElB,aAfoE;AAgB5EmB,EAAAA,OAAO,EAAEC,cAhBmE;AAiB5EC,EAAAA,OAAO,EAAEC,cAjBmE;AAkB5EC,EAAAA,OAAO,EAAEC,cAlBmE;AAmB5EC,EAAAA,KAAK,EAAEC,YAnBqE;AAoB5EC,EAAAA,MAAM,EAAEC,aApBoE;AAqB5EC,EAAAA,MAAM,EAAE7B,aArBoE;AAsB5E8B,EAAAA,MAAM,EAAEhB,aAtBoE;AAuB5EiB,EAAAA,IAAI,EAAEvB,cAvBsE;AAwB5EwB,EAAAA,IAAI,EAAExB,cAxBsE;AA0B5EyB,EAAAA,QAAQ,EAAEzB,cA1BkE;AA2B5E0B,EAAAA,aAAa,EAAE7B,eA3B6D;AA4B5E8B,EAAAA,aAAa,EAAEjC,eA5B6D;AA6B5EkC,EAAAA,kBAAkB,EAAElC,eA7BwD;AA8B5EmC,EAAAA,4BAA4B,EAAEnC;AA9B8C,CAAvE;;;AAiCA,SAASoC,2BAAT,CAAqCC,aAArC,EAA2E;AAChF,QAAMC,MAAM,GAAGC,SAAS,CAACF,aAAa,CAACG,MAAf,CAAxB;AAGA,SAAO,IAAIC,cAAJ,CAAWH,MAAX,CAAP;AACD;;AAED,SAASI,gBAAT,CAA0BC,KAA1B,EAAoE;AAClE,QAAMC,QAAQ,GAAG,IAAIC,GAAJ,EAAjB;;AAEA,OAAK,MAAMC,GAAX,IAAkBH,KAAlB,EAAyB;AACvB,QAAIG,GAAG,KAAK,MAAZ,EAAoB;AAClB,YAAMC,KAAK,GAAG,OAAOJ,KAAK,CAACG,GAAD,CAAZ,KAAsB,QAAtB,GAAiCjB,IAAI,CAACmB,SAAL,CAAeL,KAAK,CAACG,GAAD,CAApB,CAAjC,GAA8DH,KAAK,CAACG,GAAD,CAAjF;AACAF,MAAAA,QAAQ,CAACK,GAAT,CAAaH,GAAb,EAAkBC,KAAlB;AACD;AACF;;AAED,SAAOH,QAAP;AACD;;AAED,SAASL,SAAT,CAAmBC,MAAnB,EAAqD;AACnD,QAAMF,MAAe,GAAG,EAAxB;;AAEA,OAAK,MAAMY,IAAX,IAAmBV,MAAnB,EAA2B;AACzB,UAAMG,KAAK,GAAGH,MAAM,CAACU,IAAD,CAApB;;AAEA,QAAIP,KAAK,CAACL,MAAV,EAAkB;AAChB,YAAMa,UAAU,GAAGZ,SAAS,CAACI,KAAK,CAACL,MAAP,CAA5B;AACA,YAAMc,WAAW,GAAG,IAAIC,aAAJ,CAAUH,IAAV,EAAgB,IAAII,cAAJ,CAAWH,UAAX,CAAhB,EAAwCR,KAAK,CAACY,QAA9C,CAApB;AACAjB,MAAAA,MAAM,CAACkB,IAAP,CAAYJ,WAAZ;AACD,KAJD,MAIO;AACL,YAAMK,SAAS,GAAG/D,oBAAoB,CAACiD,KAAK,CAACe,IAAP,CAAtC;AACA,YAAMd,QAAQ,GAAGF,gBAAgB,CAACC,KAAD,CAAjC;AACA,YAAMgB,UAAU,GAAG,IAAIN,aAAJ,CAAUH,IAAV,EAAgB,IAAIO,SAAJ,EAAhB,EAAiCd,KAAK,CAACY,QAAvC,EAAiDX,QAAjD,CAAnB;AACAN,MAAAA,MAAM,CAACkB,IAAP,CAAYG,UAAZ;AACD;AACF;;AAED,SAAOrB,MAAP;AACD","sourcesContent":["import type {ParquetSchema} from '../parquetjs/schema/schema';\nimport type {FieldDefinition, ParquetField, ParquetType} from '../parquetjs/schema/declare';\n\nimport {\n Schema,\n Struct,\n Field,\n DataType,\n Bool,\n Float64,\n Int32,\n Float32,\n Binary,\n Utf8,\n Int64,\n Uint16,\n Uint32,\n Uint64,\n Int8,\n Int16\n} from '@loaders.gl/schema';\n\nexport const PARQUET_TYPE_MAPPING: {[type in ParquetType]: typeof DataType} = {\n BOOLEAN: Bool,\n INT32: Int32,\n INT64: Float64,\n INT96: Float64,\n FLOAT: Float32,\n DOUBLE: Float64,\n BYTE_ARRAY: Binary,\n FIXED_LEN_BYTE_ARRAY: Binary,\n UTF8: Utf8,\n DATE: Int32,\n TIME_MILLIS: Int64,\n TIME_MICROS: Int64,\n TIMESTAMP_MILLIS: Int64,\n TIMESTAMP_MICROS: Int64,\n UINT_8: Int32,\n UINT_16: Uint16,\n UINT_32: Uint32,\n UINT_64: Uint64,\n INT_8: Int8,\n INT_16: Int16,\n INT_32: Int32,\n INT_64: Int64,\n JSON: Binary,\n BSON: Binary,\n // TODO check interal type\n INTERVAL: Binary,\n DECIMAL_INT32: Float32,\n DECIMAL_INT64: Float64,\n DECIMAL_BYTE_ARRAY: Float64,\n DECIMAL_FIXED_LEN_BYTE_ARRAY: Float64\n};\n\nexport function convertParquetToArrowSchema(parquetSchema: ParquetSchema): Schema {\n const fields = getFields(parquetSchema.schema);\n\n // TODO add metadata if needed.\n return new Schema(fields);\n}\n\nfunction getFieldMetadata(field: ParquetField): Map<string, string> {\n const metadata = new Map();\n\n for (const key in field) {\n if (key !== 'name') {\n const value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];\n metadata.set(key, value);\n }\n }\n\n return metadata;\n}\n\nfunction getFields(schema: FieldDefinition): Field[] {\n const fields: Field[] = [];\n\n for (const name in schema) {\n const field = schema[name];\n\n if (field.fields) {\n const childField = getFields(field.fields);\n const nestedField = new Field(name, new Struct(childField), field.optional);\n fields.push(nestedField);\n } else {\n const FieldType = PARQUET_TYPE_MAPPING[field.type];\n const metadata = getFieldMetadata(field);\n const arrowField = new Field(name, new FieldType(), field.optional, metadata);\n fields.push(arrowField);\n }\n }\n\n return fields;\n}\n"],"file":"convert-schema.js"}
@@ -1,173 +1,38 @@
1
1
  "use strict";
2
2
 
3
- var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
-
5
3
  Object.defineProperty(exports, "__esModule", {
6
4
  value: true
7
5
  });
8
6
  exports.parseParquet = parseParquet;
9
7
  exports.parseParquetFileInBatches = parseParquetFileInBatches;
10
8
 
11
- var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
12
-
13
- var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
14
-
15
- var _awaitAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/awaitAsyncGenerator"));
16
-
17
- var _wrapAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/wrapAsyncGenerator"));
18
-
19
- var _asyncIterator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncIterator"));
20
-
21
9
  var _parquetReader = require("../parquetjs/parser/parquet-reader");
22
10
 
23
- function parseParquet(_x3, _x4) {
24
- return _parseParquet.apply(this, arguments);
25
- }
26
-
27
- function _parseParquet() {
28
- _parseParquet = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee2(arrayBuffer, options) {
29
- var blob, _iteratorNormalCompletion, _didIteratorError, _iteratorError, _iterator, _step, _value, batch;
30
-
31
- return _regenerator.default.wrap(function _callee2$(_context2) {
32
- while (1) {
33
- switch (_context2.prev = _context2.next) {
34
- case 0:
35
- blob = new Blob([arrayBuffer]);
36
- _iteratorNormalCompletion = true;
37
- _didIteratorError = false;
38
- _context2.prev = 3;
39
- _iterator = (0, _asyncIterator2.default)(parseParquetFileInBatches(blob, options));
40
-
41
- case 5:
42
- _context2.next = 7;
43
- return _iterator.next();
44
-
45
- case 7:
46
- _step = _context2.sent;
47
- _iteratorNormalCompletion = _step.done;
48
- _context2.next = 11;
49
- return _step.value;
50
-
51
- case 11:
52
- _value = _context2.sent;
53
-
54
- if (_iteratorNormalCompletion) {
55
- _context2.next = 18;
56
- break;
57
- }
58
-
59
- batch = _value;
60
- return _context2.abrupt("return", batch);
61
-
62
- case 15:
63
- _iteratorNormalCompletion = true;
64
- _context2.next = 5;
65
- break;
66
-
67
- case 18:
68
- _context2.next = 24;
69
- break;
11
+ async function parseParquet(arrayBuffer, options) {
12
+ const blob = new Blob([arrayBuffer]);
70
13
 
71
- case 20:
72
- _context2.prev = 20;
73
- _context2.t0 = _context2["catch"](3);
74
- _didIteratorError = true;
75
- _iteratorError = _context2.t0;
14
+ for await (const batch of parseParquetFileInBatches(blob, options)) {
15
+ return batch;
16
+ }
76
17
 
77
- case 24:
78
- _context2.prev = 24;
79
- _context2.prev = 25;
80
-
81
- if (!(!_iteratorNormalCompletion && _iterator.return != null)) {
82
- _context2.next = 29;
83
- break;
84
- }
85
-
86
- _context2.next = 29;
87
- return _iterator.return();
88
-
89
- case 29:
90
- _context2.prev = 29;
91
-
92
- if (!_didIteratorError) {
93
- _context2.next = 32;
94
- break;
95
- }
96
-
97
- throw _iteratorError;
98
-
99
- case 32:
100
- return _context2.finish(29);
101
-
102
- case 33:
103
- return _context2.finish(24);
104
-
105
- case 34:
106
- return _context2.abrupt("return", null);
107
-
108
- case 35:
109
- case "end":
110
- return _context2.stop();
111
- }
112
- }
113
- }, _callee2, null, [[3, 20, 24, 34], [25,, 29, 33]]);
114
- }));
115
- return _parseParquet.apply(this, arguments);
116
- }
117
-
118
- function parseParquetFileInBatches(_x, _x2) {
119
- return _parseParquetFileInBatches.apply(this, arguments);
18
+ return null;
120
19
  }
121
20
 
122
- function _parseParquetFileInBatches() {
123
- _parseParquetFileInBatches = (0, _wrapAsyncGenerator2.default)(_regenerator.default.mark(function _callee(blob, options) {
124
- var reader, rows, cursor, record;
125
- return _regenerator.default.wrap(function _callee$(_context) {
126
- while (1) {
127
- switch (_context.prev = _context.next) {
128
- case 0:
129
- _context.next = 2;
130
- return (0, _awaitAsyncGenerator2.default)(_parquetReader.ParquetReader.openBlob(blob));
131
-
132
- case 2:
133
- reader = _context.sent;
134
- rows = [];
135
- _context.prev = 4;
136
- cursor = reader.getCursor();
137
-
138
- case 6:
139
- _context.next = 8;
140
- return (0, _awaitAsyncGenerator2.default)(cursor.next());
141
-
142
- case 8:
143
- if (!(record = _context.sent)) {
144
- _context.next = 12;
145
- break;
146
- }
147
-
148
- rows.push(record);
149
- _context.next = 6;
150
- break;
151
-
152
- case 12:
153
- _context.prev = 12;
154
- _context.next = 15;
155
- return (0, _awaitAsyncGenerator2.default)(reader.close());
21
+ async function* parseParquetFileInBatches(blob, options) {
22
+ const reader = await _parquetReader.ParquetReader.openBlob(blob);
23
+ const rows = [];
156
24
 
157
- case 15:
158
- return _context.finish(12);
25
+ try {
26
+ const cursor = reader.getCursor();
27
+ let record;
159
28
 
160
- case 16:
161
- _context.next = 18;
162
- return rows;
29
+ while (record = await cursor.next()) {
30
+ rows.push(record);
31
+ }
32
+ } finally {
33
+ await reader.close();
34
+ }
163
35
 
164
- case 18:
165
- case "end":
166
- return _context.stop();
167
- }
168
- }
169
- }, _callee, null, [[4,, 12, 16]]);
170
- }));
171
- return _parseParquetFileInBatches.apply(this, arguments);
36
+ yield rows;
172
37
  }
173
38
  //# sourceMappingURL=parse-parquet.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/lib/parse-parquet.ts"],"names":["parseParquet","arrayBuffer","options","blob","Blob","parseParquetFileInBatches","batch","ParquetReader","openBlob","reader","rows","cursor","getCursor","next","record","push","close"],"mappings":";;;;;;;;;;;;;;;;;;;;AAGA;;SAEsBA,Y;;;;;4EAAf,kBAA4BC,WAA5B,EAAsDC,OAAtD;AAAA;;AAAA;AAAA;AAAA;AAAA;AACCC,YAAAA,IADD,GACQ,IAAIC,IAAJ,CAAS,CAACH,WAAD,CAAT,CADR;AAAA;AAAA;AAAA;AAAA,qDAEqBI,yBAAyB,CAACF,IAAD,EAAOD,OAAP,CAF9C;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAEYI,YAAAA,KAFZ;AAAA,8CAGIA,KAHJ;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA,8CAKE,IALF;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,G;;;;SAQgBD,yB;;;;;2FAAhB,iBAA0CF,IAA1C,EAAsDD,OAAtD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sDACgBK,6BAAcC,QAAd,CAAuBL,IAAvB,CADhB;;AAAA;AACCM,YAAAA,MADD;AAECC,YAAAA,IAFD,GAEiB,EAFjB;AAAA;AAIGC,YAAAA,MAJH,GAIYF,MAAM,CAACG,SAAP,EAJZ;;AAAA;AAAA;AAAA,sDAMoBD,MAAM,CAACE,IAAP,EANpB;;AAAA;AAAA,kBAMKC,MANL;AAAA;AAAA;AAAA;;AAODJ,YAAAA,IAAI,CAACK,IAAL,CAAUD,MAAV;AAPC;AAAA;;AAAA;AAAA;AAAA;AAAA,sDAUGL,MAAM,CAACO,KAAP,EAVH;;AAAA;AAAA;;AAAA;AAAA;AAYL,mBAAMN,IAAN;;AAZK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,G","sourcesContent":["// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../parquet-loader';\n\nimport {ParquetReader} from '../parquetjs/parser/parquet-reader';\n\nexport async function parseParquet(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions) {\n const blob = new Blob([arrayBuffer]);\n for await (const batch of parseParquetFileInBatches(blob, options)) {\n return batch;\n }\n return null;\n}\n\nexport async function* parseParquetFileInBatches(blob: Blob, options?: ParquetLoaderOptions) {\n const reader = await ParquetReader.openBlob(blob);\n const rows: any[][] = [];\n try {\n const cursor = reader.getCursor();\n let record: any[] | null;\n while ((record = await cursor.next())) {\n rows.push(record);\n }\n } finally {\n await reader.close();\n }\n yield rows;\n}\n"],"file":"parse-parquet.js"}
1
+ {"version":3,"sources":["../../../src/lib/parse-parquet.ts"],"names":["parseParquet","arrayBuffer","options","blob","Blob","batch","parseParquetFileInBatches","reader","ParquetReader","openBlob","rows","cursor","getCursor","record","next","push","close"],"mappings":";;;;;;;;AAGA;;AAEO,eAAeA,YAAf,CAA4BC,WAA5B,EAAsDC,OAAtD,EAAsF;AAC3F,QAAMC,IAAI,GAAG,IAAIC,IAAJ,CAAS,CAACH,WAAD,CAAT,CAAb;;AACA,aAAW,MAAMI,KAAjB,IAA0BC,yBAAyB,CAACH,IAAD,EAAOD,OAAP,CAAnD,EAAoE;AAClE,WAAOG,KAAP;AACD;;AACD,SAAO,IAAP;AACD;;AAEM,gBAAgBC,yBAAhB,CAA0CH,IAA1C,EAAsDD,OAAtD,EAAsF;AAC3F,QAAMK,MAAM,GAAG,MAAMC,6BAAcC,QAAd,CAAuBN,IAAvB,CAArB;AACA,QAAMO,IAAa,GAAG,EAAtB;;AACA,MAAI;AACF,UAAMC,MAAM,GAAGJ,MAAM,CAACK,SAAP,EAAf;AACA,QAAIC,MAAJ;;AACA,WAAQA,MAAM,GAAG,MAAMF,MAAM,CAACG,IAAP,EAAvB,EAAuC;AACrCJ,MAAAA,IAAI,CAACK,IAAL,CAAUF,MAAV;AACD;AACF,GAND,SAMU;AACR,UAAMN,MAAM,CAACS,KAAP,EAAN;AACD;;AACD,QAAMN,IAAN;AACD","sourcesContent":["// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../parquet-loader';\n\nimport {ParquetReader} from '../parquetjs/parser/parquet-reader';\n\nexport async function parseParquet(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions) {\n const blob = new Blob([arrayBuffer]);\n for await (const batch of parseParquetFileInBatches(blob, options)) {\n return batch;\n }\n return null;\n}\n\nexport async function* parseParquetFileInBatches(blob: Blob, options?: ParquetLoaderOptions) {\n const reader = await ParquetReader.openBlob(blob);\n const rows: any[][] = [];\n try {\n const cursor = reader.getCursor();\n let record: any[] | null;\n while ((record = await cursor.next())) {\n rows.push(record);\n }\n } finally {\n await reader.close();\n }\n yield rows;\n}\n"],"file":"parse-parquet.js"}
@@ -1,53 +1,16 @@
1
1
  "use strict";
2
2
 
3
- var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
-
5
3
  Object.defineProperty(exports, "__esModule", {
6
4
  value: true
7
5
  });
8
6
  exports.readArrayBuffer = readArrayBuffer;
9
7
 
10
- var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
11
-
12
- var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
13
-
14
- function readArrayBuffer(_x, _x2, _x3) {
15
- return _readArrayBuffer.apply(this, arguments);
16
- }
17
-
18
- function _readArrayBuffer() {
19
- _readArrayBuffer = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee(file, start, length) {
20
- var slice;
21
- return _regenerator.default.wrap(function _callee$(_context) {
22
- while (1) {
23
- switch (_context.prev = _context.next) {
24
- case 0:
25
- if (!(file instanceof Blob)) {
26
- _context.next = 5;
27
- break;
28
- }
29
-
30
- slice = file.slice(start, start + length);
31
- _context.next = 4;
32
- return slice.arrayBuffer();
33
-
34
- case 4:
35
- return _context.abrupt("return", _context.sent);
36
-
37
- case 5:
38
- _context.next = 7;
39
- return file.read(start, start + length);
40
-
41
- case 7:
42
- return _context.abrupt("return", _context.sent);
8
+ async function readArrayBuffer(file, start, length) {
9
+ if (file instanceof Blob) {
10
+ const slice = file.slice(start, start + length);
11
+ return await slice.arrayBuffer();
12
+ }
43
13
 
44
- case 8:
45
- case "end":
46
- return _context.stop();
47
- }
48
- }
49
- }, _callee);
50
- }));
51
- return _readArrayBuffer.apply(this, arguments);
14
+ return await file.read(start, start + length);
52
15
  }
53
16
  //# sourceMappingURL=read-array-buffer.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/lib/read-array-buffer.ts"],"names":["readArrayBuffer","file","start","length","Blob","slice","arrayBuffer","read"],"mappings":";;;;;;;;;;;;;SAEsBA,e;;;;;+EAAf,iBACLC,IADK,EAELC,KAFK,EAGLC,MAHK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAKDF,IAAI,YAAYG,IALf;AAAA;AAAA;AAAA;;AAMGC,YAAAA,KANH,GAMWJ,IAAI,CAACI,KAAL,CAAWH,KAAX,EAAkBA,KAAK,GAAGC,MAA1B,CANX;AAAA;AAAA,mBAOUE,KAAK,CAACC,WAAN,EAPV;;AAAA;AAAA;;AAAA;AAAA;AAAA,mBASQL,IAAI,CAACM,IAAL,CAAUL,KAAV,EAAiBA,KAAK,GAAGC,MAAzB,CATR;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,G","sourcesContent":["// Random-Access read\n\nexport async function readArrayBuffer(\n file: Blob | ArrayBuffer | any,\n start: number,\n length: number\n): Promise<ArrayBuffer> {\n if (file instanceof Blob) {\n const slice = file.slice(start, start + length);\n return await slice.arrayBuffer();\n }\n return await file.read(start, start + length);\n}\n\n/**\n * Read a slice of a Blob or File, without loading the entire file into memory\n * The trick when reading File objects is to read successive \"slices\" of the File\n * Per spec https://w3c.github.io/FileAPI/, slicing a File only updates the start and end fields\n * Actually reading from file happens in `readAsArrayBuffer`\n * @param blob to read\n export async function readBlob(blob: Blob): Promise<ArrayBuffer> {\n return await new Promise((resolve, reject) => {\n const fileReader = new FileReader();\n fileReader.onload = (event: ProgressEvent<FileReader>) =>\n resolve(event?.target?.result as ArrayBuffer);\n // TODO - reject with a proper Error\n fileReader.onerror = (error: ProgressEvent<FileReader>) => reject(error);\n fileReader.readAsArrayBuffer(blob);\n });\n}\n*/\n"],"file":"read-array-buffer.js"}
1
+ {"version":3,"sources":["../../../src/lib/read-array-buffer.ts"],"names":["readArrayBuffer","file","start","length","Blob","slice","arrayBuffer","read"],"mappings":";;;;;;;AAEO,eAAeA,eAAf,CACLC,IADK,EAELC,KAFK,EAGLC,MAHK,EAIiB;AACtB,MAAIF,IAAI,YAAYG,IAApB,EAA0B;AACxB,UAAMC,KAAK,GAAGJ,IAAI,CAACI,KAAL,CAAWH,KAAX,EAAkBA,KAAK,GAAGC,MAA1B,CAAd;AACA,WAAO,MAAME,KAAK,CAACC,WAAN,EAAb;AACD;;AACD,SAAO,MAAML,IAAI,CAACM,IAAL,CAAUL,KAAV,EAAiBA,KAAK,GAAGC,MAAzB,CAAb;AACD","sourcesContent":["// Random-Access read\n\nexport async function readArrayBuffer(\n file: Blob | ArrayBuffer | any,\n start: number,\n length: number\n): Promise<ArrayBuffer> {\n if (file instanceof Blob) {\n const slice = file.slice(start, start + length);\n return await slice.arrayBuffer();\n }\n return await file.read(start, start + length);\n}\n\n/**\n * Read a slice of a Blob or File, without loading the entire file into memory\n * The trick when reading File objects is to read successive \"slices\" of the File\n * Per spec https://w3c.github.io/FileAPI/, slicing a File only updates the start and end fields\n * Actually reading from file happens in `readAsArrayBuffer`\n * @param blob to read\n export async function readBlob(blob: Blob): Promise<ArrayBuffer> {\n return await new Promise((resolve, reject) => {\n const fileReader = new FileReader();\n fileReader.onload = (event: ProgressEvent<FileReader>) =>\n resolve(event?.target?.result as ArrayBuffer);\n // TODO - reject with a proper Error\n fileReader.onerror = (error: ProgressEvent<FileReader>) => reject(error);\n fileReader.readAsArrayBuffer(blob);\n });\n}\n*/\n"],"file":"read-array-buffer.js"}
@@ -4,14 +4,14 @@ Object.defineProperty(exports, "__esModule", {
4
4
  value: true
5
5
  });
6
6
  exports._typecheckParquetLoader = exports.ParquetLoader = void 0;
7
- var VERSION = typeof "3.1.0-alpha.4" !== 'undefined' ? "3.1.0-alpha.4" : 'latest';
8
- var DEFAULT_PARQUET_LOADER_OPTIONS = {
7
+ const VERSION = typeof "3.1.0-beta.3" !== 'undefined' ? "3.1.0-beta.3" : 'latest';
8
+ const DEFAULT_PARQUET_LOADER_OPTIONS = {
9
9
  parquet: {
10
10
  type: 'object-row-table',
11
11
  url: undefined
12
12
  }
13
13
  };
14
- var ParquetLoader = {
14
+ const ParquetLoader = {
15
15
  name: 'Apache Parquet',
16
16
  id: 'parquet',
17
17
  module: 'parquet',
@@ -25,6 +25,6 @@ var ParquetLoader = {
25
25
  options: DEFAULT_PARQUET_LOADER_OPTIONS
26
26
  };
27
27
  exports.ParquetLoader = ParquetLoader;
28
- var _typecheckParquetLoader = ParquetLoader;
28
+ const _typecheckParquetLoader = ParquetLoader;
29
29
  exports._typecheckParquetLoader = _typecheckParquetLoader;
30
30
  //# sourceMappingURL=parquet-loader.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/parquet-loader.ts"],"names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","parquet","type","url","undefined","ParquetLoader","name","id","module","version","worker","category","extensions","mimeTypes","binary","tests","options","_typecheckParquetLoader"],"mappings":";;;;;;AAIA,IAAMA,OAAO,GAAG,2BAAuB,WAAvB,qBAAmD,QAAnE;AASA,IAAMC,8BAAoD,GAAG;AAC3DC,EAAAA,OAAO,EAAE;AACPC,IAAAA,IAAI,EAAE,kBADC;AAEPC,IAAAA,GAAG,EAAEC;AAFE;AADkD,CAA7D;AAQO,IAAMC,aAAa,GAAG;AAC3BC,EAAAA,IAAI,EAAE,gBADqB;AAE3BC,EAAAA,EAAE,EAAE,SAFuB;AAG3BC,EAAAA,MAAM,EAAE,SAHmB;AAI3BC,EAAAA,OAAO,EAAEV,OAJkB;AAK3BW,EAAAA,MAAM,EAAE,IALmB;AAM3BC,EAAAA,QAAQ,EAAE,OANiB;AAO3BC,EAAAA,UAAU,EAAE,CAAC,SAAD,CAPe;AAQ3BC,EAAAA,SAAS,EAAE,CAAC,0BAAD,CARgB;AAS3BC,EAAAA,MAAM,EAAE,IATmB;AAU3BC,EAAAA,KAAK,EAAE,CAAC,MAAD,EAAS,MAAT,CAVoB;AAW3BC,EAAAA,OAAO,EAAEhB;AAXkB,CAAtB;;AAcA,IAAMiB,uBAA+B,GAAGZ,aAAxC","sourcesContent":["import type {Loader, LoaderOptions} from '@loaders.gl/loader-utils';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetLoaderOptions = LoaderOptions & {\n parquet?: {\n type?: 'object-row-table';\n url?: string;\n };\n};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS: ParquetLoaderOptions = {\n parquet: {\n type: 'object-row-table',\n url: undefined\n }\n};\n\n/** ParquetJS table loader */\nexport const ParquetLoader = {\n name: 'Apache Parquet',\n id: 'parquet',\n module: 'parquet',\n version: VERSION,\n worker: true,\n category: 'table',\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n binary: true,\n tests: ['PAR1', 'PARE'],\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nexport const _typecheckParquetLoader: Loader = ParquetLoader;\n"],"file":"parquet-loader.js"}
1
+ {"version":3,"sources":["../../src/parquet-loader.ts"],"names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","parquet","type","url","undefined","ParquetLoader","name","id","module","version","worker","category","extensions","mimeTypes","binary","tests","options","_typecheckParquetLoader"],"mappings":";;;;;;AAIA,MAAMA,OAAO,GAAG,0BAAuB,WAAvB,oBAAmD,QAAnE;AASA,MAAMC,8BAAoD,GAAG;AAC3DC,EAAAA,OAAO,EAAE;AACPC,IAAAA,IAAI,EAAE,kBADC;AAEPC,IAAAA,GAAG,EAAEC;AAFE;AADkD,CAA7D;AAQO,MAAMC,aAAa,GAAG;AAC3BC,EAAAA,IAAI,EAAE,gBADqB;AAE3BC,EAAAA,EAAE,EAAE,SAFuB;AAG3BC,EAAAA,MAAM,EAAE,SAHmB;AAI3BC,EAAAA,OAAO,EAAEV,OAJkB;AAK3BW,EAAAA,MAAM,EAAE,IALmB;AAM3BC,EAAAA,QAAQ,EAAE,OANiB;AAO3BC,EAAAA,UAAU,EAAE,CAAC,SAAD,CAPe;AAQ3BC,EAAAA,SAAS,EAAE,CAAC,0BAAD,CARgB;AAS3BC,EAAAA,MAAM,EAAE,IATmB;AAU3BC,EAAAA,KAAK,EAAE,CAAC,MAAD,EAAS,MAAT,CAVoB;AAW3BC,EAAAA,OAAO,EAAEhB;AAXkB,CAAtB;;AAcA,MAAMiB,uBAA+B,GAAGZ,aAAxC","sourcesContent":["import type {Loader, LoaderOptions} from '@loaders.gl/loader-utils';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetLoaderOptions = LoaderOptions & {\n parquet?: {\n type?: 'object-row-table';\n url?: string;\n };\n};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS: ParquetLoaderOptions = {\n parquet: {\n type: 'object-row-table',\n url: undefined\n }\n};\n\n/** ParquetJS table loader */\nexport const ParquetLoader = {\n name: 'Apache Parquet',\n id: 'parquet',\n module: 'parquet',\n version: VERSION,\n worker: true,\n category: 'table',\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n binary: true,\n tests: ['PAR1', 'PARE'],\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nexport const _typecheckParquetLoader: Loader = ParquetLoader;\n"],"file":"parquet-loader.js"}
@@ -4,16 +4,16 @@ Object.defineProperty(exports, "__esModule", {
4
4
  value: true
5
5
  });
6
6
  exports.ParquetWriter = void 0;
7
- var VERSION = typeof "3.1.0-alpha.4" !== 'undefined' ? "3.1.0-alpha.4" : 'latest';
8
- var DEFAULT_PARQUET_LOADER_OPTIONS = {};
9
- var ParquetWriter = {
7
+ const VERSION = typeof "3.1.0-beta.3" !== 'undefined' ? "3.1.0-beta.3" : 'latest';
8
+ const DEFAULT_PARQUET_LOADER_OPTIONS = {};
9
+ const ParquetWriter = {
10
10
  name: 'Apache Parquet',
11
11
  id: 'parquet',
12
12
  module: 'parquet',
13
13
  version: VERSION,
14
14
  extensions: ['parquet'],
15
15
  mimeTypes: ['application/octet-stream'],
16
- encodeSync: encodeSync,
16
+ encodeSync,
17
17
  binary: true,
18
18
  options: DEFAULT_PARQUET_LOADER_OPTIONS
19
19
  };
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/parquet-writer.ts"],"names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","ParquetWriter","name","id","module","version","extensions","mimeTypes","encodeSync","binary","options","data","ArrayBuffer"],"mappings":";;;;;;AAIA,IAAMA,OAAO,GAAG,2BAAuB,WAAvB,qBAAmD,QAAnE;AAIA,IAAMC,8BAA8B,GAAG,EAAvC;AAEO,IAAMC,aAAqB,GAAG;AACnCC,EAAAA,IAAI,EAAE,gBAD6B;AAEnCC,EAAAA,EAAE,EAAE,SAF+B;AAGnCC,EAAAA,MAAM,EAAE,SAH2B;AAInCC,EAAAA,OAAO,EAAEN,OAJ0B;AAKnCO,EAAAA,UAAU,EAAE,CAAC,SAAD,CALuB;AAMnCC,EAAAA,SAAS,EAAE,CAAC,0BAAD,CANwB;AAOnCC,EAAAA,UAAU,EAAVA,UAPmC;AAQnCC,EAAAA,MAAM,EAAE,IAR2B;AASnCC,EAAAA,OAAO,EAAEV;AAT0B,CAA9B;;;AAYP,SAASQ,UAAT,CAAoBG,IAApB,EAA0BD,OAA1B,EAA0D;AACxD,SAAO,IAAIE,WAAJ,CAAgB,CAAhB,CAAP;AACD","sourcesContent":["import type {Writer} from '@loaders.gl/loader-utils';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetWriterOptions = {};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS = {};\n\nexport const ParquetWriter: Writer = {\n name: 'Apache Parquet',\n id: 'parquet',\n module: 'parquet',\n version: VERSION,\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n encodeSync,\n binary: true,\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nfunction encodeSync(data, options?: ParquetWriterOptions) {\n return new ArrayBuffer(0);\n}\n"],"file":"parquet-writer.js"}
1
+ {"version":3,"sources":["../../src/parquet-writer.ts"],"names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","ParquetWriter","name","id","module","version","extensions","mimeTypes","encodeSync","binary","options","data","ArrayBuffer"],"mappings":";;;;;;AAIA,MAAMA,OAAO,GAAG,0BAAuB,WAAvB,oBAAmD,QAAnE;AAIA,MAAMC,8BAA8B,GAAG,EAAvC;AAEO,MAAMC,aAAqB,GAAG;AACnCC,EAAAA,IAAI,EAAE,gBAD6B;AAEnCC,EAAAA,EAAE,EAAE,SAF+B;AAGnCC,EAAAA,MAAM,EAAE,SAH2B;AAInCC,EAAAA,OAAO,EAAEN,OAJ0B;AAKnCO,EAAAA,UAAU,EAAE,CAAC,SAAD,CALuB;AAMnCC,EAAAA,SAAS,EAAE,CAAC,0BAAD,CANwB;AAOnCC,EAAAA,UAPmC;AAQnCC,EAAAA,MAAM,EAAE,IAR2B;AASnCC,EAAAA,OAAO,EAAEV;AAT0B,CAA9B;;;AAYP,SAASQ,UAAT,CAAoBG,IAApB,EAA0BD,OAA1B,EAA0D;AACxD,SAAO,IAAIE,WAAJ,CAAgB,CAAhB,CAAP;AACD","sourcesContent":["import type {Writer} from '@loaders.gl/loader-utils';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetWriterOptions = {};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS = {};\n\nexport const ParquetWriter: Writer = {\n name: 'Apache Parquet',\n id: 'parquet',\n module: 'parquet',\n version: VERSION,\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n encodeSync,\n binary: true,\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nfunction encodeSync(data, options?: ParquetWriterOptions) {\n return new ArrayBuffer(0);\n}\n"],"file":"parquet-writer.js"}