@loaders.gl/parquet 3.1.0-alpha.5 → 3.1.0-beta.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (264) hide show
  1. package/dist/bundle.d.ts +1 -0
  2. package/dist/bundle.d.ts.map +1 -0
  3. package/dist/bundle.js +5 -0
  4. package/dist/constants.d.ts +1 -0
  5. package/dist/constants.d.ts.map +1 -0
  6. package/dist/constants.js +18 -0
  7. package/dist/dist.min.js +27 -13
  8. package/dist/dist.min.js.map +7 -1
  9. package/dist/es5/index.js +6 -26
  10. package/dist/es5/index.js.map +1 -1
  11. package/dist/es5/parquet-loader.js +1 -1
  12. package/dist/es5/parquet-loader.js.map +1 -1
  13. package/dist/es5/parquet-writer.js +1 -1
  14. package/dist/es5/parquet-writer.js.map +1 -1
  15. package/dist/es5/parquetjs/codecs/rle.js +1 -1
  16. package/dist/es5/parquetjs/codecs/rle.js.map +1 -1
  17. package/dist/es5/parquetjs/compression.js +1 -12
  18. package/dist/es5/parquetjs/compression.js.map +1 -1
  19. package/dist/es5/parquetjs/parser/decoders.js +1 -1
  20. package/dist/es5/parquetjs/parser/decoders.js.map +1 -1
  21. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +0 -13
  22. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +1 -1
  23. package/dist/es5/parquetjs/parser/parquet-reader.js +0 -13
  24. package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
  25. package/dist/es5/parquetjs/utils/file-utils.js +0 -53
  26. package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
  27. package/dist/esm/index.js +2 -3
  28. package/dist/esm/index.js.map +1 -1
  29. package/dist/esm/parquet-loader.js +1 -1
  30. package/dist/esm/parquet-loader.js.map +1 -1
  31. package/dist/esm/parquet-writer.js +1 -1
  32. package/dist/esm/parquet-writer.js.map +1 -1
  33. package/dist/esm/parquetjs/codecs/rle.js +1 -1
  34. package/dist/esm/parquetjs/codecs/rle.js.map +1 -1
  35. package/dist/esm/parquetjs/compression.js +1 -10
  36. package/dist/esm/parquetjs/compression.js.map +1 -1
  37. package/dist/esm/parquetjs/parser/decoders.js +1 -1
  38. package/dist/esm/parquetjs/parser/decoders.js.map +1 -1
  39. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +0 -9
  40. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +1 -1
  41. package/dist/esm/parquetjs/parser/parquet-reader.js +0 -13
  42. package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
  43. package/dist/esm/parquetjs/utils/file-utils.js +0 -45
  44. package/dist/esm/parquetjs/utils/file-utils.js.map +1 -1
  45. package/dist/index.d.ts +3 -3
  46. package/dist/index.d.ts.map +1 -0
  47. package/dist/index.js +30 -0
  48. package/dist/lib/convert-schema.d.ts +1 -0
  49. package/dist/lib/convert-schema.d.ts.map +1 -0
  50. package/dist/lib/convert-schema.js +70 -0
  51. package/dist/lib/parse-parquet.d.ts +1 -0
  52. package/dist/lib/parse-parquet.d.ts.map +1 -0
  53. package/dist/lib/parse-parquet.js +28 -0
  54. package/dist/lib/read-array-buffer.d.ts +1 -0
  55. package/dist/lib/read-array-buffer.d.ts.map +1 -0
  56. package/dist/lib/read-array-buffer.js +29 -0
  57. package/dist/parquet-loader.d.ts +1 -0
  58. package/dist/parquet-loader.d.ts.map +1 -0
  59. package/dist/parquet-loader.js +27 -0
  60. package/dist/parquet-worker.js +27 -13
  61. package/dist/parquet-worker.js.map +7 -1
  62. package/dist/parquet-writer.d.ts +1 -0
  63. package/dist/parquet-writer.d.ts.map +1 -0
  64. package/dist/parquet-writer.js +21 -0
  65. package/dist/parquetjs/codecs/declare.d.ts +1 -0
  66. package/dist/parquetjs/codecs/declare.d.ts.map +1 -0
  67. package/dist/parquetjs/codecs/declare.js +2 -0
  68. package/dist/parquetjs/codecs/dictionary.d.ts +1 -0
  69. package/dist/parquetjs/codecs/dictionary.d.ts.map +1 -0
  70. package/dist/parquetjs/codecs/dictionary.js +14 -0
  71. package/dist/parquetjs/codecs/index.d.ts +1 -0
  72. package/dist/parquetjs/codecs/index.d.ts.map +1 -0
  73. package/dist/parquetjs/codecs/index.js +51 -0
  74. package/dist/parquetjs/codecs/plain.d.ts +1 -0
  75. package/dist/parquetjs/codecs/plain.d.ts.map +1 -0
  76. package/dist/parquetjs/codecs/plain.js +211 -0
  77. package/dist/parquetjs/codecs/rle.d.ts +1 -0
  78. package/dist/parquetjs/codecs/rle.d.ts.map +1 -0
  79. package/dist/parquetjs/codecs/rle.js +145 -0
  80. package/dist/parquetjs/compression.d.ts +1 -0
  81. package/dist/parquetjs/compression.d.ts.map +1 -0
  82. package/dist/parquetjs/compression.js +168 -0
  83. package/dist/parquetjs/encoder/writer.d.ts +1 -0
  84. package/dist/parquetjs/encoder/writer.d.ts.map +1 -0
  85. package/dist/parquetjs/encoder/writer.js +478 -0
  86. package/dist/parquetjs/file.d.ts +1 -0
  87. package/dist/parquetjs/file.d.ts.map +1 -0
  88. package/dist/parquetjs/file.js +99 -0
  89. package/dist/parquetjs/parquet-thrift/BoundaryOrder.d.ts +1 -0
  90. package/dist/parquetjs/parquet-thrift/BoundaryOrder.d.ts.map +1 -0
  91. package/dist/parquetjs/parquet-thrift/BoundaryOrder.js +15 -0
  92. package/dist/parquetjs/parquet-thrift/BsonType.d.ts +1 -0
  93. package/dist/parquetjs/parquet-thrift/BsonType.d.ts.map +1 -0
  94. package/dist/parquetjs/parquet-thrift/BsonType.js +58 -0
  95. package/dist/parquetjs/parquet-thrift/ColumnChunk.d.ts +1 -0
  96. package/dist/parquetjs/parquet-thrift/ColumnChunk.d.ts.map +1 -0
  97. package/dist/parquetjs/parquet-thrift/ColumnChunk.js +207 -0
  98. package/dist/parquetjs/parquet-thrift/ColumnIndex.d.ts +1 -0
  99. package/dist/parquetjs/parquet-thrift/ColumnIndex.d.ts.map +1 -0
  100. package/dist/parquetjs/parquet-thrift/ColumnIndex.js +213 -0
  101. package/dist/parquetjs/parquet-thrift/ColumnMetaData.d.ts +1 -0
  102. package/dist/parquetjs/parquet-thrift/ColumnMetaData.d.ts.map +1 -0
  103. package/dist/parquetjs/parquet-thrift/ColumnMetaData.js +398 -0
  104. package/dist/parquetjs/parquet-thrift/ColumnOrder.d.ts +1 -0
  105. package/dist/parquetjs/parquet-thrift/ColumnOrder.d.ts.map +1 -0
  106. package/dist/parquetjs/parquet-thrift/ColumnOrder.js +104 -0
  107. package/dist/parquetjs/parquet-thrift/CompressionCodec.d.ts +1 -0
  108. package/dist/parquetjs/parquet-thrift/CompressionCodec.d.ts.map +1 -0
  109. package/dist/parquetjs/parquet-thrift/CompressionCodec.js +20 -0
  110. package/dist/parquetjs/parquet-thrift/ConvertedType.d.ts +1 -0
  111. package/dist/parquetjs/parquet-thrift/ConvertedType.d.ts.map +1 -0
  112. package/dist/parquetjs/parquet-thrift/ConvertedType.js +34 -0
  113. package/dist/parquetjs/parquet-thrift/DataPageHeader.d.ts +1 -0
  114. package/dist/parquetjs/parquet-thrift/DataPageHeader.d.ts.map +1 -0
  115. package/dist/parquetjs/parquet-thrift/DataPageHeader.js +166 -0
  116. package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.d.ts +1 -0
  117. package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.d.ts.map +1 -0
  118. package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.js +226 -0
  119. package/dist/parquetjs/parquet-thrift/DateType.d.ts +1 -0
  120. package/dist/parquetjs/parquet-thrift/DateType.d.ts.map +1 -0
  121. package/dist/parquetjs/parquet-thrift/DateType.js +58 -0
  122. package/dist/parquetjs/parquet-thrift/DecimalType.d.ts +1 -0
  123. package/dist/parquetjs/parquet-thrift/DecimalType.d.ts.map +1 -0
  124. package/dist/parquetjs/parquet-thrift/DecimalType.js +105 -0
  125. package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.d.ts +1 -0
  126. package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.d.ts.map +1 -0
  127. package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.js +122 -0
  128. package/dist/parquetjs/parquet-thrift/Encoding.d.ts +1 -0
  129. package/dist/parquetjs/parquet-thrift/Encoding.d.ts.map +1 -0
  130. package/dist/parquetjs/parquet-thrift/Encoding.js +20 -0
  131. package/dist/parquetjs/parquet-thrift/EnumType.d.ts +1 -0
  132. package/dist/parquetjs/parquet-thrift/EnumType.d.ts.map +1 -0
  133. package/dist/parquetjs/parquet-thrift/EnumType.js +58 -0
  134. package/dist/parquetjs/parquet-thrift/FieldRepetitionType.d.ts +1 -0
  135. package/dist/parquetjs/parquet-thrift/FieldRepetitionType.d.ts.map +1 -0
  136. package/dist/parquetjs/parquet-thrift/FieldRepetitionType.js +15 -0
  137. package/dist/parquetjs/parquet-thrift/FileMetaData.d.ts +1 -0
  138. package/dist/parquetjs/parquet-thrift/FileMetaData.d.ts.map +1 -0
  139. package/dist/parquetjs/parquet-thrift/FileMetaData.js +256 -0
  140. package/dist/parquetjs/parquet-thrift/IndexPageHeader.d.ts +1 -0
  141. package/dist/parquetjs/parquet-thrift/IndexPageHeader.d.ts.map +1 -0
  142. package/dist/parquetjs/parquet-thrift/IndexPageHeader.js +58 -0
  143. package/dist/parquetjs/parquet-thrift/IntType.d.ts +1 -0
  144. package/dist/parquetjs/parquet-thrift/IntType.d.ts.map +1 -0
  145. package/dist/parquetjs/parquet-thrift/IntType.js +105 -0
  146. package/dist/parquetjs/parquet-thrift/JsonType.d.ts +1 -0
  147. package/dist/parquetjs/parquet-thrift/JsonType.d.ts.map +1 -0
  148. package/dist/parquetjs/parquet-thrift/JsonType.js +58 -0
  149. package/dist/parquetjs/parquet-thrift/KeyValue.d.ts +1 -0
  150. package/dist/parquetjs/parquet-thrift/KeyValue.d.ts.map +1 -0
  151. package/dist/parquetjs/parquet-thrift/KeyValue.js +102 -0
  152. package/dist/parquetjs/parquet-thrift/ListType.d.ts +1 -0
  153. package/dist/parquetjs/parquet-thrift/ListType.d.ts.map +1 -0
  154. package/dist/parquetjs/parquet-thrift/ListType.js +58 -0
  155. package/dist/parquetjs/parquet-thrift/LogicalType.d.ts +1 -0
  156. package/dist/parquetjs/parquet-thrift/LogicalType.d.ts.map +1 -0
  157. package/dist/parquetjs/parquet-thrift/LogicalType.js +380 -0
  158. package/dist/parquetjs/parquet-thrift/MapType.d.ts +1 -0
  159. package/dist/parquetjs/parquet-thrift/MapType.d.ts.map +1 -0
  160. package/dist/parquetjs/parquet-thrift/MapType.js +58 -0
  161. package/dist/parquetjs/parquet-thrift/MicroSeconds.d.ts +1 -0
  162. package/dist/parquetjs/parquet-thrift/MicroSeconds.d.ts.map +1 -0
  163. package/dist/parquetjs/parquet-thrift/MicroSeconds.js +58 -0
  164. package/dist/parquetjs/parquet-thrift/MilliSeconds.d.ts +1 -0
  165. package/dist/parquetjs/parquet-thrift/MilliSeconds.d.ts.map +1 -0
  166. package/dist/parquetjs/parquet-thrift/MilliSeconds.js +58 -0
  167. package/dist/parquetjs/parquet-thrift/NullType.d.ts +1 -0
  168. package/dist/parquetjs/parquet-thrift/NullType.d.ts.map +1 -0
  169. package/dist/parquetjs/parquet-thrift/NullType.js +58 -0
  170. package/dist/parquetjs/parquet-thrift/OffsetIndex.d.ts +1 -0
  171. package/dist/parquetjs/parquet-thrift/OffsetIndex.d.ts.map +1 -0
  172. package/dist/parquetjs/parquet-thrift/OffsetIndex.js +97 -0
  173. package/dist/parquetjs/parquet-thrift/PageEncodingStats.d.ts +1 -0
  174. package/dist/parquetjs/parquet-thrift/PageEncodingStats.d.ts.map +1 -0
  175. package/dist/parquetjs/parquet-thrift/PageEncodingStats.js +127 -0
  176. package/dist/parquetjs/parquet-thrift/PageHeader.d.ts +1 -0
  177. package/dist/parquetjs/parquet-thrift/PageHeader.d.ts.map +1 -0
  178. package/dist/parquetjs/parquet-thrift/PageHeader.js +216 -0
  179. package/dist/parquetjs/parquet-thrift/PageLocation.d.ts +1 -0
  180. package/dist/parquetjs/parquet-thrift/PageLocation.d.ts.map +1 -0
  181. package/dist/parquetjs/parquet-thrift/PageLocation.js +141 -0
  182. package/dist/parquetjs/parquet-thrift/PageType.d.ts +1 -0
  183. package/dist/parquetjs/parquet-thrift/PageType.d.ts.map +1 -0
  184. package/dist/parquetjs/parquet-thrift/PageType.js +16 -0
  185. package/dist/parquetjs/parquet-thrift/RowGroup.d.ts +1 -0
  186. package/dist/parquetjs/parquet-thrift/RowGroup.d.ts.map +1 -0
  187. package/dist/parquetjs/parquet-thrift/RowGroup.js +182 -0
  188. package/dist/parquetjs/parquet-thrift/SchemaElement.d.ts +1 -0
  189. package/dist/parquetjs/parquet-thrift/SchemaElement.d.ts.map +1 -0
  190. package/dist/parquetjs/parquet-thrift/SchemaElement.js +239 -0
  191. package/dist/parquetjs/parquet-thrift/SortingColumn.d.ts +1 -0
  192. package/dist/parquetjs/parquet-thrift/SortingColumn.d.ts.map +1 -0
  193. package/dist/parquetjs/parquet-thrift/SortingColumn.js +127 -0
  194. package/dist/parquetjs/parquet-thrift/Statistics.d.ts +1 -0
  195. package/dist/parquetjs/parquet-thrift/Statistics.d.ts.map +1 -0
  196. package/dist/parquetjs/parquet-thrift/Statistics.js +176 -0
  197. package/dist/parquetjs/parquet-thrift/StringType.d.ts +1 -0
  198. package/dist/parquetjs/parquet-thrift/StringType.d.ts.map +1 -0
  199. package/dist/parquetjs/parquet-thrift/StringType.js +58 -0
  200. package/dist/parquetjs/parquet-thrift/TimeType.d.ts +1 -0
  201. package/dist/parquetjs/parquet-thrift/TimeType.d.ts.map +1 -0
  202. package/dist/parquetjs/parquet-thrift/TimeType.js +106 -0
  203. package/dist/parquetjs/parquet-thrift/TimeUnit.d.ts +1 -0
  204. package/dist/parquetjs/parquet-thrift/TimeUnit.d.ts.map +1 -0
  205. package/dist/parquetjs/parquet-thrift/TimeUnit.js +127 -0
  206. package/dist/parquetjs/parquet-thrift/TimestampType.d.ts +1 -0
  207. package/dist/parquetjs/parquet-thrift/TimestampType.d.ts.map +1 -0
  208. package/dist/parquetjs/parquet-thrift/TimestampType.js +106 -0
  209. package/dist/parquetjs/parquet-thrift/Type.d.ts +1 -0
  210. package/dist/parquetjs/parquet-thrift/Type.d.ts.map +1 -0
  211. package/dist/parquetjs/parquet-thrift/Type.js +20 -0
  212. package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.d.ts +1 -0
  213. package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.d.ts.map +1 -0
  214. package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.js +58 -0
  215. package/dist/parquetjs/parquet-thrift/UUIDType.d.ts +1 -0
  216. package/dist/parquetjs/parquet-thrift/UUIDType.d.ts.map +1 -0
  217. package/dist/parquetjs/parquet-thrift/UUIDType.js +58 -0
  218. package/dist/parquetjs/parquet-thrift/index.d.ts +1 -0
  219. package/dist/parquetjs/parquet-thrift/index.d.ts.map +1 -0
  220. package/dist/parquetjs/parquet-thrift/index.js +61 -0
  221. package/dist/parquetjs/parser/decoders.d.ts +1 -0
  222. package/dist/parquetjs/parser/decoders.d.ts.map +1 -0
  223. package/dist/parquetjs/parser/decoders.js +318 -0
  224. package/dist/parquetjs/parser/parquet-cursor.d.ts +1 -0
  225. package/dist/parquetjs/parser/parquet-cursor.d.ts.map +1 -0
  226. package/dist/parquetjs/parser/parquet-cursor.js +74 -0
  227. package/dist/parquetjs/parser/parquet-envelope-reader.d.ts +1 -1
  228. package/dist/parquetjs/parser/parquet-envelope-reader.d.ts.map +1 -0
  229. package/dist/parquetjs/parser/parquet-envelope-reader.js +136 -0
  230. package/dist/parquetjs/parser/parquet-reader.d.ts +1 -5
  231. package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -0
  232. package/dist/parquetjs/parser/parquet-reader.js +134 -0
  233. package/dist/parquetjs/schema/declare.d.ts +1 -0
  234. package/dist/parquetjs/schema/declare.d.ts.map +1 -0
  235. package/dist/parquetjs/schema/declare.js +10 -0
  236. package/dist/parquetjs/schema/schema.d.ts +1 -0
  237. package/dist/parquetjs/schema/schema.d.ts.map +1 -0
  238. package/dist/parquetjs/schema/schema.js +162 -0
  239. package/dist/parquetjs/schema/shred.d.ts +1 -0
  240. package/dist/parquetjs/schema/shred.d.ts.map +1 -0
  241. package/dist/parquetjs/schema/shred.js +225 -0
  242. package/dist/parquetjs/schema/types.d.ts +1 -0
  243. package/dist/parquetjs/schema/types.d.ts.map +1 -0
  244. package/dist/parquetjs/schema/types.js +418 -0
  245. package/dist/parquetjs/utils/buffer-utils.d.ts +1 -0
  246. package/dist/parquetjs/utils/buffer-utils.d.ts.map +1 -0
  247. package/dist/parquetjs/utils/buffer-utils.js +22 -0
  248. package/dist/parquetjs/utils/file-utils.d.ts +1 -4
  249. package/dist/parquetjs/utils/file-utils.d.ts.map +1 -0
  250. package/dist/parquetjs/utils/file-utils.js +46 -0
  251. package/dist/parquetjs/utils/read-utils.d.ts +1 -0
  252. package/dist/parquetjs/utils/read-utils.d.ts.map +1 -0
  253. package/dist/parquetjs/utils/read-utils.js +109 -0
  254. package/dist/workers/parquet-worker.d.ts +1 -0
  255. package/dist/workers/parquet-worker.d.ts.map +1 -0
  256. package/dist/workers/parquet-worker.js +5 -0
  257. package/package.json +7 -8
  258. package/src/index.ts +3 -3
  259. package/src/parquetjs/codecs/rle.ts +1 -1
  260. package/src/parquetjs/compression.ts +10 -10
  261. package/src/parquetjs/parser/decoders.ts +1 -1
  262. package/src/parquetjs/parser/parquet-envelope-reader.ts +0 -11
  263. package/src/parquetjs/parser/parquet-reader.ts +0 -16
  264. package/src/parquetjs/utils/file-utils.ts +0 -49
@@ -0,0 +1,136 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ParquetEnvelopeReader = void 0;
4
+ const constants_1 = require("../../constants");
5
+ const parquet_thrift_1 = require("../parquet-thrift");
6
+ const read_utils_1 = require("../utils/read-utils");
7
+ const decoders_1 = require("./decoders");
8
+ const DEFAULT_DICTIONARY_SIZE = 1e6;
9
+ /**
10
+ * The parquet envelope reader allows direct, unbuffered access to the individual
11
+ * sections of the parquet file, namely the header, footer and the row groups.
12
+ * This class is intended for advanced/internal users; if you just want to retrieve
13
+ * rows from a parquet file use the ParquetReader instead
14
+ */
15
+ class ParquetEnvelopeReader {
16
+ constructor(read, close, fileSize, options) {
17
+ this.read = read;
18
+ this.close = close;
19
+ this.fileSize = fileSize;
20
+ this.defaultDictionarySize = options?.defaultDictionarySize || DEFAULT_DICTIONARY_SIZE;
21
+ }
22
+ static async openBuffer(buffer) {
23
+ const readFn = (position, length) => Promise.resolve(buffer.slice(position, position + length));
24
+ const closeFn = () => Promise.resolve();
25
+ return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);
26
+ }
27
+ async readHeader() {
28
+ const buffer = await this.read(0, constants_1.PARQUET_MAGIC.length);
29
+ const magic = buffer.toString();
30
+ switch (magic) {
31
+ case constants_1.PARQUET_MAGIC:
32
+ break;
33
+ case constants_1.PARQUET_MAGIC_ENCRYPTED:
34
+ throw new Error('Encrypted parquet file not supported');
35
+ default:
36
+ throw new Error(`Invalid parquet file (magic=${magic})`);
37
+ }
38
+ }
39
+ async readRowGroup(schema, rowGroup, columnList) {
40
+ const buffer = {
41
+ rowCount: Number(rowGroup.num_rows),
42
+ columnData: {}
43
+ };
44
+ for (const colChunk of rowGroup.columns) {
45
+ const colMetadata = colChunk.meta_data;
46
+ const colKey = colMetadata?.path_in_schema;
47
+ if (columnList.length > 0 && (0, read_utils_1.fieldIndexOf)(columnList, colKey) < 0) {
48
+ continue; // eslint-disable-line no-continue
49
+ }
50
+ buffer.columnData[colKey.join()] = await this.readColumnChunk(schema, colChunk);
51
+ }
52
+ return buffer;
53
+ }
54
+ /**
55
+ * Do reading of parquet file's column chunk
56
+ * @param schema
57
+ * @param colChunk
58
+ */
59
+ async readColumnChunk(schema, colChunk) {
60
+ if (colChunk.file_path !== undefined && colChunk.file_path !== null) {
61
+ throw new Error('external references are not supported');
62
+ }
63
+ const field = schema.findField(colChunk.meta_data?.path_in_schema);
64
+ const type = (0, read_utils_1.getThriftEnum)(parquet_thrift_1.Type, colChunk.meta_data?.type);
65
+ if (type !== field.primitiveType) {
66
+ throw new Error(`chunk type not matching schema: ${type}`);
67
+ }
68
+ const compression = (0, read_utils_1.getThriftEnum)(parquet_thrift_1.CompressionCodec, colChunk.meta_data?.codec);
69
+ const pagesOffset = Number(colChunk.meta_data?.data_page_offset);
70
+ let pagesSize = Number(colChunk.meta_data?.total_compressed_size);
71
+ if (!colChunk.file_path) {
72
+ pagesSize = Math.min(this.fileSize - pagesOffset, Number(colChunk.meta_data?.total_compressed_size));
73
+ }
74
+ const options = {
75
+ type,
76
+ rLevelMax: field.rLevelMax,
77
+ dLevelMax: field.dLevelMax,
78
+ compression,
79
+ column: field,
80
+ numValues: colChunk.meta_data?.num_values,
81
+ dictionary: []
82
+ };
83
+ let dictionary;
84
+ const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;
85
+ if (dictionaryPageOffset) {
86
+ const dictionaryOffset = Number(dictionaryPageOffset);
87
+ // Getting dictionary from column chunk to iterate all over indexes to get dataPage values.
88
+ dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);
89
+ }
90
+ dictionary = options.dictionary?.length ? options.dictionary : dictionary;
91
+ const pagesBuf = await this.read(pagesOffset, pagesSize);
92
+ return await (0, decoders_1.decodeDataPages)(pagesBuf, { ...options, dictionary });
93
+ }
94
+ /**
95
+ * Getting dictionary for allows to flatten values by indices.
96
+ * @param dictionaryPageOffset
97
+ * @param options
98
+ * @param pagesOffset
99
+ * @returns
100
+ */
101
+ async getDictionary(dictionaryPageOffset, options, pagesOffset) {
102
+ if (dictionaryPageOffset === 0) {
103
+ // dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);
104
+ // pagesBuf = await this.read(pagesOffset, dictionarySize);
105
+ // In this case we are working with parquet-mr files format. Problem is described below:
106
+ // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding
107
+ // We need to get dictionary page from column chunk if it exists.
108
+ // Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.
109
+ return [];
110
+ }
111
+ const dictionarySize = Math.min(this.fileSize - dictionaryPageOffset, this.defaultDictionarySize);
112
+ const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);
113
+ const cursor = { buffer: pagesBuf, offset: 0, size: pagesBuf.length };
114
+ const decodedPage = await (0, decoders_1.decodePage)(cursor, options);
115
+ return decodedPage.dictionary;
116
+ }
117
+ async readFooter() {
118
+ const trailerLen = constants_1.PARQUET_MAGIC.length + 4;
119
+ const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);
120
+ const magic = trailerBuf.slice(4).toString();
121
+ if (magic !== constants_1.PARQUET_MAGIC) {
122
+ throw new Error(`Not a valid parquet file (magic="${magic})`);
123
+ }
124
+ const metadataSize = trailerBuf.readUInt32LE(0);
125
+ const metadataOffset = this.fileSize - metadataSize - trailerLen;
126
+ if (metadataOffset < constants_1.PARQUET_MAGIC.length) {
127
+ throw new Error(`Invalid metadata size ${metadataOffset}`);
128
+ }
129
+ const metadataBuf = await this.read(metadataOffset, metadataSize);
130
+ // let metadata = new parquet_thrift.FileMetaData();
131
+ // parquet_util.decodeThrift(metadata, metadataBuf);
132
+ const { metadata } = (0, read_utils_1.decodeFileMetadata)(metadataBuf);
133
+ return metadata;
134
+ }
135
+ }
136
+ exports.ParquetEnvelopeReader = ParquetEnvelopeReader;
@@ -19,11 +19,6 @@ export declare class ParquetReader<T> implements AsyncIterable<T> {
19
19
  * return a new parquet reader initialized with a read function
20
20
  */
21
21
  static openArrayBuffer<T>(arrayBuffer: ArrayBuffer): Promise<ParquetReader<T>>;
22
- /**
23
- * Open the parquet file pointed to by the specified path and return a new
24
- * parquet reader
25
- */
26
- static openFile<T>(filePath: string): Promise<ParquetReader<T>>;
27
22
  static openBuffer<T>(buffer: Buffer): Promise<ParquetReader<T>>;
28
23
  metadata: FileMetaData;
29
24
  envelopeReader: ParquetEnvelopeReader;
@@ -70,3 +65,4 @@ export declare class ParquetReader<T> implements AsyncIterable<T> {
70
65
  */
71
66
  [Symbol.asyncIterator](): AsyncIterator<T>;
72
67
  }
68
+ //# sourceMappingURL=parquet-reader.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"parquet-reader.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parser/parquet-reader.ts"],"names":[],"mappings":";AACA,OAAO,EAAC,qBAAqB,EAAC,MAAM,2BAA2B,CAAC;AAChE,OAAO,EAAC,YAAY,EAAC,MAAM,mBAAmB,CAAC;AAC/C,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAC;AAI/C;;;;;;GAMG;AACH,qBAAa,aAAa,CAAC,CAAC,CAAE,YAAW,aAAa,CAAC,CAAC,CAAC;IACvD;;OAEG;WACU,QAAQ,CAAC,CAAC,EAAE,IAAI,EAAE,IAAI,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;IAkB/D;;OAEG;WACU,eAAe,CAAC,CAAC,EAAE,WAAW,EAAE,WAAW,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;WAevE,UAAU,CAAC,CAAC,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;IAY9D,QAAQ,EAAE,YAAY,CAAC;IACvB,cAAc,EAAE,qBAAqB,CAAC;IACtC,MAAM,EAAE,aAAa,CAAC;IAE7B;;;;;OAKG;gBACS,QAAQ,EAAE,YAAY,EAAE,cAAc,EAAE,qBAAqB;IAYzE;;;OAGG;IACG,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAM5B;;;;;;;;OAQG;IACH,SAAS,IAAI,aAAa,CAAC,CAAC,CAAC;IAE7B,SAAS,CAAC,CAAC,SAAS,MAAM,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,GAAG,aAAa,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IAChF,SAAS,CAAC,UAAU,EAAE,CAAC,MAAM,GAAG,MAAM,EAAE,CAAC,EAAE,GAAG,aAAa,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;IAkBvE;;;OAGG;IACH,WAAW,IAAI,MAAM;IAIrB;;OAEG;IACH,SAAS,IAAI,aAAa;IAI1B;;OAEG;IACH,WAAW,IAAI,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC;IAQrC;;OAEG;IAEH,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,aAAa,CAAC,CAAC,CAAC;CAG3C"}
@@ -0,0 +1,134 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ParquetReader = void 0;
4
+ // Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
5
+ const parquet_envelope_reader_1 = require("./parquet-envelope-reader");
6
+ const schema_1 = require("../schema/schema");
7
+ const parquet_cursor_1 = require("./parquet-cursor");
8
+ const constants_1 = require("../../constants");
9
+ const decoders_1 = require("./decoders");
10
+ /**
11
+ * A parquet reader allows retrieving the rows from a parquet file in order.
12
+ * The basic usage is to create a reader and then retrieve a cursor/iterator
13
+ * which allows you to consume row after row until all rows have been read. It is
14
+ * important that you call close() after you are finished reading the file to
15
+ * avoid leaking file descriptors.
16
+ */
17
+ class ParquetReader {
18
+ /**
19
+ * Create a new parquet reader from the file metadata and an envelope reader.
20
+ * It is not recommended to call this constructor directly except for advanced
21
+ * and internal use cases. Consider using one of the open{File,Buffer} methods
22
+ * instead
23
+ */
24
+ constructor(metadata, envelopeReader) {
25
+ if (metadata.version !== constants_1.PARQUET_VERSION) {
26
+ throw new Error('invalid parquet version');
27
+ }
28
+ this.metadata = metadata;
29
+ this.envelopeReader = envelopeReader;
30
+ const root = this.metadata.schema[0];
31
+ const { schema } = (0, decoders_1.decodeSchema)(this.metadata.schema, 1, root.num_children);
32
+ this.schema = new schema_1.ParquetSchema(schema);
33
+ }
34
+ /**
35
+ * return a new parquet reader initialized with a read function
36
+ */
37
+ static async openBlob(blob) {
38
+ const readFn = async (start, length) => {
39
+ const arrayBuffer = await blob.slice(start, start + length).arrayBuffer();
40
+ return Buffer.from(arrayBuffer);
41
+ };
42
+ const closeFn = async () => { };
43
+ const size = blob.size;
44
+ const envelopeReader = new parquet_envelope_reader_1.ParquetEnvelopeReader(readFn, closeFn, size);
45
+ try {
46
+ await envelopeReader.readHeader();
47
+ const metadata = await envelopeReader.readFooter();
48
+ return new ParquetReader(metadata, envelopeReader);
49
+ }
50
+ catch (err) {
51
+ await envelopeReader.close();
52
+ throw err;
53
+ }
54
+ }
55
+ /**
56
+ * return a new parquet reader initialized with a read function
57
+ */
58
+ static async openArrayBuffer(arrayBuffer) {
59
+ const readFn = async (start, length) => Buffer.from(arrayBuffer, start, length);
60
+ const closeFn = async () => { };
61
+ const size = arrayBuffer.byteLength;
62
+ const envelopeReader = new parquet_envelope_reader_1.ParquetEnvelopeReader(readFn, closeFn, size);
63
+ try {
64
+ await envelopeReader.readHeader();
65
+ const metadata = await envelopeReader.readFooter();
66
+ return new ParquetReader(metadata, envelopeReader);
67
+ }
68
+ catch (err) {
69
+ await envelopeReader.close();
70
+ throw err;
71
+ }
72
+ }
73
+ static async openBuffer(buffer) {
74
+ const envelopeReader = await parquet_envelope_reader_1.ParquetEnvelopeReader.openBuffer(buffer);
75
+ try {
76
+ await envelopeReader.readHeader();
77
+ const metadata = await envelopeReader.readFooter();
78
+ return new ParquetReader(metadata, envelopeReader);
79
+ }
80
+ catch (err) {
81
+ await envelopeReader.close();
82
+ throw err;
83
+ }
84
+ }
85
+ /**
86
+ * Close this parquet reader. You MUST call this method once you're finished
87
+ * reading rows
88
+ */
89
+ async close() {
90
+ await this.envelopeReader.close();
91
+ // this.envelopeReader = null;
92
+ // this.metadata = null;
93
+ }
94
+ getCursor(columnList) {
95
+ if (!columnList) {
96
+ // tslint:disable-next-line:no-parameter-reassignment
97
+ columnList = [];
98
+ }
99
+ // tslint:disable-next-line:no-parameter-reassignment
100
+ columnList = columnList.map((x) => (Array.isArray(x) ? x : [x]));
101
+ return new parquet_cursor_1.ParquetCursor(this.metadata, this.envelopeReader, this.schema, columnList);
102
+ }
103
+ /**
104
+ * Return the number of rows in this file. Note that the number of rows is
105
+ * not neccessarily equal to the number of rows in each column.
106
+ */
107
+ getRowCount() {
108
+ return Number(this.metadata.num_rows);
109
+ }
110
+ /**
111
+ * Returns the ParquetSchema for this file
112
+ */
113
+ getSchema() {
114
+ return this.schema;
115
+ }
116
+ /**
117
+ * Returns the user (key/value) metadata for this file
118
+ */
119
+ getMetadata() {
120
+ const md = {};
121
+ for (const kv of this.metadata.key_value_metadata) {
122
+ md[kv.key] = kv.value;
123
+ }
124
+ return md;
125
+ }
126
+ /**
127
+ * Implement AsyncIterable
128
+ */
129
+ // tslint:disable-next-line:function-name
130
+ [Symbol.asyncIterator]() {
131
+ return this.getCursor()[Symbol.asyncIterator]();
132
+ }
133
+ }
134
+ exports.ParquetReader = ParquetReader;
@@ -77,3 +77,4 @@ export declare class ParquetBuffer {
77
77
  columnData: Record<string, ParquetData>;
78
78
  constructor(rowCount?: number, columnData?: Record<string, ParquetData>);
79
79
  }
80
+ //# sourceMappingURL=declare.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"declare.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/schema/declare.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,MAAM,YAAY,CAAC;AAC/B,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,mBAAmB,CAAC;AAElD,oBAAY,YAAY,GAAG,OAAO,GAAG,KAAK,GAAG,kBAAkB,CAAC;AAChE,oBAAY,kBAAkB,GAC1B,cAAc,GACd,MAAM,GACN,QAAQ,GACR,KAAK,GACL,QAAQ,GACR,KAAK,GACL,SAAS,GACT,MAAM,CAAC;AACX,oBAAY,cAAc,GAAG,UAAU,GAAG,UAAU,GAAG,UAAU,CAAC;AAClE,oBAAY,WAAW,GAAG,aAAa,GAAG,YAAY,CAAC;AAEvD;;GAEG;AACH,oBAAY,aAAa,GAErB,SAAS,GACT,OAAO,GACP,OAAO,GACP,OAAO,GACP,OAAO,GACP,QAAQ,GACR,YAAY,GACZ,sBAAsB,CAAC;AAE3B;;GAEG;AACH,oBAAY,YAAY,GAEpB,MAAM,GAMN,eAAe,GACf,eAAe,GACf,oBAAoB,GACpB,8BAA8B,GAC9B,MAAM,GACN,aAAa,GACb,aAAa,GACb,kBAAkB,GAClB,kBAAkB,GAClB,QAAQ,GACR,SAAS,GACT,SAAS,GACT,SAAS,GACT,OAAO,GACP,QAAQ,GACR,QAAQ,GACR,QAAQ,GACR,MAAM,GACN,MAAM,GACN,UAAU,CAAC;AAEf,oBAAY,iBAAiB,GAAG,MAAM,EAAE,CAAC;AAEzC,MAAM,WAAW,gBAAgB;IAC/B,CAAC,MAAM,EAAE,MAAM,GAAG,eAAe,CAAC;CACnC;AAED,MAAM,WAAW,eAAe;IAC9B,IAAI,CAAC,EAAE,WAAW,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,YAAY,CAAC;IACxB,WAAW,CAAC,EAAE,kBAAkB,CAAC;IACjC,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,MAAM,CAAC,EAAE,gBAAgB,CAAC;CAC3B;AAED,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,EAAE,CAAC;IACf,GAAG,EAAE,MAAM,CAAC;IACZ,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B,cAAc,EAAE,cAAc,CAAC;IAC/B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,YAAY,CAAC;IACxB,WAAW,CAAC,EAAE,kBAAkB,CAAC;IACjC,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;CACvC;AAED,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,WAAW,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,EAAE,kBAAkB,CAAC;IAChC,MAAM,EAAE,YAAY,CAAC;IACrB,SAAS,CAAC,EAAE,KAAK,CAAC;IAClB,UAAU,CAAC,EAAE,iBAAiB,CAAC;CAChC;AAED,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,MAAM,EAAE,GAAG,EAAE,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,WAAW,EAAE,UAAU,EAAE,CAAC;CAC3B;AAED,MAAM,WAAW,eAAe;IAC9B,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,MAAM,EAAE,GAAG,EAAE,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,iBAAiB,CAAC;IAC/B,UAAU,EAAE,UAAU,CAAC;CACxB;AAED,MAAM,WAAW,aAAa;IAC5B,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED,qBAAa,aAAa;IACxB,QAAQ,EAAE,MAAM,CAAC;IACjB,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,CAAC;gBAC5B,QAAQ,GAAE,MAAU,EAAE,UAAU,GAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAM;CAI/E"}
@@ -0,0 +1,10 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ParquetBuffer = void 0;
4
+ class ParquetBuffer {
5
+ constructor(rowCount = 0, columnData = {}) {
6
+ this.rowCount = rowCount;
7
+ this.columnData = columnData;
8
+ }
9
+ }
10
+ exports.ParquetBuffer = ParquetBuffer;
@@ -23,3 +23,4 @@ export declare class ParquetSchema {
23
23
  compress(type: ParquetCompression): this;
24
24
  buffer(): ParquetBuffer;
25
25
  }
26
+ //# sourceMappingURL=schema.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/schema/schema.ts"],"names":[],"mappings":"AAIA,OAAO,EACL,eAAe,EACf,aAAa,EACb,kBAAkB,EAClB,YAAY,EACZ,aAAa,EAEb,gBAAgB,EACjB,MAAM,WAAW,CAAC;AAInB;;GAEG;AACH,qBAAa,aAAa;IACjB,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;IACrC,SAAS,EAAE,YAAY,EAAE,CAAC;IAEjC;;OAEG;gBACS,MAAM,EAAE,gBAAgB;IAMpC;;OAEG;IACH,SAAS,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,YAAY;IAiBhD;;OAEG;IACH,eAAe,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,YAAY,EAAE;IAgBxD,WAAW,CAAC,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,aAAa,GAAG,IAAI;IAI/D,kBAAkB,CAAC,MAAM,EAAE,aAAa,GAAG,aAAa,EAAE;IAI1D,QAAQ,CAAC,IAAI,EAAE,kBAAkB,GAAG,IAAI;IAMxC,MAAM,IAAI,aAAa;CAGxB"}
@@ -0,0 +1,162 @@
1
+ "use strict";
2
+ // Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
3
+ Object.defineProperty(exports, "__esModule", { value: true });
4
+ exports.ParquetSchema = void 0;
5
+ const codecs_1 = require("../codecs");
6
+ const compression_1 = require("../compression");
7
+ const shred_1 = require("./shred");
8
+ const types_1 = require("./types");
9
+ /**
10
+ * A parquet file schema
11
+ */
12
+ class ParquetSchema {
13
+ /**
14
+ * Create a new schema from a JSON schema definition
15
+ */
16
+ constructor(schema) {
17
+ this.schema = schema;
18
+ this.fields = buildFields(schema, 0, 0, []);
19
+ this.fieldList = listFields(this.fields);
20
+ }
21
+ /**
22
+ * Retrieve a field definition
23
+ */
24
+ findField(path) {
25
+ if (typeof path === 'string') {
26
+ // tslint:disable-next-line:no-parameter-reassignment
27
+ path = path.split(',');
28
+ }
29
+ else {
30
+ // tslint:disable-next-line:no-parameter-reassignment
31
+ path = path.slice(0); // clone array
32
+ }
33
+ let n = this.fields;
34
+ for (; path.length > 1; path.shift()) {
35
+ n = n[path[0]].fields;
36
+ }
37
+ return n[path[0]];
38
+ }
39
+ /**
40
+ * Retrieve a field definition and all the field's ancestors
41
+ */
42
+ findFieldBranch(path) {
43
+ if (typeof path === 'string') {
44
+ // tslint:disable-next-line:no-parameter-reassignment
45
+ path = path.split(',');
46
+ }
47
+ const branch = [];
48
+ let n = this.fields;
49
+ for (; path.length > 0; path.shift()) {
50
+ branch.push(n[path[0]]);
51
+ if (path.length > 1) {
52
+ n = n[path[0]].fields;
53
+ }
54
+ }
55
+ return branch;
56
+ }
57
+ shredRecord(record, buffer) {
58
+ (0, shred_1.shredRecord)(this, record, buffer);
59
+ }
60
+ materializeRecords(buffer) {
61
+ return (0, shred_1.materializeRecords)(this, buffer);
62
+ }
63
+ compress(type) {
64
+ setCompress(this.schema, type);
65
+ setCompress(this.fields, type);
66
+ return this;
67
+ }
68
+ buffer() {
69
+ return (0, shred_1.shredBuffer)(this);
70
+ }
71
+ }
72
+ exports.ParquetSchema = ParquetSchema;
73
+ function setCompress(schema, type) {
74
+ for (const name in schema) {
75
+ const node = schema[name];
76
+ if (node.fields) {
77
+ setCompress(node.fields, type);
78
+ }
79
+ else {
80
+ node.compression = type;
81
+ }
82
+ }
83
+ }
84
+ // eslint-disable-next-line max-statements, complexity
85
+ function buildFields(schema, rLevelParentMax, dLevelParentMax, path) {
86
+ const fieldList = {};
87
+ for (const name in schema) {
88
+ const opts = schema[name];
89
+ /* field repetition type */
90
+ const required = !opts.optional;
91
+ const repeated = Boolean(opts.repeated);
92
+ let rLevelMax = rLevelParentMax;
93
+ let dLevelMax = dLevelParentMax;
94
+ let repetitionType = 'REQUIRED';
95
+ if (!required) {
96
+ repetitionType = 'OPTIONAL';
97
+ dLevelMax++;
98
+ }
99
+ if (repeated) {
100
+ repetitionType = 'REPEATED';
101
+ rLevelMax++;
102
+ if (required)
103
+ dLevelMax++;
104
+ }
105
+ /* nested field */
106
+ if (opts.fields) {
107
+ const cpath = path.concat([name]);
108
+ fieldList[name] = {
109
+ name,
110
+ path: cpath,
111
+ key: cpath.join(),
112
+ repetitionType,
113
+ rLevelMax,
114
+ dLevelMax,
115
+ isNested: true,
116
+ fieldCount: Object.keys(opts.fields).length,
117
+ fields: buildFields(opts.fields, rLevelMax, dLevelMax, cpath)
118
+ };
119
+ continue; // eslint-disable-line no-continue
120
+ }
121
+ const typeDef = types_1.PARQUET_LOGICAL_TYPES[opts.type];
122
+ if (!typeDef) {
123
+ throw new Error(`invalid parquet type: ${opts.type}`);
124
+ }
125
+ opts.encoding = opts.encoding || 'PLAIN';
126
+ if (!(opts.encoding in codecs_1.PARQUET_CODECS)) {
127
+ throw new Error(`unsupported parquet encoding: ${opts.encoding}`);
128
+ }
129
+ opts.compression = opts.compression || 'UNCOMPRESSED';
130
+ if (!(opts.compression in compression_1.PARQUET_COMPRESSION_METHODS)) {
131
+ throw new Error(`unsupported compression method: ${opts.compression}`);
132
+ }
133
+ /* add to schema */
134
+ const cpath = path.concat([name]);
135
+ fieldList[name] = {
136
+ name,
137
+ primitiveType: typeDef.primitiveType,
138
+ originalType: typeDef.originalType,
139
+ path: cpath,
140
+ key: cpath.join(),
141
+ repetitionType,
142
+ encoding: opts.encoding,
143
+ compression: opts.compression,
144
+ typeLength: opts.typeLength || typeDef.typeLength,
145
+ presision: opts.presision,
146
+ scale: opts.scale,
147
+ rLevelMax,
148
+ dLevelMax
149
+ };
150
+ }
151
+ return fieldList;
152
+ }
153
+ function listFields(fields) {
154
+ let list = [];
155
+ for (const k in fields) {
156
+ list.push(fields[k]);
157
+ if (fields[k].isNested) {
158
+ list = list.concat(listFields(fields[k].fields));
159
+ }
160
+ }
161
+ return list;
162
+ }
@@ -45,3 +45,4 @@ export declare function shredRecord(schema: ParquetSchema, record: any, buffer:
45
45
  * }
46
46
  */
47
47
  export declare function materializeRecords(schema: ParquetSchema, buffer: ParquetBuffer): ParquetRecord[];
48
+ //# sourceMappingURL=shred.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"shred.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/schema/shred.ts"],"names":[],"mappings":"AAEA,OAAO,EAAC,aAAa,EAA6B,aAAa,EAAC,MAAM,WAAW,CAAC;AAClF,OAAO,EAAC,aAAa,EAAC,MAAM,UAAU,CAAC;AAGvC,OAAO,EAAC,aAAa,EAAC,CAAC;AAEvB,wBAAgB,WAAW,CAAC,MAAM,EAAE,aAAa,GAAG,aAAa,CAYhE;AAED;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,wBAAgB,WAAW,CAAC,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,aAAa,GAAG,IAAI,CAmB3F;AAgED;;;;;;;;;;;;;;;;;;GAkBG;AACH,wBAAgB,kBAAkB,CAAC,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,aAAa,GAAG,aAAa,EAAE,CAOhG"}