@loaders.gl/parquet 3.1.0-beta.7 → 3.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (137) hide show
  1. package/dist/es5/bundle.js +1 -1
  2. package/dist/es5/bundle.js.map +1 -1
  3. package/dist/es5/constants.js +5 -5
  4. package/dist/es5/constants.js.map +1 -1
  5. package/dist/es5/index.js +19 -10
  6. package/dist/es5/index.js.map +1 -1
  7. package/dist/es5/lib/convert-schema.js +13 -13
  8. package/dist/es5/lib/convert-schema.js.map +1 -1
  9. package/dist/es5/lib/parse-parquet.js +154 -19
  10. package/dist/es5/lib/parse-parquet.js.map +1 -1
  11. package/dist/es5/lib/read-array-buffer.js +43 -6
  12. package/dist/es5/lib/read-array-buffer.js.map +1 -1
  13. package/dist/es5/parquet-loader.js +4 -4
  14. package/dist/es5/parquet-loader.js.map +1 -1
  15. package/dist/es5/parquet-writer.js +4 -4
  16. package/dist/es5/parquet-writer.js.map +1 -1
  17. package/dist/es5/parquetjs/codecs/dictionary.js +10 -2
  18. package/dist/es5/parquetjs/codecs/dictionary.js.map +1 -1
  19. package/dist/es5/parquetjs/codecs/index.js +6 -4
  20. package/dist/es5/parquetjs/codecs/index.js.map +1 -1
  21. package/dist/es5/parquetjs/codecs/plain.js +43 -41
  22. package/dist/es5/parquetjs/codecs/plain.js.map +1 -1
  23. package/dist/es5/parquetjs/codecs/rle.js +35 -25
  24. package/dist/es5/parquetjs/codecs/rle.js.map +1 -1
  25. package/dist/es5/parquetjs/compression.js +110 -27
  26. package/dist/es5/parquetjs/compression.js.map +1 -1
  27. package/dist/es5/parquetjs/encoder/writer.js +737 -301
  28. package/dist/es5/parquetjs/encoder/writer.js.map +1 -1
  29. package/dist/es5/parquetjs/file.js +15 -15
  30. package/dist/es5/parquetjs/file.js.map +1 -1
  31. package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js +1 -1
  32. package/dist/es5/parquetjs/parquet-thrift/BsonType.js +45 -31
  33. package/dist/es5/parquetjs/parquet-thrift/BsonType.js.map +1 -1
  34. package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js +152 -141
  35. package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -1
  36. package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js +160 -147
  37. package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -1
  38. package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js +259 -248
  39. package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -1
  40. package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js +79 -67
  41. package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -1
  42. package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js +1 -1
  43. package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js +1 -1
  44. package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js +124 -113
  45. package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -1
  46. package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js +169 -158
  47. package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -1
  48. package/dist/es5/parquetjs/parquet-thrift/DateType.js +45 -31
  49. package/dist/es5/parquetjs/parquet-thrift/DateType.js.map +1 -1
  50. package/dist/es5/parquetjs/parquet-thrift/DecimalType.js +79 -68
  51. package/dist/es5/parquetjs/parquet-thrift/DecimalType.js.map +1 -1
  52. package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js +94 -83
  53. package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -1
  54. package/dist/es5/parquetjs/parquet-thrift/Encoding.js +1 -1
  55. package/dist/es5/parquetjs/parquet-thrift/EnumType.js +45 -31
  56. package/dist/es5/parquetjs/parquet-thrift/EnumType.js.map +1 -1
  57. package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js +1 -1
  58. package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js +182 -170
  59. package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js.map +1 -1
  60. package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js +45 -31
  61. package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -1
  62. package/dist/es5/parquetjs/parquet-thrift/IntType.js +79 -68
  63. package/dist/es5/parquetjs/parquet-thrift/IntType.js.map +1 -1
  64. package/dist/es5/parquetjs/parquet-thrift/JsonType.js +45 -31
  65. package/dist/es5/parquetjs/parquet-thrift/JsonType.js.map +1 -1
  66. package/dist/es5/parquetjs/parquet-thrift/KeyValue.js +79 -68
  67. package/dist/es5/parquetjs/parquet-thrift/KeyValue.js.map +1 -1
  68. package/dist/es5/parquetjs/parquet-thrift/ListType.js +45 -31
  69. package/dist/es5/parquetjs/parquet-thrift/ListType.js.map +1 -1
  70. package/dist/es5/parquetjs/parquet-thrift/LogicalType.js +343 -319
  71. package/dist/es5/parquetjs/parquet-thrift/LogicalType.js.map +1 -1
  72. package/dist/es5/parquetjs/parquet-thrift/MapType.js +45 -31
  73. package/dist/es5/parquetjs/parquet-thrift/MapType.js.map +1 -1
  74. package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js +45 -31
  75. package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -1
  76. package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js +45 -31
  77. package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -1
  78. package/dist/es5/parquetjs/parquet-thrift/NullType.js +45 -31
  79. package/dist/es5/parquetjs/parquet-thrift/NullType.js.map +1 -1
  80. package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js +75 -64
  81. package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -1
  82. package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js +94 -83
  83. package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -1
  84. package/dist/es5/parquetjs/parquet-thrift/PageHeader.js +169 -158
  85. package/dist/es5/parquetjs/parquet-thrift/PageHeader.js.map +1 -1
  86. package/dist/es5/parquetjs/parquet-thrift/PageLocation.js +94 -83
  87. package/dist/es5/parquetjs/parquet-thrift/PageLocation.js.map +1 -1
  88. package/dist/es5/parquetjs/parquet-thrift/PageType.js +1 -1
  89. package/dist/es5/parquetjs/parquet-thrift/RowGroup.js +124 -113
  90. package/dist/es5/parquetjs/parquet-thrift/RowGroup.js.map +1 -1
  91. package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js +199 -188
  92. package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js.map +1 -1
  93. package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js +94 -83
  94. package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js.map +1 -1
  95. package/dist/es5/parquetjs/parquet-thrift/Statistics.js +135 -124
  96. package/dist/es5/parquetjs/parquet-thrift/Statistics.js.map +1 -1
  97. package/dist/es5/parquetjs/parquet-thrift/StringType.js +45 -31
  98. package/dist/es5/parquetjs/parquet-thrift/StringType.js.map +1 -1
  99. package/dist/es5/parquetjs/parquet-thrift/TimeType.js +79 -68
  100. package/dist/es5/parquetjs/parquet-thrift/TimeType.js.map +1 -1
  101. package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js +101 -88
  102. package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js.map +1 -1
  103. package/dist/es5/parquetjs/parquet-thrift/TimestampType.js +79 -68
  104. package/dist/es5/parquetjs/parquet-thrift/TimestampType.js.map +1 -1
  105. package/dist/es5/parquetjs/parquet-thrift/Type.js +1 -1
  106. package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js +45 -31
  107. package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -1
  108. package/dist/es5/parquetjs/parquet-thrift/UUIDType.js +45 -31
  109. package/dist/es5/parquetjs/parquet-thrift/UUIDType.js.map +1 -1
  110. package/dist/es5/parquetjs/parquet-thrift/index.js +43 -43
  111. package/dist/es5/parquetjs/parser/decoders.js +391 -218
  112. package/dist/es5/parquetjs/parser/decoders.js.map +1 -1
  113. package/dist/es5/parquetjs/parser/parquet-cursor.js +180 -62
  114. package/dist/es5/parquetjs/parser/parquet-cursor.js.map +1 -1
  115. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +370 -125
  116. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +1 -1
  117. package/dist/es5/parquetjs/parser/parquet-reader.js +320 -91
  118. package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
  119. package/dist/es5/parquetjs/schema/declare.js +11 -9
  120. package/dist/es5/parquetjs/schema/declare.js.map +1 -1
  121. package/dist/es5/parquetjs/schema/schema.js +87 -73
  122. package/dist/es5/parquetjs/schema/schema.js.map +1 -1
  123. package/dist/es5/parquetjs/schema/shred.js +96 -56
  124. package/dist/es5/parquetjs/schema/shred.js.map +1 -1
  125. package/dist/es5/parquetjs/schema/types.js +40 -39
  126. package/dist/es5/parquetjs/schema/types.js.map +1 -1
  127. package/dist/es5/parquetjs/utils/buffer-utils.js +1 -1
  128. package/dist/es5/parquetjs/utils/buffer-utils.js.map +1 -1
  129. package/dist/es5/parquetjs/utils/file-utils.js +12 -8
  130. package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
  131. package/dist/es5/parquetjs/utils/read-utils.js +50 -22
  132. package/dist/es5/parquetjs/utils/read-utils.js.map +1 -1
  133. package/dist/esm/parquet-loader.js +1 -1
  134. package/dist/esm/parquet-loader.js.map +1 -1
  135. package/dist/esm/parquet-writer.js +1 -1
  136. package/dist/esm/parquet-writer.js.map +1 -1
  137. package/package.json +5 -5
@@ -7,6 +7,14 @@ Object.defineProperty(exports, "__esModule", {
7
7
  });
8
8
  exports.ParquetEnvelopeReader = void 0;
9
9
 
10
+ var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
11
+
12
+ var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
13
+
14
+ var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime/helpers/classCallCheck"));
15
+
16
+ var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass"));
17
+
10
18
  var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
11
19
 
12
20
  var _constants = require("../../constants");
@@ -17,18 +25,21 @@ var _readUtils = require("../utils/read-utils");
17
25
 
18
26
  var _decoders = require("./decoders");
19
27
 
20
- const DEFAULT_DICTIONARY_SIZE = 1e6;
28
+ function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
21
29
 
22
- class ParquetEnvelopeReader {
23
- static async openBuffer(buffer) {
24
- const readFn = (position, length) => Promise.resolve(buffer.slice(position, position + length));
30
+ function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
25
31
 
26
- const closeFn = () => Promise.resolve();
32
+ function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; }
27
33
 
28
- return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);
29
- }
34
+ function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
35
+
36
+ function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
37
+
38
+ var DEFAULT_DICTIONARY_SIZE = 1e6;
30
39
 
31
- constructor(read, close, fileSize, options) {
40
+ var ParquetEnvelopeReader = function () {
41
+ function ParquetEnvelopeReader(read, close, fileSize, options) {
42
+ (0, _classCallCheck2.default)(this, ParquetEnvelopeReader);
32
43
  (0, _defineProperty2.default)(this, "read", void 0);
33
44
  (0, _defineProperty2.default)(this, "close", void 0);
34
45
  (0, _defineProperty2.default)(this, "fileSize", void 0);
@@ -39,130 +50,364 @@ class ParquetEnvelopeReader {
39
50
  this.defaultDictionarySize = (options === null || options === void 0 ? void 0 : options.defaultDictionarySize) || DEFAULT_DICTIONARY_SIZE;
40
51
  }
41
52
 
42
- async readHeader() {
43
- const buffer = await this.read(0, _constants.PARQUET_MAGIC.length);
44
- const magic = buffer.toString();
45
-
46
- switch (magic) {
47
- case _constants.PARQUET_MAGIC:
48
- break;
49
-
50
- case _constants.PARQUET_MAGIC_ENCRYPTED:
51
- throw new Error('Encrypted parquet file not supported');
52
-
53
- default:
54
- throw new Error("Invalid parquet file (magic=".concat(magic, ")"));
55
- }
56
- }
57
-
58
- async readRowGroup(schema, rowGroup, columnList) {
59
- const buffer = {
60
- rowCount: Number(rowGroup.num_rows),
61
- columnData: {}
62
- };
63
-
64
- for (const colChunk of rowGroup.columns) {
65
- const colMetadata = colChunk.meta_data;
66
- const colKey = colMetadata === null || colMetadata === void 0 ? void 0 : colMetadata.path_in_schema;
67
-
68
- if (columnList.length > 0 && (0, _readUtils.fieldIndexOf)(columnList, colKey) < 0) {
69
- continue;
53
+ (0, _createClass2.default)(ParquetEnvelopeReader, [{
54
+ key: "readHeader",
55
+ value: function () {
56
+ var _readHeader = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee() {
57
+ var buffer, magic;
58
+ return _regenerator.default.wrap(function _callee$(_context) {
59
+ while (1) {
60
+ switch (_context.prev = _context.next) {
61
+ case 0:
62
+ _context.next = 2;
63
+ return this.read(0, _constants.PARQUET_MAGIC.length);
64
+
65
+ case 2:
66
+ buffer = _context.sent;
67
+ magic = buffer.toString();
68
+ _context.t0 = magic;
69
+ _context.next = _context.t0 === _constants.PARQUET_MAGIC ? 7 : _context.t0 === _constants.PARQUET_MAGIC_ENCRYPTED ? 8 : 9;
70
+ break;
71
+
72
+ case 7:
73
+ return _context.abrupt("break", 10);
74
+
75
+ case 8:
76
+ throw new Error('Encrypted parquet file not supported');
77
+
78
+ case 9:
79
+ throw new Error("Invalid parquet file (magic=".concat(magic, ")"));
80
+
81
+ case 10:
82
+ case "end":
83
+ return _context.stop();
84
+ }
85
+ }
86
+ }, _callee, this);
87
+ }));
88
+
89
+ function readHeader() {
90
+ return _readHeader.apply(this, arguments);
70
91
  }
71
92
 
72
- buffer.columnData[colKey.join()] = await this.readColumnChunk(schema, colChunk);
73
- }
74
-
75
- return buffer;
76
- }
77
-
78
- async readColumnChunk(schema, colChunk) {
79
- var _colChunk$meta_data, _colChunk$meta_data2, _colChunk$meta_data3, _colChunk$meta_data4, _colChunk$meta_data5, _colChunk$meta_data7, _colChunk$meta_data8, _options$dictionary;
80
-
81
- if (colChunk.file_path !== undefined && colChunk.file_path !== null) {
82
- throw new Error('external references are not supported');
83
- }
84
-
85
- const field = schema.findField((_colChunk$meta_data = colChunk.meta_data) === null || _colChunk$meta_data === void 0 ? void 0 : _colChunk$meta_data.path_in_schema);
86
- const type = (0, _readUtils.getThriftEnum)(_parquetThrift.Type, (_colChunk$meta_data2 = colChunk.meta_data) === null || _colChunk$meta_data2 === void 0 ? void 0 : _colChunk$meta_data2.type);
87
-
88
- if (type !== field.primitiveType) {
89
- throw new Error("chunk type not matching schema: ".concat(type));
90
- }
91
-
92
- const compression = (0, _readUtils.getThriftEnum)(_parquetThrift.CompressionCodec, (_colChunk$meta_data3 = colChunk.meta_data) === null || _colChunk$meta_data3 === void 0 ? void 0 : _colChunk$meta_data3.codec);
93
- const pagesOffset = Number((_colChunk$meta_data4 = colChunk.meta_data) === null || _colChunk$meta_data4 === void 0 ? void 0 : _colChunk$meta_data4.data_page_offset);
94
- let pagesSize = Number((_colChunk$meta_data5 = colChunk.meta_data) === null || _colChunk$meta_data5 === void 0 ? void 0 : _colChunk$meta_data5.total_compressed_size);
95
-
96
- if (!colChunk.file_path) {
97
- var _colChunk$meta_data6;
98
-
99
- pagesSize = Math.min(this.fileSize - pagesOffset, Number((_colChunk$meta_data6 = colChunk.meta_data) === null || _colChunk$meta_data6 === void 0 ? void 0 : _colChunk$meta_data6.total_compressed_size));
100
- }
101
-
102
- const options = {
103
- type,
104
- rLevelMax: field.rLevelMax,
105
- dLevelMax: field.dLevelMax,
106
- compression,
107
- column: field,
108
- numValues: (_colChunk$meta_data7 = colChunk.meta_data) === null || _colChunk$meta_data7 === void 0 ? void 0 : _colChunk$meta_data7.num_values,
109
- dictionary: []
110
- };
111
- let dictionary;
112
- const dictionaryPageOffset = colChunk === null || colChunk === void 0 ? void 0 : (_colChunk$meta_data8 = colChunk.meta_data) === null || _colChunk$meta_data8 === void 0 ? void 0 : _colChunk$meta_data8.dictionary_page_offset;
113
-
114
- if (dictionaryPageOffset) {
115
- const dictionaryOffset = Number(dictionaryPageOffset);
116
- dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);
117
- }
118
-
119
- dictionary = (_options$dictionary = options.dictionary) !== null && _options$dictionary !== void 0 && _options$dictionary.length ? options.dictionary : dictionary;
120
- const pagesBuf = await this.read(pagesOffset, pagesSize);
121
- return await (0, _decoders.decodeDataPages)(pagesBuf, { ...options,
122
- dictionary
123
- });
124
- }
125
-
126
- async getDictionary(dictionaryPageOffset, options, pagesOffset) {
127
- if (dictionaryPageOffset === 0) {
128
- return [];
129
- }
130
-
131
- const dictionarySize = Math.min(this.fileSize - dictionaryPageOffset, this.defaultDictionarySize);
132
- const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);
133
- const cursor = {
134
- buffer: pagesBuf,
135
- offset: 0,
136
- size: pagesBuf.length
137
- };
138
- const decodedPage = await (0, _decoders.decodePage)(cursor, options);
139
- return decodedPage.dictionary;
140
- }
141
-
142
- async readFooter() {
143
- const trailerLen = _constants.PARQUET_MAGIC.length + 4;
144
- const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);
145
- const magic = trailerBuf.slice(4).toString();
93
+ return readHeader;
94
+ }()
95
+ }, {
96
+ key: "readRowGroup",
97
+ value: function () {
98
+ var _readRowGroup = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee2(schema, rowGroup, columnList) {
99
+ var buffer, _iterator, _step, colChunk, colMetadata, colKey;
100
+
101
+ return _regenerator.default.wrap(function _callee2$(_context2) {
102
+ while (1) {
103
+ switch (_context2.prev = _context2.next) {
104
+ case 0:
105
+ buffer = {
106
+ rowCount: Number(rowGroup.num_rows),
107
+ columnData: {}
108
+ };
109
+ _iterator = _createForOfIteratorHelper(rowGroup.columns);
110
+ _context2.prev = 2;
111
+
112
+ _iterator.s();
113
+
114
+ case 4:
115
+ if ((_step = _iterator.n()).done) {
116
+ _context2.next = 15;
117
+ break;
118
+ }
119
+
120
+ colChunk = _step.value;
121
+ colMetadata = colChunk.meta_data;
122
+ colKey = colMetadata === null || colMetadata === void 0 ? void 0 : colMetadata.path_in_schema;
123
+
124
+ if (!(columnList.length > 0 && (0, _readUtils.fieldIndexOf)(columnList, colKey) < 0)) {
125
+ _context2.next = 10;
126
+ break;
127
+ }
128
+
129
+ return _context2.abrupt("continue", 13);
130
+
131
+ case 10:
132
+ _context2.next = 12;
133
+ return this.readColumnChunk(schema, colChunk);
134
+
135
+ case 12:
136
+ buffer.columnData[colKey.join()] = _context2.sent;
137
+
138
+ case 13:
139
+ _context2.next = 4;
140
+ break;
141
+
142
+ case 15:
143
+ _context2.next = 20;
144
+ break;
145
+
146
+ case 17:
147
+ _context2.prev = 17;
148
+ _context2.t0 = _context2["catch"](2);
149
+
150
+ _iterator.e(_context2.t0);
151
+
152
+ case 20:
153
+ _context2.prev = 20;
154
+
155
+ _iterator.f();
156
+
157
+ return _context2.finish(20);
158
+
159
+ case 23:
160
+ return _context2.abrupt("return", buffer);
161
+
162
+ case 24:
163
+ case "end":
164
+ return _context2.stop();
165
+ }
166
+ }
167
+ }, _callee2, this, [[2, 17, 20, 23]]);
168
+ }));
169
+
170
+ function readRowGroup(_x, _x2, _x3) {
171
+ return _readRowGroup.apply(this, arguments);
172
+ }
146
173
 
147
- if (magic !== _constants.PARQUET_MAGIC) {
148
- throw new Error("Not a valid parquet file (magic=\"".concat(magic, ")"));
149
- }
174
+ return readRowGroup;
175
+ }()
176
+ }, {
177
+ key: "readColumnChunk",
178
+ value: function () {
179
+ var _readColumnChunk = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee3(schema, colChunk) {
180
+ var _colChunk$meta_data, _colChunk$meta_data2, _colChunk$meta_data3, _colChunk$meta_data4, _colChunk$meta_data5, _colChunk$meta_data7, _colChunk$meta_data8, _options$dictionary;
181
+
182
+ var field, type, compression, pagesOffset, pagesSize, _colChunk$meta_data6, options, dictionary, dictionaryPageOffset, dictionaryOffset, pagesBuf;
183
+
184
+ return _regenerator.default.wrap(function _callee3$(_context3) {
185
+ while (1) {
186
+ switch (_context3.prev = _context3.next) {
187
+ case 0:
188
+ if (!(colChunk.file_path !== undefined && colChunk.file_path !== null)) {
189
+ _context3.next = 2;
190
+ break;
191
+ }
192
+
193
+ throw new Error('external references are not supported');
194
+
195
+ case 2:
196
+ field = schema.findField((_colChunk$meta_data = colChunk.meta_data) === null || _colChunk$meta_data === void 0 ? void 0 : _colChunk$meta_data.path_in_schema);
197
+ type = (0, _readUtils.getThriftEnum)(_parquetThrift.Type, (_colChunk$meta_data2 = colChunk.meta_data) === null || _colChunk$meta_data2 === void 0 ? void 0 : _colChunk$meta_data2.type);
198
+
199
+ if (!(type !== field.primitiveType)) {
200
+ _context3.next = 6;
201
+ break;
202
+ }
203
+
204
+ throw new Error("chunk type not matching schema: ".concat(type));
205
+
206
+ case 6:
207
+ compression = (0, _readUtils.getThriftEnum)(_parquetThrift.CompressionCodec, (_colChunk$meta_data3 = colChunk.meta_data) === null || _colChunk$meta_data3 === void 0 ? void 0 : _colChunk$meta_data3.codec);
208
+ pagesOffset = Number((_colChunk$meta_data4 = colChunk.meta_data) === null || _colChunk$meta_data4 === void 0 ? void 0 : _colChunk$meta_data4.data_page_offset);
209
+ pagesSize = Number((_colChunk$meta_data5 = colChunk.meta_data) === null || _colChunk$meta_data5 === void 0 ? void 0 : _colChunk$meta_data5.total_compressed_size);
210
+
211
+ if (!colChunk.file_path) {
212
+ pagesSize = Math.min(this.fileSize - pagesOffset, Number((_colChunk$meta_data6 = colChunk.meta_data) === null || _colChunk$meta_data6 === void 0 ? void 0 : _colChunk$meta_data6.total_compressed_size));
213
+ }
214
+
215
+ options = {
216
+ type: type,
217
+ rLevelMax: field.rLevelMax,
218
+ dLevelMax: field.dLevelMax,
219
+ compression: compression,
220
+ column: field,
221
+ numValues: (_colChunk$meta_data7 = colChunk.meta_data) === null || _colChunk$meta_data7 === void 0 ? void 0 : _colChunk$meta_data7.num_values,
222
+ dictionary: []
223
+ };
224
+ dictionaryPageOffset = colChunk === null || colChunk === void 0 ? void 0 : (_colChunk$meta_data8 = colChunk.meta_data) === null || _colChunk$meta_data8 === void 0 ? void 0 : _colChunk$meta_data8.dictionary_page_offset;
225
+
226
+ if (!dictionaryPageOffset) {
227
+ _context3.next = 17;
228
+ break;
229
+ }
230
+
231
+ dictionaryOffset = Number(dictionaryPageOffset);
232
+ _context3.next = 16;
233
+ return this.getDictionary(dictionaryOffset, options, pagesOffset);
234
+
235
+ case 16:
236
+ dictionary = _context3.sent;
237
+
238
+ case 17:
239
+ dictionary = (_options$dictionary = options.dictionary) !== null && _options$dictionary !== void 0 && _options$dictionary.length ? options.dictionary : dictionary;
240
+ _context3.next = 20;
241
+ return this.read(pagesOffset, pagesSize);
242
+
243
+ case 20:
244
+ pagesBuf = _context3.sent;
245
+ _context3.next = 23;
246
+ return (0, _decoders.decodeDataPages)(pagesBuf, _objectSpread(_objectSpread({}, options), {}, {
247
+ dictionary: dictionary
248
+ }));
249
+
250
+ case 23:
251
+ return _context3.abrupt("return", _context3.sent);
252
+
253
+ case 24:
254
+ case "end":
255
+ return _context3.stop();
256
+ }
257
+ }
258
+ }, _callee3, this);
259
+ }));
260
+
261
+ function readColumnChunk(_x4, _x5) {
262
+ return _readColumnChunk.apply(this, arguments);
263
+ }
150
264
 
151
- const metadataSize = trailerBuf.readUInt32LE(0);
152
- const metadataOffset = this.fileSize - metadataSize - trailerLen;
265
+ return readColumnChunk;
266
+ }()
267
+ }, {
268
+ key: "getDictionary",
269
+ value: function () {
270
+ var _getDictionary = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee4(dictionaryPageOffset, options, pagesOffset) {
271
+ var dictionarySize, pagesBuf, cursor, decodedPage;
272
+ return _regenerator.default.wrap(function _callee4$(_context4) {
273
+ while (1) {
274
+ switch (_context4.prev = _context4.next) {
275
+ case 0:
276
+ if (!(dictionaryPageOffset === 0)) {
277
+ _context4.next = 2;
278
+ break;
279
+ }
280
+
281
+ return _context4.abrupt("return", []);
282
+
283
+ case 2:
284
+ dictionarySize = Math.min(this.fileSize - dictionaryPageOffset, this.defaultDictionarySize);
285
+ _context4.next = 5;
286
+ return this.read(dictionaryPageOffset, dictionarySize);
287
+
288
+ case 5:
289
+ pagesBuf = _context4.sent;
290
+ cursor = {
291
+ buffer: pagesBuf,
292
+ offset: 0,
293
+ size: pagesBuf.length
294
+ };
295
+ _context4.next = 9;
296
+ return (0, _decoders.decodePage)(cursor, options);
297
+
298
+ case 9:
299
+ decodedPage = _context4.sent;
300
+ return _context4.abrupt("return", decodedPage.dictionary);
301
+
302
+ case 11:
303
+ case "end":
304
+ return _context4.stop();
305
+ }
306
+ }
307
+ }, _callee4, this);
308
+ }));
309
+
310
+ function getDictionary(_x6, _x7, _x8) {
311
+ return _getDictionary.apply(this, arguments);
312
+ }
153
313
 
154
- if (metadataOffset < _constants.PARQUET_MAGIC.length) {
155
- throw new Error("Invalid metadata size ".concat(metadataOffset));
156
- }
314
+ return getDictionary;
315
+ }()
316
+ }, {
317
+ key: "readFooter",
318
+ value: function () {
319
+ var _readFooter = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee5() {
320
+ var trailerLen, trailerBuf, magic, metadataSize, metadataOffset, metadataBuf, _decodeFileMetadata, metadata;
321
+
322
+ return _regenerator.default.wrap(function _callee5$(_context5) {
323
+ while (1) {
324
+ switch (_context5.prev = _context5.next) {
325
+ case 0:
326
+ trailerLen = _constants.PARQUET_MAGIC.length + 4;
327
+ _context5.next = 3;
328
+ return this.read(this.fileSize - trailerLen, trailerLen);
329
+
330
+ case 3:
331
+ trailerBuf = _context5.sent;
332
+ magic = trailerBuf.slice(4).toString();
333
+
334
+ if (!(magic !== _constants.PARQUET_MAGIC)) {
335
+ _context5.next = 7;
336
+ break;
337
+ }
338
+
339
+ throw new Error("Not a valid parquet file (magic=\"".concat(magic, ")"));
340
+
341
+ case 7:
342
+ metadataSize = trailerBuf.readUInt32LE(0);
343
+ metadataOffset = this.fileSize - metadataSize - trailerLen;
344
+
345
+ if (!(metadataOffset < _constants.PARQUET_MAGIC.length)) {
346
+ _context5.next = 11;
347
+ break;
348
+ }
349
+
350
+ throw new Error("Invalid metadata size ".concat(metadataOffset));
351
+
352
+ case 11:
353
+ _context5.next = 13;
354
+ return this.read(metadataOffset, metadataSize);
355
+
356
+ case 13:
357
+ metadataBuf = _context5.sent;
358
+ _decodeFileMetadata = (0, _readUtils.decodeFileMetadata)(metadataBuf), metadata = _decodeFileMetadata.metadata;
359
+ return _context5.abrupt("return", metadata);
360
+
361
+ case 16:
362
+ case "end":
363
+ return _context5.stop();
364
+ }
365
+ }
366
+ }, _callee5, this);
367
+ }));
368
+
369
+ function readFooter() {
370
+ return _readFooter.apply(this, arguments);
371
+ }
157
372
 
158
- const metadataBuf = await this.read(metadataOffset, metadataSize);
159
- const {
160
- metadata
161
- } = (0, _readUtils.decodeFileMetadata)(metadataBuf);
162
- return metadata;
163
- }
373
+ return readFooter;
374
+ }()
375
+ }], [{
376
+ key: "openBuffer",
377
+ value: function () {
378
+ var _openBuffer = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee6(buffer) {
379
+ var readFn, closeFn;
380
+ return _regenerator.default.wrap(function _callee6$(_context6) {
381
+ while (1) {
382
+ switch (_context6.prev = _context6.next) {
383
+ case 0:
384
+ readFn = function readFn(position, length) {
385
+ return Promise.resolve(buffer.slice(position, position + length));
386
+ };
387
+
388
+ closeFn = function closeFn() {
389
+ return Promise.resolve();
390
+ };
391
+
392
+ return _context6.abrupt("return", new ParquetEnvelopeReader(readFn, closeFn, buffer.length));
393
+
394
+ case 3:
395
+ case "end":
396
+ return _context6.stop();
397
+ }
398
+ }
399
+ }, _callee6);
400
+ }));
401
+
402
+ function openBuffer(_x9) {
403
+ return _openBuffer.apply(this, arguments);
404
+ }
164
405
 
165
- }
406
+ return openBuffer;
407
+ }()
408
+ }]);
409
+ return ParquetEnvelopeReader;
410
+ }();
166
411
 
167
412
  exports.ParquetEnvelopeReader = ParquetEnvelopeReader;
168
413
  //# sourceMappingURL=parquet-envelope-reader.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/parquetjs/parser/parquet-envelope-reader.ts"],"names":["DEFAULT_DICTIONARY_SIZE","ParquetEnvelopeReader","openBuffer","buffer","readFn","position","length","Promise","resolve","slice","closeFn","constructor","read","close","fileSize","options","defaultDictionarySize","readHeader","PARQUET_MAGIC","magic","toString","PARQUET_MAGIC_ENCRYPTED","Error","readRowGroup","schema","rowGroup","columnList","rowCount","Number","num_rows","columnData","colChunk","columns","colMetadata","meta_data","colKey","path_in_schema","join","readColumnChunk","file_path","undefined","field","findField","type","Type","primitiveType","compression","CompressionCodec","codec","pagesOffset","data_page_offset","pagesSize","total_compressed_size","Math","min","rLevelMax","dLevelMax","column","numValues","num_values","dictionary","dictionaryPageOffset","dictionary_page_offset","dictionaryOffset","getDictionary","pagesBuf","dictionarySize","cursor","offset","size","decodedPage","readFooter","trailerLen","trailerBuf","metadataSize","readUInt32LE","metadataOffset","metadataBuf","metadata"],"mappings":";;;;;;;;;;;AAEA;;AACA;;AAQA;;AACA;;AAEA,MAAMA,uBAAuB,GAAG,GAAhC;;AAQO,MAAMC,qBAAN,CAA4B;AAUV,eAAVC,UAAU,CAACC,MAAD,EAAiD;AACtE,UAAMC,MAAM,GAAG,CAACC,QAAD,EAAmBC,MAAnB,KACbC,OAAO,CAACC,OAAR,CAAgBL,MAAM,CAACM,KAAP,CAAaJ,QAAb,EAAuBA,QAAQ,GAAGC,MAAlC,CAAhB,CADF;;AAEA,UAAMI,OAAO,GAAG,MAAMH,OAAO,CAACC,OAAR,EAAtB;;AACA,WAAO,IAAIP,qBAAJ,CAA0BG,MAA1B,EAAkCM,OAAlC,EAA2CP,MAAM,CAACG,MAAlD,CAAP;AACD;;AAEDK,EAAAA,WAAW,CACTC,IADS,EAETC,KAFS,EAGTC,QAHS,EAITC,OAJS,EAKT;AAAA;AAAA;AAAA;AAAA;AACA,SAAKH,IAAL,GAAYA,IAAZ;AACA,SAAKC,KAAL,GAAaA,KAAb;AACA,SAAKC,QAAL,GAAgBA,QAAhB;AACA,SAAKE,qBAAL,GAA6B,CAAAD,OAAO,SAAP,IAAAA,OAAO,WAAP,YAAAA,OAAO,CAAEC,qBAAT,KAAkChB,uBAA/D;AACD;;AAEe,QAAViB,UAAU,GAAkB;AAChC,UAAMd,MAAM,GAAG,MAAM,KAAKS,IAAL,CAAU,CAAV,EAAaM,yBAAcZ,MAA3B,CAArB;AAEA,UAAMa,KAAK,GAAGhB,MAAM,CAACiB,QAAP,EAAd;;AACA,YAAQD,KAAR;AACE,WAAKD,wBAAL;AACE;;AACF,WAAKG,kCAAL;AACE,cAAM,IAAIC,KAAJ,CAAU,sCAAV,CAAN;;AACF;AACE,cAAM,IAAIA,KAAJ,uCAAyCH,KAAzC,OAAN;AANJ;AAQD;;AAEiB,QAAZI,YAAY,CAChBC,MADgB,EAEhBC,QAFgB,EAGhBC,UAHgB,EAIQ;AACxB,UAAMvB,MAAqB,GAAG;AAC5BwB,MAAAA,QAAQ,EAAEC,MAAM,CAACH,QAAQ,CAACI,QAAV,CADY;AAE5BC,MAAAA,UAAU,EAAE;AAFgB,KAA9B;;AAIA,SAAK,MAAMC,QAAX,IAAuBN,QAAQ,CAACO,OAAhC,EAAyC;AACvC,YAAMC,WAAW,GAAGF,QAAQ,CAACG,SAA7B;AACA,YAAMC,MAAM,GAAGF,WAAH,aAAGA,WAAH,uBAAGA,WAAW,CAAEG,cAA5B;;AACA,UAAIV,UAAU,CAACpB,MAAX,GAAoB,CAApB,IAAyB,6BAAaoB,UAAb,EAAyBS,MAAzB,IAAoC,CAAjE,EAAoE;AAClE;AACD;;AACDhC,MAAAA,MAAM,CAAC2B,UAAP,CAAkBK,MAAM,CAAEE,IAAR,EAAlB,IAAoC,MAAM,KAAKC,eAAL,CAAqBd,MAArB,EAA6BO,QAA7B,CAA1C;AACD;;AACD,WAAO5B,MAAP;AACD;;AAOoB,QAAfmC,eAAe,CAACd,MAAD,EAAwBO,QAAxB,EAAqE;AAAA;;AACxF,QAAIA,QAAQ,CAACQ,SAAT,KAAuBC,SAAvB,IAAoCT,QAAQ,CAACQ,SAAT,KAAuB,IAA/D,EAAqE;AACnE,YAAM,IAAIjB,KAAJ,CAAU,uCAAV,CAAN;AACD;;AAED,UAAMmB,KAAK,GAAGjB,MAAM,CAACkB,SAAP,wBAAiBX,QAAQ,CAACG,SAA1B,wDAAiB,oBAAoBE,cAArC,CAAd;AACA,UAAMO,IAAmB,GAAG,8BAAcC,mBAAd,0BAAoBb,QAAQ,CAACG,SAA7B,yDAAoB,qBAAoBS,IAAxC,CAA5B;;AAEA,QAAIA,IAAI,KAAKF,KAAK,CAACI,aAAnB,EAAkC;AAChC,YAAM,IAAIvB,KAAJ,2CAA6CqB,IAA7C,EAAN;AACD;;AAED,UAAMG,WAA+B,GAAG,8BACtCC,+BADsC,0BAEtChB,QAAQ,CAACG,SAF6B,yDAEtC,qBAAoBc,KAFkB,CAAxC;AAKA,UAAMC,WAAW,GAAGrB,MAAM,yBAACG,QAAQ,CAACG,SAAV,yDAAC,qBAAoBgB,gBAArB,CAA1B;AACA,QAAIC,SAAS,GAAGvB,MAAM,yBAACG,QAAQ,CAACG,SAAV,yDAAC,qBAAoBkB,qBAArB,CAAtB;;AAEA,QAAI,CAACrB,QAAQ,CAACQ,SAAd,EAAyB;AAAA;;AACvBY,MAAAA,SAAS,GAAGE,IAAI,CAACC,GAAL,CACV,KAAKxC,QAAL,GAAgBmC,WADN,EAEVrB,MAAM,yBAACG,QAAQ,CAACG,SAAV,yDAAC,qBAAoBkB,qBAArB,CAFI,CAAZ;AAID;;AAED,UAAMrC,OAAuB,GAAG;AAC9B4B,MAAAA,IAD8B;AAE9BY,MAAAA,SAAS,EAAEd,KAAK,CAACc,SAFa;AAG9BC,MAAAA,SAAS,EAAEf,KAAK,CAACe,SAHa;AAI9BV,MAAAA,WAJ8B;AAK9BW,MAAAA,MAAM,EAAEhB,KALsB;AAM9BiB,MAAAA,SAAS,0BAAE3B,QAAQ,CAACG,SAAX,yDAAE,qBAAoByB,UAND;AAO9BC,MAAAA,UAAU,EAAE;AAPkB,KAAhC;AAUA,QAAIA,UAAJ;AAEA,UAAMC,oBAAoB,GAAG9B,QAAH,aAAGA,QAAH,+CAAGA,QAAQ,CAAEG,SAAb,yDAAG,qBAAqB4B,sBAAlD;;AAEA,QAAID,oBAAJ,EAA0B;AACxB,YAAME,gBAAgB,GAAGnC,MAAM,CAACiC,oBAAD,CAA/B;AAEAD,MAAAA,UAAU,GAAG,MAAM,KAAKI,aAAL,CAAmBD,gBAAnB,EAAqChD,OAArC,EAA8CkC,WAA9C,CAAnB;AACD;;AAEDW,IAAAA,UAAU,GAAG,uBAAA7C,OAAO,CAAC6C,UAAR,oEAAoBtD,MAApB,GAA6BS,OAAO,CAAC6C,UAArC,GAAkDA,UAA/D;AACA,UAAMK,QAAQ,GAAG,MAAM,KAAKrD,IAAL,CAAUqC,WAAV,EAAuBE,SAAvB,CAAvB;AACA,WAAO,MAAM,+BAAgBc,QAAhB,EAA0B,EAAC,GAAGlD,OAAJ;AAAa6C,MAAAA;AAAb,KAA1B,CAAb;AACD;;AASkB,QAAbI,aAAa,CACjBH,oBADiB,EAEjB9C,OAFiB,EAGjBkC,WAHiB,EAIE;AACnB,QAAIY,oBAAoB,KAAK,CAA7B,EAAgC;AAQ9B,aAAO,EAAP;AACD;;AAED,UAAMK,cAAc,GAAGb,IAAI,CAACC,GAAL,CACrB,KAAKxC,QAAL,GAAgB+C,oBADK,EAErB,KAAK7C,qBAFgB,CAAvB;AAIA,UAAMiD,QAAQ,GAAG,MAAM,KAAKrD,IAAL,CAAUiD,oBAAV,EAAgCK,cAAhC,CAAvB;AAEA,UAAMC,MAAM,GAAG;AAAChE,MAAAA,MAAM,EAAE8D,QAAT;AAAmBG,MAAAA,MAAM,EAAE,CAA3B;AAA8BC,MAAAA,IAAI,EAAEJ,QAAQ,CAAC3D;AAA7C,KAAf;AACA,UAAMgE,WAAW,GAAG,MAAM,0BAAWH,MAAX,EAAmBpD,OAAnB,CAA1B;AAEA,WAAOuD,WAAW,CAACV,UAAnB;AACD;;AAEe,QAAVW,UAAU,GAA0B;AACxC,UAAMC,UAAU,GAAGtD,yBAAcZ,MAAd,GAAuB,CAA1C;AACA,UAAMmE,UAAU,GAAG,MAAM,KAAK7D,IAAL,CAAU,KAAKE,QAAL,GAAgB0D,UAA1B,EAAsCA,UAAtC,CAAzB;AAEA,UAAMrD,KAAK,GAAGsD,UAAU,CAAChE,KAAX,CAAiB,CAAjB,EAAoBW,QAApB,EAAd;;AACA,QAAID,KAAK,KAAKD,wBAAd,EAA6B;AAC3B,YAAM,IAAII,KAAJ,6CAA8CH,KAA9C,OAAN;AACD;;AAED,UAAMuD,YAAY,GAAGD,UAAU,CAACE,YAAX,CAAwB,CAAxB,CAArB;AACA,UAAMC,cAAc,GAAG,KAAK9D,QAAL,GAAgB4D,YAAhB,GAA+BF,UAAtD;;AACA,QAAII,cAAc,GAAG1D,yBAAcZ,MAAnC,EAA2C;AACzC,YAAM,IAAIgB,KAAJ,iCAAmCsD,cAAnC,EAAN;AACD;;AAED,UAAMC,WAAW,GAAG,MAAM,KAAKjE,IAAL,CAAUgE,cAAV,EAA0BF,YAA1B,CAA1B;AAGA,UAAM;AAACI,MAAAA;AAAD,QAAa,mCAAmBD,WAAnB,CAAnB;AACA,WAAOC,QAAP;AACD;;AA/KgC","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetSchema} from '../schema/schema';\nimport {PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED} from '../../constants';\nimport {ColumnChunk, CompressionCodec, FileMetaData, RowGroup, Type} from '../parquet-thrift';\nimport {\n ParquetBuffer,\n ParquetCompression,\n ParquetData,\n PrimitiveType,\n ParquetOptions\n} from '../schema/declare';\nimport {decodeFileMetadata, getThriftEnum, fieldIndexOf} from '../utils/read-utils';\nimport {decodeDataPages, decodePage} from './decoders';\n\nconst DEFAULT_DICTIONARY_SIZE = 1e6;\n\n/**\n * The parquet envelope reader allows direct, unbuffered access to the individual\n * sections of the parquet file, namely the header, footer and the row groups.\n * This class is intended for advanced/internal users; if you just want to retrieve\n * rows from a parquet file use the ParquetReader instead\n */\nexport class ParquetEnvelopeReader {\n public read: (position: number, length: number) => Promise<Buffer>;\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n public close: () => Promise<void>;\n public fileSize: number;\n public defaultDictionarySize: number;\n\n static async openBuffer(buffer: Buffer): Promise<ParquetEnvelopeReader> {\n const readFn = (position: number, length: number) =>\n Promise.resolve(buffer.slice(position, position + length));\n const closeFn = () => Promise.resolve();\n return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);\n }\n\n constructor(\n read: (position: number, length: number) => Promise<Buffer>,\n close: () => Promise<void>,\n fileSize: number,\n options?: any\n ) {\n this.read = read;\n this.close = close;\n this.fileSize = fileSize;\n this.defaultDictionarySize = options?.defaultDictionarySize || DEFAULT_DICTIONARY_SIZE;\n }\n\n async readHeader(): Promise<void> {\n const buffer = await this.read(0, PARQUET_MAGIC.length);\n\n const magic = buffer.toString();\n switch (magic) {\n case PARQUET_MAGIC:\n break;\n case PARQUET_MAGIC_ENCRYPTED:\n throw new Error('Encrypted parquet file not supported');\n default:\n throw new Error(`Invalid parquet file (magic=${magic})`);\n }\n }\n\n async readRowGroup(\n schema: ParquetSchema,\n rowGroup: RowGroup,\n columnList: string[][]\n ): Promise<ParquetBuffer> {\n const buffer: ParquetBuffer = {\n rowCount: Number(rowGroup.num_rows),\n columnData: {}\n };\n for (const colChunk of rowGroup.columns) {\n const colMetadata = colChunk.meta_data;\n const colKey = colMetadata?.path_in_schema;\n if (columnList.length > 0 && fieldIndexOf(columnList, colKey!) < 0) {\n continue; // eslint-disable-line no-continue\n }\n buffer.columnData[colKey!.join()] = await this.readColumnChunk(schema, colChunk);\n }\n return buffer;\n }\n\n /**\n * Do reading of parquet file's column chunk\n * @param schema\n * @param colChunk\n */\n async readColumnChunk(schema: ParquetSchema, colChunk: ColumnChunk): Promise<ParquetData> {\n if (colChunk.file_path !== undefined && colChunk.file_path !== null) {\n throw new Error('external references are not supported');\n }\n\n const field = schema.findField(colChunk.meta_data?.path_in_schema!);\n const type: PrimitiveType = getThriftEnum(Type, colChunk.meta_data?.type!) as any;\n\n if (type !== field.primitiveType) {\n throw new Error(`chunk type not matching schema: ${type}`);\n }\n\n const compression: ParquetCompression = getThriftEnum(\n CompressionCodec,\n colChunk.meta_data?.codec!\n ) as any;\n\n const pagesOffset = Number(colChunk.meta_data?.data_page_offset!);\n let pagesSize = Number(colChunk.meta_data?.total_compressed_size!);\n\n if (!colChunk.file_path) {\n pagesSize = Math.min(\n this.fileSize - pagesOffset,\n Number(colChunk.meta_data?.total_compressed_size)\n );\n }\n\n const options: ParquetOptions = {\n type,\n rLevelMax: field.rLevelMax,\n dLevelMax: field.dLevelMax,\n compression,\n column: field,\n numValues: colChunk.meta_data?.num_values,\n dictionary: []\n };\n\n let dictionary;\n\n const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;\n\n if (dictionaryPageOffset) {\n const dictionaryOffset = Number(dictionaryPageOffset);\n // Getting dictionary from column chunk to iterate all over indexes to get dataPage values.\n dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);\n }\n\n dictionary = options.dictionary?.length ? options.dictionary : dictionary;\n const pagesBuf = await this.read(pagesOffset, pagesSize);\n return await decodeDataPages(pagesBuf, {...options, dictionary});\n }\n\n /**\n * Getting dictionary for allows to flatten values by indices.\n * @param dictionaryPageOffset\n * @param options\n * @param pagesOffset\n * @returns\n */\n async getDictionary(\n dictionaryPageOffset: number,\n options: ParquetOptions,\n pagesOffset: number\n ): Promise<string[]> {\n if (dictionaryPageOffset === 0) {\n // dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);\n // pagesBuf = await this.read(pagesOffset, dictionarySize);\n\n // In this case we are working with parquet-mr files format. Problem is described below:\n // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding\n // We need to get dictionary page from column chunk if it exists.\n // Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.\n return [];\n }\n\n const dictionarySize = Math.min(\n this.fileSize - dictionaryPageOffset,\n this.defaultDictionarySize\n );\n const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);\n\n const cursor = {buffer: pagesBuf, offset: 0, size: pagesBuf.length};\n const decodedPage = await decodePage(cursor, options);\n\n return decodedPage.dictionary!;\n }\n\n async readFooter(): Promise<FileMetaData> {\n const trailerLen = PARQUET_MAGIC.length + 4;\n const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);\n\n const magic = trailerBuf.slice(4).toString();\n if (magic !== PARQUET_MAGIC) {\n throw new Error(`Not a valid parquet file (magic=\"${magic})`);\n }\n\n const metadataSize = trailerBuf.readUInt32LE(0);\n const metadataOffset = this.fileSize - metadataSize - trailerLen;\n if (metadataOffset < PARQUET_MAGIC.length) {\n throw new Error(`Invalid metadata size ${metadataOffset}`);\n }\n\n const metadataBuf = await this.read(metadataOffset, metadataSize);\n // let metadata = new parquet_thrift.FileMetaData();\n // parquet_util.decodeThrift(metadata, metadataBuf);\n const {metadata} = decodeFileMetadata(metadataBuf);\n return metadata;\n }\n}\n"],"file":"parquet-envelope-reader.js"}
1
+ {"version":3,"sources":["../../../../src/parquetjs/parser/parquet-envelope-reader.ts"],"names":["DEFAULT_DICTIONARY_SIZE","ParquetEnvelopeReader","read","close","fileSize","options","defaultDictionarySize","PARQUET_MAGIC","length","buffer","magic","toString","PARQUET_MAGIC_ENCRYPTED","Error","schema","rowGroup","columnList","rowCount","Number","num_rows","columnData","columns","colChunk","colMetadata","meta_data","colKey","path_in_schema","readColumnChunk","join","file_path","undefined","field","findField","type","Type","primitiveType","compression","CompressionCodec","codec","pagesOffset","data_page_offset","pagesSize","total_compressed_size","Math","min","rLevelMax","dLevelMax","column","numValues","num_values","dictionary","dictionaryPageOffset","dictionary_page_offset","dictionaryOffset","getDictionary","pagesBuf","dictionarySize","cursor","offset","size","decodedPage","trailerLen","trailerBuf","slice","metadataSize","readUInt32LE","metadataOffset","metadataBuf","metadata","readFn","position","Promise","resolve","closeFn"],"mappings":";;;;;;;;;;;;;;;;;;;AAEA;;AACA;;AAQA;;AACA;;;;;;;;;;;;AAEA,IAAMA,uBAAuB,GAAG,GAAhC;;IAQaC,qB;AAiBX,iCACEC,IADF,EAEEC,KAFF,EAGEC,QAHF,EAIEC,OAJF,EAKE;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,SAAKH,IAAL,GAAYA,IAAZ;AACA,SAAKC,KAAL,GAAaA,KAAb;AACA,SAAKC,QAAL,GAAgBA,QAAhB;AACA,SAAKE,qBAAL,GAA6B,CAAAD,OAAO,SAAP,IAAAA,OAAO,WAAP,YAAAA,OAAO,CAAEC,qBAAT,KAAkCN,uBAA/D;AACD;;;;;kFAED;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBACuB,KAAKE,IAAL,CAAU,CAAV,EAAaK,yBAAcC,MAA3B,CADvB;;AAAA;AACQC,gBAAAA,MADR;AAGQC,gBAAAA,KAHR,GAGgBD,MAAM,CAACE,QAAP,EAHhB;AAAA,8BAIUD,KAJV;AAAA,gDAKSH,wBALT,uBAOSK,kCAPT;AAAA;;AAAA;AAAA;;AAAA;AAAA,sBAQY,IAAIC,KAAJ,CAAU,sCAAV,CARZ;;AAAA;AAAA,sBAUY,IAAIA,KAAJ,uCAAyCH,KAAzC,OAVZ;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,O;;;;;;;;;;;oFAcA,kBACEI,MADF,EAEEC,QAFF,EAGEC,UAHF;AAAA;;AAAA;AAAA;AAAA;AAAA;AAKQP,gBAAAA,MALR,GAKgC;AAC5BQ,kBAAAA,QAAQ,EAAEC,MAAM,CAACH,QAAQ,CAACI,QAAV,CADY;AAE5BC,kBAAAA,UAAU,EAAE;AAFgB,iBALhC;AAAA,uDASyBL,QAAQ,CAACM,OATlC;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AASaC,gBAAAA,QATb;AAUUC,gBAAAA,WAVV,GAUwBD,QAAQ,CAACE,SAVjC;AAWUC,gBAAAA,MAXV,GAWmBF,WAXnB,aAWmBA,WAXnB,uBAWmBA,WAAW,CAAEG,cAXhC;;AAAA,sBAYQV,UAAU,CAACR,MAAX,GAAoB,CAApB,IAAyB,6BAAaQ,UAAb,EAAyBS,MAAzB,IAAoC,CAZrE;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA,uBAe8C,KAAKE,eAAL,CAAqBb,MAArB,EAA6BQ,QAA7B,CAf9C;;AAAA;AAeIb,gBAAAA,MAAM,CAACW,UAAP,CAAkBK,MAAM,CAAEG,IAAR,EAAlB,CAfJ;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;;AAAA;;AAAA;;AAAA;AAAA,kDAiBSnB,MAjBT;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,O;;;;;;;;;;;uFAyBA,kBAAsBK,MAAtB,EAA6CQ,QAA7C;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA,sBACMA,QAAQ,CAACO,SAAT,KAAuBC,SAAvB,IAAoCR,QAAQ,CAACO,SAAT,KAAuB,IADjE;AAAA;AAAA;AAAA;;AAAA,sBAEU,IAAIhB,KAAJ,CAAU,uCAAV,CAFV;;AAAA;AAKQkB,gBAAAA,KALR,GAKgBjB,MAAM,CAACkB,SAAP,wBAAiBV,QAAQ,CAACE,SAA1B,wDAAiB,oBAAoBE,cAArC,CALhB;AAMQO,gBAAAA,IANR,GAM8B,8BAAcC,mBAAd,0BAAoBZ,QAAQ,CAACE,SAA7B,yDAAoB,qBAAoBS,IAAxC,CAN9B;;AAAA,sBAQMA,IAAI,KAAKF,KAAK,CAACI,aARrB;AAAA;AAAA;AAAA;;AAAA,sBASU,IAAItB,KAAJ,2CAA6CoB,IAA7C,EATV;;AAAA;AAYQG,gBAAAA,WAZR,GAY0C,8BACtCC,+BADsC,0BAEtCf,QAAQ,CAACE,SAF6B,yDAEtC,qBAAoBc,KAFkB,CAZ1C;AAiBQC,gBAAAA,WAjBR,GAiBsBrB,MAAM,yBAACI,QAAQ,CAACE,SAAV,yDAAC,qBAAoBgB,gBAArB,CAjB5B;AAkBMC,gBAAAA,SAlBN,GAkBkBvB,MAAM,yBAACI,QAAQ,CAACE,SAAV,yDAAC,qBAAoBkB,qBAArB,CAlBxB;;AAoBE,oBAAI,CAACpB,QAAQ,CAACO,SAAd,EAAyB;AACvBY,kBAAAA,SAAS,GAAGE,IAAI,CAACC,GAAL,CACV,KAAKxC,QAAL,GAAgBmC,WADN,EAEVrB,MAAM,yBAACI,QAAQ,CAACE,SAAV,yDAAC,qBAAoBkB,qBAArB,CAFI,CAAZ;AAID;;AAEKrC,gBAAAA,OA3BR,GA2BkC;AAC9B4B,kBAAAA,IAAI,EAAJA,IAD8B;AAE9BY,kBAAAA,SAAS,EAAEd,KAAK,CAACc,SAFa;AAG9BC,kBAAAA,SAAS,EAAEf,KAAK,CAACe,SAHa;AAI9BV,kBAAAA,WAAW,EAAXA,WAJ8B;AAK9BW,kBAAAA,MAAM,EAAEhB,KALsB;AAM9BiB,kBAAAA,SAAS,0BAAE1B,QAAQ,CAACE,SAAX,yDAAE,qBAAoByB,UAND;AAO9BC,kBAAAA,UAAU,EAAE;AAPkB,iBA3BlC;AAuCQC,gBAAAA,oBAvCR,GAuC+B7B,QAvC/B,aAuC+BA,QAvC/B,+CAuC+BA,QAAQ,CAAEE,SAvCzC,yDAuC+B,qBAAqB4B,sBAvCpD;;AAAA,qBAyCMD,oBAzCN;AAAA;AAAA;AAAA;;AA0CUE,gBAAAA,gBA1CV,GA0C6BnC,MAAM,CAACiC,oBAAD,CA1CnC;AAAA;AAAA,uBA4CuB,KAAKG,aAAL,CAAmBD,gBAAnB,EAAqChD,OAArC,EAA8CkC,WAA9C,CA5CvB;;AAAA;AA4CIW,gBAAAA,UA5CJ;;AAAA;AA+CEA,gBAAAA,UAAU,GAAG,uBAAA7C,OAAO,CAAC6C,UAAR,oEAAoB1C,MAApB,GAA6BH,OAAO,CAAC6C,UAArC,GAAkDA,UAA/D;AA/CF;AAAA,uBAgDyB,KAAKhD,IAAL,CAAUqC,WAAV,EAAuBE,SAAvB,CAhDzB;;AAAA;AAgDQc,gBAAAA,QAhDR;AAAA;AAAA,uBAiDe,+BAAgBA,QAAhB,kCAA8BlD,OAA9B;AAAuC6C,kBAAAA,UAAU,EAAVA;AAAvC,mBAjDf;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,O;;;;;;;;;;;qFA2DA,kBACEC,oBADF,EAEE9C,OAFF,EAGEkC,WAHF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAKMY,oBAAoB,KAAK,CAL/B;AAAA;AAAA;AAAA;;AAAA,kDAaW,EAbX;;AAAA;AAgBQK,gBAAAA,cAhBR,GAgByBb,IAAI,CAACC,GAAL,CACrB,KAAKxC,QAAL,GAAgB+C,oBADK,EAErB,KAAK7C,qBAFgB,CAhBzB;AAAA;AAAA,uBAoByB,KAAKJ,IAAL,CAAUiD,oBAAV,EAAgCK,cAAhC,CApBzB;;AAAA;AAoBQD,gBAAAA,QApBR;AAsBQE,gBAAAA,MAtBR,GAsBiB;AAAChD,kBAAAA,MAAM,EAAE8C,QAAT;AAAmBG,kBAAAA,MAAM,EAAE,CAA3B;AAA8BC,kBAAAA,IAAI,EAAEJ,QAAQ,CAAC/C;AAA7C,iBAtBjB;AAAA;AAAA,uBAuB4B,0BAAWiD,MAAX,EAAmBpD,OAAnB,CAvB5B;;AAAA;AAuBQuD,gBAAAA,WAvBR;AAAA,kDAyBSA,WAAW,CAACV,UAzBrB;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,O;;;;;;;;;;;kFA4BA;AAAA;;AAAA;AAAA;AAAA;AAAA;AACQW,gBAAAA,UADR,GACqBtD,yBAAcC,MAAd,GAAuB,CAD5C;AAAA;AAAA,uBAE2B,KAAKN,IAAL,CAAU,KAAKE,QAAL,GAAgByD,UAA1B,EAAsCA,UAAtC,CAF3B;;AAAA;AAEQC,gBAAAA,UAFR;AAIQpD,gBAAAA,KAJR,GAIgBoD,UAAU,CAACC,KAAX,CAAiB,CAAjB,EAAoBpD,QAApB,EAJhB;;AAAA,sBAKMD,KAAK,KAAKH,wBALhB;AAAA;AAAA;AAAA;;AAAA,sBAMU,IAAIM,KAAJ,6CAA8CH,KAA9C,OANV;;AAAA;AASQsD,gBAAAA,YATR,GASuBF,UAAU,CAACG,YAAX,CAAwB,CAAxB,CATvB;AAUQC,gBAAAA,cAVR,GAUyB,KAAK9D,QAAL,GAAgB4D,YAAhB,GAA+BH,UAVxD;;AAAA,sBAWMK,cAAc,GAAG3D,yBAAcC,MAXrC;AAAA;AAAA;AAAA;;AAAA,sBAYU,IAAIK,KAAJ,iCAAmCqD,cAAnC,EAZV;;AAAA;AAAA;AAAA,uBAe4B,KAAKhE,IAAL,CAAUgE,cAAV,EAA0BF,YAA1B,CAf5B;;AAAA;AAeQG,gBAAAA,WAfR;AAAA,sCAkBqB,mCAAmBA,WAAnB,CAlBrB,EAkBSC,QAlBT,uBAkBSA,QAlBT;AAAA,kDAmBSA,QAnBT;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,O;;;;;;;;;;;kFAjJA,kBAAwB3D,MAAxB;AAAA;AAAA;AAAA;AAAA;AAAA;AACQ4D,gBAAAA,MADR,GACiB,SAATA,MAAS,CAACC,QAAD,EAAmB9D,MAAnB;AAAA,yBACb+D,OAAO,CAACC,OAAR,CAAgB/D,MAAM,CAACsD,KAAP,CAAaO,QAAb,EAAuBA,QAAQ,GAAG9D,MAAlC,CAAhB,CADa;AAAA,iBADjB;;AAGQiE,gBAAAA,OAHR,GAGkB,SAAVA,OAAU;AAAA,yBAAMF,OAAO,CAACC,OAAR,EAAN;AAAA,iBAHlB;;AAAA,kDAIS,IAAIvE,qBAAJ,CAA0BoE,MAA1B,EAAkCI,OAAlC,EAA2ChE,MAAM,CAACD,MAAlD,CAJT;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,O","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetSchema} from '../schema/schema';\nimport {PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED} from '../../constants';\nimport {ColumnChunk, CompressionCodec, FileMetaData, RowGroup, Type} from '../parquet-thrift';\nimport {\n ParquetBuffer,\n ParquetCompression,\n ParquetData,\n PrimitiveType,\n ParquetOptions\n} from '../schema/declare';\nimport {decodeFileMetadata, getThriftEnum, fieldIndexOf} from '../utils/read-utils';\nimport {decodeDataPages, decodePage} from './decoders';\n\nconst DEFAULT_DICTIONARY_SIZE = 1e6;\n\n/**\n * The parquet envelope reader allows direct, unbuffered access to the individual\n * sections of the parquet file, namely the header, footer and the row groups.\n * This class is intended for advanced/internal users; if you just want to retrieve\n * rows from a parquet file use the ParquetReader instead\n */\nexport class ParquetEnvelopeReader {\n public read: (position: number, length: number) => Promise<Buffer>;\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n public close: () => Promise<void>;\n public fileSize: number;\n public defaultDictionarySize: number;\n\n static async openBuffer(buffer: Buffer): Promise<ParquetEnvelopeReader> {\n const readFn = (position: number, length: number) =>\n Promise.resolve(buffer.slice(position, position + length));\n const closeFn = () => Promise.resolve();\n return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);\n }\n\n constructor(\n read: (position: number, length: number) => Promise<Buffer>,\n close: () => Promise<void>,\n fileSize: number,\n options?: any\n ) {\n this.read = read;\n this.close = close;\n this.fileSize = fileSize;\n this.defaultDictionarySize = options?.defaultDictionarySize || DEFAULT_DICTIONARY_SIZE;\n }\n\n async readHeader(): Promise<void> {\n const buffer = await this.read(0, PARQUET_MAGIC.length);\n\n const magic = buffer.toString();\n switch (magic) {\n case PARQUET_MAGIC:\n break;\n case PARQUET_MAGIC_ENCRYPTED:\n throw new Error('Encrypted parquet file not supported');\n default:\n throw new Error(`Invalid parquet file (magic=${magic})`);\n }\n }\n\n async readRowGroup(\n schema: ParquetSchema,\n rowGroup: RowGroup,\n columnList: string[][]\n ): Promise<ParquetBuffer> {\n const buffer: ParquetBuffer = {\n rowCount: Number(rowGroup.num_rows),\n columnData: {}\n };\n for (const colChunk of rowGroup.columns) {\n const colMetadata = colChunk.meta_data;\n const colKey = colMetadata?.path_in_schema;\n if (columnList.length > 0 && fieldIndexOf(columnList, colKey!) < 0) {\n continue; // eslint-disable-line no-continue\n }\n buffer.columnData[colKey!.join()] = await this.readColumnChunk(schema, colChunk);\n }\n return buffer;\n }\n\n /**\n * Do reading of parquet file's column chunk\n * @param schema\n * @param colChunk\n */\n async readColumnChunk(schema: ParquetSchema, colChunk: ColumnChunk): Promise<ParquetData> {\n if (colChunk.file_path !== undefined && colChunk.file_path !== null) {\n throw new Error('external references are not supported');\n }\n\n const field = schema.findField(colChunk.meta_data?.path_in_schema!);\n const type: PrimitiveType = getThriftEnum(Type, colChunk.meta_data?.type!) as any;\n\n if (type !== field.primitiveType) {\n throw new Error(`chunk type not matching schema: ${type}`);\n }\n\n const compression: ParquetCompression = getThriftEnum(\n CompressionCodec,\n colChunk.meta_data?.codec!\n ) as any;\n\n const pagesOffset = Number(colChunk.meta_data?.data_page_offset!);\n let pagesSize = Number(colChunk.meta_data?.total_compressed_size!);\n\n if (!colChunk.file_path) {\n pagesSize = Math.min(\n this.fileSize - pagesOffset,\n Number(colChunk.meta_data?.total_compressed_size)\n );\n }\n\n const options: ParquetOptions = {\n type,\n rLevelMax: field.rLevelMax,\n dLevelMax: field.dLevelMax,\n compression,\n column: field,\n numValues: colChunk.meta_data?.num_values,\n dictionary: []\n };\n\n let dictionary;\n\n const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;\n\n if (dictionaryPageOffset) {\n const dictionaryOffset = Number(dictionaryPageOffset);\n // Getting dictionary from column chunk to iterate all over indexes to get dataPage values.\n dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);\n }\n\n dictionary = options.dictionary?.length ? options.dictionary : dictionary;\n const pagesBuf = await this.read(pagesOffset, pagesSize);\n return await decodeDataPages(pagesBuf, {...options, dictionary});\n }\n\n /**\n * Getting dictionary for allows to flatten values by indices.\n * @param dictionaryPageOffset\n * @param options\n * @param pagesOffset\n * @returns\n */\n async getDictionary(\n dictionaryPageOffset: number,\n options: ParquetOptions,\n pagesOffset: number\n ): Promise<string[]> {\n if (dictionaryPageOffset === 0) {\n // dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);\n // pagesBuf = await this.read(pagesOffset, dictionarySize);\n\n // In this case we are working with parquet-mr files format. Problem is described below:\n // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding\n // We need to get dictionary page from column chunk if it exists.\n // Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.\n return [];\n }\n\n const dictionarySize = Math.min(\n this.fileSize - dictionaryPageOffset,\n this.defaultDictionarySize\n );\n const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);\n\n const cursor = {buffer: pagesBuf, offset: 0, size: pagesBuf.length};\n const decodedPage = await decodePage(cursor, options);\n\n return decodedPage.dictionary!;\n }\n\n async readFooter(): Promise<FileMetaData> {\n const trailerLen = PARQUET_MAGIC.length + 4;\n const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);\n\n const magic = trailerBuf.slice(4).toString();\n if (magic !== PARQUET_MAGIC) {\n throw new Error(`Not a valid parquet file (magic=\"${magic})`);\n }\n\n const metadataSize = trailerBuf.readUInt32LE(0);\n const metadataOffset = this.fileSize - metadataSize - trailerLen;\n if (metadataOffset < PARQUET_MAGIC.length) {\n throw new Error(`Invalid metadata size ${metadataOffset}`);\n }\n\n const metadataBuf = await this.read(metadataOffset, metadataSize);\n // let metadata = new parquet_thrift.FileMetaData();\n // parquet_util.decodeThrift(metadata, metadataBuf);\n const {metadata} = decodeFileMetadata(metadataBuf);\n return metadata;\n }\n}\n"],"file":"parquet-envelope-reader.js"}