@loaders.gl/parquet 3.3.0-alpha.8 → 3.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (131) hide show
  1. package/dist/dist.min.js +17 -26
  2. package/dist/dist.min.js.map +3 -3
  3. package/dist/es5/index.js +3 -3
  4. package/dist/es5/index.js.map +1 -1
  5. package/dist/es5/lib/convert-schema-deep.ts.disabled +910 -0
  6. package/dist/es5/lib/parse-parquet.js +49 -25
  7. package/dist/es5/lib/parse-parquet.js.map +1 -1
  8. package/dist/es5/parquet-loader.js +3 -2
  9. package/dist/es5/parquet-loader.js.map +1 -1
  10. package/dist/es5/parquet-wasm-loader.js +1 -1
  11. package/dist/es5/parquet-wasm-loader.js.map +1 -1
  12. package/dist/es5/parquet-wasm-writer.js +1 -1
  13. package/dist/es5/parquet-wasm-writer.js.map +1 -1
  14. package/dist/es5/parquet-writer.js +1 -1
  15. package/dist/es5/parquet-writer.js.map +1 -1
  16. package/dist/es5/parquetjs/compression.js +15 -5
  17. package/dist/es5/parquetjs/compression.js.map +1 -1
  18. package/dist/es5/parquetjs/encoder/{writer.js → parquet-encoder.js} +70 -158
  19. package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -0
  20. package/dist/es5/parquetjs/parser/parquet-reader.js +553 -222
  21. package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
  22. package/dist/es5/parquetjs/schema/declare.js +3 -1
  23. package/dist/es5/parquetjs/schema/declare.js.map +1 -1
  24. package/dist/es5/parquetjs/schema/shred.js +39 -33
  25. package/dist/es5/parquetjs/schema/shred.js.map +1 -1
  26. package/dist/es5/parquetjs/schema/types.js.map +1 -1
  27. package/dist/es5/parquetjs/utils/file-utils.js +2 -3
  28. package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
  29. package/dist/esm/index.js +1 -1
  30. package/dist/esm/index.js.map +1 -1
  31. package/dist/esm/lib/convert-schema-deep.ts.disabled +910 -0
  32. package/dist/esm/lib/parse-parquet.js +6 -12
  33. package/dist/esm/lib/parse-parquet.js.map +1 -1
  34. package/dist/esm/parquet-loader.js +3 -2
  35. package/dist/esm/parquet-loader.js.map +1 -1
  36. package/dist/esm/parquet-wasm-loader.js +1 -1
  37. package/dist/esm/parquet-wasm-loader.js.map +1 -1
  38. package/dist/esm/parquet-wasm-writer.js +1 -1
  39. package/dist/esm/parquet-wasm-writer.js.map +1 -1
  40. package/dist/esm/parquet-writer.js +1 -1
  41. package/dist/esm/parquet-writer.js.map +1 -1
  42. package/dist/esm/parquetjs/compression.js +10 -1
  43. package/dist/esm/parquetjs/compression.js.map +1 -1
  44. package/dist/esm/parquetjs/encoder/{writer.js → parquet-encoder.js} +7 -37
  45. package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +1 -0
  46. package/dist/esm/parquetjs/parser/parquet-reader.js +158 -72
  47. package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
  48. package/dist/esm/parquetjs/schema/declare.js +1 -0
  49. package/dist/esm/parquetjs/schema/declare.js.map +1 -1
  50. package/dist/esm/parquetjs/schema/shred.js +42 -34
  51. package/dist/esm/parquetjs/schema/shred.js.map +1 -1
  52. package/dist/esm/parquetjs/schema/types.js.map +1 -1
  53. package/dist/esm/parquetjs/utils/file-utils.js +1 -1
  54. package/dist/esm/parquetjs/utils/file-utils.js.map +1 -1
  55. package/dist/index.d.ts +1 -1
  56. package/dist/index.d.ts.map +1 -1
  57. package/dist/index.js +3 -4
  58. package/dist/lib/parse-parquet.d.ts +2 -2
  59. package/dist/lib/parse-parquet.d.ts.map +1 -1
  60. package/dist/lib/parse-parquet.js +24 -12
  61. package/dist/parquet-loader.d.ts +1 -0
  62. package/dist/parquet-loader.d.ts.map +1 -1
  63. package/dist/parquet-loader.js +2 -1
  64. package/dist/parquet-worker.js +15 -24
  65. package/dist/parquet-worker.js.map +3 -3
  66. package/dist/parquetjs/compression.d.ts.map +1 -1
  67. package/dist/parquetjs/compression.js +16 -5
  68. package/dist/parquetjs/encoder/{writer.d.ts → parquet-encoder.d.ts} +10 -19
  69. package/dist/parquetjs/encoder/parquet-encoder.d.ts.map +1 -0
  70. package/dist/parquetjs/encoder/{writer.js → parquet-encoder.js} +39 -37
  71. package/dist/parquetjs/parser/parquet-reader.d.ts +47 -57
  72. package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -1
  73. package/dist/parquetjs/parser/parquet-reader.js +168 -102
  74. package/dist/parquetjs/schema/declare.d.ts +14 -7
  75. package/dist/parquetjs/schema/declare.d.ts.map +1 -1
  76. package/dist/parquetjs/schema/declare.js +2 -0
  77. package/dist/parquetjs/schema/shred.d.ts +115 -0
  78. package/dist/parquetjs/schema/shred.d.ts.map +1 -1
  79. package/dist/parquetjs/schema/shred.js +161 -43
  80. package/dist/parquetjs/schema/types.d.ts +2 -2
  81. package/dist/parquetjs/schema/types.d.ts.map +1 -1
  82. package/dist/parquetjs/utils/file-utils.d.ts +3 -4
  83. package/dist/parquetjs/utils/file-utils.d.ts.map +1 -1
  84. package/dist/parquetjs/utils/file-utils.js +2 -5
  85. package/package.json +7 -5
  86. package/src/index.ts +2 -2
  87. package/src/lib/convert-schema-deep.ts.disabled +910 -0
  88. package/src/lib/parse-parquet.ts +25 -12
  89. package/src/parquet-loader.ts +3 -1
  90. package/src/parquetjs/compression.ts +14 -1
  91. package/src/parquetjs/encoder/{writer.ts → parquet-encoder.ts} +22 -28
  92. package/src/parquetjs/parser/parquet-reader.ts +239 -122
  93. package/src/parquetjs/schema/declare.ts +17 -9
  94. package/src/parquetjs/schema/shred.ts +157 -28
  95. package/src/parquetjs/schema/types.ts +21 -27
  96. package/src/parquetjs/utils/file-utils.ts +3 -4
  97. package/dist/es5/parquetjs/encoder/writer.js.map +0 -1
  98. package/dist/es5/parquetjs/file.js +0 -94
  99. package/dist/es5/parquetjs/file.js.map +0 -1
  100. package/dist/es5/parquetjs/parser/parquet-cursor.js +0 -183
  101. package/dist/es5/parquetjs/parser/parquet-cursor.js.map +0 -1
  102. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +0 -327
  103. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
  104. package/dist/es5/parquetjs/utils/buffer-utils.js +0 -19
  105. package/dist/es5/parquetjs/utils/buffer-utils.js.map +0 -1
  106. package/dist/esm/parquetjs/encoder/writer.js.map +0 -1
  107. package/dist/esm/parquetjs/file.js +0 -81
  108. package/dist/esm/parquetjs/file.js.map +0 -1
  109. package/dist/esm/parquetjs/parser/parquet-cursor.js +0 -78
  110. package/dist/esm/parquetjs/parser/parquet-cursor.js.map +0 -1
  111. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +0 -129
  112. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
  113. package/dist/esm/parquetjs/utils/buffer-utils.js +0 -13
  114. package/dist/esm/parquetjs/utils/buffer-utils.js.map +0 -1
  115. package/dist/parquetjs/encoder/writer.d.ts.map +0 -1
  116. package/dist/parquetjs/file.d.ts +0 -10
  117. package/dist/parquetjs/file.d.ts.map +0 -1
  118. package/dist/parquetjs/file.js +0 -99
  119. package/dist/parquetjs/parser/parquet-cursor.d.ts +0 -36
  120. package/dist/parquetjs/parser/parquet-cursor.d.ts.map +0 -1
  121. package/dist/parquetjs/parser/parquet-cursor.js +0 -74
  122. package/dist/parquetjs/parser/parquet-envelope-reader.d.ts +0 -40
  123. package/dist/parquetjs/parser/parquet-envelope-reader.d.ts.map +0 -1
  124. package/dist/parquetjs/parser/parquet-envelope-reader.js +0 -136
  125. package/dist/parquetjs/utils/buffer-utils.d.ts +0 -10
  126. package/dist/parquetjs/utils/buffer-utils.d.ts.map +0 -1
  127. package/dist/parquetjs/utils/buffer-utils.js +0 -22
  128. package/src/parquetjs/file.ts +0 -90
  129. package/src/parquetjs/parser/parquet-cursor.ts +0 -94
  130. package/src/parquetjs/parser/parquet-envelope-reader.ts +0 -199
  131. package/src/parquetjs/utils/buffer-utils.ts +0 -18
@@ -5,17 +5,12 @@ var _typeof = require("@babel/runtime/helpers/typeof");
5
5
  Object.defineProperty(exports, "__esModule", {
6
6
  value: true
7
7
  });
8
- exports.ParquetWriter = exports.ParquetTransformer = exports.ParquetEnvelopeWriter = void 0;
8
+ exports.ParquetEnvelopeWriter = exports.ParquetEncoder = void 0;
9
9
  var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
10
- var _assertThisInitialized2 = _interopRequireDefault(require("@babel/runtime/helpers/assertThisInitialized"));
11
- var _inherits2 = _interopRequireDefault(require("@babel/runtime/helpers/inherits"));
12
- var _possibleConstructorReturn2 = _interopRequireDefault(require("@babel/runtime/helpers/possibleConstructorReturn"));
13
- var _getPrototypeOf2 = _interopRequireDefault(require("@babel/runtime/helpers/getPrototypeOf"));
14
10
  var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
15
11
  var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime/helpers/classCallCheck"));
16
12
  var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass"));
17
13
  var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
18
- var _stream = require("stream");
19
14
  var _codecs = require("../codecs");
20
15
  var Compression = _interopRequireWildcard(require("../compression"));
21
16
  var Shred = _interopRequireWildcard(require("../schema/shred"));
@@ -28,8 +23,6 @@ function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj &&
28
23
  function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; }
29
24
  function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
30
25
  function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
31
- function _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = (0, _getPrototypeOf2.default)(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = (0, _getPrototypeOf2.default)(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return (0, _possibleConstructorReturn2.default)(this, result); }; }
32
- function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function () {})); return true; } catch (e) { return false; } }
33
26
  var PARQUET_MAGIC = 'PAR1';
34
27
 
35
28
  var PARQUET_VERSION = 1;
@@ -39,9 +32,9 @@ var PARQUET_DEFAULT_ROW_GROUP_SIZE = 4096;
39
32
 
40
33
  var PARQUET_RDLVL_TYPE = 'INT32';
41
34
  var PARQUET_RDLVL_ENCODING = 'RLE';
42
- var ParquetWriter = function () {
43
- function ParquetWriter(schema, envelopeWriter, opts) {
44
- (0, _classCallCheck2.default)(this, ParquetWriter);
35
+ var ParquetEncoder = function () {
36
+ function ParquetEncoder(schema, envelopeWriter, opts) {
37
+ (0, _classCallCheck2.default)(this, ParquetEncoder);
45
38
  (0, _defineProperty2.default)(this, "schema", void 0);
46
39
  (0, _defineProperty2.default)(this, "envelopeWriter", void 0);
47
40
  (0, _defineProperty2.default)(this, "rowBuffer", void 0);
@@ -57,7 +50,7 @@ var ParquetWriter = function () {
57
50
 
58
51
  this.writeHeader();
59
52
  }
60
- (0, _createClass2.default)(ParquetWriter, [{
53
+ (0, _createClass2.default)(ParquetEncoder, [{
61
54
  key: "writeHeader",
62
55
  value: function () {
63
56
  var _writeHeader = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee() {
@@ -193,7 +186,7 @@ var ParquetWriter = function () {
193
186
  return (0, _fileUtils.osopen)(path, opts);
194
187
  case 2:
195
188
  outputStream = _context4.sent;
196
- return _context4.abrupt("return", ParquetWriter.openStream(schema, outputStream, opts));
189
+ return _context4.abrupt("return", ParquetEncoder.openStream(schema, outputStream, opts));
197
190
  case 4:
198
191
  case "end":
199
192
  return _context4.stop();
@@ -209,20 +202,20 @@ var ParquetWriter = function () {
209
202
  }, {
210
203
  key: "openStream",
211
204
  value: function () {
212
- var _openStream = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee5(schema, outputStream, opts) {
213
- var envelopeWriter;
205
+ var _openStream = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee5(schema, outputStream) {
206
+ var opts,
207
+ envelopeWriter,
208
+ _args5 = arguments;
214
209
  return _regenerator.default.wrap(function _callee5$(_context5) {
215
210
  while (1) {
216
211
  switch (_context5.prev = _context5.next) {
217
212
  case 0:
218
- if (!opts) {
219
- opts = {};
220
- }
213
+ opts = _args5.length > 2 && _args5[2] !== undefined ? _args5[2] : {};
221
214
  _context5.next = 3;
222
215
  return ParquetEnvelopeWriter.openStream(schema, outputStream, opts);
223
216
  case 3:
224
217
  envelopeWriter = _context5.sent;
225
- return _context5.abrupt("return", new ParquetWriter(schema, envelopeWriter, opts));
218
+ return _context5.abrupt("return", new ParquetEncoder(schema, envelopeWriter, opts));
226
219
  case 5:
227
220
  case "end":
228
221
  return _context5.stop();
@@ -230,15 +223,15 @@ var ParquetWriter = function () {
230
223
  }
231
224
  }, _callee5);
232
225
  }));
233
- function openStream(_x6, _x7, _x8) {
226
+ function openStream(_x6, _x7) {
234
227
  return _openStream.apply(this, arguments);
235
228
  }
236
229
  return openStream;
237
230
  }()
238
231
  }]);
239
- return ParquetWriter;
232
+ return ParquetEncoder;
240
233
  }();
241
- exports.ParquetWriter = ParquetWriter;
234
+ exports.ParquetEncoder = ParquetEncoder;
242
235
  var ParquetEnvelopeWriter = function () {
243
236
  function ParquetEnvelopeWriter(schema, writeFn, closeFn, fileOffset, opts) {
244
237
  (0, _classCallCheck2.default)(this, ParquetEnvelopeWriter);
@@ -303,7 +296,7 @@ var ParquetEnvelopeWriter = function () {
303
296
  }
304
297
  }, _callee6, this);
305
298
  }));
306
- function writeRowGroup(_x9) {
299
+ function writeRowGroup(_x8) {
307
300
  return _writeRowGroup.apply(this, arguments);
308
301
  }
309
302
  return writeRowGroup;
@@ -343,7 +336,7 @@ var ParquetEnvelopeWriter = function () {
343
336
  }
344
337
  }, _callee7);
345
338
  }));
346
- function openStream(_x10, _x11, _x12) {
339
+ function openStream(_x9, _x10, _x11) {
347
340
  return _openStream2.apply(this, arguments);
348
341
  }
349
342
  return openStream;
@@ -352,88 +345,7 @@ var ParquetEnvelopeWriter = function () {
352
345
  return ParquetEnvelopeWriter;
353
346
  }();
354
347
  exports.ParquetEnvelopeWriter = ParquetEnvelopeWriter;
355
- var ParquetTransformer = function (_Transform) {
356
- (0, _inherits2.default)(ParquetTransformer, _Transform);
357
- var _super = _createSuper(ParquetTransformer);
358
- function ParquetTransformer(schema) {
359
- var _this;
360
- var opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
361
- (0, _classCallCheck2.default)(this, ParquetTransformer);
362
- _this = _super.call(this, {
363
- objectMode: true
364
- });
365
- (0, _defineProperty2.default)((0, _assertThisInitialized2.default)(_this), "writer", void 0);
366
- var writeProxy = function (t) {
367
- return function () {
368
- var _ref = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee8(b) {
369
- return _regenerator.default.wrap(function _callee8$(_context8) {
370
- while (1) {
371
- switch (_context8.prev = _context8.next) {
372
- case 0:
373
- t.push(b);
374
- case 1:
375
- case "end":
376
- return _context8.stop();
377
- }
378
- }
379
- }, _callee8);
380
- }));
381
- return function (_x13) {
382
- return _ref.apply(this, arguments);
383
- };
384
- }();
385
- }((0, _assertThisInitialized2.default)(_this));
386
- _this.writer = new ParquetWriter(schema, new ParquetEnvelopeWriter(schema, writeProxy, (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee9() {
387
- return _regenerator.default.wrap(function _callee9$(_context9) {
388
- while (1) {
389
- switch (_context9.prev = _context9.next) {
390
- case 0:
391
- case "end":
392
- return _context9.stop();
393
- }
394
- }
395
- }, _callee9);
396
- })), 0, opts), opts);
397
- return _this;
398
- }
399
348
 
400
- (0, _createClass2.default)(ParquetTransformer, [{
401
- key: "_transform",
402
- value:
403
- function _transform(row, encoding, callback) {
404
- if (row) {
405
- return this.writer.appendRow(row).then(callback);
406
- }
407
- callback();
408
- return Promise.resolve();
409
- }
410
-
411
- }, {
412
- key: "_flush",
413
- value: function () {
414
- var _flush2 = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee10(callback) {
415
- return _regenerator.default.wrap(function _callee10$(_context10) {
416
- while (1) {
417
- switch (_context10.prev = _context10.next) {
418
- case 0:
419
- _context10.next = 2;
420
- return this.writer.close(callback);
421
- case 2:
422
- case "end":
423
- return _context10.stop();
424
- }
425
- }
426
- }, _callee10, this);
427
- }));
428
- function _flush(_x14) {
429
- return _flush2.apply(this, arguments);
430
- }
431
- return _flush;
432
- }()
433
- }]);
434
- return ParquetTransformer;
435
- }(_stream.Transform);
436
- exports.ParquetTransformer = ParquetTransformer;
437
349
  function encodeValues(type, encoding, values, opts) {
438
350
  if (!(encoding in _codecs.PARQUET_CODECS)) {
439
351
  throw new Error("invalid encoding: ".concat(encoding));
@@ -441,15 +353,15 @@ function encodeValues(type, encoding, values, opts) {
441
353
  return _codecs.PARQUET_CODECS[encoding].encodeValues(type, values, opts);
442
354
  }
443
355
 
444
- function encodeDataPage(_x15, _x16) {
356
+ function encodeDataPage(_x12, _x13) {
445
357
  return _encodeDataPage.apply(this, arguments);
446
358
  }
447
359
  function _encodeDataPage() {
448
- _encodeDataPage = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee11(column, data) {
360
+ _encodeDataPage = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee8(column, data) {
449
361
  var rLevelsBuf, dLevelsBuf, valuesBuf, dataBuf, compressedBuf, header, headerBuf, page;
450
- return _regenerator.default.wrap(function _callee11$(_context11) {
362
+ return _regenerator.default.wrap(function _callee8$(_context8) {
451
363
  while (1) {
452
- switch (_context11.prev = _context11.next) {
364
+ switch (_context8.prev = _context8.next) {
453
365
  case 0:
454
366
  rLevelsBuf = Buffer.alloc(0);
455
367
  if (column.rLevelMax > 0) {
@@ -469,10 +381,10 @@ function _encodeDataPage() {
469
381
  bitWidth: column.typeLength
470
382
  });
471
383
  dataBuf = Buffer.concat([rLevelsBuf, dLevelsBuf, valuesBuf]);
472
- _context11.next = 8;
384
+ _context8.next = 8;
473
385
  return Compression.deflate(column.compression, dataBuf);
474
386
  case 8:
475
- compressedBuf = _context11.sent;
387
+ compressedBuf = _context8.sent;
476
388
  header = new _parquetThrift.PageHeader({
477
389
  type: _parquetThrift.PageType.DATA_PAGE,
478
390
  data_page_header: new _parquetThrift.DataPageHeader({
@@ -487,38 +399,38 @@ function _encodeDataPage() {
487
399
  });
488
400
  headerBuf = (0, _readUtils.serializeThrift)(header);
489
401
  page = Buffer.concat([headerBuf, compressedBuf]);
490
- return _context11.abrupt("return", {
402
+ return _context8.abrupt("return", {
491
403
  header: header,
492
404
  headerSize: headerBuf.length,
493
405
  page: page
494
406
  });
495
407
  case 13:
496
408
  case "end":
497
- return _context11.stop();
409
+ return _context8.stop();
498
410
  }
499
411
  }
500
- }, _callee11);
412
+ }, _callee8);
501
413
  }));
502
414
  return _encodeDataPage.apply(this, arguments);
503
415
  }
504
- function encodeDataPageV2(_x17, _x18, _x19) {
416
+ function encodeDataPageV2(_x14, _x15, _x16) {
505
417
  return _encodeDataPageV.apply(this, arguments);
506
418
  }
507
419
  function _encodeDataPageV() {
508
- _encodeDataPageV = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee12(column, data, rowCount) {
420
+ _encodeDataPageV = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee9(column, data, rowCount) {
509
421
  var valuesBuf, compressedBuf, rLevelsBuf, dLevelsBuf, header, headerBuf, page;
510
- return _regenerator.default.wrap(function _callee12$(_context12) {
422
+ return _regenerator.default.wrap(function _callee9$(_context9) {
511
423
  while (1) {
512
- switch (_context12.prev = _context12.next) {
424
+ switch (_context9.prev = _context9.next) {
513
425
  case 0:
514
426
  valuesBuf = encodeValues(column.primitiveType, column.encoding, data.values, {
515
427
  typeLength: column.typeLength,
516
428
  bitWidth: column.typeLength
517
429
  });
518
- _context12.next = 3;
430
+ _context9.next = 3;
519
431
  return Compression.deflate(column.compression, valuesBuf);
520
432
  case 3:
521
- compressedBuf = _context12.sent;
433
+ compressedBuf = _context9.sent;
522
434
  rLevelsBuf = Buffer.alloc(0);
523
435
  if (column.rLevelMax > 0) {
524
436
  rLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.rlevels, {
@@ -550,51 +462,51 @@ function _encodeDataPageV() {
550
462
  });
551
463
  headerBuf = (0, _readUtils.serializeThrift)(header);
552
464
  page = Buffer.concat([headerBuf, rLevelsBuf, dLevelsBuf, compressedBuf]);
553
- return _context12.abrupt("return", {
465
+ return _context9.abrupt("return", {
554
466
  header: header,
555
467
  headerSize: headerBuf.length,
556
468
  page: page
557
469
  });
558
470
  case 12:
559
471
  case "end":
560
- return _context12.stop();
472
+ return _context9.stop();
561
473
  }
562
474
  }
563
- }, _callee12);
475
+ }, _callee9);
564
476
  }));
565
477
  return _encodeDataPageV.apply(this, arguments);
566
478
  }
567
- function encodeColumnChunk(_x20, _x21, _x22, _x23) {
479
+ function encodeColumnChunk(_x17, _x18, _x19, _x20) {
568
480
  return _encodeColumnChunk.apply(this, arguments);
569
481
  }
570
482
  function _encodeColumnChunk() {
571
- _encodeColumnChunk = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee13(column, buffer, offset, opts) {
483
+ _encodeColumnChunk = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee10(column, buffer, offset, opts) {
572
484
  var data, baseOffset, pageBuf, total_uncompressed_size, total_compressed_size, result, metadata, metadataOffset, body;
573
- return _regenerator.default.wrap(function _callee13$(_context13) {
485
+ return _regenerator.default.wrap(function _callee10$(_context10) {
574
486
  while (1) {
575
- switch (_context13.prev = _context13.next) {
487
+ switch (_context10.prev = _context10.next) {
576
488
  case 0:
577
489
  data = buffer.columnData[column.path.join()];
578
490
  baseOffset = (opts.baseOffset || 0) + offset;
579
491
  total_uncompressed_size = 0;
580
492
  total_compressed_size = 0;
581
493
  if (!opts.useDataPageV2) {
582
- _context13.next = 10;
494
+ _context10.next = 10;
583
495
  break;
584
496
  }
585
- _context13.next = 7;
497
+ _context10.next = 7;
586
498
  return encodeDataPageV2(column, data, buffer.rowCount);
587
499
  case 7:
588
- _context13.t0 = _context13.sent;
589
- _context13.next = 13;
500
+ _context10.t0 = _context10.sent;
501
+ _context10.next = 13;
590
502
  break;
591
503
  case 10:
592
- _context13.next = 12;
504
+ _context10.next = 12;
593
505
  return encodeDataPage(column, data);
594
506
  case 12:
595
- _context13.t0 = _context13.sent;
507
+ _context10.t0 = _context10.sent;
596
508
  case 13:
597
- result = _context13.t0;
509
+ result = _context10.t0;
598
510
  pageBuf = result.page;
599
511
  total_uncompressed_size += result.header.uncompressed_page_size + result.headerSize;
600
512
  total_compressed_size += result.header.compressed_page_size + result.headerSize;
@@ -613,29 +525,29 @@ function _encodeColumnChunk() {
613
525
 
614
526
  metadataOffset = baseOffset + pageBuf.length;
615
527
  body = Buffer.concat([pageBuf, (0, _readUtils.serializeThrift)(metadata)]);
616
- return _context13.abrupt("return", {
528
+ return _context10.abrupt("return", {
617
529
  body: body,
618
530
  metadata: metadata,
619
531
  metadataOffset: metadataOffset
620
532
  });
621
533
  case 23:
622
534
  case "end":
623
- return _context13.stop();
535
+ return _context10.stop();
624
536
  }
625
537
  }
626
- }, _callee13);
538
+ }, _callee10);
627
539
  }));
628
540
  return _encodeColumnChunk.apply(this, arguments);
629
541
  }
630
- function encodeRowGroup(_x24, _x25, _x26) {
542
+ function encodeRowGroup(_x21, _x22, _x23) {
631
543
  return _encodeRowGroup.apply(this, arguments);
632
544
  }
633
545
  function _encodeRowGroup() {
634
- _encodeRowGroup = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee14(schema, data, opts) {
546
+ _encodeRowGroup = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee11(schema, data, opts) {
635
547
  var metadata, body, _iterator2, _step2, field, cchunkData, cchunk;
636
- return _regenerator.default.wrap(function _callee14$(_context14) {
548
+ return _regenerator.default.wrap(function _callee11$(_context11) {
637
549
  while (1) {
638
- switch (_context14.prev = _context14.next) {
550
+ switch (_context11.prev = _context11.next) {
639
551
  case 0:
640
552
  metadata = new _parquetThrift.RowGroup({
641
553
  num_rows: data.rowCount,
@@ -644,24 +556,24 @@ function _encodeRowGroup() {
644
556
  });
645
557
  body = Buffer.alloc(0);
646
558
  _iterator2 = _createForOfIteratorHelper(schema.fieldList);
647
- _context14.prev = 3;
559
+ _context11.prev = 3;
648
560
  _iterator2.s();
649
561
  case 5:
650
562
  if ((_step2 = _iterator2.n()).done) {
651
- _context14.next = 18;
563
+ _context11.next = 18;
652
564
  break;
653
565
  }
654
566
  field = _step2.value;
655
567
  if (!field.isNested) {
656
- _context14.next = 9;
568
+ _context11.next = 9;
657
569
  break;
658
570
  }
659
- return _context14.abrupt("continue", 16);
571
+ return _context11.abrupt("continue", 16);
660
572
  case 9:
661
- _context14.next = 11;
573
+ _context11.next = 11;
662
574
  return encodeColumnChunk(field, data, body.length, opts);
663
575
  case 11:
664
- cchunkData = _context14.sent;
576
+ cchunkData = _context11.sent;
665
577
  cchunk = new _parquetThrift.ColumnChunk({
666
578
  file_offset: cchunkData.metadataOffset,
667
579
  meta_data: cchunkData.metadata
@@ -670,30 +582,30 @@ function _encodeRowGroup() {
670
582
  metadata.total_byte_size = new _nodeInt.default(Number(metadata.total_byte_size) + cchunkData.body.length);
671
583
  body = Buffer.concat([body, cchunkData.body]);
672
584
  case 16:
673
- _context14.next = 5;
585
+ _context11.next = 5;
674
586
  break;
675
587
  case 18:
676
- _context14.next = 23;
588
+ _context11.next = 23;
677
589
  break;
678
590
  case 20:
679
- _context14.prev = 20;
680
- _context14.t0 = _context14["catch"](3);
681
- _iterator2.e(_context14.t0);
591
+ _context11.prev = 20;
592
+ _context11.t0 = _context11["catch"](3);
593
+ _iterator2.e(_context11.t0);
682
594
  case 23:
683
- _context14.prev = 23;
595
+ _context11.prev = 23;
684
596
  _iterator2.f();
685
- return _context14.finish(23);
597
+ return _context11.finish(23);
686
598
  case 26:
687
- return _context14.abrupt("return", {
599
+ return _context11.abrupt("return", {
688
600
  body: body,
689
601
  metadata: metadata
690
602
  });
691
603
  case 27:
692
604
  case "end":
693
- return _context14.stop();
605
+ return _context11.stop();
694
606
  }
695
607
  }
696
- }, _callee14, null, [[3, 20, 23, 26]]);
608
+ }, _callee11, null, [[3, 20, 23, 26]]);
697
609
  }));
698
610
  return _encodeRowGroup.apply(this, arguments);
699
611
  }
@@ -754,4 +666,4 @@ function encodeFooter(schema, rowCount, rowGroups, userMetadata) {
754
666
  footerEncoded.write(PARQUET_MAGIC, metadataEncoded.length + 4);
755
667
  return footerEncoded;
756
668
  }
757
- //# sourceMappingURL=writer.js.map
669
+ //# sourceMappingURL=parquet-encoder.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"parquet-encoder.js","names":["PARQUET_MAGIC","PARQUET_VERSION","PARQUET_DEFAULT_PAGE_SIZE","PARQUET_DEFAULT_ROW_GROUP_SIZE","PARQUET_RDLVL_TYPE","PARQUET_RDLVL_ENCODING","ParquetEncoder","schema","envelopeWriter","opts","rowBuffer","rowGroupSize","closed","userMetadata","writeHeader","close","row","Error","Shred","shredRecord","rowCount","callback","writeFooter","key","value","String","cnt","setPageSize","path","osopen","outputStream","openStream","ParquetEnvelopeWriter","writeFn","closeFn","fileOffset","write","offset","rowGroups","pageSize","useDataPageV2","Boolean","buf","length","writeSection","Buffer","from","records","encodeRowGroup","baseOffset","rgroup","push","metadata","body","encodeFooter","oswrite","bind","undefined","osclose","encodeValues","type","encoding","values","PARQUET_CODECS","encodeDataPage","column","data","rLevelsBuf","alloc","rLevelMax","rlevels","bitWidth","getBitWidth","dLevelsBuf","dLevelMax","dlevels","valuesBuf","primitiveType","typeLength","dataBuf","concat","Compression","deflate","compression","compressedBuf","header","PageHeader","PageType","DATA_PAGE","data_page_header","DataPageHeader","num_values","count","Encoding","definition_level_encoding","repetition_level_encoding","uncompressed_page_size","compressed_page_size","headerBuf","serializeThrift","page","headerSize","encodeDataPageV2","disableEnvelope","DATA_PAGE_V2","data_page_header_v2","DataPageHeaderV2","num_nulls","num_rows","definition_levels_byte_length","repetition_levels_byte_length","is_compressed","encodeColumnChunk","buffer","columnData","join","total_uncompressed_size","total_compressed_size","result","pageBuf","ColumnMetaData","path_in_schema","data_page_offset","encodings","Type","codec","CompressionCodec","metadataOffset","RowGroup","columns","total_byte_size","fieldList","field","isNested","cchunkData","cchunk","ColumnChunk","file_offset","meta_data","Int64","Number","FileMetaData","version","created_by","row_groups","key_value_metadata","kv","KeyValue","schemaRoot","SchemaElement","name","num_children","Object","keys","fields","relt","FieldRepetitionType","repetitionType","schemaElem","repetition_type","fieldCount","originalType","converted_type","ConvertedType","type_length","metadataEncoded","footerEncoded","copy","writeUInt32LE"],"sources":["../../../../src/parquetjs/encoder/parquet-encoder.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/* eslint-disable camelcase */\nimport {stream} from '@loaders.gl/loader-utils';\nimport {ParquetCodecOptions, PARQUET_CODECS} from '../codecs';\nimport * as Compression from '../compression';\nimport {\n ParquetBuffer,\n ParquetCodec,\n ParquetData,\n ParquetField,\n PrimitiveType\n} from '../schema/declare';\nimport {ParquetSchema} from '../schema/schema';\nimport * as Shred from '../schema/shred';\nimport {\n ColumnChunk,\n ColumnMetaData,\n CompressionCodec,\n ConvertedType,\n DataPageHeader,\n DataPageHeaderV2,\n Encoding,\n FieldRepetitionType,\n FileMetaData,\n KeyValue,\n PageHeader,\n PageType,\n RowGroup,\n SchemaElement,\n Type\n} from '../parquet-thrift';\nimport {osopen, oswrite, osclose} from '../utils/file-utils';\nimport {getBitWidth, serializeThrift} from '../utils/read-utils';\nimport Int64 from 'node-int64';\n\n/**\n * Parquet File Magic String\n */\nconst PARQUET_MAGIC = 'PAR1';\n\n/**\n * Parquet File Format Version\n */\nconst PARQUET_VERSION = 1;\n\n/**\n * Default Page and Row Group sizes\n */\nconst PARQUET_DEFAULT_PAGE_SIZE = 8192;\nconst PARQUET_DEFAULT_ROW_GROUP_SIZE = 4096;\n\n/**\n * Repetition and Definition Level Encoding\n */\nconst PARQUET_RDLVL_TYPE = 'INT32';\nconst PARQUET_RDLVL_ENCODING = 'RLE';\n\nexport interface ParquetEncoderOptions {\n baseOffset?: number;\n rowGroupSize?: number;\n pageSize?: number;\n useDataPageV2?: boolean;\n\n // Write Stream Options\n flags?: string;\n encoding?: string;\n fd?: number;\n mode?: number;\n autoClose?: boolean;\n start?: number;\n}\n\n/**\n * Write a parquet file to an output stream. The ParquetEncoder will perform\n * buffering/batching for performance, so close() must be called after all rows\n * are written.\n */\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nexport class ParquetEncoder<T> {\n /**\n * Convenience method to create a new buffered parquet writer that writes to\n * the specified file\n */\n static async openFile<T>(\n schema: ParquetSchema,\n path: string,\n opts?: ParquetEncoderOptions\n ): Promise<ParquetEncoder<T>> {\n const outputStream = await osopen(path, opts);\n return ParquetEncoder.openStream(schema, outputStream, opts);\n }\n\n /**\n * Convenience method to create a new buffered parquet writer that writes to\n * the specified stream\n */\n static async openStream<T>(\n schema: ParquetSchema,\n outputStream: stream.Writable,\n opts: ParquetEncoderOptions = {}\n ): Promise<ParquetEncoder<T>> {\n const envelopeWriter = await ParquetEnvelopeWriter.openStream(schema, outputStream, opts);\n return new ParquetEncoder(schema, envelopeWriter, opts);\n }\n\n public schema: ParquetSchema;\n public envelopeWriter: ParquetEnvelopeWriter;\n public rowBuffer: ParquetBuffer;\n public rowGroupSize: number;\n public closed: boolean;\n public userMetadata: Record<string, string>;\n\n /**\n * Create a new buffered parquet writer for a given envelope writer\n */\n constructor(\n schema: ParquetSchema,\n envelopeWriter: ParquetEnvelopeWriter,\n opts: ParquetEncoderOptions\n ) {\n this.schema = schema;\n this.envelopeWriter = envelopeWriter;\n // @ts-ignore Row buffer typings...\n this.rowBuffer = {};\n this.rowGroupSize = opts.rowGroupSize || PARQUET_DEFAULT_ROW_GROUP_SIZE;\n this.closed = false;\n this.userMetadata = {};\n\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n this.writeHeader();\n }\n\n async writeHeader(): Promise<void> {\n // TODO - better not mess with promises in the constructor\n try {\n await this.envelopeWriter.writeHeader();\n } catch (err) {\n await this.envelopeWriter.close();\n throw err;\n }\n }\n\n /**\n * Append a single row to the parquet file. Rows are buffered in memory until\n * rowGroupSize rows are in the buffer or close() is called\n */\n async appendRow<T>(row: T): Promise<void> {\n if (this.closed) {\n throw new Error('writer was closed');\n }\n Shred.shredRecord(this.schema, row, this.rowBuffer);\n if (this.rowBuffer.rowCount >= this.rowGroupSize) {\n // @ts-ignore\n this.rowBuffer = {};\n }\n }\n\n /**\n * Finish writing the parquet file and commit the footer to disk. This method\n * MUST be called after you are finished adding rows. You must not call this\n * method twice on the same object or add any rows after the close() method has\n * been called\n */\n async close(callback?: () => void): Promise<void> {\n if (this.closed) {\n throw new Error('writer was closed');\n }\n\n this.closed = true;\n\n if (this.rowBuffer.rowCount > 0 || this.rowBuffer.rowCount >= this.rowGroupSize) {\n // @ts-ignore\n this.rowBuffer = {};\n }\n\n await this.envelopeWriter.writeFooter(this.userMetadata);\n await this.envelopeWriter.close();\n // this.envelopeWriter = null;\n\n if (callback) {\n callback();\n }\n }\n\n /**\n * Add key<>value metadata to the file\n */\n setMetadata(key: string, value: string): void {\n // TODO: value to be any, obj -> JSON\n this.userMetadata[String(key)] = String(value);\n }\n\n /**\n * Set the parquet row group size. This values controls the maximum number\n * of rows that are buffered in memory at any given time as well as the number\n * of rows that are co-located on disk. A higher value is generally better for\n * read-time I/O performance at the tradeoff of write-time memory usage.\n */\n setRowGroupSize(cnt: number): void {\n this.rowGroupSize = cnt;\n }\n\n /**\n * Set the parquet data page size. The data page size controls the maximum\n * number of column values that are written to disk as a consecutive array\n */\n setPageSize(cnt: number): void {\n this.envelopeWriter.setPageSize(cnt);\n }\n}\n\n/**\n * Create a parquet file from a schema and a number of row groups. This class\n * performs direct, unbuffered writes to the underlying output stream and is\n * intendend for advanced and internal users; the writeXXX methods must be\n * called in the correct order to produce a valid file.\n */\nexport class ParquetEnvelopeWriter {\n /**\n * Create a new parquet envelope writer that writes to the specified stream\n */\n static async openStream(\n schema: ParquetSchema,\n outputStream: stream.Writable,\n opts: ParquetEncoderOptions\n ): Promise<ParquetEnvelopeWriter> {\n const writeFn = oswrite.bind(undefined, outputStream);\n const closeFn = osclose.bind(undefined, outputStream);\n return new ParquetEnvelopeWriter(schema, writeFn, closeFn, 0, opts);\n }\n\n public schema: ParquetSchema;\n public write: (buf: Buffer) => Promise<void>;\n public close: () => Promise<void>;\n public offset: number;\n public rowCount: number;\n public rowGroups: RowGroup[];\n public pageSize: number;\n public useDataPageV2: boolean;\n\n constructor(\n schema: ParquetSchema,\n writeFn: (buf: Buffer) => Promise<void>,\n closeFn: () => Promise<void>,\n fileOffset: number,\n opts: ParquetEncoderOptions\n ) {\n this.schema = schema;\n this.write = writeFn;\n this.close = closeFn;\n this.offset = fileOffset;\n this.rowCount = 0;\n this.rowGroups = [];\n this.pageSize = opts.pageSize || PARQUET_DEFAULT_PAGE_SIZE;\n this.useDataPageV2 = 'useDataPageV2' in opts ? Boolean(opts.useDataPageV2) : false;\n }\n\n writeSection(buf: Buffer): Promise<void> {\n this.offset += buf.length;\n return this.write(buf);\n }\n\n /**\n * Encode the parquet file header\n */\n writeHeader(): Promise<void> {\n return this.writeSection(Buffer.from(PARQUET_MAGIC));\n }\n\n /**\n * Encode a parquet row group. The records object should be created using the\n * shredRecord method\n */\n async writeRowGroup(records: ParquetBuffer): Promise<void> {\n const rgroup = await encodeRowGroup(this.schema, records, {\n baseOffset: this.offset,\n pageSize: this.pageSize,\n useDataPageV2: this.useDataPageV2\n });\n\n this.rowCount += records.rowCount;\n this.rowGroups.push(rgroup.metadata);\n return await this.writeSection(rgroup.body);\n }\n\n /**\n * Write the parquet file footer\n */\n writeFooter(userMetadata: Record<string, string>): Promise<void> {\n if (!userMetadata) {\n // tslint:disable-next-line:no-parameter-reassignment\n userMetadata = {};\n }\n\n return this.writeSection(\n encodeFooter(this.schema, this.rowCount, this.rowGroups, userMetadata)\n );\n }\n\n /**\n * Set the parquet data page size. The data page size controls the maximum\n * number of column values that are written to disk as a consecutive array\n */\n setPageSize(cnt: number): void {\n this.pageSize = cnt;\n }\n}\n\n/**\n * Create a parquet transform stream\nexport class ParquetTransformer<T> extends stream.Transform {\n public writer: ParquetEncoder<T>;\n\n constructor(schema: ParquetSchema, opts: ParquetEncoderOptions = {}) {\n super({objectMode: true});\n\n const writeProxy = (function (t: ParquetTransformer<any>) {\n return async function (b: any): Promise<void> {\n t.push(b);\n };\n })(this);\n\n this.writer = new ParquetEncoder(\n schema,\n new ParquetEnvelopeWriter(schema, writeProxy, async () => {}, 0, opts),\n opts\n );\n }\n\n // tslint:disable-next-line:function-name\n _transform(row: any, encoding: string, callback: (val?: any) => void): Promise<void> {\n if (row) {\n return this.writer.appendRow(row).then(callback);\n }\n callback();\n return Promise.resolve();\n }\n\n // tslint:disable-next-line:function-name\n async _flush(callback: (val?: any) => void) {\n await this.writer.close(callback);\n }\n}\n */\n\n/**\n * Encode a consecutive array of data using one of the parquet encodings\n */\nfunction encodeValues(\n type: PrimitiveType,\n encoding: ParquetCodec,\n values: any[],\n opts: ParquetCodecOptions\n) {\n if (!(encoding in PARQUET_CODECS)) {\n throw new Error(`invalid encoding: ${encoding}`);\n }\n return PARQUET_CODECS[encoding].encodeValues(type, values, opts);\n}\n\n/**\n * Encode a parquet data page\n */\nasync function encodeDataPage(\n column: ParquetField,\n data: ParquetData\n): Promise<{\n header: PageHeader;\n headerSize: number;\n page: Buffer;\n}> {\n /* encode repetition and definition levels */\n let rLevelsBuf = Buffer.alloc(0);\n if (column.rLevelMax > 0) {\n rLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.rlevels, {\n bitWidth: getBitWidth(column.rLevelMax)\n // disableEnvelope: false\n });\n }\n\n let dLevelsBuf = Buffer.alloc(0);\n if (column.dLevelMax > 0) {\n dLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.dlevels, {\n bitWidth: getBitWidth(column.dLevelMax)\n // disableEnvelope: false\n });\n }\n\n /* encode values */\n const valuesBuf = encodeValues(column.primitiveType!, column.encoding!, data.values, {\n typeLength: column.typeLength,\n bitWidth: column.typeLength\n });\n\n const dataBuf = Buffer.concat([rLevelsBuf, dLevelsBuf, valuesBuf]);\n\n // compression = column.compression === 'UNCOMPRESSED' ? (compression || 'UNCOMPRESSED') : column.compression;\n const compressedBuf = await Compression.deflate(column.compression!, dataBuf);\n\n /* build page header */\n const header = new PageHeader({\n type: PageType.DATA_PAGE,\n data_page_header: new DataPageHeader({\n num_values: data.count,\n encoding: Encoding[column.encoding!] as any,\n definition_level_encoding: Encoding[PARQUET_RDLVL_ENCODING], // [PARQUET_RDLVL_ENCODING],\n repetition_level_encoding: Encoding[PARQUET_RDLVL_ENCODING] // [PARQUET_RDLVL_ENCODING]\n }),\n uncompressed_page_size: dataBuf.length,\n compressed_page_size: compressedBuf.length\n });\n\n /* concat page header, repetition and definition levels and values */\n const headerBuf = serializeThrift(header);\n const page = Buffer.concat([headerBuf, compressedBuf]);\n\n return {header, headerSize: headerBuf.length, page};\n}\n\n/**\n * Encode a parquet data page (v2)\n */\nasync function encodeDataPageV2(\n column: ParquetField,\n data: ParquetData,\n rowCount: number\n): Promise<{\n header: PageHeader;\n headerSize: number;\n page: Buffer;\n}> {\n /* encode values */\n const valuesBuf = encodeValues(column.primitiveType!, column.encoding!, data.values, {\n typeLength: column.typeLength,\n bitWidth: column.typeLength\n });\n\n // compression = column.compression === 'UNCOMPRESSED' ? (compression || 'UNCOMPRESSED') : column.compression;\n const compressedBuf = await Compression.deflate(column.compression!, valuesBuf);\n\n /* encode repetition and definition levels */\n let rLevelsBuf = Buffer.alloc(0);\n if (column.rLevelMax > 0) {\n rLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.rlevels, {\n bitWidth: getBitWidth(column.rLevelMax),\n disableEnvelope: true\n });\n }\n\n let dLevelsBuf = Buffer.alloc(0);\n if (column.dLevelMax > 0) {\n dLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.dlevels, {\n bitWidth: getBitWidth(column.dLevelMax),\n disableEnvelope: true\n });\n }\n\n /* build page header */\n const header = new PageHeader({\n type: PageType.DATA_PAGE_V2,\n data_page_header_v2: new DataPageHeaderV2({\n num_values: data.count,\n num_nulls: data.count - data.values.length,\n num_rows: rowCount,\n encoding: Encoding[column.encoding!] as any,\n definition_levels_byte_length: dLevelsBuf.length,\n repetition_levels_byte_length: rLevelsBuf.length,\n is_compressed: column.compression !== 'UNCOMPRESSED'\n }),\n uncompressed_page_size: rLevelsBuf.length + dLevelsBuf.length + valuesBuf.length,\n compressed_page_size: rLevelsBuf.length + dLevelsBuf.length + compressedBuf.length\n });\n\n /* concat page header, repetition and definition levels and values */\n const headerBuf = serializeThrift(header);\n const page = Buffer.concat([headerBuf, rLevelsBuf, dLevelsBuf, compressedBuf]);\n return {header, headerSize: headerBuf.length, page};\n}\n\n/**\n * Encode an array of values into a parquet column chunk\n */\nasync function encodeColumnChunk(\n column: ParquetField,\n buffer: ParquetBuffer,\n offset: number,\n opts: ParquetEncoderOptions\n): Promise<{\n body: Buffer;\n metadata: ColumnMetaData;\n metadataOffset: number;\n}> {\n const data = buffer.columnData[column.path.join()];\n const baseOffset = (opts.baseOffset || 0) + offset;\n /* encode data page(s) */\n // const pages: Buffer[] = [];\n let pageBuf: Buffer;\n // tslint:disable-next-line:variable-name\n let total_uncompressed_size = 0;\n // tslint:disable-next-line:variable-name\n let total_compressed_size = 0;\n {\n const result = opts.useDataPageV2\n ? await encodeDataPageV2(column, data, buffer.rowCount)\n : await encodeDataPage(column, data);\n // pages.push(result.page);\n pageBuf = result.page;\n total_uncompressed_size += result.header.uncompressed_page_size + result.headerSize;\n total_compressed_size += result.header.compressed_page_size + result.headerSize;\n }\n\n // const pagesBuf = Buffer.concat(pages);\n // const compression = column.compression === 'UNCOMPRESSED' ? (opts.compression || 'UNCOMPRESSED') : column.compression;\n\n /* prepare metadata header */\n const metadata = new ColumnMetaData({\n path_in_schema: column.path,\n num_values: data.count,\n data_page_offset: baseOffset,\n encodings: [],\n total_uncompressed_size, // : pagesBuf.length,\n total_compressed_size,\n type: Type[column.primitiveType!],\n codec: CompressionCodec[column.compression!]\n });\n\n /* list encodings */\n metadata.encodings.push(Encoding[PARQUET_RDLVL_ENCODING]);\n metadata.encodings.push(Encoding[column.encoding!]);\n\n /* concat metadata header and data pages */\n const metadataOffset = baseOffset + pageBuf.length;\n const body = Buffer.concat([pageBuf, serializeThrift(metadata)]);\n return {body, metadata, metadataOffset};\n}\n\n/**\n * Encode a list of column values into a parquet row group\n */\nasync function encodeRowGroup(\n schema: ParquetSchema,\n data: ParquetBuffer,\n opts: ParquetEncoderOptions\n): Promise<{\n body: Buffer;\n metadata: RowGroup;\n}> {\n const metadata = new RowGroup({\n num_rows: data.rowCount,\n columns: [],\n total_byte_size: 0\n });\n\n let body = Buffer.alloc(0);\n for (const field of schema.fieldList) {\n if (field.isNested) {\n continue; // eslint-disable-line no-continue\n }\n\n const cchunkData = await encodeColumnChunk(field, data, body.length, opts);\n\n const cchunk = new ColumnChunk({\n file_offset: cchunkData.metadataOffset,\n meta_data: cchunkData.metadata\n });\n\n metadata.columns.push(cchunk);\n metadata.total_byte_size = new Int64(Number(metadata.total_byte_size) + cchunkData.body.length);\n\n body = Buffer.concat([body, cchunkData.body]);\n }\n\n return {body, metadata};\n}\n\n/**\n * Encode a parquet file metadata footer\n */\nfunction encodeFooter(\n schema: ParquetSchema,\n rowCount: number,\n rowGroups: RowGroup[],\n userMetadata: Record<string, string>\n): Buffer {\n const metadata = new FileMetaData({\n version: PARQUET_VERSION,\n created_by: 'parquets',\n num_rows: rowCount,\n row_groups: rowGroups,\n schema: [],\n key_value_metadata: []\n });\n\n for (const key in userMetadata) {\n const kv = new KeyValue({\n key,\n value: userMetadata[key]\n });\n metadata.key_value_metadata?.push?.(kv);\n }\n\n {\n const schemaRoot = new SchemaElement({\n name: 'root',\n num_children: Object.keys(schema.fields).length\n });\n metadata.schema.push(schemaRoot);\n }\n\n for (const field of schema.fieldList) {\n const relt = FieldRepetitionType[field.repetitionType];\n const schemaElem = new SchemaElement({\n name: field.name,\n repetition_type: relt as any\n });\n\n if (field.isNested) {\n schemaElem.num_children = field.fieldCount;\n } else {\n schemaElem.type = Type[field.primitiveType!] as Type;\n }\n\n if (field.originalType) {\n schemaElem.converted_type = ConvertedType[field.originalType] as ConvertedType;\n }\n\n schemaElem.type_length = field.typeLength;\n\n metadata.schema.push(schemaElem);\n }\n\n const metadataEncoded = serializeThrift(metadata);\n const footerEncoded = Buffer.alloc(metadataEncoded.length + 8);\n metadataEncoded.copy(footerEncoded);\n footerEncoded.writeUInt32LE(metadataEncoded.length, metadataEncoded.length);\n footerEncoded.write(PARQUET_MAGIC, metadataEncoded.length + 4);\n return footerEncoded;\n}\n"],"mappings":";;;;;;;;;;;;;AAGA;AACA;AASA;AACA;AAiBA;AACA;AACA;AAA+B;AAAA;AAAA;AAAA;AAAA;AAK/B,IAAMA,aAAa,GAAG,MAAM;;AAK5B,IAAMC,eAAe,GAAG,CAAC;;AAKzB,IAAMC,yBAAyB,GAAG,IAAI;AACtC,IAAMC,8BAA8B,GAAG,IAAI;;AAK3C,IAAMC,kBAAkB,GAAG,OAAO;AAClC,IAAMC,sBAAsB,GAAG,KAAK;AAAC,IAuBxBC,cAAc;EAqCzB,wBACEC,MAAqB,EACrBC,cAAqC,EACrCC,IAA2B,EAC3B;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IACA,IAAI,CAACF,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACC,cAAc,GAAGA,cAAc;IAEpC,IAAI,CAACE,SAAS,GAAG,CAAC,CAAC;IACnB,IAAI,CAACC,YAAY,GAAGF,IAAI,CAACE,YAAY,IAAIR,8BAA8B;IACvE,IAAI,CAACS,MAAM,GAAG,KAAK;IACnB,IAAI,CAACC,YAAY,GAAG,CAAC,CAAC;;IAGtB,IAAI,CAACC,WAAW,EAAE;EACpB;EAAC;IAAA;IAAA;MAAA,6EAED;QAAA;UAAA;YAAA;cAAA;gBAAA;gBAAA;gBAAA,OAGU,IAAI,CAACN,cAAc,CAACM,WAAW,EAAE;cAAA;gBAAA;gBAAA;cAAA;gBAAA;gBAAA;gBAAA;gBAAA,OAEjC,IAAI,CAACN,cAAc,CAACO,KAAK,EAAE;cAAA;gBAAA;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CAGpC;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;MAAA,2EAMD,kBAAmBC,GAAM;QAAA;UAAA;YAAA;cAAA;gBAAA,KACnB,IAAI,CAACJ,MAAM;kBAAA;kBAAA;gBAAA;gBAAA,MACP,IAAIK,KAAK,CAAC,mBAAmB,CAAC;cAAA;gBAEtCC,KAAK,CAACC,WAAW,CAAC,IAAI,CAACZ,MAAM,EAAES,GAAG,EAAE,IAAI,CAACN,SAAS,CAAC;gBACnD,IAAI,IAAI,CAACA,SAAS,CAACU,QAAQ,IAAI,IAAI,CAACT,YAAY,EAAE;kBAEhD,IAAI,CAACD,SAAS,GAAG,CAAC,CAAC;gBACrB;cAAC;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CACF;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;MAAA,uEAQD,kBAAYW,QAAqB;QAAA;UAAA;YAAA;cAAA;gBAAA,KAC3B,IAAI,CAACT,MAAM;kBAAA;kBAAA;gBAAA;gBAAA,MACP,IAAIK,KAAK,CAAC,mBAAmB,CAAC;cAAA;gBAGtC,IAAI,CAACL,MAAM,GAAG,IAAI;gBAElB,IAAI,IAAI,CAACF,SAAS,CAACU,QAAQ,GAAG,CAAC,IAAI,IAAI,CAACV,SAAS,CAACU,QAAQ,IAAI,IAAI,CAACT,YAAY,EAAE;kBAE/E,IAAI,CAACD,SAAS,GAAG,CAAC,CAAC;gBACrB;gBAAC;gBAAA,OAEK,IAAI,CAACF,cAAc,CAACc,WAAW,CAAC,IAAI,CAACT,YAAY,CAAC;cAAA;gBAAA;gBAAA,OAClD,IAAI,CAACL,cAAc,CAACO,KAAK,EAAE;cAAA;;gBAGjC,IAAIM,QAAQ,EAAE;kBACZA,QAAQ,EAAE;gBACZ;cAAC;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CACF;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;IAKD,qBAAYE,GAAW,EAAEC,KAAa,EAAQ;MAE5C,IAAI,CAACX,YAAY,CAACY,MAAM,CAACF,GAAG,CAAC,CAAC,GAAGE,MAAM,CAACD,KAAK,CAAC;IAChD;;EAAC;IAAA;IAAA;IAQD,yBAAgBE,GAAW,EAAQ;MACjC,IAAI,CAACf,YAAY,GAAGe,GAAG;IACzB;;EAAC;IAAA;IAAA;IAMD,qBAAYA,GAAW,EAAQ;MAC7B,IAAI,CAAClB,cAAc,CAACmB,WAAW,CAACD,GAAG,CAAC;IACtC;EAAC;IAAA;IAAA;MAAA,0EA7HD,kBACEnB,MAAqB,EACrBqB,IAAY,EACZnB,IAA4B;QAAA;QAAA;UAAA;YAAA;cAAA;gBAAA;gBAAA,OAED,IAAAoB,iBAAM,EAACD,IAAI,EAAEnB,IAAI,CAAC;cAAA;gBAAvCqB,YAAY;gBAAA,kCACXxB,cAAc,CAACyB,UAAU,CAACxB,MAAM,EAAEuB,YAAY,EAAErB,IAAI,CAAC;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CAC7D;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;MAAA,4EAMD,kBACEF,MAAqB,EACrBuB,YAA6B;QAAA;UAAA;UAAA;QAAA;UAAA;YAAA;cAAA;gBAC7BrB,IAA2B,8DAAG,CAAC,CAAC;gBAAA;gBAAA,OAEHuB,qBAAqB,CAACD,UAAU,CAACxB,MAAM,EAAEuB,YAAY,EAAErB,IAAI,CAAC;cAAA;gBAAnFD,cAAc;gBAAA,kCACb,IAAIF,cAAc,CAACC,MAAM,EAAEC,cAAc,EAAEC,IAAI,CAAC;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CACxD;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;EAAA;AAAA;AAAA;AAAA,IAkHUuB,qBAAqB;EAuBhC,+BACEzB,MAAqB,EACrB0B,OAAuC,EACvCC,OAA4B,EAC5BC,UAAkB,EAClB1B,IAA2B,EAC3B;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IACA,IAAI,CAACF,MAAM,GAAGA,MAAM;IACpB,IAAI,CAAC6B,KAAK,GAAGH,OAAO;IACpB,IAAI,CAAClB,KAAK,GAAGmB,OAAO;IACpB,IAAI,CAACG,MAAM,GAAGF,UAAU;IACxB,IAAI,CAACf,QAAQ,GAAG,CAAC;IACjB,IAAI,CAACkB,SAAS,GAAG,EAAE;IACnB,IAAI,CAACC,QAAQ,GAAG9B,IAAI,CAAC8B,QAAQ,IAAIrC,yBAAyB;IAC1D,IAAI,CAACsC,aAAa,GAAG,eAAe,IAAI/B,IAAI,GAAGgC,OAAO,CAAChC,IAAI,CAAC+B,aAAa,CAAC,GAAG,KAAK;EACpF;EAAC;IAAA;IAAA,OAED,sBAAaE,GAAW,EAAiB;MACvC,IAAI,CAACL,MAAM,IAAIK,GAAG,CAACC,MAAM;MACzB,OAAO,IAAI,CAACP,KAAK,CAACM,GAAG,CAAC;IACxB;;EAAC;IAAA;IAAA;IAKD,uBAA6B;MAC3B,OAAO,IAAI,CAACE,YAAY,CAACC,MAAM,CAACC,IAAI,CAAC9C,aAAa,CAAC,CAAC;IACtD;;EAAC;IAAA;IAAA;MAAA,+EAMD,kBAAoB+C,OAAsB;QAAA;QAAA;UAAA;YAAA;cAAA;gBAAA;gBAAA,OACnBC,cAAc,CAAC,IAAI,CAACzC,MAAM,EAAEwC,OAAO,EAAE;kBACxDE,UAAU,EAAE,IAAI,CAACZ,MAAM;kBACvBE,QAAQ,EAAE,IAAI,CAACA,QAAQ;kBACvBC,aAAa,EAAE,IAAI,CAACA;gBACtB,CAAC,CAAC;cAAA;gBAJIU,MAAM;gBAMZ,IAAI,CAAC9B,QAAQ,IAAI2B,OAAO,CAAC3B,QAAQ;gBACjC,IAAI,CAACkB,SAAS,CAACa,IAAI,CAACD,MAAM,CAACE,QAAQ,CAAC;gBAAC;gBAAA,OACxB,IAAI,CAACR,YAAY,CAACM,MAAM,CAACG,IAAI,CAAC;cAAA;gBAAA;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CAC5C;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;IAKD,qBAAYxC,YAAoC,EAAiB;MAC/D,IAAI,CAACA,YAAY,EAAE;QAEjBA,YAAY,GAAG,CAAC,CAAC;MACnB;MAEA,OAAO,IAAI,CAAC+B,YAAY,CACtBU,YAAY,CAAC,IAAI,CAAC/C,MAAM,EAAE,IAAI,CAACa,QAAQ,EAAE,IAAI,CAACkB,SAAS,EAAEzB,YAAY,CAAC,CACvE;IACH;;EAAC;IAAA;IAAA;IAMD,qBAAYa,GAAW,EAAQ;MAC7B,IAAI,CAACa,QAAQ,GAAGb,GAAG;IACrB;EAAC;IAAA;IAAA;MAAA,6EApFD,kBACEnB,MAAqB,EACrBuB,YAA6B,EAC7BrB,IAA2B;QAAA;QAAA;UAAA;YAAA;cAAA;gBAErBwB,OAAO,GAAGsB,kBAAO,CAACC,IAAI,CAACC,SAAS,EAAE3B,YAAY,CAAC;gBAC/CI,OAAO,GAAGwB,kBAAO,CAACF,IAAI,CAACC,SAAS,EAAE3B,YAAY,CAAC;gBAAA,kCAC9C,IAAIE,qBAAqB,CAACzB,MAAM,EAAE0B,OAAO,EAAEC,OAAO,EAAE,CAAC,EAAEzB,IAAI,CAAC;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CACpE;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;EAAA;AAAA;AAAA;;AAuHH,SAASkD,YAAY,CACnBC,IAAmB,EACnBC,QAAsB,EACtBC,MAAa,EACbrD,IAAyB,EACzB;EACA,IAAI,EAAEoD,QAAQ,IAAIE,sBAAc,CAAC,EAAE;IACjC,MAAM,IAAI9C,KAAK,6BAAsB4C,QAAQ,EAAG;EAClD;EACA,OAAOE,sBAAc,CAACF,QAAQ,CAAC,CAACF,YAAY,CAACC,IAAI,EAAEE,MAAM,EAAErD,IAAI,CAAC;AAClE;;AAAC,SAKcuD,cAAc;EAAA;AAAA;AAAA;EAAA,4EAA7B,kBACEC,MAAoB,EACpBC,IAAiB;IAAA;IAAA;MAAA;QAAA;UAAA;YAObC,UAAU,GAAGtB,MAAM,CAACuB,KAAK,CAAC,CAAC,CAAC;YAChC,IAAIH,MAAM,CAACI,SAAS,GAAG,CAAC,EAAE;cACxBF,UAAU,GAAGR,YAAY,CAACvD,kBAAkB,EAAEC,sBAAsB,EAAE6D,IAAI,CAACI,OAAO,EAAE;gBAClFC,QAAQ,EAAE,IAAAC,sBAAW,EAACP,MAAM,CAACI,SAAS;cAExC,CAAC,CAAC;YACJ;YAEII,UAAU,GAAG5B,MAAM,CAACuB,KAAK,CAAC,CAAC,CAAC;YAChC,IAAIH,MAAM,CAACS,SAAS,GAAG,CAAC,EAAE;cACxBD,UAAU,GAAGd,YAAY,CAACvD,kBAAkB,EAAEC,sBAAsB,EAAE6D,IAAI,CAACS,OAAO,EAAE;gBAClFJ,QAAQ,EAAE,IAAAC,sBAAW,EAACP,MAAM,CAACS,SAAS;cAExC,CAAC,CAAC;YACJ;;YAGME,SAAS,GAAGjB,YAAY,CAACM,MAAM,CAACY,aAAa,EAAGZ,MAAM,CAACJ,QAAQ,EAAGK,IAAI,CAACJ,MAAM,EAAE;cACnFgB,UAAU,EAAEb,MAAM,CAACa,UAAU;cAC7BP,QAAQ,EAAEN,MAAM,CAACa;YACnB,CAAC,CAAC;YAEIC,OAAO,GAAGlC,MAAM,CAACmC,MAAM,CAAC,CAACb,UAAU,EAAEM,UAAU,EAAEG,SAAS,CAAC,CAAC;YAAA;YAAA,OAGtCK,WAAW,CAACC,OAAO,CAACjB,MAAM,CAACkB,WAAW,EAAGJ,OAAO,CAAC;UAAA;YAAvEK,aAAa;YAGbC,MAAM,GAAG,IAAIC,yBAAU,CAAC;cAC5B1B,IAAI,EAAE2B,uBAAQ,CAACC,SAAS;cACxBC,gBAAgB,EAAE,IAAIC,6BAAc,CAAC;gBACnCC,UAAU,EAAEzB,IAAI,CAAC0B,KAAK;gBACtB/B,QAAQ,EAAEgC,uBAAQ,CAAC5B,MAAM,CAACJ,QAAQ,CAAS;gBAC3CiC,yBAAyB,EAAED,uBAAQ,CAACxF,sBAAsB,CAAC;gBAC3D0F,yBAAyB,EAAEF,uBAAQ,CAACxF,sBAAsB;cAC5D,CAAC,CAAC;;cACF2F,sBAAsB,EAAEjB,OAAO,CAACpC,MAAM;cACtCsD,oBAAoB,EAAEb,aAAa,CAACzC;YACtC,CAAC,CAAC;YAGIuD,SAAS,GAAG,IAAAC,0BAAe,EAACd,MAAM,CAAC;YACnCe,IAAI,GAAGvD,MAAM,CAACmC,MAAM,CAAC,CAACkB,SAAS,EAAEd,aAAa,CAAC,CAAC;YAAA,kCAE/C;cAACC,MAAM,EAANA,MAAM;cAAEgB,UAAU,EAAEH,SAAS,CAACvD,MAAM;cAAEyD,IAAI,EAAJA;YAAI,CAAC;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CACpD;EAAA;AAAA;AAAA,SAKcE,gBAAgB;EAAA;AAAA;AAAA;EAAA,6EAA/B,kBACErC,MAAoB,EACpBC,IAAiB,EACjB9C,QAAgB;IAAA;IAAA;MAAA;QAAA;UAAA;YAOVwD,SAAS,GAAGjB,YAAY,CAACM,MAAM,CAACY,aAAa,EAAGZ,MAAM,CAACJ,QAAQ,EAAGK,IAAI,CAACJ,MAAM,EAAE;cACnFgB,UAAU,EAAEb,MAAM,CAACa,UAAU;cAC7BP,QAAQ,EAAEN,MAAM,CAACa;YACnB,CAAC,CAAC;YAAA;YAAA,OAG0BG,WAAW,CAACC,OAAO,CAACjB,MAAM,CAACkB,WAAW,EAAGP,SAAS,CAAC;UAAA;YAAzEQ,aAAa;YAGfjB,UAAU,GAAGtB,MAAM,CAACuB,KAAK,CAAC,CAAC,CAAC;YAChC,IAAIH,MAAM,CAACI,SAAS,GAAG,CAAC,EAAE;cACxBF,UAAU,GAAGR,YAAY,CAACvD,kBAAkB,EAAEC,sBAAsB,EAAE6D,IAAI,CAACI,OAAO,EAAE;gBAClFC,QAAQ,EAAE,IAAAC,sBAAW,EAACP,MAAM,CAACI,SAAS,CAAC;gBACvCkC,eAAe,EAAE;cACnB,CAAC,CAAC;YACJ;YAEI9B,UAAU,GAAG5B,MAAM,CAACuB,KAAK,CAAC,CAAC,CAAC;YAChC,IAAIH,MAAM,CAACS,SAAS,GAAG,CAAC,EAAE;cACxBD,UAAU,GAAGd,YAAY,CAACvD,kBAAkB,EAAEC,sBAAsB,EAAE6D,IAAI,CAACS,OAAO,EAAE;gBAClFJ,QAAQ,EAAE,IAAAC,sBAAW,EAACP,MAAM,CAACS,SAAS,CAAC;gBACvC6B,eAAe,EAAE;cACnB,CAAC,CAAC;YACJ;;YAGMlB,MAAM,GAAG,IAAIC,yBAAU,CAAC;cAC5B1B,IAAI,EAAE2B,uBAAQ,CAACiB,YAAY;cAC3BC,mBAAmB,EAAE,IAAIC,+BAAgB,CAAC;gBACxCf,UAAU,EAAEzB,IAAI,CAAC0B,KAAK;gBACtBe,SAAS,EAAEzC,IAAI,CAAC0B,KAAK,GAAG1B,IAAI,CAACJ,MAAM,CAACnB,MAAM;gBAC1CiE,QAAQ,EAAExF,QAAQ;gBAClByC,QAAQ,EAAEgC,uBAAQ,CAAC5B,MAAM,CAACJ,QAAQ,CAAS;gBAC3CgD,6BAA6B,EAAEpC,UAAU,CAAC9B,MAAM;gBAChDmE,6BAA6B,EAAE3C,UAAU,CAACxB,MAAM;gBAChDoE,aAAa,EAAE9C,MAAM,CAACkB,WAAW,KAAK;cACxC,CAAC,CAAC;cACFa,sBAAsB,EAAE7B,UAAU,CAACxB,MAAM,GAAG8B,UAAU,CAAC9B,MAAM,GAAGiC,SAAS,CAACjC,MAAM;cAChFsD,oBAAoB,EAAE9B,UAAU,CAACxB,MAAM,GAAG8B,UAAU,CAAC9B,MAAM,GAAGyC,aAAa,CAACzC;YAC9E,CAAC,CAAC;YAGIuD,SAAS,GAAG,IAAAC,0BAAe,EAACd,MAAM,CAAC;YACnCe,IAAI,GAAGvD,MAAM,CAACmC,MAAM,CAAC,CAACkB,SAAS,EAAE/B,UAAU,EAAEM,UAAU,EAAEW,aAAa,CAAC,CAAC;YAAA,kCACvE;cAACC,MAAM,EAANA,MAAM;cAAEgB,UAAU,EAAEH,SAAS,CAACvD,MAAM;cAAEyD,IAAI,EAAJA;YAAI,CAAC;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CACpD;EAAA;AAAA;AAAA,SAKcY,iBAAiB;EAAA;AAAA;AAAA;EAAA,+EAAhC,mBACE/C,MAAoB,EACpBgD,MAAqB,EACrB5E,MAAc,EACd5B,IAA2B;IAAA;IAAA;MAAA;QAAA;UAAA;YAMrByD,IAAI,GAAG+C,MAAM,CAACC,UAAU,CAACjD,MAAM,CAACrC,IAAI,CAACuF,IAAI,EAAE,CAAC;YAC5ClE,UAAU,GAAG,CAACxC,IAAI,CAACwC,UAAU,IAAI,CAAC,IAAIZ,MAAM;YAK9C+E,uBAAuB,GAAG,CAAC;YAE3BC,qBAAqB,GAAG,CAAC;YAAA,KAEZ5G,IAAI,CAAC+B,aAAa;cAAA;cAAA;YAAA;YAAA;YAAA,OACvB8D,gBAAgB,CAACrC,MAAM,EAAEC,IAAI,EAAE+C,MAAM,CAAC7F,QAAQ,CAAC;UAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA,OAC/C4C,cAAc,CAACC,MAAM,EAAEC,IAAI,CAAC;UAAA;YAAA;UAAA;YAFhCoD,MAAM;YAIZC,OAAO,GAAGD,MAAM,CAAClB,IAAI;YACrBgB,uBAAuB,IAAIE,MAAM,CAACjC,MAAM,CAACW,sBAAsB,GAAGsB,MAAM,CAACjB,UAAU;YACnFgB,qBAAqB,IAAIC,MAAM,CAACjC,MAAM,CAACY,oBAAoB,GAAGqB,MAAM,CAACjB,UAAU;YAO3EjD,QAAQ,GAAG,IAAIoE,6BAAc,CAAC;cAClCC,cAAc,EAAExD,MAAM,CAACrC,IAAI;cAC3B+D,UAAU,EAAEzB,IAAI,CAAC0B,KAAK;cACtB8B,gBAAgB,EAAEzE,UAAU;cAC5B0E,SAAS,EAAE,EAAE;cACbP,uBAAuB,EAAvBA,uBAAuB;cACvBC,qBAAqB,EAArBA,qBAAqB;cACrBzD,IAAI,EAAEgE,mBAAI,CAAC3D,MAAM,CAACY,aAAa,CAAE;cACjCgD,KAAK,EAAEC,+BAAgB,CAAC7D,MAAM,CAACkB,WAAW;YAC5C,CAAC,CAAC;YAGF/B,QAAQ,CAACuE,SAAS,CAACxE,IAAI,CAAC0C,uBAAQ,CAACxF,sBAAsB,CAAC,CAAC;YACzD+C,QAAQ,CAACuE,SAAS,CAACxE,IAAI,CAAC0C,uBAAQ,CAAC5B,MAAM,CAACJ,QAAQ,CAAE,CAAC;;YAG7CkE,cAAc,GAAG9E,UAAU,GAAGsE,OAAO,CAAC5E,MAAM;YAC5CU,IAAI,GAAGR,MAAM,CAACmC,MAAM,CAAC,CAACuC,OAAO,EAAE,IAAApB,0BAAe,EAAC/C,QAAQ,CAAC,CAAC,CAAC;YAAA,mCACzD;cAACC,IAAI,EAAJA,IAAI;cAAED,QAAQ,EAARA,QAAQ;cAAE2E,cAAc,EAAdA;YAAc,CAAC;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CACxC;EAAA;AAAA;AAAA,SAKc/E,cAAc;EAAA;AAAA;AAAA;EAAA,4EAA7B,mBACEzC,MAAqB,EACrB2D,IAAmB,EACnBzD,IAA2B;IAAA;IAAA;MAAA;QAAA;UAAA;YAKrB2C,QAAQ,GAAG,IAAI4E,uBAAQ,CAAC;cAC5BpB,QAAQ,EAAE1C,IAAI,CAAC9C,QAAQ;cACvB6G,OAAO,EAAE,EAAE;cACXC,eAAe,EAAE;YACnB,CAAC,CAAC;YAEE7E,IAAI,GAAGR,MAAM,CAACuB,KAAK,CAAC,CAAC,CAAC;YAAA,wCACN7D,MAAM,CAAC4H,SAAS;YAAA;YAAA;UAAA;YAAA;cAAA;cAAA;YAAA;YAAzBC,KAAK;YAAA,KACVA,KAAK,CAACC,QAAQ;cAAA;cAAA;YAAA;YAAA;UAAA;YAAA;YAAA,OAIOrB,iBAAiB,CAACoB,KAAK,EAAElE,IAAI,EAAEb,IAAI,CAACV,MAAM,EAAElC,IAAI,CAAC;UAAA;YAApE6H,UAAU;YAEVC,MAAM,GAAG,IAAIC,0BAAW,CAAC;cAC7BC,WAAW,EAAEH,UAAU,CAACP,cAAc;cACtCW,SAAS,EAAEJ,UAAU,CAAClF;YACxB,CAAC,CAAC;YAEFA,QAAQ,CAAC6E,OAAO,CAAC9E,IAAI,CAACoF,MAAM,CAAC;YAC7BnF,QAAQ,CAAC8E,eAAe,GAAG,IAAIS,gBAAK,CAACC,MAAM,CAACxF,QAAQ,CAAC8E,eAAe,CAAC,GAAGI,UAAU,CAACjF,IAAI,CAACV,MAAM,CAAC;YAE/FU,IAAI,GAAGR,MAAM,CAACmC,MAAM,CAAC,CAAC3B,IAAI,EAAEiF,UAAU,CAACjF,IAAI,CAAC,CAAC;UAAC;YAAA;YAAA;UAAA;YAAA;YAAA;UAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;YAAA;UAAA;YAAA,mCAGzC;cAACA,IAAI,EAAJA,IAAI;cAAED,QAAQ,EAARA;YAAQ,CAAC;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CACxB;EAAA;AAAA;AAKD,SAASE,YAAY,CACnB/C,MAAqB,EACrBa,QAAgB,EAChBkB,SAAqB,EACrBzB,YAAoC,EAC5B;EACR,IAAMuC,QAAQ,GAAG,IAAIyF,2BAAY,CAAC;IAChCC,OAAO,EAAE7I,eAAe;IACxB8I,UAAU,EAAE,UAAU;IACtBnC,QAAQ,EAAExF,QAAQ;IAClB4H,UAAU,EAAE1G,SAAS;IACrB/B,MAAM,EAAE,EAAE;IACV0I,kBAAkB,EAAE;EACtB,CAAC,CAAC;EAEF,KAAK,IAAM1H,GAAG,IAAIV,YAAY,EAAE;IAAA;IAC9B,IAAMqI,EAAE,GAAG,IAAIC,uBAAQ,CAAC;MACtB5H,GAAG,EAAHA,GAAG;MACHC,KAAK,EAAEX,YAAY,CAACU,GAAG;IACzB,CAAC,CAAC;IACF,yBAAA6B,QAAQ,CAAC6F,kBAAkB,oFAA3B,iDAA6B9F,IAAI,2DAAjC,oDAAoC+F,EAAE,CAAC;EACzC;EAEA;IACE,IAAME,UAAU,GAAG,IAAIC,4BAAa,CAAC;MACnCC,IAAI,EAAE,MAAM;MACZC,YAAY,EAAEC,MAAM,CAACC,IAAI,CAAClJ,MAAM,CAACmJ,MAAM,CAAC,CAAC/G;IAC3C,CAAC,CAAC;IACFS,QAAQ,CAAC7C,MAAM,CAAC4C,IAAI,CAACiG,UAAU,CAAC;EAClC;EAAC,2CAEmB7I,MAAM,CAAC4H,SAAS;IAAA;EAAA;IAApC,oDAAsC;MAAA,IAA3BC,KAAK;MACd,IAAMuB,IAAI,GAAGC,kCAAmB,CAACxB,KAAK,CAACyB,cAAc,CAAC;MACtD,IAAMC,UAAU,GAAG,IAAIT,4BAAa,CAAC;QACnCC,IAAI,EAAElB,KAAK,CAACkB,IAAI;QAChBS,eAAe,EAAEJ;MACnB,CAAC,CAAC;MAEF,IAAIvB,KAAK,CAACC,QAAQ,EAAE;QAClByB,UAAU,CAACP,YAAY,GAAGnB,KAAK,CAAC4B,UAAU;MAC5C,CAAC,MAAM;QACLF,UAAU,CAAClG,IAAI,GAAGgE,mBAAI,CAACQ,KAAK,CAACvD,aAAa,CAAU;MACtD;MAEA,IAAIuD,KAAK,CAAC6B,YAAY,EAAE;QACtBH,UAAU,CAACI,cAAc,GAAGC,4BAAa,CAAC/B,KAAK,CAAC6B,YAAY,CAAkB;MAChF;MAEAH,UAAU,CAACM,WAAW,GAAGhC,KAAK,CAACtD,UAAU;MAEzC1B,QAAQ,CAAC7C,MAAM,CAAC4C,IAAI,CAAC2G,UAAU,CAAC;IAClC;EAAC;IAAA;EAAA;IAAA;EAAA;EAED,IAAMO,eAAe,GAAG,IAAAlE,0BAAe,EAAC/C,QAAQ,CAAC;EACjD,IAAMkH,aAAa,GAAGzH,MAAM,CAACuB,KAAK,CAACiG,eAAe,CAAC1H,MAAM,GAAG,CAAC,CAAC;EAC9D0H,eAAe,CAACE,IAAI,CAACD,aAAa,CAAC;EACnCA,aAAa,CAACE,aAAa,CAACH,eAAe,CAAC1H,MAAM,EAAE0H,eAAe,CAAC1H,MAAM,CAAC;EAC3E2H,aAAa,CAAClI,KAAK,CAACpC,aAAa,EAAEqK,eAAe,CAAC1H,MAAM,GAAG,CAAC,CAAC;EAC9D,OAAO2H,aAAa;AACtB"}