@loaders.gl/parquet 3.3.0 → 3.4.0-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (131) hide show
  1. package/dist/dist.min.js +26 -17
  2. package/dist/dist.min.js.map +3 -3
  3. package/dist/es5/index.js +3 -3
  4. package/dist/es5/index.js.map +1 -1
  5. package/dist/es5/lib/parse-parquet.js +25 -49
  6. package/dist/es5/lib/parse-parquet.js.map +1 -1
  7. package/dist/es5/parquet-loader.js +2 -3
  8. package/dist/es5/parquet-loader.js.map +1 -1
  9. package/dist/es5/parquet-wasm-loader.js +1 -1
  10. package/dist/es5/parquet-wasm-loader.js.map +1 -1
  11. package/dist/es5/parquet-wasm-writer.js +1 -1
  12. package/dist/es5/parquet-wasm-writer.js.map +1 -1
  13. package/dist/es5/parquet-writer.js +1 -1
  14. package/dist/es5/parquet-writer.js.map +1 -1
  15. package/dist/es5/parquetjs/compression.js +5 -15
  16. package/dist/es5/parquetjs/compression.js.map +1 -1
  17. package/dist/es5/parquetjs/encoder/{parquet-encoder.js → writer.js} +158 -70
  18. package/dist/es5/parquetjs/encoder/writer.js.map +1 -0
  19. package/dist/es5/parquetjs/file.js +94 -0
  20. package/dist/es5/parquetjs/file.js.map +1 -0
  21. package/dist/es5/parquetjs/parser/parquet-cursor.js +183 -0
  22. package/dist/es5/parquetjs/parser/parquet-cursor.js.map +1 -0
  23. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +327 -0
  24. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +1 -0
  25. package/dist/es5/parquetjs/parser/parquet-reader.js +222 -553
  26. package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
  27. package/dist/es5/parquetjs/schema/declare.js +1 -3
  28. package/dist/es5/parquetjs/schema/declare.js.map +1 -1
  29. package/dist/es5/parquetjs/schema/shred.js +33 -39
  30. package/dist/es5/parquetjs/schema/shred.js.map +1 -1
  31. package/dist/es5/parquetjs/schema/types.js.map +1 -1
  32. package/dist/es5/parquetjs/utils/buffer-utils.js +19 -0
  33. package/dist/es5/parquetjs/utils/buffer-utils.js.map +1 -0
  34. package/dist/es5/parquetjs/utils/file-utils.js +3 -2
  35. package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
  36. package/dist/esm/index.js +1 -1
  37. package/dist/esm/index.js.map +1 -1
  38. package/dist/esm/lib/parse-parquet.js +12 -6
  39. package/dist/esm/lib/parse-parquet.js.map +1 -1
  40. package/dist/esm/parquet-loader.js +2 -3
  41. package/dist/esm/parquet-loader.js.map +1 -1
  42. package/dist/esm/parquet-wasm-loader.js +1 -1
  43. package/dist/esm/parquet-wasm-loader.js.map +1 -1
  44. package/dist/esm/parquet-wasm-writer.js +1 -1
  45. package/dist/esm/parquet-wasm-writer.js.map +1 -1
  46. package/dist/esm/parquet-writer.js +1 -1
  47. package/dist/esm/parquet-writer.js.map +1 -1
  48. package/dist/esm/parquetjs/compression.js +1 -10
  49. package/dist/esm/parquetjs/compression.js.map +1 -1
  50. package/dist/esm/parquetjs/encoder/{parquet-encoder.js → writer.js} +37 -7
  51. package/dist/esm/parquetjs/encoder/writer.js.map +1 -0
  52. package/dist/esm/parquetjs/file.js +81 -0
  53. package/dist/esm/parquetjs/file.js.map +1 -0
  54. package/dist/esm/parquetjs/parser/parquet-cursor.js +78 -0
  55. package/dist/esm/parquetjs/parser/parquet-cursor.js.map +1 -0
  56. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +129 -0
  57. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +1 -0
  58. package/dist/esm/parquetjs/parser/parquet-reader.js +72 -158
  59. package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
  60. package/dist/esm/parquetjs/schema/declare.js +0 -1
  61. package/dist/esm/parquetjs/schema/declare.js.map +1 -1
  62. package/dist/esm/parquetjs/schema/shred.js +34 -42
  63. package/dist/esm/parquetjs/schema/shred.js.map +1 -1
  64. package/dist/esm/parquetjs/schema/types.js.map +1 -1
  65. package/dist/esm/parquetjs/utils/buffer-utils.js +13 -0
  66. package/dist/esm/parquetjs/utils/buffer-utils.js.map +1 -0
  67. package/dist/esm/parquetjs/utils/file-utils.js +1 -1
  68. package/dist/esm/parquetjs/utils/file-utils.js.map +1 -1
  69. package/dist/index.d.ts +1 -1
  70. package/dist/index.d.ts.map +1 -1
  71. package/dist/index.js +4 -3
  72. package/dist/lib/parse-parquet.d.ts +2 -2
  73. package/dist/lib/parse-parquet.d.ts.map +1 -1
  74. package/dist/lib/parse-parquet.js +12 -24
  75. package/dist/parquet-loader.d.ts +0 -1
  76. package/dist/parquet-loader.d.ts.map +1 -1
  77. package/dist/parquet-loader.js +1 -2
  78. package/dist/parquet-worker.js +24 -15
  79. package/dist/parquet-worker.js.map +3 -3
  80. package/dist/parquetjs/compression.d.ts.map +1 -1
  81. package/dist/parquetjs/compression.js +5 -16
  82. package/dist/parquetjs/encoder/{parquet-encoder.d.ts → writer.d.ts} +19 -10
  83. package/dist/parquetjs/encoder/writer.d.ts.map +1 -0
  84. package/dist/parquetjs/encoder/{parquet-encoder.js → writer.js} +37 -39
  85. package/dist/parquetjs/file.d.ts +10 -0
  86. package/dist/parquetjs/file.d.ts.map +1 -0
  87. package/dist/parquetjs/file.js +99 -0
  88. package/dist/parquetjs/parser/parquet-cursor.d.ts +36 -0
  89. package/dist/parquetjs/parser/parquet-cursor.d.ts.map +1 -0
  90. package/dist/parquetjs/parser/parquet-cursor.js +74 -0
  91. package/dist/parquetjs/parser/parquet-envelope-reader.d.ts +40 -0
  92. package/dist/parquetjs/parser/parquet-envelope-reader.d.ts.map +1 -0
  93. package/dist/parquetjs/parser/parquet-envelope-reader.js +136 -0
  94. package/dist/parquetjs/parser/parquet-reader.d.ts +57 -47
  95. package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -1
  96. package/dist/parquetjs/parser/parquet-reader.js +102 -168
  97. package/dist/parquetjs/schema/declare.d.ts +7 -14
  98. package/dist/parquetjs/schema/declare.d.ts.map +1 -1
  99. package/dist/parquetjs/schema/declare.js +0 -2
  100. package/dist/parquetjs/schema/shred.d.ts +0 -115
  101. package/dist/parquetjs/schema/shred.d.ts.map +1 -1
  102. package/dist/parquetjs/schema/shred.js +43 -161
  103. package/dist/parquetjs/schema/types.d.ts +2 -2
  104. package/dist/parquetjs/schema/types.d.ts.map +1 -1
  105. package/dist/parquetjs/utils/buffer-utils.d.ts +10 -0
  106. package/dist/parquetjs/utils/buffer-utils.d.ts.map +1 -0
  107. package/dist/parquetjs/utils/buffer-utils.js +22 -0
  108. package/dist/parquetjs/utils/file-utils.d.ts +4 -3
  109. package/dist/parquetjs/utils/file-utils.d.ts.map +1 -1
  110. package/dist/parquetjs/utils/file-utils.js +5 -2
  111. package/package.json +5 -7
  112. package/src/index.ts +2 -2
  113. package/src/lib/parse-parquet.ts +12 -25
  114. package/src/parquet-loader.ts +1 -3
  115. package/src/parquetjs/compression.ts +1 -14
  116. package/src/parquetjs/encoder/{parquet-encoder.ts → writer.ts} +28 -22
  117. package/src/parquetjs/file.ts +90 -0
  118. package/src/parquetjs/parser/parquet-cursor.ts +94 -0
  119. package/src/parquetjs/parser/parquet-envelope-reader.ts +199 -0
  120. package/src/parquetjs/parser/parquet-reader.ts +122 -239
  121. package/src/parquetjs/schema/declare.ts +9 -17
  122. package/src/parquetjs/schema/shred.ts +28 -157
  123. package/src/parquetjs/schema/types.ts +27 -21
  124. package/src/parquetjs/utils/buffer-utils.ts +18 -0
  125. package/src/parquetjs/utils/file-utils.ts +4 -3
  126. package/dist/es5/lib/convert-schema-deep.ts.disabled +0 -910
  127. package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +0 -1
  128. package/dist/esm/lib/convert-schema-deep.ts.disabled +0 -910
  129. package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +0 -1
  130. package/dist/parquetjs/encoder/parquet-encoder.d.ts.map +0 -1
  131. package/src/lib/convert-schema-deep.ts.disabled +0 -910
@@ -5,12 +5,17 @@ var _typeof = require("@babel/runtime/helpers/typeof");
5
5
  Object.defineProperty(exports, "__esModule", {
6
6
  value: true
7
7
  });
8
- exports.ParquetEnvelopeWriter = exports.ParquetEncoder = void 0;
8
+ exports.ParquetWriter = exports.ParquetTransformer = exports.ParquetEnvelopeWriter = void 0;
9
9
  var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
10
+ var _assertThisInitialized2 = _interopRequireDefault(require("@babel/runtime/helpers/assertThisInitialized"));
11
+ var _inherits2 = _interopRequireDefault(require("@babel/runtime/helpers/inherits"));
12
+ var _possibleConstructorReturn2 = _interopRequireDefault(require("@babel/runtime/helpers/possibleConstructorReturn"));
13
+ var _getPrototypeOf2 = _interopRequireDefault(require("@babel/runtime/helpers/getPrototypeOf"));
10
14
  var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
11
15
  var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime/helpers/classCallCheck"));
12
16
  var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass"));
13
17
  var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
18
+ var _stream = require("stream");
14
19
  var _codecs = require("../codecs");
15
20
  var Compression = _interopRequireWildcard(require("../compression"));
16
21
  var Shred = _interopRequireWildcard(require("../schema/shred"));
@@ -23,6 +28,8 @@ function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj &&
23
28
  function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; }
24
29
  function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
25
30
  function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
31
+ function _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = (0, _getPrototypeOf2.default)(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = (0, _getPrototypeOf2.default)(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return (0, _possibleConstructorReturn2.default)(this, result); }; }
32
+ function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function () {})); return true; } catch (e) { return false; } }
26
33
  var PARQUET_MAGIC = 'PAR1';
27
34
 
28
35
  var PARQUET_VERSION = 1;
@@ -32,9 +39,9 @@ var PARQUET_DEFAULT_ROW_GROUP_SIZE = 4096;
32
39
 
33
40
  var PARQUET_RDLVL_TYPE = 'INT32';
34
41
  var PARQUET_RDLVL_ENCODING = 'RLE';
35
- var ParquetEncoder = function () {
36
- function ParquetEncoder(schema, envelopeWriter, opts) {
37
- (0, _classCallCheck2.default)(this, ParquetEncoder);
42
+ var ParquetWriter = function () {
43
+ function ParquetWriter(schema, envelopeWriter, opts) {
44
+ (0, _classCallCheck2.default)(this, ParquetWriter);
38
45
  (0, _defineProperty2.default)(this, "schema", void 0);
39
46
  (0, _defineProperty2.default)(this, "envelopeWriter", void 0);
40
47
  (0, _defineProperty2.default)(this, "rowBuffer", void 0);
@@ -50,7 +57,7 @@ var ParquetEncoder = function () {
50
57
 
51
58
  this.writeHeader();
52
59
  }
53
- (0, _createClass2.default)(ParquetEncoder, [{
60
+ (0, _createClass2.default)(ParquetWriter, [{
54
61
  key: "writeHeader",
55
62
  value: function () {
56
63
  var _writeHeader = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee() {
@@ -186,7 +193,7 @@ var ParquetEncoder = function () {
186
193
  return (0, _fileUtils.osopen)(path, opts);
187
194
  case 2:
188
195
  outputStream = _context4.sent;
189
- return _context4.abrupt("return", ParquetEncoder.openStream(schema, outputStream, opts));
196
+ return _context4.abrupt("return", ParquetWriter.openStream(schema, outputStream, opts));
190
197
  case 4:
191
198
  case "end":
192
199
  return _context4.stop();
@@ -202,20 +209,20 @@ var ParquetEncoder = function () {
202
209
  }, {
203
210
  key: "openStream",
204
211
  value: function () {
205
- var _openStream = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee5(schema, outputStream) {
206
- var opts,
207
- envelopeWriter,
208
- _args5 = arguments;
212
+ var _openStream = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee5(schema, outputStream, opts) {
213
+ var envelopeWriter;
209
214
  return _regenerator.default.wrap(function _callee5$(_context5) {
210
215
  while (1) {
211
216
  switch (_context5.prev = _context5.next) {
212
217
  case 0:
213
- opts = _args5.length > 2 && _args5[2] !== undefined ? _args5[2] : {};
218
+ if (!opts) {
219
+ opts = {};
220
+ }
214
221
  _context5.next = 3;
215
222
  return ParquetEnvelopeWriter.openStream(schema, outputStream, opts);
216
223
  case 3:
217
224
  envelopeWriter = _context5.sent;
218
- return _context5.abrupt("return", new ParquetEncoder(schema, envelopeWriter, opts));
225
+ return _context5.abrupt("return", new ParquetWriter(schema, envelopeWriter, opts));
219
226
  case 5:
220
227
  case "end":
221
228
  return _context5.stop();
@@ -223,15 +230,15 @@ var ParquetEncoder = function () {
223
230
  }
224
231
  }, _callee5);
225
232
  }));
226
- function openStream(_x6, _x7) {
233
+ function openStream(_x6, _x7, _x8) {
227
234
  return _openStream.apply(this, arguments);
228
235
  }
229
236
  return openStream;
230
237
  }()
231
238
  }]);
232
- return ParquetEncoder;
239
+ return ParquetWriter;
233
240
  }();
234
- exports.ParquetEncoder = ParquetEncoder;
241
+ exports.ParquetWriter = ParquetWriter;
235
242
  var ParquetEnvelopeWriter = function () {
236
243
  function ParquetEnvelopeWriter(schema, writeFn, closeFn, fileOffset, opts) {
237
244
  (0, _classCallCheck2.default)(this, ParquetEnvelopeWriter);
@@ -296,7 +303,7 @@ var ParquetEnvelopeWriter = function () {
296
303
  }
297
304
  }, _callee6, this);
298
305
  }));
299
- function writeRowGroup(_x8) {
306
+ function writeRowGroup(_x9) {
300
307
  return _writeRowGroup.apply(this, arguments);
301
308
  }
302
309
  return writeRowGroup;
@@ -336,7 +343,7 @@ var ParquetEnvelopeWriter = function () {
336
343
  }
337
344
  }, _callee7);
338
345
  }));
339
- function openStream(_x9, _x10, _x11) {
346
+ function openStream(_x10, _x11, _x12) {
340
347
  return _openStream2.apply(this, arguments);
341
348
  }
342
349
  return openStream;
@@ -345,7 +352,88 @@ var ParquetEnvelopeWriter = function () {
345
352
  return ParquetEnvelopeWriter;
346
353
  }();
347
354
  exports.ParquetEnvelopeWriter = ParquetEnvelopeWriter;
355
+ var ParquetTransformer = function (_Transform) {
356
+ (0, _inherits2.default)(ParquetTransformer, _Transform);
357
+ var _super = _createSuper(ParquetTransformer);
358
+ function ParquetTransformer(schema) {
359
+ var _this;
360
+ var opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
361
+ (0, _classCallCheck2.default)(this, ParquetTransformer);
362
+ _this = _super.call(this, {
363
+ objectMode: true
364
+ });
365
+ (0, _defineProperty2.default)((0, _assertThisInitialized2.default)(_this), "writer", void 0);
366
+ var writeProxy = function (t) {
367
+ return function () {
368
+ var _ref = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee8(b) {
369
+ return _regenerator.default.wrap(function _callee8$(_context8) {
370
+ while (1) {
371
+ switch (_context8.prev = _context8.next) {
372
+ case 0:
373
+ t.push(b);
374
+ case 1:
375
+ case "end":
376
+ return _context8.stop();
377
+ }
378
+ }
379
+ }, _callee8);
380
+ }));
381
+ return function (_x13) {
382
+ return _ref.apply(this, arguments);
383
+ };
384
+ }();
385
+ }((0, _assertThisInitialized2.default)(_this));
386
+ _this.writer = new ParquetWriter(schema, new ParquetEnvelopeWriter(schema, writeProxy, (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee9() {
387
+ return _regenerator.default.wrap(function _callee9$(_context9) {
388
+ while (1) {
389
+ switch (_context9.prev = _context9.next) {
390
+ case 0:
391
+ case "end":
392
+ return _context9.stop();
393
+ }
394
+ }
395
+ }, _callee9);
396
+ })), 0, opts), opts);
397
+ return _this;
398
+ }
348
399
 
400
+ (0, _createClass2.default)(ParquetTransformer, [{
401
+ key: "_transform",
402
+ value:
403
+ function _transform(row, encoding, callback) {
404
+ if (row) {
405
+ return this.writer.appendRow(row).then(callback);
406
+ }
407
+ callback();
408
+ return Promise.resolve();
409
+ }
410
+
411
+ }, {
412
+ key: "_flush",
413
+ value: function () {
414
+ var _flush2 = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee10(callback) {
415
+ return _regenerator.default.wrap(function _callee10$(_context10) {
416
+ while (1) {
417
+ switch (_context10.prev = _context10.next) {
418
+ case 0:
419
+ _context10.next = 2;
420
+ return this.writer.close(callback);
421
+ case 2:
422
+ case "end":
423
+ return _context10.stop();
424
+ }
425
+ }
426
+ }, _callee10, this);
427
+ }));
428
+ function _flush(_x14) {
429
+ return _flush2.apply(this, arguments);
430
+ }
431
+ return _flush;
432
+ }()
433
+ }]);
434
+ return ParquetTransformer;
435
+ }(_stream.Transform);
436
+ exports.ParquetTransformer = ParquetTransformer;
349
437
  function encodeValues(type, encoding, values, opts) {
350
438
  if (!(encoding in _codecs.PARQUET_CODECS)) {
351
439
  throw new Error("invalid encoding: ".concat(encoding));
@@ -353,15 +441,15 @@ function encodeValues(type, encoding, values, opts) {
353
441
  return _codecs.PARQUET_CODECS[encoding].encodeValues(type, values, opts);
354
442
  }
355
443
 
356
- function encodeDataPage(_x12, _x13) {
444
+ function encodeDataPage(_x15, _x16) {
357
445
  return _encodeDataPage.apply(this, arguments);
358
446
  }
359
447
  function _encodeDataPage() {
360
- _encodeDataPage = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee8(column, data) {
448
+ _encodeDataPage = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee11(column, data) {
361
449
  var rLevelsBuf, dLevelsBuf, valuesBuf, dataBuf, compressedBuf, header, headerBuf, page;
362
- return _regenerator.default.wrap(function _callee8$(_context8) {
450
+ return _regenerator.default.wrap(function _callee11$(_context11) {
363
451
  while (1) {
364
- switch (_context8.prev = _context8.next) {
452
+ switch (_context11.prev = _context11.next) {
365
453
  case 0:
366
454
  rLevelsBuf = Buffer.alloc(0);
367
455
  if (column.rLevelMax > 0) {
@@ -381,10 +469,10 @@ function _encodeDataPage() {
381
469
  bitWidth: column.typeLength
382
470
  });
383
471
  dataBuf = Buffer.concat([rLevelsBuf, dLevelsBuf, valuesBuf]);
384
- _context8.next = 8;
472
+ _context11.next = 8;
385
473
  return Compression.deflate(column.compression, dataBuf);
386
474
  case 8:
387
- compressedBuf = _context8.sent;
475
+ compressedBuf = _context11.sent;
388
476
  header = new _parquetThrift.PageHeader({
389
477
  type: _parquetThrift.PageType.DATA_PAGE,
390
478
  data_page_header: new _parquetThrift.DataPageHeader({
@@ -399,38 +487,38 @@ function _encodeDataPage() {
399
487
  });
400
488
  headerBuf = (0, _readUtils.serializeThrift)(header);
401
489
  page = Buffer.concat([headerBuf, compressedBuf]);
402
- return _context8.abrupt("return", {
490
+ return _context11.abrupt("return", {
403
491
  header: header,
404
492
  headerSize: headerBuf.length,
405
493
  page: page
406
494
  });
407
495
  case 13:
408
496
  case "end":
409
- return _context8.stop();
497
+ return _context11.stop();
410
498
  }
411
499
  }
412
- }, _callee8);
500
+ }, _callee11);
413
501
  }));
414
502
  return _encodeDataPage.apply(this, arguments);
415
503
  }
416
- function encodeDataPageV2(_x14, _x15, _x16) {
504
+ function encodeDataPageV2(_x17, _x18, _x19) {
417
505
  return _encodeDataPageV.apply(this, arguments);
418
506
  }
419
507
  function _encodeDataPageV() {
420
- _encodeDataPageV = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee9(column, data, rowCount) {
508
+ _encodeDataPageV = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee12(column, data, rowCount) {
421
509
  var valuesBuf, compressedBuf, rLevelsBuf, dLevelsBuf, header, headerBuf, page;
422
- return _regenerator.default.wrap(function _callee9$(_context9) {
510
+ return _regenerator.default.wrap(function _callee12$(_context12) {
423
511
  while (1) {
424
- switch (_context9.prev = _context9.next) {
512
+ switch (_context12.prev = _context12.next) {
425
513
  case 0:
426
514
  valuesBuf = encodeValues(column.primitiveType, column.encoding, data.values, {
427
515
  typeLength: column.typeLength,
428
516
  bitWidth: column.typeLength
429
517
  });
430
- _context9.next = 3;
518
+ _context12.next = 3;
431
519
  return Compression.deflate(column.compression, valuesBuf);
432
520
  case 3:
433
- compressedBuf = _context9.sent;
521
+ compressedBuf = _context12.sent;
434
522
  rLevelsBuf = Buffer.alloc(0);
435
523
  if (column.rLevelMax > 0) {
436
524
  rLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.rlevels, {
@@ -462,51 +550,51 @@ function _encodeDataPageV() {
462
550
  });
463
551
  headerBuf = (0, _readUtils.serializeThrift)(header);
464
552
  page = Buffer.concat([headerBuf, rLevelsBuf, dLevelsBuf, compressedBuf]);
465
- return _context9.abrupt("return", {
553
+ return _context12.abrupt("return", {
466
554
  header: header,
467
555
  headerSize: headerBuf.length,
468
556
  page: page
469
557
  });
470
558
  case 12:
471
559
  case "end":
472
- return _context9.stop();
560
+ return _context12.stop();
473
561
  }
474
562
  }
475
- }, _callee9);
563
+ }, _callee12);
476
564
  }));
477
565
  return _encodeDataPageV.apply(this, arguments);
478
566
  }
479
- function encodeColumnChunk(_x17, _x18, _x19, _x20) {
567
+ function encodeColumnChunk(_x20, _x21, _x22, _x23) {
480
568
  return _encodeColumnChunk.apply(this, arguments);
481
569
  }
482
570
  function _encodeColumnChunk() {
483
- _encodeColumnChunk = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee10(column, buffer, offset, opts) {
571
+ _encodeColumnChunk = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee13(column, buffer, offset, opts) {
484
572
  var data, baseOffset, pageBuf, total_uncompressed_size, total_compressed_size, result, metadata, metadataOffset, body;
485
- return _regenerator.default.wrap(function _callee10$(_context10) {
573
+ return _regenerator.default.wrap(function _callee13$(_context13) {
486
574
  while (1) {
487
- switch (_context10.prev = _context10.next) {
575
+ switch (_context13.prev = _context13.next) {
488
576
  case 0:
489
577
  data = buffer.columnData[column.path.join()];
490
578
  baseOffset = (opts.baseOffset || 0) + offset;
491
579
  total_uncompressed_size = 0;
492
580
  total_compressed_size = 0;
493
581
  if (!opts.useDataPageV2) {
494
- _context10.next = 10;
582
+ _context13.next = 10;
495
583
  break;
496
584
  }
497
- _context10.next = 7;
585
+ _context13.next = 7;
498
586
  return encodeDataPageV2(column, data, buffer.rowCount);
499
587
  case 7:
500
- _context10.t0 = _context10.sent;
501
- _context10.next = 13;
588
+ _context13.t0 = _context13.sent;
589
+ _context13.next = 13;
502
590
  break;
503
591
  case 10:
504
- _context10.next = 12;
592
+ _context13.next = 12;
505
593
  return encodeDataPage(column, data);
506
594
  case 12:
507
- _context10.t0 = _context10.sent;
595
+ _context13.t0 = _context13.sent;
508
596
  case 13:
509
- result = _context10.t0;
597
+ result = _context13.t0;
510
598
  pageBuf = result.page;
511
599
  total_uncompressed_size += result.header.uncompressed_page_size + result.headerSize;
512
600
  total_compressed_size += result.header.compressed_page_size + result.headerSize;
@@ -525,29 +613,29 @@ function _encodeColumnChunk() {
525
613
 
526
614
  metadataOffset = baseOffset + pageBuf.length;
527
615
  body = Buffer.concat([pageBuf, (0, _readUtils.serializeThrift)(metadata)]);
528
- return _context10.abrupt("return", {
616
+ return _context13.abrupt("return", {
529
617
  body: body,
530
618
  metadata: metadata,
531
619
  metadataOffset: metadataOffset
532
620
  });
533
621
  case 23:
534
622
  case "end":
535
- return _context10.stop();
623
+ return _context13.stop();
536
624
  }
537
625
  }
538
- }, _callee10);
626
+ }, _callee13);
539
627
  }));
540
628
  return _encodeColumnChunk.apply(this, arguments);
541
629
  }
542
- function encodeRowGroup(_x21, _x22, _x23) {
630
+ function encodeRowGroup(_x24, _x25, _x26) {
543
631
  return _encodeRowGroup.apply(this, arguments);
544
632
  }
545
633
  function _encodeRowGroup() {
546
- _encodeRowGroup = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee11(schema, data, opts) {
634
+ _encodeRowGroup = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee14(schema, data, opts) {
547
635
  var metadata, body, _iterator2, _step2, field, cchunkData, cchunk;
548
- return _regenerator.default.wrap(function _callee11$(_context11) {
636
+ return _regenerator.default.wrap(function _callee14$(_context14) {
549
637
  while (1) {
550
- switch (_context11.prev = _context11.next) {
638
+ switch (_context14.prev = _context14.next) {
551
639
  case 0:
552
640
  metadata = new _parquetThrift.RowGroup({
553
641
  num_rows: data.rowCount,
@@ -556,24 +644,24 @@ function _encodeRowGroup() {
556
644
  });
557
645
  body = Buffer.alloc(0);
558
646
  _iterator2 = _createForOfIteratorHelper(schema.fieldList);
559
- _context11.prev = 3;
647
+ _context14.prev = 3;
560
648
  _iterator2.s();
561
649
  case 5:
562
650
  if ((_step2 = _iterator2.n()).done) {
563
- _context11.next = 18;
651
+ _context14.next = 18;
564
652
  break;
565
653
  }
566
654
  field = _step2.value;
567
655
  if (!field.isNested) {
568
- _context11.next = 9;
656
+ _context14.next = 9;
569
657
  break;
570
658
  }
571
- return _context11.abrupt("continue", 16);
659
+ return _context14.abrupt("continue", 16);
572
660
  case 9:
573
- _context11.next = 11;
661
+ _context14.next = 11;
574
662
  return encodeColumnChunk(field, data, body.length, opts);
575
663
  case 11:
576
- cchunkData = _context11.sent;
664
+ cchunkData = _context14.sent;
577
665
  cchunk = new _parquetThrift.ColumnChunk({
578
666
  file_offset: cchunkData.metadataOffset,
579
667
  meta_data: cchunkData.metadata
@@ -582,30 +670,30 @@ function _encodeRowGroup() {
582
670
  metadata.total_byte_size = new _nodeInt.default(Number(metadata.total_byte_size) + cchunkData.body.length);
583
671
  body = Buffer.concat([body, cchunkData.body]);
584
672
  case 16:
585
- _context11.next = 5;
673
+ _context14.next = 5;
586
674
  break;
587
675
  case 18:
588
- _context11.next = 23;
676
+ _context14.next = 23;
589
677
  break;
590
678
  case 20:
591
- _context11.prev = 20;
592
- _context11.t0 = _context11["catch"](3);
593
- _iterator2.e(_context11.t0);
679
+ _context14.prev = 20;
680
+ _context14.t0 = _context14["catch"](3);
681
+ _iterator2.e(_context14.t0);
594
682
  case 23:
595
- _context11.prev = 23;
683
+ _context14.prev = 23;
596
684
  _iterator2.f();
597
- return _context11.finish(23);
685
+ return _context14.finish(23);
598
686
  case 26:
599
- return _context11.abrupt("return", {
687
+ return _context14.abrupt("return", {
600
688
  body: body,
601
689
  metadata: metadata
602
690
  });
603
691
  case 27:
604
692
  case "end":
605
- return _context11.stop();
693
+ return _context14.stop();
606
694
  }
607
695
  }
608
- }, _callee11, null, [[3, 20, 23, 26]]);
696
+ }, _callee14, null, [[3, 20, 23, 26]]);
609
697
  }));
610
698
  return _encodeRowGroup.apply(this, arguments);
611
699
  }
@@ -666,4 +754,4 @@ function encodeFooter(schema, rowCount, rowGroups, userMetadata) {
666
754
  footerEncoded.write(PARQUET_MAGIC, metadataEncoded.length + 4);
667
755
  return footerEncoded;
668
756
  }
669
- //# sourceMappingURL=parquet-encoder.js.map
757
+ //# sourceMappingURL=writer.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"writer.js","names":["PARQUET_MAGIC","PARQUET_VERSION","PARQUET_DEFAULT_PAGE_SIZE","PARQUET_DEFAULT_ROW_GROUP_SIZE","PARQUET_RDLVL_TYPE","PARQUET_RDLVL_ENCODING","ParquetWriter","schema","envelopeWriter","opts","rowBuffer","rowGroupSize","closed","userMetadata","writeHeader","close","row","Error","Shred","shredRecord","rowCount","callback","writeFooter","key","value","String","cnt","setPageSize","path","osopen","outputStream","openStream","ParquetEnvelopeWriter","writeFn","closeFn","fileOffset","write","offset","rowGroups","pageSize","useDataPageV2","Boolean","buf","length","writeSection","Buffer","from","records","encodeRowGroup","baseOffset","rgroup","push","metadata","body","encodeFooter","oswrite","bind","undefined","osclose","ParquetTransformer","objectMode","writeProxy","t","b","writer","encoding","appendRow","then","Promise","resolve","Transform","encodeValues","type","values","PARQUET_CODECS","encodeDataPage","column","data","rLevelsBuf","alloc","rLevelMax","rlevels","bitWidth","getBitWidth","dLevelsBuf","dLevelMax","dlevels","valuesBuf","primitiveType","typeLength","dataBuf","concat","Compression","deflate","compression","compressedBuf","header","PageHeader","PageType","DATA_PAGE","data_page_header","DataPageHeader","num_values","count","Encoding","definition_level_encoding","repetition_level_encoding","uncompressed_page_size","compressed_page_size","headerBuf","serializeThrift","page","headerSize","encodeDataPageV2","disableEnvelope","DATA_PAGE_V2","data_page_header_v2","DataPageHeaderV2","num_nulls","num_rows","definition_levels_byte_length","repetition_levels_byte_length","is_compressed","encodeColumnChunk","buffer","columnData","join","total_uncompressed_size","total_compressed_size","result","pageBuf","ColumnMetaData","path_in_schema","data_page_offset","encodings","Type","codec","CompressionCodec","metadataOffset","RowGroup","columns","total_byte_size","fieldList","field","isNested","cchunkData","cchunk","ColumnChunk","file_offset","meta_data","Int64","Number","FileMetaData","version","created_by","row_groups","key_value_metadata","kv","KeyValue","schemaRoot","SchemaElement","name","num_children","Object","keys","fields","relt","FieldRepetitionType","repetitionType","schemaElem","repetition_type","fieldCount","originalType","converted_type","ConvertedType","type_length","metadataEncoded","footerEncoded","copy","writeUInt32LE"],"sources":["../../../../src/parquetjs/encoder/writer.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/* eslint-disable camelcase */\nimport {Transform, Writable} from 'stream';\nimport {ParquetCodecOptions, PARQUET_CODECS} from '../codecs';\nimport * as Compression from '../compression';\nimport {\n ParquetBuffer,\n ParquetCodec,\n ParquetData,\n ParquetField,\n PrimitiveType\n} from '../schema/declare';\nimport {ParquetSchema} from '../schema/schema';\nimport * as Shred from '../schema/shred';\nimport {\n ColumnChunk,\n ColumnMetaData,\n CompressionCodec,\n ConvertedType,\n DataPageHeader,\n DataPageHeaderV2,\n Encoding,\n FieldRepetitionType,\n FileMetaData,\n KeyValue,\n PageHeader,\n PageType,\n RowGroup,\n SchemaElement,\n Type\n} from '../parquet-thrift';\nimport {osopen, oswrite, osclose} from '../utils/file-utils';\nimport {getBitWidth, serializeThrift} from '../utils/read-utils';\nimport Int64 from 'node-int64';\n\n/**\n * Parquet File Magic String\n */\nconst PARQUET_MAGIC = 'PAR1';\n\n/**\n * Parquet File Format Version\n */\nconst PARQUET_VERSION = 1;\n\n/**\n * Default Page and Row Group sizes\n */\nconst PARQUET_DEFAULT_PAGE_SIZE = 8192;\nconst PARQUET_DEFAULT_ROW_GROUP_SIZE = 4096;\n\n/**\n * Repetition and Definition Level Encoding\n */\nconst PARQUET_RDLVL_TYPE = 'INT32';\nconst PARQUET_RDLVL_ENCODING = 'RLE';\n\nexport interface ParquetWriterOptions {\n baseOffset?: number;\n rowGroupSize?: number;\n pageSize?: number;\n useDataPageV2?: boolean;\n\n // Write Stream Options\n flags?: string;\n encoding?: string;\n fd?: number;\n mode?: number;\n autoClose?: boolean;\n start?: number;\n}\n\n/**\n * Write a parquet file to an output stream. The ParquetWriter will perform\n * buffering/batching for performance, so close() must be called after all rows\n * are written.\n */\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nexport class ParquetWriter<T> {\n /**\n * Convenience method to create a new buffered parquet writer that writes to\n * the specified file\n */\n static async openFile<T>(\n schema: ParquetSchema,\n path: string,\n opts?: ParquetWriterOptions\n ): Promise<ParquetWriter<T>> {\n const outputStream = await osopen(path, opts);\n return ParquetWriter.openStream(schema, outputStream, opts);\n }\n\n /**\n * Convenience method to create a new buffered parquet writer that writes to\n * the specified stream\n */\n static async openStream<T>(\n schema: ParquetSchema,\n outputStream: Writable,\n opts?: ParquetWriterOptions\n ): Promise<ParquetWriter<T>> {\n if (!opts) {\n // tslint:disable-next-line:no-parameter-reassignment\n opts = {};\n }\n\n const envelopeWriter = await ParquetEnvelopeWriter.openStream(schema, outputStream, opts);\n\n return new ParquetWriter(schema, envelopeWriter, opts);\n }\n\n public schema: ParquetSchema;\n public envelopeWriter: ParquetEnvelopeWriter;\n public rowBuffer: ParquetBuffer;\n public rowGroupSize: number;\n public closed: boolean;\n public userMetadata: Record<string, string>;\n\n /**\n * Create a new buffered parquet writer for a given envelope writer\n */\n constructor(\n schema: ParquetSchema,\n envelopeWriter: ParquetEnvelopeWriter,\n opts: ParquetWriterOptions\n ) {\n this.schema = schema;\n this.envelopeWriter = envelopeWriter;\n // @ts-ignore Row buffer typings...\n this.rowBuffer = {};\n this.rowGroupSize = opts.rowGroupSize || PARQUET_DEFAULT_ROW_GROUP_SIZE;\n this.closed = false;\n this.userMetadata = {};\n\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n this.writeHeader();\n }\n\n async writeHeader(): Promise<void> {\n // TODO - better not mess with promises in the constructor\n try {\n await this.envelopeWriter.writeHeader();\n } catch (err) {\n await this.envelopeWriter.close();\n throw err;\n }\n }\n\n /**\n * Append a single row to the parquet file. Rows are buffered in memory until\n * rowGroupSize rows are in the buffer or close() is called\n */\n async appendRow<T>(row: T): Promise<void> {\n if (this.closed) {\n throw new Error('writer was closed');\n }\n Shred.shredRecord(this.schema, row, this.rowBuffer);\n if (this.rowBuffer.rowCount >= this.rowGroupSize) {\n // @ts-ignore\n this.rowBuffer = {};\n }\n }\n\n /**\n * Finish writing the parquet file and commit the footer to disk. This method\n * MUST be called after you are finished adding rows. You must not call this\n * method twice on the same object or add any rows after the close() method has\n * been called\n */\n async close(callback?: () => void): Promise<void> {\n if (this.closed) {\n throw new Error('writer was closed');\n }\n\n this.closed = true;\n\n if (this.rowBuffer.rowCount > 0 || this.rowBuffer.rowCount >= this.rowGroupSize) {\n // @ts-ignore\n this.rowBuffer = {};\n }\n\n await this.envelopeWriter.writeFooter(this.userMetadata);\n await this.envelopeWriter.close();\n // this.envelopeWriter = null;\n\n if (callback) {\n callback();\n }\n }\n\n /**\n * Add key<>value metadata to the file\n */\n setMetadata(key: string, value: string): void {\n // TODO: value to be any, obj -> JSON\n this.userMetadata[String(key)] = String(value);\n }\n\n /**\n * Set the parquet row group size. This values controls the maximum number\n * of rows that are buffered in memory at any given time as well as the number\n * of rows that are co-located on disk. A higher value is generally better for\n * read-time I/O performance at the tradeoff of write-time memory usage.\n */\n setRowGroupSize(cnt: number): void {\n this.rowGroupSize = cnt;\n }\n\n /**\n * Set the parquet data page size. The data page size controls the maximum\n * number of column values that are written to disk as a consecutive array\n */\n setPageSize(cnt: number): void {\n this.envelopeWriter.setPageSize(cnt);\n }\n}\n\n/**\n * Create a parquet file from a schema and a number of row groups. This class\n * performs direct, unbuffered writes to the underlying output stream and is\n * intendend for advanced and internal users; the writeXXX methods must be\n * called in the correct order to produce a valid file.\n */\nexport class ParquetEnvelopeWriter {\n /**\n * Create a new parquet envelope writer that writes to the specified stream\n */\n static async openStream(\n schema: ParquetSchema,\n outputStream: Writable,\n opts: ParquetWriterOptions\n ): Promise<ParquetEnvelopeWriter> {\n const writeFn = oswrite.bind(undefined, outputStream);\n const closeFn = osclose.bind(undefined, outputStream);\n return new ParquetEnvelopeWriter(schema, writeFn, closeFn, 0, opts);\n }\n\n public schema: ParquetSchema;\n public write: (buf: Buffer) => Promise<void>;\n public close: () => Promise<void>;\n public offset: number;\n public rowCount: number;\n public rowGroups: RowGroup[];\n public pageSize: number;\n public useDataPageV2: boolean;\n\n constructor(\n schema: ParquetSchema,\n writeFn: (buf: Buffer) => Promise<void>,\n closeFn: () => Promise<void>,\n fileOffset: number,\n opts: ParquetWriterOptions\n ) {\n this.schema = schema;\n this.write = writeFn;\n this.close = closeFn;\n this.offset = fileOffset;\n this.rowCount = 0;\n this.rowGroups = [];\n this.pageSize = opts.pageSize || PARQUET_DEFAULT_PAGE_SIZE;\n this.useDataPageV2 = 'useDataPageV2' in opts ? Boolean(opts.useDataPageV2) : false;\n }\n\n writeSection(buf: Buffer): Promise<void> {\n this.offset += buf.length;\n return this.write(buf);\n }\n\n /**\n * Encode the parquet file header\n */\n writeHeader(): Promise<void> {\n return this.writeSection(Buffer.from(PARQUET_MAGIC));\n }\n\n /**\n * Encode a parquet row group. The records object should be created using the\n * shredRecord method\n */\n async writeRowGroup(records: ParquetBuffer): Promise<void> {\n const rgroup = await encodeRowGroup(this.schema, records, {\n baseOffset: this.offset,\n pageSize: this.pageSize,\n useDataPageV2: this.useDataPageV2\n });\n\n this.rowCount += records.rowCount;\n this.rowGroups.push(rgroup.metadata);\n return await this.writeSection(rgroup.body);\n }\n\n /**\n * Write the parquet file footer\n */\n writeFooter(userMetadata: Record<string, string>): Promise<void> {\n if (!userMetadata) {\n // tslint:disable-next-line:no-parameter-reassignment\n userMetadata = {};\n }\n\n return this.writeSection(\n encodeFooter(this.schema, this.rowCount, this.rowGroups, userMetadata)\n );\n }\n\n /**\n * Set the parquet data page size. The data page size controls the maximum\n * number of column values that are written to disk as a consecutive array\n */\n setPageSize(cnt: number): void {\n this.pageSize = cnt;\n }\n}\n\n/**\n * Create a parquet transform stream\n */\nexport class ParquetTransformer<T> extends Transform {\n public writer: ParquetWriter<T>;\n\n constructor(schema: ParquetSchema, opts: ParquetWriterOptions = {}) {\n super({objectMode: true});\n\n const writeProxy = (function (t: ParquetTransformer<any>) {\n return async function (b: any): Promise<void> {\n t.push(b);\n };\n })(this);\n\n this.writer = new ParquetWriter(\n schema,\n new ParquetEnvelopeWriter(schema, writeProxy, async () => {}, 0, opts),\n opts\n );\n }\n\n // tslint:disable-next-line:function-name\n _transform(row: any, encoding: string, callback: (val?: any) => void): Promise<void> {\n if (row) {\n return this.writer.appendRow(row).then(callback);\n }\n callback();\n return Promise.resolve();\n }\n\n // tslint:disable-next-line:function-name\n async _flush(callback: (val?: any) => void) {\n await this.writer.close(callback);\n }\n}\n\n/**\n * Encode a consecutive array of data using one of the parquet encodings\n */\nfunction encodeValues(\n type: PrimitiveType,\n encoding: ParquetCodec,\n values: any[],\n opts: ParquetCodecOptions\n) {\n if (!(encoding in PARQUET_CODECS)) {\n throw new Error(`invalid encoding: ${encoding}`);\n }\n return PARQUET_CODECS[encoding].encodeValues(type, values, opts);\n}\n\n/**\n * Encode a parquet data page\n */\nasync function encodeDataPage(\n column: ParquetField,\n data: ParquetData\n): Promise<{\n header: PageHeader;\n headerSize: number;\n page: Buffer;\n}> {\n /* encode repetition and definition levels */\n let rLevelsBuf = Buffer.alloc(0);\n if (column.rLevelMax > 0) {\n rLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.rlevels, {\n bitWidth: getBitWidth(column.rLevelMax)\n // disableEnvelope: false\n });\n }\n\n let dLevelsBuf = Buffer.alloc(0);\n if (column.dLevelMax > 0) {\n dLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.dlevels, {\n bitWidth: getBitWidth(column.dLevelMax)\n // disableEnvelope: false\n });\n }\n\n /* encode values */\n const valuesBuf = encodeValues(column.primitiveType!, column.encoding!, data.values, {\n typeLength: column.typeLength,\n bitWidth: column.typeLength\n });\n\n const dataBuf = Buffer.concat([rLevelsBuf, dLevelsBuf, valuesBuf]);\n\n // compression = column.compression === 'UNCOMPRESSED' ? (compression || 'UNCOMPRESSED') : column.compression;\n const compressedBuf = await Compression.deflate(column.compression!, dataBuf);\n\n /* build page header */\n const header = new PageHeader({\n type: PageType.DATA_PAGE,\n data_page_header: new DataPageHeader({\n num_values: data.count,\n encoding: Encoding[column.encoding!] as any,\n definition_level_encoding: Encoding[PARQUET_RDLVL_ENCODING], // [PARQUET_RDLVL_ENCODING],\n repetition_level_encoding: Encoding[PARQUET_RDLVL_ENCODING] // [PARQUET_RDLVL_ENCODING]\n }),\n uncompressed_page_size: dataBuf.length,\n compressed_page_size: compressedBuf.length\n });\n\n /* concat page header, repetition and definition levels and values */\n const headerBuf = serializeThrift(header);\n const page = Buffer.concat([headerBuf, compressedBuf]);\n\n return {header, headerSize: headerBuf.length, page};\n}\n\n/**\n * Encode a parquet data page (v2)\n */\nasync function encodeDataPageV2(\n column: ParquetField,\n data: ParquetData,\n rowCount: number\n): Promise<{\n header: PageHeader;\n headerSize: number;\n page: Buffer;\n}> {\n /* encode values */\n const valuesBuf = encodeValues(column.primitiveType!, column.encoding!, data.values, {\n typeLength: column.typeLength,\n bitWidth: column.typeLength\n });\n\n // compression = column.compression === 'UNCOMPRESSED' ? (compression || 'UNCOMPRESSED') : column.compression;\n const compressedBuf = await Compression.deflate(column.compression!, valuesBuf);\n\n /* encode repetition and definition levels */\n let rLevelsBuf = Buffer.alloc(0);\n if (column.rLevelMax > 0) {\n rLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.rlevels, {\n bitWidth: getBitWidth(column.rLevelMax),\n disableEnvelope: true\n });\n }\n\n let dLevelsBuf = Buffer.alloc(0);\n if (column.dLevelMax > 0) {\n dLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.dlevels, {\n bitWidth: getBitWidth(column.dLevelMax),\n disableEnvelope: true\n });\n }\n\n /* build page header */\n const header = new PageHeader({\n type: PageType.DATA_PAGE_V2,\n data_page_header_v2: new DataPageHeaderV2({\n num_values: data.count,\n num_nulls: data.count - data.values.length,\n num_rows: rowCount,\n encoding: Encoding[column.encoding!] as any,\n definition_levels_byte_length: dLevelsBuf.length,\n repetition_levels_byte_length: rLevelsBuf.length,\n is_compressed: column.compression !== 'UNCOMPRESSED'\n }),\n uncompressed_page_size: rLevelsBuf.length + dLevelsBuf.length + valuesBuf.length,\n compressed_page_size: rLevelsBuf.length + dLevelsBuf.length + compressedBuf.length\n });\n\n /* concat page header, repetition and definition levels and values */\n const headerBuf = serializeThrift(header);\n const page = Buffer.concat([headerBuf, rLevelsBuf, dLevelsBuf, compressedBuf]);\n return {header, headerSize: headerBuf.length, page};\n}\n\n/**\n * Encode an array of values into a parquet column chunk\n */\nasync function encodeColumnChunk(\n column: ParquetField,\n buffer: ParquetBuffer,\n offset: number,\n opts: ParquetWriterOptions\n): Promise<{\n body: Buffer;\n metadata: ColumnMetaData;\n metadataOffset: number;\n}> {\n const data = buffer.columnData[column.path.join()];\n const baseOffset = (opts.baseOffset || 0) + offset;\n /* encode data page(s) */\n // const pages: Buffer[] = [];\n let pageBuf: Buffer;\n // tslint:disable-next-line:variable-name\n let total_uncompressed_size = 0;\n // tslint:disable-next-line:variable-name\n let total_compressed_size = 0;\n {\n const result = opts.useDataPageV2\n ? await encodeDataPageV2(column, data, buffer.rowCount)\n : await encodeDataPage(column, data);\n // pages.push(result.page);\n pageBuf = result.page;\n total_uncompressed_size += result.header.uncompressed_page_size + result.headerSize;\n total_compressed_size += result.header.compressed_page_size + result.headerSize;\n }\n\n // const pagesBuf = Buffer.concat(pages);\n // const compression = column.compression === 'UNCOMPRESSED' ? (opts.compression || 'UNCOMPRESSED') : column.compression;\n\n /* prepare metadata header */\n const metadata = new ColumnMetaData({\n path_in_schema: column.path,\n num_values: data.count,\n data_page_offset: baseOffset,\n encodings: [],\n total_uncompressed_size, // : pagesBuf.length,\n total_compressed_size,\n type: Type[column.primitiveType!],\n codec: CompressionCodec[column.compression!]\n });\n\n /* list encodings */\n metadata.encodings.push(Encoding[PARQUET_RDLVL_ENCODING]);\n metadata.encodings.push(Encoding[column.encoding!]);\n\n /* concat metadata header and data pages */\n const metadataOffset = baseOffset + pageBuf.length;\n const body = Buffer.concat([pageBuf, serializeThrift(metadata)]);\n return {body, metadata, metadataOffset};\n}\n\n/**\n * Encode a list of column values into a parquet row group\n */\nasync function encodeRowGroup(\n schema: ParquetSchema,\n data: ParquetBuffer,\n opts: ParquetWriterOptions\n): Promise<{\n body: Buffer;\n metadata: RowGroup;\n}> {\n const metadata = new RowGroup({\n num_rows: data.rowCount,\n columns: [],\n total_byte_size: 0\n });\n\n let body = Buffer.alloc(0);\n for (const field of schema.fieldList) {\n if (field.isNested) {\n continue; // eslint-disable-line no-continue\n }\n\n const cchunkData = await encodeColumnChunk(field, data, body.length, opts);\n\n const cchunk = new ColumnChunk({\n file_offset: cchunkData.metadataOffset,\n meta_data: cchunkData.metadata\n });\n\n metadata.columns.push(cchunk);\n metadata.total_byte_size = new Int64(Number(metadata.total_byte_size) + cchunkData.body.length);\n\n body = Buffer.concat([body, cchunkData.body]);\n }\n\n return {body, metadata};\n}\n\n/**\n * Encode a parquet file metadata footer\n */\nfunction encodeFooter(\n schema: ParquetSchema,\n rowCount: number,\n rowGroups: RowGroup[],\n userMetadata: Record<string, string>\n): Buffer {\n const metadata = new FileMetaData({\n version: PARQUET_VERSION,\n created_by: 'parquets',\n num_rows: rowCount,\n row_groups: rowGroups,\n schema: [],\n key_value_metadata: []\n });\n\n for (const key in userMetadata) {\n const kv = new KeyValue({\n key,\n value: userMetadata[key]\n });\n metadata.key_value_metadata?.push?.(kv);\n }\n\n {\n const schemaRoot = new SchemaElement({\n name: 'root',\n num_children: Object.keys(schema.fields).length\n });\n metadata.schema.push(schemaRoot);\n }\n\n for (const field of schema.fieldList) {\n const relt = FieldRepetitionType[field.repetitionType];\n const schemaElem = new SchemaElement({\n name: field.name,\n repetition_type: relt as any\n });\n\n if (field.isNested) {\n schemaElem.num_children = field.fieldCount;\n } else {\n schemaElem.type = Type[field.primitiveType!] as Type;\n }\n\n if (field.originalType) {\n schemaElem.converted_type = ConvertedType[field.originalType] as ConvertedType;\n }\n\n schemaElem.type_length = field.typeLength;\n\n metadata.schema.push(schemaElem);\n }\n\n const metadataEncoded = serializeThrift(metadata);\n const footerEncoded = Buffer.alloc(metadataEncoded.length + 8);\n metadataEncoded.copy(footerEncoded);\n footerEncoded.writeUInt32LE(metadataEncoded.length, metadataEncoded.length);\n footerEncoded.write(PARQUET_MAGIC, metadataEncoded.length + 4);\n return footerEncoded;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AAEA;AACA;AACA;AASA;AACA;AAiBA;AACA;AACA;AAA+B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAK/B,IAAMA,aAAa,GAAG,MAAM;;AAK5B,IAAMC,eAAe,GAAG,CAAC;;AAKzB,IAAMC,yBAAyB,GAAG,IAAI;AACtC,IAAMC,8BAA8B,GAAG,IAAI;;AAK3C,IAAMC,kBAAkB,GAAG,OAAO;AAClC,IAAMC,sBAAsB,GAAG,KAAK;AAAC,IAuBxBC,aAAa;EA2CxB,uBACEC,MAAqB,EACrBC,cAAqC,EACrCC,IAA0B,EAC1B;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IACA,IAAI,CAACF,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACC,cAAc,GAAGA,cAAc;IAEpC,IAAI,CAACE,SAAS,GAAG,CAAC,CAAC;IACnB,IAAI,CAACC,YAAY,GAAGF,IAAI,CAACE,YAAY,IAAIR,8BAA8B;IACvE,IAAI,CAACS,MAAM,GAAG,KAAK;IACnB,IAAI,CAACC,YAAY,GAAG,CAAC,CAAC;;IAGtB,IAAI,CAACC,WAAW,EAAE;EACpB;EAAC;IAAA;IAAA;MAAA,6EAED;QAAA;UAAA;YAAA;cAAA;gBAAA;gBAAA;gBAAA,OAGU,IAAI,CAACN,cAAc,CAACM,WAAW,EAAE;cAAA;gBAAA;gBAAA;cAAA;gBAAA;gBAAA;gBAAA;gBAAA,OAEjC,IAAI,CAACN,cAAc,CAACO,KAAK,EAAE;cAAA;gBAAA;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CAGpC;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;MAAA,2EAMD,kBAAmBC,GAAM;QAAA;UAAA;YAAA;cAAA;gBAAA,KACnB,IAAI,CAACJ,MAAM;kBAAA;kBAAA;gBAAA;gBAAA,MACP,IAAIK,KAAK,CAAC,mBAAmB,CAAC;cAAA;gBAEtCC,KAAK,CAACC,WAAW,CAAC,IAAI,CAACZ,MAAM,EAAES,GAAG,EAAE,IAAI,CAACN,SAAS,CAAC;gBACnD,IAAI,IAAI,CAACA,SAAS,CAACU,QAAQ,IAAI,IAAI,CAACT,YAAY,EAAE;kBAEhD,IAAI,CAACD,SAAS,GAAG,CAAC,CAAC;gBACrB;cAAC;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CACF;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;MAAA,uEAQD,kBAAYW,QAAqB;QAAA;UAAA;YAAA;cAAA;gBAAA,KAC3B,IAAI,CAACT,MAAM;kBAAA;kBAAA;gBAAA;gBAAA,MACP,IAAIK,KAAK,CAAC,mBAAmB,CAAC;cAAA;gBAGtC,IAAI,CAACL,MAAM,GAAG,IAAI;gBAElB,IAAI,IAAI,CAACF,SAAS,CAACU,QAAQ,GAAG,CAAC,IAAI,IAAI,CAACV,SAAS,CAACU,QAAQ,IAAI,IAAI,CAACT,YAAY,EAAE;kBAE/E,IAAI,CAACD,SAAS,GAAG,CAAC,CAAC;gBACrB;gBAAC;gBAAA,OAEK,IAAI,CAACF,cAAc,CAACc,WAAW,CAAC,IAAI,CAACT,YAAY,CAAC;cAAA;gBAAA;gBAAA,OAClD,IAAI,CAACL,cAAc,CAACO,KAAK,EAAE;cAAA;;gBAGjC,IAAIM,QAAQ,EAAE;kBACZA,QAAQ,EAAE;gBACZ;cAAC;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CACF;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;IAKD,qBAAYE,GAAW,EAAEC,KAAa,EAAQ;MAE5C,IAAI,CAACX,YAAY,CAACY,MAAM,CAACF,GAAG,CAAC,CAAC,GAAGE,MAAM,CAACD,KAAK,CAAC;IAChD;;EAAC;IAAA;IAAA;IAQD,yBAAgBE,GAAW,EAAQ;MACjC,IAAI,CAACf,YAAY,GAAGe,GAAG;IACzB;;EAAC;IAAA;IAAA;IAMD,qBAAYA,GAAW,EAAQ;MAC7B,IAAI,CAAClB,cAAc,CAACmB,WAAW,CAACD,GAAG,CAAC;IACtC;EAAC;IAAA;IAAA;MAAA,0EAnID,kBACEnB,MAAqB,EACrBqB,IAAY,EACZnB,IAA2B;QAAA;QAAA;UAAA;YAAA;cAAA;gBAAA;gBAAA,OAEA,IAAAoB,iBAAM,EAACD,IAAI,EAAEnB,IAAI,CAAC;cAAA;gBAAvCqB,YAAY;gBAAA,kCACXxB,aAAa,CAACyB,UAAU,CAACxB,MAAM,EAAEuB,YAAY,EAAErB,IAAI,CAAC;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CAC5D;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;MAAA,4EAMD,kBACEF,MAAqB,EACrBuB,YAAsB,EACtBrB,IAA2B;QAAA;QAAA;UAAA;YAAA;cAAA;gBAE3B,IAAI,CAACA,IAAI,EAAE;kBAETA,IAAI,GAAG,CAAC,CAAC;gBACX;gBAAC;gBAAA,OAE4BuB,qBAAqB,CAACD,UAAU,CAACxB,MAAM,EAAEuB,YAAY,EAAErB,IAAI,CAAC;cAAA;gBAAnFD,cAAc;gBAAA,kCAEb,IAAIF,aAAa,CAACC,MAAM,EAAEC,cAAc,EAAEC,IAAI,CAAC;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CACvD;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;EAAA;AAAA;AAAA;AAAA,IAkHUuB,qBAAqB;EAuBhC,+BACEzB,MAAqB,EACrB0B,OAAuC,EACvCC,OAA4B,EAC5BC,UAAkB,EAClB1B,IAA0B,EAC1B;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IACA,IAAI,CAACF,MAAM,GAAGA,MAAM;IACpB,IAAI,CAAC6B,KAAK,GAAGH,OAAO;IACpB,IAAI,CAAClB,KAAK,GAAGmB,OAAO;IACpB,IAAI,CAACG,MAAM,GAAGF,UAAU;IACxB,IAAI,CAACf,QAAQ,GAAG,CAAC;IACjB,IAAI,CAACkB,SAAS,GAAG,EAAE;IACnB,IAAI,CAACC,QAAQ,GAAG9B,IAAI,CAAC8B,QAAQ,IAAIrC,yBAAyB;IAC1D,IAAI,CAACsC,aAAa,GAAG,eAAe,IAAI/B,IAAI,GAAGgC,OAAO,CAAChC,IAAI,CAAC+B,aAAa,CAAC,GAAG,KAAK;EACpF;EAAC;IAAA;IAAA,OAED,sBAAaE,GAAW,EAAiB;MACvC,IAAI,CAACL,MAAM,IAAIK,GAAG,CAACC,MAAM;MACzB,OAAO,IAAI,CAACP,KAAK,CAACM,GAAG,CAAC;IACxB;;EAAC;IAAA;IAAA;IAKD,uBAA6B;MAC3B,OAAO,IAAI,CAACE,YAAY,CAACC,MAAM,CAACC,IAAI,CAAC9C,aAAa,CAAC,CAAC;IACtD;;EAAC;IAAA;IAAA;MAAA,+EAMD,kBAAoB+C,OAAsB;QAAA;QAAA;UAAA;YAAA;cAAA;gBAAA;gBAAA,OACnBC,cAAc,CAAC,IAAI,CAACzC,MAAM,EAAEwC,OAAO,EAAE;kBACxDE,UAAU,EAAE,IAAI,CAACZ,MAAM;kBACvBE,QAAQ,EAAE,IAAI,CAACA,QAAQ;kBACvBC,aAAa,EAAE,IAAI,CAACA;gBACtB,CAAC,CAAC;cAAA;gBAJIU,MAAM;gBAMZ,IAAI,CAAC9B,QAAQ,IAAI2B,OAAO,CAAC3B,QAAQ;gBACjC,IAAI,CAACkB,SAAS,CAACa,IAAI,CAACD,MAAM,CAACE,QAAQ,CAAC;gBAAC;gBAAA,OACxB,IAAI,CAACR,YAAY,CAACM,MAAM,CAACG,IAAI,CAAC;cAAA;gBAAA;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CAC5C;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;IAKD,qBAAYxC,YAAoC,EAAiB;MAC/D,IAAI,CAACA,YAAY,EAAE;QAEjBA,YAAY,GAAG,CAAC,CAAC;MACnB;MAEA,OAAO,IAAI,CAAC+B,YAAY,CACtBU,YAAY,CAAC,IAAI,CAAC/C,MAAM,EAAE,IAAI,CAACa,QAAQ,EAAE,IAAI,CAACkB,SAAS,EAAEzB,YAAY,CAAC,CACvE;IACH;;EAAC;IAAA;IAAA;IAMD,qBAAYa,GAAW,EAAQ;MAC7B,IAAI,CAACa,QAAQ,GAAGb,GAAG;IACrB;EAAC;IAAA;IAAA;MAAA,6EApFD,kBACEnB,MAAqB,EACrBuB,YAAsB,EACtBrB,IAA0B;QAAA;QAAA;UAAA;YAAA;cAAA;gBAEpBwB,OAAO,GAAGsB,kBAAO,CAACC,IAAI,CAACC,SAAS,EAAE3B,YAAY,CAAC;gBAC/CI,OAAO,GAAGwB,kBAAO,CAACF,IAAI,CAACC,SAAS,EAAE3B,YAAY,CAAC;gBAAA,kCAC9C,IAAIE,qBAAqB,CAACzB,MAAM,EAAE0B,OAAO,EAAEC,OAAO,EAAE,CAAC,EAAEzB,IAAI,CAAC;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CACpE;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;EAAA;AAAA;AAAA;AAAA,IAkFUkD,kBAAkB;EAAA;EAAA;EAG7B,4BAAYpD,MAAqB,EAAmC;IAAA;IAAA,IAAjCE,IAA0B,uEAAG,CAAC,CAAC;IAAA;IAChE,0BAAM;MAACmD,UAAU,EAAE;IAAI,CAAC;IAAE;IAE1B,IAAMC,UAAU,GAAI,UAAUC,CAA0B,EAAE;MACxD;QAAA,qEAAO,kBAAgBC,CAAM;UAAA;YAAA;cAAA;gBAAA;kBAC3BD,CAAC,CAACX,IAAI,CAACY,CAAC,CAAC;gBAAC;gBAAA;kBAAA;cAAA;YAAA;UAAA;QAAA,CACX;QAAA;UAAA;QAAA;MAAA;IACH,CAAC,6CAAO;IAER,MAAKC,MAAM,GAAG,IAAI1D,aAAa,CAC7BC,MAAM,EACN,IAAIyB,qBAAqB,CAACzB,MAAM,EAAEsD,UAAU,4DAAE;MAAA;QAAA;UAAA;YAAA;YAAA;cAAA;UAAA;QAAA;MAAA;IAAA,CAAc,IAAE,CAAC,EAAEpD,IAAI,CAAC,EACtEA,IAAI,CACL;IAAC;EACJ;;EAAC;IAAA;IAAA;IAGD,oBAAWO,GAAQ,EAAEiD,QAAgB,EAAE5C,QAA6B,EAAiB;MACnF,IAAIL,GAAG,EAAE;QACP,OAAO,IAAI,CAACgD,MAAM,CAACE,SAAS,CAAClD,GAAG,CAAC,CAACmD,IAAI,CAAC9C,QAAQ,CAAC;MAClD;MACAA,QAAQ,EAAE;MACV,OAAO+C,OAAO,CAACC,OAAO,EAAE;IAC1B;;EAAC;IAAA;IAAA;MAAA,wEAGD,mBAAahD,QAA6B;QAAA;UAAA;YAAA;cAAA;gBAAA;gBAAA,OAClC,IAAI,CAAC2C,MAAM,CAACjD,KAAK,CAACM,QAAQ,CAAC;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CAClC;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;EAAA;AAAA,EA/BwCiD,iBAAS;AAAA;AAqCpD,SAASC,YAAY,CACnBC,IAAmB,EACnBP,QAAsB,EACtBQ,MAAa,EACbhE,IAAyB,EACzB;EACA,IAAI,EAAEwD,QAAQ,IAAIS,sBAAc,CAAC,EAAE;IACjC,MAAM,IAAIzD,KAAK,6BAAsBgD,QAAQ,EAAG;EAClD;EACA,OAAOS,sBAAc,CAACT,QAAQ,CAAC,CAACM,YAAY,CAACC,IAAI,EAAEC,MAAM,EAAEhE,IAAI,CAAC;AAClE;;AAAC,SAKckE,cAAc;EAAA;AAAA;AAAA;EAAA,4EAA7B,mBACEC,MAAoB,EACpBC,IAAiB;IAAA;IAAA;MAAA;QAAA;UAAA;YAObC,UAAU,GAAGjC,MAAM,CAACkC,KAAK,CAAC,CAAC,CAAC;YAChC,IAAIH,MAAM,CAACI,SAAS,GAAG,CAAC,EAAE;cACxBF,UAAU,GAAGP,YAAY,CAACnE,kBAAkB,EAAEC,sBAAsB,EAAEwE,IAAI,CAACI,OAAO,EAAE;gBAClFC,QAAQ,EAAE,IAAAC,sBAAW,EAACP,MAAM,CAACI,SAAS;cAExC,CAAC,CAAC;YACJ;YAEII,UAAU,GAAGvC,MAAM,CAACkC,KAAK,CAAC,CAAC,CAAC;YAChC,IAAIH,MAAM,CAACS,SAAS,GAAG,CAAC,EAAE;cACxBD,UAAU,GAAGb,YAAY,CAACnE,kBAAkB,EAAEC,sBAAsB,EAAEwE,IAAI,CAACS,OAAO,EAAE;gBAClFJ,QAAQ,EAAE,IAAAC,sBAAW,EAACP,MAAM,CAACS,SAAS;cAExC,CAAC,CAAC;YACJ;;YAGME,SAAS,GAAGhB,YAAY,CAACK,MAAM,CAACY,aAAa,EAAGZ,MAAM,CAACX,QAAQ,EAAGY,IAAI,CAACJ,MAAM,EAAE;cACnFgB,UAAU,EAAEb,MAAM,CAACa,UAAU;cAC7BP,QAAQ,EAAEN,MAAM,CAACa;YACnB,CAAC,CAAC;YAEIC,OAAO,GAAG7C,MAAM,CAAC8C,MAAM,CAAC,CAACb,UAAU,EAAEM,UAAU,EAAEG,SAAS,CAAC,CAAC;YAAA;YAAA,OAGtCK,WAAW,CAACC,OAAO,CAACjB,MAAM,CAACkB,WAAW,EAAGJ,OAAO,CAAC;UAAA;YAAvEK,aAAa;YAGbC,MAAM,GAAG,IAAIC,yBAAU,CAAC;cAC5BzB,IAAI,EAAE0B,uBAAQ,CAACC,SAAS;cACxBC,gBAAgB,EAAE,IAAIC,6BAAc,CAAC;gBACnCC,UAAU,EAAEzB,IAAI,CAAC0B,KAAK;gBACtBtC,QAAQ,EAAEuC,uBAAQ,CAAC5B,MAAM,CAACX,QAAQ,CAAS;gBAC3CwC,yBAAyB,EAAED,uBAAQ,CAACnG,sBAAsB,CAAC;gBAC3DqG,yBAAyB,EAAEF,uBAAQ,CAACnG,sBAAsB;cAC5D,CAAC,CAAC;;cACFsG,sBAAsB,EAAEjB,OAAO,CAAC/C,MAAM;cACtCiE,oBAAoB,EAAEb,aAAa,CAACpD;YACtC,CAAC,CAAC;YAGIkE,SAAS,GAAG,IAAAC,0BAAe,EAACd,MAAM,CAAC;YACnCe,IAAI,GAAGlE,MAAM,CAAC8C,MAAM,CAAC,CAACkB,SAAS,EAAEd,aAAa,CAAC,CAAC;YAAA,mCAE/C;cAACC,MAAM,EAANA,MAAM;cAAEgB,UAAU,EAAEH,SAAS,CAAClE,MAAM;cAAEoE,IAAI,EAAJA;YAAI,CAAC;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CACpD;EAAA;AAAA;AAAA,SAKcE,gBAAgB;EAAA;AAAA;AAAA;EAAA,6EAA/B,mBACErC,MAAoB,EACpBC,IAAiB,EACjBzD,QAAgB;IAAA;IAAA;MAAA;QAAA;UAAA;YAOVmE,SAAS,GAAGhB,YAAY,CAACK,MAAM,CAACY,aAAa,EAAGZ,MAAM,CAACX,QAAQ,EAAGY,IAAI,CAACJ,MAAM,EAAE;cACnFgB,UAAU,EAAEb,MAAM,CAACa,UAAU;cAC7BP,QAAQ,EAAEN,MAAM,CAACa;YACnB,CAAC,CAAC;YAAA;YAAA,OAG0BG,WAAW,CAACC,OAAO,CAACjB,MAAM,CAACkB,WAAW,EAAGP,SAAS,CAAC;UAAA;YAAzEQ,aAAa;YAGfjB,UAAU,GAAGjC,MAAM,CAACkC,KAAK,CAAC,CAAC,CAAC;YAChC,IAAIH,MAAM,CAACI,SAAS,GAAG,CAAC,EAAE;cACxBF,UAAU,GAAGP,YAAY,CAACnE,kBAAkB,EAAEC,sBAAsB,EAAEwE,IAAI,CAACI,OAAO,EAAE;gBAClFC,QAAQ,EAAE,IAAAC,sBAAW,EAACP,MAAM,CAACI,SAAS,CAAC;gBACvCkC,eAAe,EAAE;cACnB,CAAC,CAAC;YACJ;YAEI9B,UAAU,GAAGvC,MAAM,CAACkC,KAAK,CAAC,CAAC,CAAC;YAChC,IAAIH,MAAM,CAACS,SAAS,GAAG,CAAC,EAAE;cACxBD,UAAU,GAAGb,YAAY,CAACnE,kBAAkB,EAAEC,sBAAsB,EAAEwE,IAAI,CAACS,OAAO,EAAE;gBAClFJ,QAAQ,EAAE,IAAAC,sBAAW,EAACP,MAAM,CAACS,SAAS,CAAC;gBACvC6B,eAAe,EAAE;cACnB,CAAC,CAAC;YACJ;;YAGMlB,MAAM,GAAG,IAAIC,yBAAU,CAAC;cAC5BzB,IAAI,EAAE0B,uBAAQ,CAACiB,YAAY;cAC3BC,mBAAmB,EAAE,IAAIC,+BAAgB,CAAC;gBACxCf,UAAU,EAAEzB,IAAI,CAAC0B,KAAK;gBACtBe,SAAS,EAAEzC,IAAI,CAAC0B,KAAK,GAAG1B,IAAI,CAACJ,MAAM,CAAC9B,MAAM;gBAC1C4E,QAAQ,EAAEnG,QAAQ;gBAClB6C,QAAQ,EAAEuC,uBAAQ,CAAC5B,MAAM,CAACX,QAAQ,CAAS;gBAC3CuD,6BAA6B,EAAEpC,UAAU,CAACzC,MAAM;gBAChD8E,6BAA6B,EAAE3C,UAAU,CAACnC,MAAM;gBAChD+E,aAAa,EAAE9C,MAAM,CAACkB,WAAW,KAAK;cACxC,CAAC,CAAC;cACFa,sBAAsB,EAAE7B,UAAU,CAACnC,MAAM,GAAGyC,UAAU,CAACzC,MAAM,GAAG4C,SAAS,CAAC5C,MAAM;cAChFiE,oBAAoB,EAAE9B,UAAU,CAACnC,MAAM,GAAGyC,UAAU,CAACzC,MAAM,GAAGoD,aAAa,CAACpD;YAC9E,CAAC,CAAC;YAGIkE,SAAS,GAAG,IAAAC,0BAAe,EAACd,MAAM,CAAC;YACnCe,IAAI,GAAGlE,MAAM,CAAC8C,MAAM,CAAC,CAACkB,SAAS,EAAE/B,UAAU,EAAEM,UAAU,EAAEW,aAAa,CAAC,CAAC;YAAA,mCACvE;cAACC,MAAM,EAANA,MAAM;cAAEgB,UAAU,EAAEH,SAAS,CAAClE,MAAM;cAAEoE,IAAI,EAAJA;YAAI,CAAC;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CACpD;EAAA;AAAA;AAAA,SAKcY,iBAAiB;EAAA;AAAA;AAAA;EAAA,+EAAhC,mBACE/C,MAAoB,EACpBgD,MAAqB,EACrBvF,MAAc,EACd5B,IAA0B;IAAA;IAAA;MAAA;QAAA;UAAA;YAMpBoE,IAAI,GAAG+C,MAAM,CAACC,UAAU,CAACjD,MAAM,CAAChD,IAAI,CAACkG,IAAI,EAAE,CAAC;YAC5C7E,UAAU,GAAG,CAACxC,IAAI,CAACwC,UAAU,IAAI,CAAC,IAAIZ,MAAM;YAK9C0F,uBAAuB,GAAG,CAAC;YAE3BC,qBAAqB,GAAG,CAAC;YAAA,KAEZvH,IAAI,CAAC+B,aAAa;cAAA;cAAA;YAAA;YAAA;YAAA,OACvByE,gBAAgB,CAACrC,MAAM,EAAEC,IAAI,EAAE+C,MAAM,CAACxG,QAAQ,CAAC;UAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA,OAC/CuD,cAAc,CAACC,MAAM,EAAEC,IAAI,CAAC;UAAA;YAAA;UAAA;YAFhCoD,MAAM;YAIZC,OAAO,GAAGD,MAAM,CAAClB,IAAI;YACrBgB,uBAAuB,IAAIE,MAAM,CAACjC,MAAM,CAACW,sBAAsB,GAAGsB,MAAM,CAACjB,UAAU;YACnFgB,qBAAqB,IAAIC,MAAM,CAACjC,MAAM,CAACY,oBAAoB,GAAGqB,MAAM,CAACjB,UAAU;YAO3E5D,QAAQ,GAAG,IAAI+E,6BAAc,CAAC;cAClCC,cAAc,EAAExD,MAAM,CAAChD,IAAI;cAC3B0E,UAAU,EAAEzB,IAAI,CAAC0B,KAAK;cACtB8B,gBAAgB,EAAEpF,UAAU;cAC5BqF,SAAS,EAAE,EAAE;cACbP,uBAAuB,EAAvBA,uBAAuB;cACvBC,qBAAqB,EAArBA,qBAAqB;cACrBxD,IAAI,EAAE+D,mBAAI,CAAC3D,MAAM,CAACY,aAAa,CAAE;cACjCgD,KAAK,EAAEC,+BAAgB,CAAC7D,MAAM,CAACkB,WAAW;YAC5C,CAAC,CAAC;YAGF1C,QAAQ,CAACkF,SAAS,CAACnF,IAAI,CAACqD,uBAAQ,CAACnG,sBAAsB,CAAC,CAAC;YACzD+C,QAAQ,CAACkF,SAAS,CAACnF,IAAI,CAACqD,uBAAQ,CAAC5B,MAAM,CAACX,QAAQ,CAAE,CAAC;;YAG7CyE,cAAc,GAAGzF,UAAU,GAAGiF,OAAO,CAACvF,MAAM;YAC5CU,IAAI,GAAGR,MAAM,CAAC8C,MAAM,CAAC,CAACuC,OAAO,EAAE,IAAApB,0BAAe,EAAC1D,QAAQ,CAAC,CAAC,CAAC;YAAA,mCACzD;cAACC,IAAI,EAAJA,IAAI;cAAED,QAAQ,EAARA,QAAQ;cAAEsF,cAAc,EAAdA;YAAc,CAAC;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CACxC;EAAA;AAAA;AAAA,SAKc1F,cAAc;EAAA;AAAA;AAAA;EAAA,4EAA7B,mBACEzC,MAAqB,EACrBsE,IAAmB,EACnBpE,IAA0B;IAAA;IAAA;MAAA;QAAA;UAAA;YAKpB2C,QAAQ,GAAG,IAAIuF,uBAAQ,CAAC;cAC5BpB,QAAQ,EAAE1C,IAAI,CAACzD,QAAQ;cACvBwH,OAAO,EAAE,EAAE;cACXC,eAAe,EAAE;YACnB,CAAC,CAAC;YAEExF,IAAI,GAAGR,MAAM,CAACkC,KAAK,CAAC,CAAC,CAAC;YAAA,wCACNxE,MAAM,CAACuI,SAAS;YAAA;YAAA;UAAA;YAAA;cAAA;cAAA;YAAA;YAAzBC,KAAK;YAAA,KACVA,KAAK,CAACC,QAAQ;cAAA;cAAA;YAAA;YAAA;UAAA;YAAA;YAAA,OAIOrB,iBAAiB,CAACoB,KAAK,EAAElE,IAAI,EAAExB,IAAI,CAACV,MAAM,EAAElC,IAAI,CAAC;UAAA;YAApEwI,UAAU;YAEVC,MAAM,GAAG,IAAIC,0BAAW,CAAC;cAC7BC,WAAW,EAAEH,UAAU,CAACP,cAAc;cACtCW,SAAS,EAAEJ,UAAU,CAAC7F;YACxB,CAAC,CAAC;YAEFA,QAAQ,CAACwF,OAAO,CAACzF,IAAI,CAAC+F,MAAM,CAAC;YAC7B9F,QAAQ,CAACyF,eAAe,GAAG,IAAIS,gBAAK,CAACC,MAAM,CAACnG,QAAQ,CAACyF,eAAe,CAAC,GAAGI,UAAU,CAAC5F,IAAI,CAACV,MAAM,CAAC;YAE/FU,IAAI,GAAGR,MAAM,CAAC8C,MAAM,CAAC,CAACtC,IAAI,EAAE4F,UAAU,CAAC5F,IAAI,CAAC,CAAC;UAAC;YAAA;YAAA;UAAA;YAAA;YAAA;UAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;YAAA;UAAA;YAAA,mCAGzC;cAACA,IAAI,EAAJA,IAAI;cAAED,QAAQ,EAARA;YAAQ,CAAC;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CACxB;EAAA;AAAA;AAKD,SAASE,YAAY,CACnB/C,MAAqB,EACrBa,QAAgB,EAChBkB,SAAqB,EACrBzB,YAAoC,EAC5B;EACR,IAAMuC,QAAQ,GAAG,IAAIoG,2BAAY,CAAC;IAChCC,OAAO,EAAExJ,eAAe;IACxByJ,UAAU,EAAE,UAAU;IACtBnC,QAAQ,EAAEnG,QAAQ;IAClBuI,UAAU,EAAErH,SAAS;IACrB/B,MAAM,EAAE,EAAE;IACVqJ,kBAAkB,EAAE;EACtB,CAAC,CAAC;EAEF,KAAK,IAAMrI,GAAG,IAAIV,YAAY,EAAE;IAAA;IAC9B,IAAMgJ,EAAE,GAAG,IAAIC,uBAAQ,CAAC;MACtBvI,GAAG,EAAHA,GAAG;MACHC,KAAK,EAAEX,YAAY,CAACU,GAAG;IACzB,CAAC,CAAC;IACF,yBAAA6B,QAAQ,CAACwG,kBAAkB,oFAA3B,iDAA6BzG,IAAI,2DAAjC,oDAAoC0G,EAAE,CAAC;EACzC;EAEA;IACE,IAAME,UAAU,GAAG,IAAIC,4BAAa,CAAC;MACnCC,IAAI,EAAE,MAAM;MACZC,YAAY,EAAEC,MAAM,CAACC,IAAI,CAAC7J,MAAM,CAAC8J,MAAM,CAAC,CAAC1H;IAC3C,CAAC,CAAC;IACFS,QAAQ,CAAC7C,MAAM,CAAC4C,IAAI,CAAC4G,UAAU,CAAC;EAClC;EAAC,2CAEmBxJ,MAAM,CAACuI,SAAS;IAAA;EAAA;IAApC,oDAAsC;MAAA,IAA3BC,KAAK;MACd,IAAMuB,IAAI,GAAGC,kCAAmB,CAACxB,KAAK,CAACyB,cAAc,CAAC;MACtD,IAAMC,UAAU,GAAG,IAAIT,4BAAa,CAAC;QACnCC,IAAI,EAAElB,KAAK,CAACkB,IAAI;QAChBS,eAAe,EAAEJ;MACnB,CAAC,CAAC;MAEF,IAAIvB,KAAK,CAACC,QAAQ,EAAE;QAClByB,UAAU,CAACP,YAAY,GAAGnB,KAAK,CAAC4B,UAAU;MAC5C,CAAC,MAAM;QACLF,UAAU,CAACjG,IAAI,GAAG+D,mBAAI,CAACQ,KAAK,CAACvD,aAAa,CAAU;MACtD;MAEA,IAAIuD,KAAK,CAAC6B,YAAY,EAAE;QACtBH,UAAU,CAACI,cAAc,GAAGC,4BAAa,CAAC/B,KAAK,CAAC6B,YAAY,CAAkB;MAChF;MAEAH,UAAU,CAACM,WAAW,GAAGhC,KAAK,CAACtD,UAAU;MAEzCrC,QAAQ,CAAC7C,MAAM,CAAC4C,IAAI,CAACsH,UAAU,CAAC;IAClC;EAAC;IAAA;EAAA;IAAA;EAAA;EAED,IAAMO,eAAe,GAAG,IAAAlE,0BAAe,EAAC1D,QAAQ,CAAC;EACjD,IAAM6H,aAAa,GAAGpI,MAAM,CAACkC,KAAK,CAACiG,eAAe,CAACrI,MAAM,GAAG,CAAC,CAAC;EAC9DqI,eAAe,CAACE,IAAI,CAACD,aAAa,CAAC;EACnCA,aAAa,CAACE,aAAa,CAACH,eAAe,CAACrI,MAAM,EAAEqI,eAAe,CAACrI,MAAM,CAAC;EAC3EsI,aAAa,CAAC7I,KAAK,CAACpC,aAAa,EAAEgL,eAAe,CAACrI,MAAM,GAAG,CAAC,CAAC;EAC9D,OAAOsI,aAAa;AACtB"}
@@ -0,0 +1,94 @@
1
+ "use strict";
2
+
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
+ Object.defineProperty(exports, "__esModule", {
5
+ value: true
6
+ });
7
+ exports.fclose = fclose;
8
+ exports.fopen = fopen;
9
+ exports.fread = fread;
10
+ exports.fstat = fstat;
11
+ exports.osclose = osclose;
12
+ exports.osopen = osopen;
13
+ exports.oswrite = oswrite;
14
+ var _fs = _interopRequireDefault(require("fs"));
15
+
16
+ function fopen(filePath) {
17
+ return new Promise(function (resolve, reject) {
18
+ _fs.default.open(filePath, 'r', function (err, fd) {
19
+ if (err) {
20
+ reject(err);
21
+ } else {
22
+ resolve(fd);
23
+ }
24
+ });
25
+ });
26
+ }
27
+ function fstat(filePath) {
28
+ return new Promise(function (resolve, reject) {
29
+ _fs.default.stat(filePath, function (err, stat) {
30
+ if (err) {
31
+ reject(err);
32
+ } else {
33
+ resolve(stat);
34
+ }
35
+ });
36
+ });
37
+ }
38
+ function fread(fd, position, length) {
39
+ var buffer = Buffer.alloc(length);
40
+ return new Promise(function (resolve, reject) {
41
+ _fs.default.read(fd, buffer, 0, length, position, function (err, bytesRead, buf) {
42
+ if (err || bytesRead !== length) {
43
+ reject(err || Error('read failed'));
44
+ } else {
45
+ resolve(buf);
46
+ }
47
+ });
48
+ });
49
+ }
50
+ function fclose(fd) {
51
+ return new Promise(function (resolve, reject) {
52
+ _fs.default.close(fd, function (err) {
53
+ if (err) {
54
+ reject(err);
55
+ } else {
56
+ resolve(err);
57
+ }
58
+ });
59
+ });
60
+ }
61
+ function oswrite(os, buf) {
62
+ return new Promise(function (resolve, reject) {
63
+ os.write(buf, function (err) {
64
+ if (err) {
65
+ reject(err);
66
+ } else {
67
+ resolve();
68
+ }
69
+ });
70
+ });
71
+ }
72
+ function osclose(os) {
73
+ return new Promise(function (resolve, reject) {
74
+ os.close(function (err) {
75
+ if (err) {
76
+ reject(err);
77
+ } else {
78
+ resolve();
79
+ }
80
+ });
81
+ });
82
+ }
83
+ function osopen(path, opts) {
84
+ return new Promise(function (resolve, reject) {
85
+ var outputStream = _fs.default.createWriteStream(path, opts);
86
+ outputStream.on('open', function (fd) {
87
+ resolve(outputStream);
88
+ });
89
+ outputStream.on('error', function (err) {
90
+ reject(err);
91
+ });
92
+ });
93
+ }
94
+ //# sourceMappingURL=file.js.map