@loaders.gl/parquet 3.4.13 → 3.4.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (153) hide show
  1. package/dist/dist.min.js +15 -24
  2. package/dist/dist.min.js.map +3 -3
  3. package/dist/es5/bundle.js +1 -1
  4. package/dist/es5/bundle.js.map +1 -1
  5. package/dist/es5/constants.js +5 -5
  6. package/dist/es5/constants.js.map +1 -1
  7. package/dist/es5/index.js +24 -24
  8. package/dist/es5/index.js.map +1 -1
  9. package/dist/es5/lib/arrow/convert-row-group-to-columns.js +2 -7
  10. package/dist/es5/lib/arrow/convert-row-group-to-columns.js.map +1 -1
  11. package/dist/es5/lib/arrow/convert-schema-from-parquet.js +22 -33
  12. package/dist/es5/lib/arrow/convert-schema-from-parquet.js.map +1 -1
  13. package/dist/es5/lib/arrow/convert-schema-to-parquet.js +2 -2
  14. package/dist/es5/lib/arrow/convert-schema-to-parquet.js.map +1 -1
  15. package/dist/es5/lib/geo/decode-geo-metadata.js +16 -27
  16. package/dist/es5/lib/geo/decode-geo-metadata.js.map +1 -1
  17. package/dist/es5/lib/parsers/parse-parquet-to-columns.js +20 -151
  18. package/dist/es5/lib/parsers/parse-parquet-to-columns.js.map +1 -1
  19. package/dist/es5/lib/parsers/parse-parquet-to-rows.js +13 -138
  20. package/dist/es5/lib/parsers/parse-parquet-to-rows.js.map +1 -1
  21. package/dist/es5/lib/wasm/encode-parquet-wasm.js +8 -29
  22. package/dist/es5/lib/wasm/encode-parquet-wasm.js.map +1 -1
  23. package/dist/es5/lib/wasm/load-wasm/index.js +1 -1
  24. package/dist/es5/lib/wasm/load-wasm/load-wasm-browser.js +10 -33
  25. package/dist/es5/lib/wasm/load-wasm/load-wasm-browser.js.map +1 -1
  26. package/dist/es5/lib/wasm/load-wasm/load-wasm-node.js +4 -22
  27. package/dist/es5/lib/wasm/load-wasm/load-wasm-node.js.map +1 -1
  28. package/dist/es5/lib/wasm/parse-parquet-wasm.js +13 -46
  29. package/dist/es5/lib/wasm/parse-parquet-wasm.js.map +1 -1
  30. package/dist/es5/parquet-loader.js +4 -4
  31. package/dist/es5/parquet-loader.js.map +1 -1
  32. package/dist/es5/parquet-wasm-loader.js +4 -4
  33. package/dist/es5/parquet-wasm-loader.js.map +1 -1
  34. package/dist/es5/parquet-wasm-writer.js +3 -3
  35. package/dist/es5/parquet-wasm-writer.js.map +1 -1
  36. package/dist/es5/parquet-writer.js +4 -4
  37. package/dist/es5/parquet-writer.js.map +1 -1
  38. package/dist/es5/parquetjs/codecs/dictionary.js +3 -6
  39. package/dist/es5/parquetjs/codecs/dictionary.js.map +1 -1
  40. package/dist/es5/parquetjs/codecs/index.js +4 -5
  41. package/dist/es5/parquetjs/codecs/index.js.map +1 -1
  42. package/dist/es5/parquetjs/codecs/plain.js +41 -41
  43. package/dist/es5/parquetjs/codecs/plain.js.map +1 -1
  44. package/dist/es5/parquetjs/codecs/rle.js +25 -30
  45. package/dist/es5/parquetjs/codecs/rle.js.map +1 -1
  46. package/dist/es5/parquetjs/compression.js +26 -90
  47. package/dist/es5/parquetjs/compression.js.map +1 -1
  48. package/dist/es5/parquetjs/encoder/parquet-encoder.js +245 -536
  49. package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -1
  50. package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js +1 -1
  51. package/dist/es5/parquetjs/parquet-thrift/BsonType.js +28 -40
  52. package/dist/es5/parquetjs/parquet-thrift/BsonType.js.map +1 -1
  53. package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js +123 -133
  54. package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -1
  55. package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js +138 -150
  56. package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -1
  57. package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js +241 -251
  58. package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -1
  59. package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js +58 -70
  60. package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -1
  61. package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js +1 -1
  62. package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js +1 -1
  63. package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js +97 -107
  64. package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -1
  65. package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js +136 -146
  66. package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -1
  67. package/dist/es5/parquetjs/parquet-thrift/DateType.js +28 -40
  68. package/dist/es5/parquetjs/parquet-thrift/DateType.js.map +1 -1
  69. package/dist/es5/parquetjs/parquet-thrift/DecimalType.js +58 -68
  70. package/dist/es5/parquetjs/parquet-thrift/DecimalType.js.map +1 -1
  71. package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js +71 -81
  72. package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -1
  73. package/dist/es5/parquetjs/parquet-thrift/Encoding.js +1 -1
  74. package/dist/es5/parquetjs/parquet-thrift/EnumType.js +28 -40
  75. package/dist/es5/parquetjs/parquet-thrift/EnumType.js.map +1 -1
  76. package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js +1 -1
  77. package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js +164 -174
  78. package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js.map +1 -1
  79. package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js +28 -40
  80. package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -1
  81. package/dist/es5/parquetjs/parquet-thrift/IntType.js +58 -68
  82. package/dist/es5/parquetjs/parquet-thrift/IntType.js.map +1 -1
  83. package/dist/es5/parquetjs/parquet-thrift/JsonType.js +28 -40
  84. package/dist/es5/parquetjs/parquet-thrift/JsonType.js.map +1 -1
  85. package/dist/es5/parquetjs/parquet-thrift/KeyValue.js +58 -68
  86. package/dist/es5/parquetjs/parquet-thrift/KeyValue.js.map +1 -1
  87. package/dist/es5/parquetjs/parquet-thrift/ListType.js +28 -40
  88. package/dist/es5/parquetjs/parquet-thrift/ListType.js.map +1 -1
  89. package/dist/es5/parquetjs/parquet-thrift/LogicalType.js +274 -310
  90. package/dist/es5/parquetjs/parquet-thrift/LogicalType.js.map +1 -1
  91. package/dist/es5/parquetjs/parquet-thrift/MapType.js +28 -40
  92. package/dist/es5/parquetjs/parquet-thrift/MapType.js.map +1 -1
  93. package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js +28 -40
  94. package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -1
  95. package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js +28 -40
  96. package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -1
  97. package/dist/es5/parquetjs/parquet-thrift/NullType.js +28 -40
  98. package/dist/es5/parquetjs/parquet-thrift/NullType.js.map +1 -1
  99. package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js +56 -66
  100. package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -1
  101. package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js +71 -81
  102. package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -1
  103. package/dist/es5/parquetjs/parquet-thrift/PageHeader.js +136 -146
  104. package/dist/es5/parquetjs/parquet-thrift/PageHeader.js.map +1 -1
  105. package/dist/es5/parquetjs/parquet-thrift/PageLocation.js +71 -81
  106. package/dist/es5/parquetjs/parquet-thrift/PageLocation.js.map +1 -1
  107. package/dist/es5/parquetjs/parquet-thrift/PageType.js +1 -1
  108. package/dist/es5/parquetjs/parquet-thrift/RowGroup.js +105 -115
  109. package/dist/es5/parquetjs/parquet-thrift/RowGroup.js.map +1 -1
  110. package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js +162 -172
  111. package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js.map +1 -1
  112. package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js +71 -81
  113. package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js.map +1 -1
  114. package/dist/es5/parquetjs/parquet-thrift/Statistics.js +106 -116
  115. package/dist/es5/parquetjs/parquet-thrift/Statistics.js.map +1 -1
  116. package/dist/es5/parquetjs/parquet-thrift/StringType.js +28 -40
  117. package/dist/es5/parquetjs/parquet-thrift/StringType.js.map +1 -1
  118. package/dist/es5/parquetjs/parquet-thrift/TimeType.js +58 -68
  119. package/dist/es5/parquetjs/parquet-thrift/TimeType.js.map +1 -1
  120. package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js +76 -90
  121. package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js.map +1 -1
  122. package/dist/es5/parquetjs/parquet-thrift/TimestampType.js +58 -68
  123. package/dist/es5/parquetjs/parquet-thrift/TimestampType.js.map +1 -1
  124. package/dist/es5/parquetjs/parquet-thrift/Type.js +1 -1
  125. package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js +28 -40
  126. package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -1
  127. package/dist/es5/parquetjs/parquet-thrift/UUIDType.js +28 -40
  128. package/dist/es5/parquetjs/parquet-thrift/UUIDType.js.map +1 -1
  129. package/dist/es5/parquetjs/parquet-thrift/index.js +43 -43
  130. package/dist/es5/parquetjs/parquet-thrift/index.js.map +1 -1
  131. package/dist/es5/parquetjs/parser/decoders.js +195 -327
  132. package/dist/es5/parquetjs/parser/decoders.js.map +1 -1
  133. package/dist/es5/parquetjs/parser/parquet-reader.js +155 -582
  134. package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
  135. package/dist/es5/parquetjs/schema/declare.js +10 -11
  136. package/dist/es5/parquetjs/schema/declare.js.map +1 -1
  137. package/dist/es5/parquetjs/schema/schema.js +65 -82
  138. package/dist/es5/parquetjs/schema/schema.js.map +1 -1
  139. package/dist/es5/parquetjs/schema/shred.js +56 -87
  140. package/dist/es5/parquetjs/schema/shred.js.map +1 -1
  141. package/dist/es5/parquetjs/schema/types.js +40 -40
  142. package/dist/es5/parquetjs/schema/types.js.map +1 -1
  143. package/dist/es5/parquetjs/utils/file-utils.js +8 -12
  144. package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
  145. package/dist/es5/parquetjs/utils/read-utils.js +22 -39
  146. package/dist/es5/parquetjs/utils/read-utils.js.map +1 -1
  147. package/dist/esm/parquet-loader.js +1 -1
  148. package/dist/esm/parquet-wasm-loader.js +1 -1
  149. package/dist/esm/parquet-wasm-writer.js +1 -1
  150. package/dist/esm/parquet-writer.js +1 -1
  151. package/dist/parquet-worker.js +15 -24
  152. package/dist/parquet-worker.js.map +3 -3
  153. package/package.json +6 -6
@@ -1,12 +1,11 @@
1
1
  "use strict";
2
2
 
3
- var _typeof = require("@babel/runtime/helpers/typeof");
4
3
  Object.defineProperty(exports, "__esModule", {
5
4
  value: true
6
5
  });
7
6
  Object.defineProperty(exports, "ParquetBuffer", {
8
7
  enumerable: true,
9
- get: function get() {
8
+ get: function () {
10
9
  return _declare.ParquetBuffer;
11
10
  }
12
11
  });
@@ -15,38 +14,26 @@ exports.shredBuffer = shredBuffer;
15
14
  exports.shredRecord = shredRecord;
16
15
  var _declare = require("./declare");
17
16
  var Types = _interopRequireWildcard(require("./types"));
18
- function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function _getRequireWildcardCache(nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
19
- function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || _typeof(obj) !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
20
- function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; }
21
- function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
22
- function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; return arr2; }
17
+ function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
18
+ function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
23
19
  function shredBuffer(schema) {
24
- var columnData = {};
25
- var _iterator = _createForOfIteratorHelper(schema.fieldList),
26
- _step;
27
- try {
28
- for (_iterator.s(); !(_step = _iterator.n()).done;) {
29
- var field = _step.value;
30
- columnData[field.key] = {
31
- dlevels: [],
32
- rlevels: [],
33
- values: [],
34
- pageHeaders: [],
35
- count: 0
36
- };
37
- }
38
- } catch (err) {
39
- _iterator.e(err);
40
- } finally {
41
- _iterator.f();
20
+ const columnData = {};
21
+ for (const field of schema.fieldList) {
22
+ columnData[field.key] = {
23
+ dlevels: [],
24
+ rlevels: [],
25
+ values: [],
26
+ pageHeaders: [],
27
+ count: 0
28
+ };
42
29
  }
43
30
  return {
44
31
  rowCount: 0,
45
- columnData: columnData
32
+ columnData
46
33
  };
47
34
  }
48
35
  function shredRecord(schema, record, buffer) {
49
- var data = shredBuffer(schema).columnData;
36
+ const data = shredBuffer(schema).columnData;
50
37
  shredRecordFields(schema.fields, record, data, 0, 0);
51
38
  if (buffer.rowCount === 0) {
52
39
  buffer.rowCount = 1;
@@ -54,26 +41,17 @@ function shredRecord(schema, record, buffer) {
54
41
  return;
55
42
  }
56
43
  buffer.rowCount += 1;
57
- var _iterator2 = _createForOfIteratorHelper(schema.fieldList),
58
- _step2;
59
- try {
60
- for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) {
61
- var field = _step2.value;
62
- Array.prototype.push.apply(buffer.columnData[field.key].rlevels, data[field.key].rlevels);
63
- Array.prototype.push.apply(buffer.columnData[field.key].dlevels, data[field.key].dlevels);
64
- Array.prototype.push.apply(buffer.columnData[field.key].values, data[field.key].values);
65
- buffer.columnData[field.key].count += data[field.key].count;
66
- }
67
- } catch (err) {
68
- _iterator2.e(err);
69
- } finally {
70
- _iterator2.f();
44
+ for (const field of schema.fieldList) {
45
+ Array.prototype.push.apply(buffer.columnData[field.key].rlevels, data[field.key].rlevels);
46
+ Array.prototype.push.apply(buffer.columnData[field.key].dlevels, data[field.key].dlevels);
47
+ Array.prototype.push.apply(buffer.columnData[field.key].values, data[field.key].values);
48
+ buffer.columnData[field.key].count += data[field.key].count;
71
49
  }
72
50
  }
73
51
  function shredRecordFields(fields, record, data, rLevel, dLevel) {
74
- for (var name in fields) {
75
- var field = fields[name];
76
- var values = [];
52
+ for (const name in fields) {
53
+ const field = fields[name];
54
+ let values = [];
77
55
  if (record && field.name in record && record[field.name] !== undefined && record[field.name] !== null) {
78
56
  if (record[field.name].constructor === Array) {
79
57
  values = record[field.name];
@@ -97,8 +75,8 @@ function shredRecordFields(fields, record, data, rLevel, dLevel) {
97
75
  }
98
76
  continue;
99
77
  }
100
- for (var i = 0; i < values.length; i++) {
101
- var rlvl = i === 0 ? rLevel : field.rLevelMax;
78
+ for (let i = 0; i < values.length; i++) {
79
+ const rlvl = i === 0 ? rLevel : field.rLevelMax;
102
80
  if (field.isNested) {
103
81
  shredRecordFields(field.fields, values[i], data, rlvl, field.dLevelMax);
104
82
  } else {
@@ -111,12 +89,12 @@ function shredRecordFields(fields, record, data, rLevel, dLevel) {
111
89
  }
112
90
  }
113
91
  function materializeRecords(schema, buffer) {
114
- var records = [];
115
- for (var i = 0; i < buffer.rowCount; i++) {
92
+ const records = [];
93
+ for (let i = 0; i < buffer.rowCount; i++) {
116
94
  records.push({});
117
95
  }
118
- for (var key in buffer.columnData) {
119
- var columnData = buffer.columnData[key];
96
+ for (const key in buffer.columnData) {
97
+ const columnData = buffer.columnData[key];
120
98
  if (columnData.count) {
121
99
  materializeColumn(schema, columnData, key, records);
122
100
  }
@@ -124,55 +102,46 @@ function materializeRecords(schema, buffer) {
124
102
  return records;
125
103
  }
126
104
  function materializeColumn(schema, columnData, key, records) {
127
- var field = schema.findField(key);
128
- var branch = schema.findFieldBranch(key);
129
- var rLevels = new Array(field.rLevelMax + 1).fill(0);
130
- var vIndex = 0;
131
- for (var i = 0; i < columnData.count; i++) {
132
- var dLevel = columnData.dlevels[i];
133
- var rLevel = columnData.rlevels[i];
105
+ const field = schema.findField(key);
106
+ const branch = schema.findFieldBranch(key);
107
+ const rLevels = new Array(field.rLevelMax + 1).fill(0);
108
+ let vIndex = 0;
109
+ for (let i = 0; i < columnData.count; i++) {
110
+ const dLevel = columnData.dlevels[i];
111
+ const rLevel = columnData.rlevels[i];
134
112
  rLevels[rLevel]++;
135
113
  rLevels.fill(0, rLevel + 1);
136
- var rIndex = 0;
137
- var record = records[rLevels[rIndex++] - 1];
138
- var _iterator3 = _createForOfIteratorHelper(branch),
139
- _step3;
140
- try {
141
- for (_iterator3.s(); !(_step3 = _iterator3.n()).done;) {
142
- var step = _step3.value;
143
- if (step === field || dLevel < step.dLevelMax) {
114
+ let rIndex = 0;
115
+ let record = records[rLevels[rIndex++] - 1];
116
+ for (const step of branch) {
117
+ if (step === field || dLevel < step.dLevelMax) {
118
+ break;
119
+ }
120
+ switch (step.repetitionType) {
121
+ case 'REPEATED':
122
+ if (!(step.name in record)) {
123
+ record[step.name] = [];
124
+ }
125
+ const ix = rLevels[rIndex++];
126
+ while (record[step.name].length <= ix) {
127
+ record[step.name].push({});
128
+ }
129
+ record = record[step.name][ix];
144
130
  break;
145
- }
146
- switch (step.repetitionType) {
147
- case 'REPEATED':
148
- if (!(step.name in record)) {
149
- record[step.name] = [];
150
- }
151
- var _ix = rLevels[rIndex++];
152
- while (record[step.name].length <= _ix) {
153
- record[step.name].push({});
154
- }
155
- record = record[step.name][_ix];
156
- break;
157
- default:
158
- record[step.name] = record[step.name] || {};
159
- record = record[step.name];
160
- }
131
+ default:
132
+ record[step.name] = record[step.name] || {};
133
+ record = record[step.name];
161
134
  }
162
- } catch (err) {
163
- _iterator3.e(err);
164
- } finally {
165
- _iterator3.f();
166
135
  }
167
136
  if (dLevel === field.dLevelMax) {
168
- var value = Types.fromPrimitive(field.originalType || field.primitiveType, columnData.values[vIndex], field);
137
+ const value = Types.fromPrimitive(field.originalType || field.primitiveType, columnData.values[vIndex], field);
169
138
  vIndex++;
170
139
  switch (field.repetitionType) {
171
140
  case 'REPEATED':
172
141
  if (!(field.name in record)) {
173
142
  record[field.name] = [];
174
143
  }
175
- var ix = rLevels[rIndex];
144
+ const ix = rLevels[rIndex];
176
145
  while (record[field.name].length <= ix) {
177
146
  record[field.name].push(null);
178
147
  }
@@ -1 +1 @@
1
- {"version":3,"file":"shred.js","names":["_declare","require","Types","_interopRequireWildcard","_getRequireWildcardCache","nodeInterop","WeakMap","cacheBabelInterop","cacheNodeInterop","obj","__esModule","_typeof","default","cache","has","get","newObj","hasPropertyDescriptor","Object","defineProperty","getOwnPropertyDescriptor","key","prototype","hasOwnProperty","call","desc","set","_createForOfIteratorHelper","o","allowArrayLike","it","Symbol","iterator","Array","isArray","_unsupportedIterableToArray","length","i","F","s","n","done","value","e","_e","f","TypeError","normalCompletion","didErr","err","step","next","_e2","return","minLen","_arrayLikeToArray","toString","slice","constructor","name","from","test","arr","len","arr2","shredBuffer","schema","columnData","_iterator","fieldList","_step","field","dlevels","rlevels","values","pageHeaders","count","rowCount","shredRecord","record","buffer","data","shredRecordFields","fields","_iterator2","_step2","push","apply","rLevel","dLevel","undefined","Boolean","repetitionType","Error","concat","isNested","rlvl","rLevelMax","dLevelMax","toPrimitive","originalType","primitiveType","materializeRecords","records","materializeColumn","findField","branch","findFieldBranch","rLevels","fill","vIndex","rIndex","_iterator3","_step3","ix","fromPrimitive"],"sources":["../../../../src/parquetjs/schema/shred.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport {ParquetBuffer, ParquetData, ParquetField, ParquetRecord} from './declare';\nimport {ParquetSchema} from './schema';\nimport * as Types from './types';\n\nexport {ParquetBuffer};\n\nexport function shredBuffer(schema: ParquetSchema): ParquetBuffer {\n const columnData: Record<string, ParquetData> = {};\n for (const field of schema.fieldList) {\n columnData[field.key] = {\n dlevels: [],\n rlevels: [],\n values: [],\n pageHeaders: [],\n count: 0\n };\n }\n return {rowCount: 0, columnData};\n}\n\n/**\n * 'Shred' a record into a list of <value, repetition_level, definition_level>\n * tuples per column using the Google Dremel Algorithm..\n *\n * The buffer argument must point to an object into which the shredded record\n * will be returned. You may re-use the buffer for repeated calls to this function\n * to append to an existing buffer, as long as the schema is unchanged.\n *\n * The format in which the shredded records will be stored in the buffer is as\n * follows:\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n */\nexport function shredRecord(schema: ParquetSchema, record: any, buffer: ParquetBuffer): void {\n /* shred the record, this may raise an exception */\n const data = shredBuffer(schema).columnData;\n\n shredRecordFields(schema.fields, record, data, 0, 0);\n\n /* if no error during shredding, add the shredded record to the buffer */\n if (buffer.rowCount === 0) {\n buffer.rowCount = 1;\n buffer.columnData = data;\n return;\n }\n buffer.rowCount += 1;\n for (const field of schema.fieldList) {\n Array.prototype.push.apply(buffer.columnData[field.key].rlevels, data[field.key].rlevels);\n Array.prototype.push.apply(buffer.columnData[field.key].dlevels, data[field.key].dlevels);\n Array.prototype.push.apply(buffer.columnData[field.key].values, data[field.key].values);\n buffer.columnData[field.key].count += data[field.key].count;\n }\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction shredRecordFields(\n fields: Record<string, ParquetField>,\n record: any,\n data: Record<string, ParquetData>,\n rLevel: number,\n dLevel: number\n) {\n for (const name in fields) {\n const field = fields[name];\n\n // fetch values\n let values: any[] = [];\n if (\n record &&\n field.name in record &&\n record[field.name] !== undefined &&\n record[field.name] !== null\n ) {\n if (record[field.name].constructor === Array) {\n values = record[field.name];\n } else {\n values.push(record[field.name]);\n }\n }\n // check values\n if (values.length === 0 && Boolean(record) && field.repetitionType === 'REQUIRED') {\n throw new Error(`missing required field: ${field.name}`);\n }\n if (values.length > 1 && field.repetitionType !== 'REPEATED') {\n throw new Error(`too many values for field: ${field.name}`);\n }\n\n // push null\n if (values.length === 0) {\n if (field.isNested) {\n shredRecordFields(field.fields!, null, data, rLevel, dLevel);\n } else {\n data[field.key].count += 1;\n data[field.key].rlevels.push(rLevel);\n data[field.key].dlevels.push(dLevel);\n }\n continue; // eslint-disable-line no-continue\n }\n\n // push values\n for (let i = 0; i < values.length; i++) {\n const rlvl = i === 0 ? rLevel : field.rLevelMax;\n if (field.isNested) {\n shredRecordFields(field.fields!, values[i], data, rlvl, field.dLevelMax);\n } else {\n data[field.key].count += 1;\n data[field.key].rlevels.push(rlvl);\n data[field.key].dlevels.push(field.dLevelMax);\n data[field.key].values.push(\n Types.toPrimitive((field.originalType || field.primitiveType)!, values[i])\n );\n }\n }\n }\n}\n\n/**\n * 'Materialize' a list of <value, repetition_level, definition_level>\n * tuples back to nested records (objects/arrays) using the Google Dremel\n * Algorithm..\n *\n * The buffer argument must point to an object with the following structure (i.e.\n * the same structure that is returned by shredRecords):\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n */\nexport function materializeRecords(schema: ParquetSchema, buffer: ParquetBuffer): ParquetRecord[] {\n const records: ParquetRecord[] = [];\n for (let i = 0; i < buffer.rowCount; i++) {\n records.push({});\n }\n for (const key in buffer.columnData) {\n const columnData = buffer.columnData[key];\n if (columnData.count) {\n materializeColumn(schema, columnData, key, records);\n }\n }\n return records;\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction materializeColumn(\n schema: ParquetSchema,\n columnData: ParquetData,\n key: string,\n records: ParquetRecord[]\n): void {\n const field = schema.findField(key);\n const branch = schema.findFieldBranch(key);\n\n // tslint:disable-next-line:prefer-array-literal\n const rLevels: number[] = new Array(field.rLevelMax + 1).fill(0);\n let vIndex = 0;\n for (let i = 0; i < columnData.count; i++) {\n const dLevel = columnData.dlevels[i];\n const rLevel = columnData.rlevels[i];\n rLevels[rLevel]++;\n rLevels.fill(0, rLevel + 1);\n\n let rIndex = 0;\n let record = records[rLevels[rIndex++] - 1];\n\n // Internal nodes - Build a nested row object\n for (const step of branch) {\n if (step === field || dLevel < step.dLevelMax) {\n break;\n }\n\n switch (step.repetitionType) {\n case 'REPEATED':\n if (!(step.name in record)) {\n // eslint-disable max-depth\n record[step.name] = [];\n }\n const ix = rLevels[rIndex++];\n while (record[step.name].length <= ix) {\n // eslint-disable max-depth\n record[step.name].push({});\n }\n record = record[step.name][ix];\n break;\n\n default:\n record[step.name] = record[step.name] || {};\n record = record[step.name];\n }\n }\n\n // Leaf node - Add the value\n if (dLevel === field.dLevelMax) {\n const value = Types.fromPrimitive(\n // @ts-ignore\n field.originalType || field.primitiveType,\n columnData.values[vIndex],\n field\n );\n vIndex++;\n\n switch (field.repetitionType) {\n case 'REPEATED':\n if (!(field.name in record)) {\n // eslint-disable max-depth\n record[field.name] = [];\n }\n const ix = rLevels[rIndex];\n while (record[field.name].length <= ix) {\n // eslint-disable max-depth\n record[field.name].push(null);\n }\n record[field.name][ix] = value;\n break;\n\n default:\n record[field.name] = value;\n }\n }\n }\n}\n\n// Columnar export\n\n/**\n * 'Materialize' a list of <value, repetition_level, definition_level>\n * tuples back to nested records (objects/arrays) using the Google Dremel\n * Algorithm..\n *\n * The buffer argument must point to an object with the following structure (i.e.\n * the same structure that is returned by shredRecords):\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n *\nexport function extractColumns(schema: ParquetSchema, buffer: ParquetBuffer): Record<string, unknown> {\n const columns: ParquetRecord = {};\n for (const key in buffer.columnData) {\n const columnData = buffer.columnData[key];\n if (columnData.count) {\n extractColumn(schema, columnData, key, columns);\n }\n }\n return columns;\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction extractColumn(\n schema: ParquetSchema,\n columnData: ParquetData,\n key: string,\n columns: Record<string, unknown> \n) {\n if (columnData.count <= 0) {\n return;\n }\n\n const record = columns;\n\n const field = schema.findField(key);\n const branch = schema.findFieldBranch(key);\n\n // tslint:disable-next-line:prefer-array-literal\n const rLevels: number[] = new Array(field.rLevelMax + 1).fill(0);\n let vIndex = 0;\n\n let i = 0;\n const dLevel = columnData.dlevels[i];\n const rLevel = columnData.rlevels[i];\n rLevels[rLevel]++;\n rLevels.fill(0, rLevel + 1);\n\n let rIndex = 0;\n let record = records[rLevels[rIndex++] - 1];\n\n // Internal nodes\n for (const step of branch) {\n if (step === field || dLevel < step.dLevelMax) {\n break;\n }\n\n switch (step.repetitionType) {\n case 'REPEATED':\n if (!(step.name in record)) {\n // eslint-disable max-depth\n record[step.name] = [];\n }\n const ix = rLevels[rIndex++];\n while (record[step.name].length <= ix) {\n // eslint-disable max-depth\n record[step.name].push({});\n }\n record = record[step.name][ix];\n break;\n\n default:\n record[step.name] = record[step.name] || {};\n record = record[step.name];\n }\n }\n\n // Leaf node\n if (dLevel === field.dLevelMax) {\n const value = Types.fromPrimitive(\n // @ts-ignore\n field.originalType || field.primitiveType,\n columnData.values[vIndex],\n field\n );\n vIndex++;\n\n switch (field.repetitionType) {\n case 'REPEATED':\n if (!(field.name in record)) {\n // eslint-disable max-depth\n record[field.name] = [];\n }\n const ix = rLevels[rIndex];\n while (record[field.name].length <= ix) {\n // eslint-disable max-depth\n record[field.name].push(null);\n }\n record[field.name][ix] = value;\n break;\n\n default:\n record[field.name] = value;\n }\n }\n}\n*/\n"],"mappings":";;;;;;;;;;;;;;;AAEA,IAAAA,QAAA,GAAAC,OAAA;AAEA,IAAAC,KAAA,GAAAC,uBAAA,CAAAF,OAAA;AAAiC,SAAAG,yBAAAC,WAAA,eAAAC,OAAA,kCAAAC,iBAAA,OAAAD,OAAA,QAAAE,gBAAA,OAAAF,OAAA,YAAAF,wBAAA,YAAAA,yBAAAC,WAAA,WAAAA,WAAA,GAAAG,gBAAA,GAAAD,iBAAA,KAAAF,WAAA;AAAA,SAAAF,wBAAAM,GAAA,EAAAJ,WAAA,SAAAA,WAAA,IAAAI,GAAA,IAAAA,GAAA,CAAAC,UAAA,WAAAD,GAAA,QAAAA,GAAA,aAAAE,OAAA,CAAAF,GAAA,yBAAAA,GAAA,4BAAAG,OAAA,EAAAH,GAAA,UAAAI,KAAA,GAAAT,wBAAA,CAAAC,WAAA,OAAAQ,KAAA,IAAAA,KAAA,CAAAC,GAAA,CAAAL,GAAA,YAAAI,KAAA,CAAAE,GAAA,CAAAN,GAAA,SAAAO,MAAA,WAAAC,qBAAA,GAAAC,MAAA,CAAAC,cAAA,IAAAD,MAAA,CAAAE,wBAAA,WAAAC,GAAA,IAAAZ,GAAA,QAAAY,GAAA,kBAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAf,GAAA,EAAAY,GAAA,SAAAI,IAAA,GAAAR,qBAAA,GAAAC,MAAA,CAAAE,wBAAA,CAAAX,GAAA,EAAAY,GAAA,cAAAI,IAAA,KAAAA,IAAA,CAAAV,GAAA,IAAAU,IAAA,CAAAC,GAAA,KAAAR,MAAA,CAAAC,cAAA,CAAAH,MAAA,EAAAK,GAAA,EAAAI,IAAA,YAAAT,MAAA,CAAAK,GAAA,IAAAZ,GAAA,CAAAY,GAAA,SAAAL,MAAA,CAAAJ,OAAA,GAAAH,GAAA,MAAAI,KAAA,IAAAA,KAAA,CAAAa,GAAA,CAAAjB,GAAA,EAAAO,MAAA,YAAAA,MAAA;AAAA,SAAAW,2BAAAC,CAAA,EAAAC,cAAA,QAAAC,EAAA,UAAAC,MAAA,oBAAAH,CAAA,CAAAG,MAAA,CAAAC,QAAA,KAAAJ,CAAA,qBAAAE,EAAA,QAAAG,KAAA,CAAAC,OAAA,CAAAN,CAAA,MAAAE,EAAA,GAAAK,2BAAA,CAAAP,CAAA,MAAAC,cAAA,IAAAD,CAAA,WAAAA,CAAA,CAAAQ,MAAA,qBAAAN,EAAA,EAAAF,CAAA,GAAAE,EAAA,MAAAO,CAAA,UAAAC,CAAA,YAAAA,EAAA,eAAAC,CAAA,EAAAD,CAAA,EAAAE,CAAA,WAAAA,EAAA,QAAAH,CAAA,IAAAT,CAAA,CAAAQ,MAAA,WAAAK,IAAA,mBAAAA,IAAA,SAAAC,KAAA,EAAAd,CAAA,CAAAS,CAAA,UAAAM,CAAA,WAAAA,EAAAC,EAAA,UAAAA,EAAA,KAAAC,CAAA,EAAAP,CAAA,gBAAAQ,SAAA,iJAAAC,gBAAA,SAAAC,MAAA,UAAAC,GAAA,WAAAV,CAAA,WAAAA,EAAA,IAAAT,EAAA,GAAAA,EAAA,CAAAN,IAAA,CAAAI,CAAA,MAAAY,CAAA,WAAAA,EAAA,QAAAU,IAAA,GAAApB,EAAA,CAAAqB,IAAA,IAAAJ,gBAAA,GAAAG,IAAA,CAAAT,IAAA,SAAAS,IAAA,KAAAP,CAAA,WAAAA,EAAAS,GAAA,IAAAJ,MAAA,SAAAC,GAAA,GAAAG,GAAA,KAAAP,CAAA,WAAAA,EAAA,eAAAE,gBAAA,IAAAjB,EAAA,CAAAuB,MAAA,UAAAvB,EAAA,CAAAuB,MAAA,oBAAAL,MAAA,QAAAC,GAAA;AAAA,SAAAd,4BAAAP,CAAA,EAAA0B,MAAA,SAAA1B,CAAA,qBAAAA,CAAA,sBAAA2B,iBAAA,CAAA3B,CAAA,EAAA0B,MAAA,OAAAd,CAAA,GAAAtB,MAAA,CAAAI,SAAA,CAAAkC,QAAA,CAAAhC,IAAA,CAAAI,CAAA,EAAA6B,KAAA,aAAAjB,CAAA,iBAAAZ,CAAA,CAAA8B,WAAA,EAAAlB,CAAA,GAAAZ,CAAA,CAAA8B,WAAA,CAAAC,IAAA,MAAAnB,CAAA,cAAAA,CAAA,mBAAAP,KAAA,CAAA2B,IAAA,CAAAhC,CAAA,OAAAY,CAAA,+DAAAqB,IAAA,CAAArB,CAAA,UAAAe,iBAAA,CAAA3B,CAAA,EAAA0B,MAAA;AAAA,SAAAC,kBAAAO,GAAA,EAAAC,GAAA,QAAAA,GAAA,YAAAA,GAAA,GAAAD,GAAA,CAAA1B,MAAA,EAAA2B,GAAA,GAAAD,GAAA,CAAA1B,MAAA,WAAAC,CAAA,MAAA2B,IAAA,OAAA/B,KAAA,CAAA8B,GAAA,GAAA1B,CAAA,GAAA0B,GAAA,EAAA1B,CAAA,IAAA2B,IAAA,CAAA3B,CAAA,IAAAyB,GAAA,CAAAzB,CAAA,UAAA2B,IAAA;AAI1B,SAASC,WAAWA,CAACC,MAAqB,EAAiB;EAChE,IAAMC,UAAuC,GAAG,CAAC,CAAC;EAAC,IAAAC,SAAA,GAAAzC,0BAAA,CAC/BuC,MAAM,CAACG,SAAS;IAAAC,KAAA;EAAA;IAApC,KAAAF,SAAA,CAAA7B,CAAA,MAAA+B,KAAA,GAAAF,SAAA,CAAA5B,CAAA,IAAAC,IAAA,GAAsC;MAAA,IAA3B8B,KAAK,GAAAD,KAAA,CAAA5B,KAAA;MACdyB,UAAU,CAACI,KAAK,CAAClD,GAAG,CAAC,GAAG;QACtBmD,OAAO,EAAE,EAAE;QACXC,OAAO,EAAE,EAAE;QACXC,MAAM,EAAE,EAAE;QACVC,WAAW,EAAE,EAAE;QACfC,KAAK,EAAE;MACT,CAAC;IACH;EAAC,SAAA3B,GAAA;IAAAmB,SAAA,CAAAzB,CAAA,CAAAM,GAAA;EAAA;IAAAmB,SAAA,CAAAvB,CAAA;EAAA;EACD,OAAO;IAACgC,QAAQ,EAAE,CAAC;IAAEV,UAAU,EAAVA;EAAU,CAAC;AAClC;AAwBO,SAASW,WAAWA,CAACZ,MAAqB,EAAEa,MAAW,EAAEC,MAAqB,EAAQ;EAE3F,IAAMC,IAAI,GAAGhB,WAAW,CAACC,MAAM,CAAC,CAACC,UAAU;EAE3Ce,iBAAiB,CAAChB,MAAM,CAACiB,MAAM,EAAEJ,MAAM,EAAEE,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC;EAGpD,IAAID,MAAM,CAACH,QAAQ,KAAK,CAAC,EAAE;IACzBG,MAAM,CAACH,QAAQ,GAAG,CAAC;IACnBG,MAAM,CAACb,UAAU,GAAGc,IAAI;IACxB;EACF;EACAD,MAAM,CAACH,QAAQ,IAAI,CAAC;EAAC,IAAAO,UAAA,GAAAzD,0BAAA,CACDuC,MAAM,CAACG,SAAS;IAAAgB,MAAA;EAAA;IAApC,KAAAD,UAAA,CAAA7C,CAAA,MAAA8C,MAAA,GAAAD,UAAA,CAAA5C,CAAA,IAAAC,IAAA,GAAsC;MAAA,IAA3B8B,KAAK,GAAAc,MAAA,CAAA3C,KAAA;MACdT,KAAK,CAACX,SAAS,CAACgE,IAAI,CAACC,KAAK,CAACP,MAAM,CAACb,UAAU,CAACI,KAAK,CAAClD,GAAG,CAAC,CAACoD,OAAO,EAAEQ,IAAI,CAACV,KAAK,CAAClD,GAAG,CAAC,CAACoD,OAAO,CAAC;MACzFxC,KAAK,CAACX,SAAS,CAACgE,IAAI,CAACC,KAAK,CAACP,MAAM,CAACb,UAAU,CAACI,KAAK,CAAClD,GAAG,CAAC,CAACmD,OAAO,EAAES,IAAI,CAACV,KAAK,CAAClD,GAAG,CAAC,CAACmD,OAAO,CAAC;MACzFvC,KAAK,CAACX,SAAS,CAACgE,IAAI,CAACC,KAAK,CAACP,MAAM,CAACb,UAAU,CAACI,KAAK,CAAClD,GAAG,CAAC,CAACqD,MAAM,EAAEO,IAAI,CAACV,KAAK,CAAClD,GAAG,CAAC,CAACqD,MAAM,CAAC;MACvFM,MAAM,CAACb,UAAU,CAACI,KAAK,CAAClD,GAAG,CAAC,CAACuD,KAAK,IAAIK,IAAI,CAACV,KAAK,CAAClD,GAAG,CAAC,CAACuD,KAAK;IAC7D;EAAC,SAAA3B,GAAA;IAAAmC,UAAA,CAAAzC,CAAA,CAAAM,GAAA;EAAA;IAAAmC,UAAA,CAAAvC,CAAA;EAAA;AACH;AAGA,SAASqC,iBAAiBA,CACxBC,MAAoC,EACpCJ,MAAW,EACXE,IAAiC,EACjCO,MAAc,EACdC,MAAc,EACd;EACA,KAAK,IAAM9B,IAAI,IAAIwB,MAAM,EAAE;IACzB,IAAMZ,KAAK,GAAGY,MAAM,CAACxB,IAAI,CAAC;IAG1B,IAAIe,MAAa,GAAG,EAAE;IACtB,IACEK,MAAM,IACNR,KAAK,CAACZ,IAAI,IAAIoB,MAAM,IACpBA,MAAM,CAACR,KAAK,CAACZ,IAAI,CAAC,KAAK+B,SAAS,IAChCX,MAAM,CAACR,KAAK,CAACZ,IAAI,CAAC,KAAK,IAAI,EAC3B;MACA,IAAIoB,MAAM,CAACR,KAAK,CAACZ,IAAI,CAAC,CAACD,WAAW,KAAKzB,KAAK,EAAE;QAC5CyC,MAAM,GAAGK,MAAM,CAACR,KAAK,CAACZ,IAAI,CAAC;MAC7B,CAAC,MAAM;QACLe,MAAM,CAACY,IAAI,CAACP,MAAM,CAACR,KAAK,CAACZ,IAAI,CAAC,CAAC;MACjC;IACF;IAEA,IAAIe,MAAM,CAACtC,MAAM,KAAK,CAAC,IAAIuD,OAAO,CAACZ,MAAM,CAAC,IAAIR,KAAK,CAACqB,cAAc,KAAK,UAAU,EAAE;MACjF,MAAM,IAAIC,KAAK,4BAAAC,MAAA,CAA4BvB,KAAK,CAACZ,IAAI,CAAE,CAAC;IAC1D;IACA,IAAIe,MAAM,CAACtC,MAAM,GAAG,CAAC,IAAImC,KAAK,CAACqB,cAAc,KAAK,UAAU,EAAE;MAC5D,MAAM,IAAIC,KAAK,+BAAAC,MAAA,CAA+BvB,KAAK,CAACZ,IAAI,CAAE,CAAC;IAC7D;IAGA,IAAIe,MAAM,CAACtC,MAAM,KAAK,CAAC,EAAE;MACvB,IAAImC,KAAK,CAACwB,QAAQ,EAAE;QAClBb,iBAAiB,CAACX,KAAK,CAACY,MAAM,EAAG,IAAI,EAAEF,IAAI,EAAEO,MAAM,EAAEC,MAAM,CAAC;MAC9D,CAAC,MAAM;QACLR,IAAI,CAACV,KAAK,CAAClD,GAAG,CAAC,CAACuD,KAAK,IAAI,CAAC;QAC1BK,IAAI,CAACV,KAAK,CAAClD,GAAG,CAAC,CAACoD,OAAO,CAACa,IAAI,CAACE,MAAM,CAAC;QACpCP,IAAI,CAACV,KAAK,CAAClD,GAAG,CAAC,CAACmD,OAAO,CAACc,IAAI,CAACG,MAAM,CAAC;MACtC;MACA;IACF;IAGA,KAAK,IAAIpD,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGqC,MAAM,CAACtC,MAAM,EAAEC,CAAC,EAAE,EAAE;MACtC,IAAM2D,IAAI,GAAG3D,CAAC,KAAK,CAAC,GAAGmD,MAAM,GAAGjB,KAAK,CAAC0B,SAAS;MAC/C,IAAI1B,KAAK,CAACwB,QAAQ,EAAE;QAClBb,iBAAiB,CAACX,KAAK,CAACY,MAAM,EAAGT,MAAM,CAACrC,CAAC,CAAC,EAAE4C,IAAI,EAAEe,IAAI,EAAEzB,KAAK,CAAC2B,SAAS,CAAC;MAC1E,CAAC,MAAM;QACLjB,IAAI,CAACV,KAAK,CAAClD,GAAG,CAAC,CAACuD,KAAK,IAAI,CAAC;QAC1BK,IAAI,CAACV,KAAK,CAAClD,GAAG,CAAC,CAACoD,OAAO,CAACa,IAAI,CAACU,IAAI,CAAC;QAClCf,IAAI,CAACV,KAAK,CAAClD,GAAG,CAAC,CAACmD,OAAO,CAACc,IAAI,CAACf,KAAK,CAAC2B,SAAS,CAAC;QAC7CjB,IAAI,CAACV,KAAK,CAAClD,GAAG,CAAC,CAACqD,MAAM,CAACY,IAAI,CACzBpF,KAAK,CAACiG,WAAW,CAAE5B,KAAK,CAAC6B,YAAY,IAAI7B,KAAK,CAAC8B,aAAa,EAAI3B,MAAM,CAACrC,CAAC,CAAC,CAC3E,CAAC;MACH;IACF;EACF;AACF;AAqBO,SAASiE,kBAAkBA,CAACpC,MAAqB,EAAEc,MAAqB,EAAmB;EAChG,IAAMuB,OAAwB,GAAG,EAAE;EACnC,KAAK,IAAIlE,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG2C,MAAM,CAACH,QAAQ,EAAExC,CAAC,EAAE,EAAE;IACxCkE,OAAO,CAACjB,IAAI,CAAC,CAAC,CAAC,CAAC;EAClB;EACA,KAAK,IAAMjE,GAAG,IAAI2D,MAAM,CAACb,UAAU,EAAE;IACnC,IAAMA,UAAU,GAAGa,MAAM,CAACb,UAAU,CAAC9C,GAAG,CAAC;IACzC,IAAI8C,UAAU,CAACS,KAAK,EAAE;MACpB4B,iBAAiB,CAACtC,MAAM,EAAEC,UAAU,EAAE9C,GAAG,EAAEkF,OAAO,CAAC;IACrD;EACF;EACA,OAAOA,OAAO;AAChB;AAGA,SAASC,iBAAiBA,CACxBtC,MAAqB,EACrBC,UAAuB,EACvB9C,GAAW,EACXkF,OAAwB,EAClB;EACN,IAAMhC,KAAK,GAAGL,MAAM,CAACuC,SAAS,CAACpF,GAAG,CAAC;EACnC,IAAMqF,MAAM,GAAGxC,MAAM,CAACyC,eAAe,CAACtF,GAAG,CAAC;EAG1C,IAAMuF,OAAiB,GAAG,IAAI3E,KAAK,CAACsC,KAAK,CAAC0B,SAAS,GAAG,CAAC,CAAC,CAACY,IAAI,CAAC,CAAC,CAAC;EAChE,IAAIC,MAAM,GAAG,CAAC;EACd,KAAK,IAAIzE,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG8B,UAAU,CAACS,KAAK,EAAEvC,CAAC,EAAE,EAAE;IACzC,IAAMoD,MAAM,GAAGtB,UAAU,CAACK,OAAO,CAACnC,CAAC,CAAC;IACpC,IAAMmD,MAAM,GAAGrB,UAAU,CAACM,OAAO,CAACpC,CAAC,CAAC;IACpCuE,OAAO,CAACpB,MAAM,CAAC,EAAE;IACjBoB,OAAO,CAACC,IAAI,CAAC,CAAC,EAAErB,MAAM,GAAG,CAAC,CAAC;IAE3B,IAAIuB,MAAM,GAAG,CAAC;IACd,IAAIhC,MAAM,GAAGwB,OAAO,CAACK,OAAO,CAACG,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC;IAAC,IAAAC,UAAA,GAAArF,0BAAA,CAGzB+E,MAAM;MAAAO,MAAA;IAAA;MAAzB,KAAAD,UAAA,CAAAzE,CAAA,MAAA0E,MAAA,GAAAD,UAAA,CAAAxE,CAAA,IAAAC,IAAA,GAA2B;QAAA,IAAhBS,IAAI,GAAA+D,MAAA,CAAAvE,KAAA;QACb,IAAIQ,IAAI,KAAKqB,KAAK,IAAIkB,MAAM,GAAGvC,IAAI,CAACgD,SAAS,EAAE;UAC7C;QACF;QAEA,QAAQhD,IAAI,CAAC0C,cAAc;UACzB,KAAK,UAAU;YACb,IAAI,EAAE1C,IAAI,CAACS,IAAI,IAAIoB,MAAM,CAAC,EAAE;cAE1BA,MAAM,CAAC7B,IAAI,CAACS,IAAI,CAAC,GAAG,EAAE;YACxB;YACA,IAAMuD,GAAE,GAAGN,OAAO,CAACG,MAAM,EAAE,CAAC;YAC5B,OAAOhC,MAAM,CAAC7B,IAAI,CAACS,IAAI,CAAC,CAACvB,MAAM,IAAI8E,GAAE,EAAE;cAErCnC,MAAM,CAAC7B,IAAI,CAACS,IAAI,CAAC,CAAC2B,IAAI,CAAC,CAAC,CAAC,CAAC;YAC5B;YACAP,MAAM,GAAGA,MAAM,CAAC7B,IAAI,CAACS,IAAI,CAAC,CAACuD,GAAE,CAAC;YAC9B;UAEF;YACEnC,MAAM,CAAC7B,IAAI,CAACS,IAAI,CAAC,GAAGoB,MAAM,CAAC7B,IAAI,CAACS,IAAI,CAAC,IAAI,CAAC,CAAC;YAC3CoB,MAAM,GAAGA,MAAM,CAAC7B,IAAI,CAACS,IAAI,CAAC;QAC9B;MACF;IAAC,SAAAV,GAAA;MAAA+D,UAAA,CAAArE,CAAA,CAAAM,GAAA;IAAA;MAAA+D,UAAA,CAAAnE,CAAA;IAAA;IAGD,IAAI4C,MAAM,KAAKlB,KAAK,CAAC2B,SAAS,EAAE;MAC9B,IAAMxD,KAAK,GAAGxC,KAAK,CAACiH,aAAa,CAE/B5C,KAAK,CAAC6B,YAAY,IAAI7B,KAAK,CAAC8B,aAAa,EACzClC,UAAU,CAACO,MAAM,CAACoC,MAAM,CAAC,EACzBvC,KACF,CAAC;MACDuC,MAAM,EAAE;MAER,QAAQvC,KAAK,CAACqB,cAAc;QAC1B,KAAK,UAAU;UACb,IAAI,EAAErB,KAAK,CAACZ,IAAI,IAAIoB,MAAM,CAAC,EAAE;YAE3BA,MAAM,CAACR,KAAK,CAACZ,IAAI,CAAC,GAAG,EAAE;UACzB;UACA,IAAMuD,EAAE,GAAGN,OAAO,CAACG,MAAM,CAAC;UAC1B,OAAOhC,MAAM,CAACR,KAAK,CAACZ,IAAI,CAAC,CAACvB,MAAM,IAAI8E,EAAE,EAAE;YAEtCnC,MAAM,CAACR,KAAK,CAACZ,IAAI,CAAC,CAAC2B,IAAI,CAAC,IAAI,CAAC;UAC/B;UACAP,MAAM,CAACR,KAAK,CAACZ,IAAI,CAAC,CAACuD,EAAE,CAAC,GAAGxE,KAAK;UAC9B;QAEF;UACEqC,MAAM,CAACR,KAAK,CAACZ,IAAI,CAAC,GAAGjB,KAAK;MAC9B;IACF;EACF;AACF"}
1
+ {"version":3,"file":"shred.js","names":["_declare","require","Types","_interopRequireWildcard","_getRequireWildcardCache","nodeInterop","WeakMap","cacheBabelInterop","cacheNodeInterop","obj","__esModule","default","cache","has","get","newObj","hasPropertyDescriptor","Object","defineProperty","getOwnPropertyDescriptor","key","prototype","hasOwnProperty","call","desc","set","shredBuffer","schema","columnData","field","fieldList","dlevels","rlevels","values","pageHeaders","count","rowCount","shredRecord","record","buffer","data","shredRecordFields","fields","Array","push","apply","rLevel","dLevel","name","undefined","constructor","length","Boolean","repetitionType","Error","concat","isNested","i","rlvl","rLevelMax","dLevelMax","toPrimitive","originalType","primitiveType","materializeRecords","records","materializeColumn","findField","branch","findFieldBranch","rLevels","fill","vIndex","rIndex","step","ix","value","fromPrimitive"],"sources":["../../../../src/parquetjs/schema/shred.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport {ParquetBuffer, ParquetData, ParquetField, ParquetRecord} from './declare';\nimport {ParquetSchema} from './schema';\nimport * as Types from './types';\n\nexport {ParquetBuffer};\n\nexport function shredBuffer(schema: ParquetSchema): ParquetBuffer {\n const columnData: Record<string, ParquetData> = {};\n for (const field of schema.fieldList) {\n columnData[field.key] = {\n dlevels: [],\n rlevels: [],\n values: [],\n pageHeaders: [],\n count: 0\n };\n }\n return {rowCount: 0, columnData};\n}\n\n/**\n * 'Shred' a record into a list of <value, repetition_level, definition_level>\n * tuples per column using the Google Dremel Algorithm..\n *\n * The buffer argument must point to an object into which the shredded record\n * will be returned. You may re-use the buffer for repeated calls to this function\n * to append to an existing buffer, as long as the schema is unchanged.\n *\n * The format in which the shredded records will be stored in the buffer is as\n * follows:\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n */\nexport function shredRecord(schema: ParquetSchema, record: any, buffer: ParquetBuffer): void {\n /* shred the record, this may raise an exception */\n const data = shredBuffer(schema).columnData;\n\n shredRecordFields(schema.fields, record, data, 0, 0);\n\n /* if no error during shredding, add the shredded record to the buffer */\n if (buffer.rowCount === 0) {\n buffer.rowCount = 1;\n buffer.columnData = data;\n return;\n }\n buffer.rowCount += 1;\n for (const field of schema.fieldList) {\n Array.prototype.push.apply(buffer.columnData[field.key].rlevels, data[field.key].rlevels);\n Array.prototype.push.apply(buffer.columnData[field.key].dlevels, data[field.key].dlevels);\n Array.prototype.push.apply(buffer.columnData[field.key].values, data[field.key].values);\n buffer.columnData[field.key].count += data[field.key].count;\n }\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction shredRecordFields(\n fields: Record<string, ParquetField>,\n record: any,\n data: Record<string, ParquetData>,\n rLevel: number,\n dLevel: number\n) {\n for (const name in fields) {\n const field = fields[name];\n\n // fetch values\n let values: any[] = [];\n if (\n record &&\n field.name in record &&\n record[field.name] !== undefined &&\n record[field.name] !== null\n ) {\n if (record[field.name].constructor === Array) {\n values = record[field.name];\n } else {\n values.push(record[field.name]);\n }\n }\n // check values\n if (values.length === 0 && Boolean(record) && field.repetitionType === 'REQUIRED') {\n throw new Error(`missing required field: ${field.name}`);\n }\n if (values.length > 1 && field.repetitionType !== 'REPEATED') {\n throw new Error(`too many values for field: ${field.name}`);\n }\n\n // push null\n if (values.length === 0) {\n if (field.isNested) {\n shredRecordFields(field.fields!, null, data, rLevel, dLevel);\n } else {\n data[field.key].count += 1;\n data[field.key].rlevels.push(rLevel);\n data[field.key].dlevels.push(dLevel);\n }\n continue; // eslint-disable-line no-continue\n }\n\n // push values\n for (let i = 0; i < values.length; i++) {\n const rlvl = i === 0 ? rLevel : field.rLevelMax;\n if (field.isNested) {\n shredRecordFields(field.fields!, values[i], data, rlvl, field.dLevelMax);\n } else {\n data[field.key].count += 1;\n data[field.key].rlevels.push(rlvl);\n data[field.key].dlevels.push(field.dLevelMax);\n data[field.key].values.push(\n Types.toPrimitive((field.originalType || field.primitiveType)!, values[i])\n );\n }\n }\n }\n}\n\n/**\n * 'Materialize' a list of <value, repetition_level, definition_level>\n * tuples back to nested records (objects/arrays) using the Google Dremel\n * Algorithm..\n *\n * The buffer argument must point to an object with the following structure (i.e.\n * the same structure that is returned by shredRecords):\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n */\nexport function materializeRecords(schema: ParquetSchema, buffer: ParquetBuffer): ParquetRecord[] {\n const records: ParquetRecord[] = [];\n for (let i = 0; i < buffer.rowCount; i++) {\n records.push({});\n }\n for (const key in buffer.columnData) {\n const columnData = buffer.columnData[key];\n if (columnData.count) {\n materializeColumn(schema, columnData, key, records);\n }\n }\n return records;\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction materializeColumn(\n schema: ParquetSchema,\n columnData: ParquetData,\n key: string,\n records: ParquetRecord[]\n): void {\n const field = schema.findField(key);\n const branch = schema.findFieldBranch(key);\n\n // tslint:disable-next-line:prefer-array-literal\n const rLevels: number[] = new Array(field.rLevelMax + 1).fill(0);\n let vIndex = 0;\n for (let i = 0; i < columnData.count; i++) {\n const dLevel = columnData.dlevels[i];\n const rLevel = columnData.rlevels[i];\n rLevels[rLevel]++;\n rLevels.fill(0, rLevel + 1);\n\n let rIndex = 0;\n let record = records[rLevels[rIndex++] - 1];\n\n // Internal nodes - Build a nested row object\n for (const step of branch) {\n if (step === field || dLevel < step.dLevelMax) {\n break;\n }\n\n switch (step.repetitionType) {\n case 'REPEATED':\n if (!(step.name in record)) {\n // eslint-disable max-depth\n record[step.name] = [];\n }\n const ix = rLevels[rIndex++];\n while (record[step.name].length <= ix) {\n // eslint-disable max-depth\n record[step.name].push({});\n }\n record = record[step.name][ix];\n break;\n\n default:\n record[step.name] = record[step.name] || {};\n record = record[step.name];\n }\n }\n\n // Leaf node - Add the value\n if (dLevel === field.dLevelMax) {\n const value = Types.fromPrimitive(\n // @ts-ignore\n field.originalType || field.primitiveType,\n columnData.values[vIndex],\n field\n );\n vIndex++;\n\n switch (field.repetitionType) {\n case 'REPEATED':\n if (!(field.name in record)) {\n // eslint-disable max-depth\n record[field.name] = [];\n }\n const ix = rLevels[rIndex];\n while (record[field.name].length <= ix) {\n // eslint-disable max-depth\n record[field.name].push(null);\n }\n record[field.name][ix] = value;\n break;\n\n default:\n record[field.name] = value;\n }\n }\n }\n}\n\n// Columnar export\n\n/**\n * 'Materialize' a list of <value, repetition_level, definition_level>\n * tuples back to nested records (objects/arrays) using the Google Dremel\n * Algorithm..\n *\n * The buffer argument must point to an object with the following structure (i.e.\n * the same structure that is returned by shredRecords):\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n *\nexport function extractColumns(schema: ParquetSchema, buffer: ParquetBuffer): Record<string, unknown> {\n const columns: ParquetRecord = {};\n for (const key in buffer.columnData) {\n const columnData = buffer.columnData[key];\n if (columnData.count) {\n extractColumn(schema, columnData, key, columns);\n }\n }\n return columns;\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction extractColumn(\n schema: ParquetSchema,\n columnData: ParquetData,\n key: string,\n columns: Record<string, unknown> \n) {\n if (columnData.count <= 0) {\n return;\n }\n\n const record = columns;\n\n const field = schema.findField(key);\n const branch = schema.findFieldBranch(key);\n\n // tslint:disable-next-line:prefer-array-literal\n const rLevels: number[] = new Array(field.rLevelMax + 1).fill(0);\n let vIndex = 0;\n\n let i = 0;\n const dLevel = columnData.dlevels[i];\n const rLevel = columnData.rlevels[i];\n rLevels[rLevel]++;\n rLevels.fill(0, rLevel + 1);\n\n let rIndex = 0;\n let record = records[rLevels[rIndex++] - 1];\n\n // Internal nodes\n for (const step of branch) {\n if (step === field || dLevel < step.dLevelMax) {\n break;\n }\n\n switch (step.repetitionType) {\n case 'REPEATED':\n if (!(step.name in record)) {\n // eslint-disable max-depth\n record[step.name] = [];\n }\n const ix = rLevels[rIndex++];\n while (record[step.name].length <= ix) {\n // eslint-disable max-depth\n record[step.name].push({});\n }\n record = record[step.name][ix];\n break;\n\n default:\n record[step.name] = record[step.name] || {};\n record = record[step.name];\n }\n }\n\n // Leaf node\n if (dLevel === field.dLevelMax) {\n const value = Types.fromPrimitive(\n // @ts-ignore\n field.originalType || field.primitiveType,\n columnData.values[vIndex],\n field\n );\n vIndex++;\n\n switch (field.repetitionType) {\n case 'REPEATED':\n if (!(field.name in record)) {\n // eslint-disable max-depth\n record[field.name] = [];\n }\n const ix = rLevels[rIndex];\n while (record[field.name].length <= ix) {\n // eslint-disable max-depth\n record[field.name].push(null);\n }\n record[field.name][ix] = value;\n break;\n\n default:\n record[field.name] = value;\n }\n }\n}\n*/\n"],"mappings":";;;;;;;;;;;;;;AAEA,IAAAA,QAAA,GAAAC,OAAA;AAEA,IAAAC,KAAA,GAAAC,uBAAA,CAAAF,OAAA;AAAiC,SAAAG,yBAAAC,WAAA,eAAAC,OAAA,kCAAAC,iBAAA,OAAAD,OAAA,QAAAE,gBAAA,OAAAF,OAAA,YAAAF,wBAAA,YAAAA,CAAAC,WAAA,WAAAA,WAAA,GAAAG,gBAAA,GAAAD,iBAAA,KAAAF,WAAA;AAAA,SAAAF,wBAAAM,GAAA,EAAAJ,WAAA,SAAAA,WAAA,IAAAI,GAAA,IAAAA,GAAA,CAAAC,UAAA,WAAAD,GAAA,QAAAA,GAAA,oBAAAA,GAAA,wBAAAA,GAAA,4BAAAE,OAAA,EAAAF,GAAA,UAAAG,KAAA,GAAAR,wBAAA,CAAAC,WAAA,OAAAO,KAAA,IAAAA,KAAA,CAAAC,GAAA,CAAAJ,GAAA,YAAAG,KAAA,CAAAE,GAAA,CAAAL,GAAA,SAAAM,MAAA,WAAAC,qBAAA,GAAAC,MAAA,CAAAC,cAAA,IAAAD,MAAA,CAAAE,wBAAA,WAAAC,GAAA,IAAAX,GAAA,QAAAW,GAAA,kBAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAd,GAAA,EAAAW,GAAA,SAAAI,IAAA,GAAAR,qBAAA,GAAAC,MAAA,CAAAE,wBAAA,CAAAV,GAAA,EAAAW,GAAA,cAAAI,IAAA,KAAAA,IAAA,CAAAV,GAAA,IAAAU,IAAA,CAAAC,GAAA,KAAAR,MAAA,CAAAC,cAAA,CAAAH,MAAA,EAAAK,GAAA,EAAAI,IAAA,YAAAT,MAAA,CAAAK,GAAA,IAAAX,GAAA,CAAAW,GAAA,SAAAL,MAAA,CAAAJ,OAAA,GAAAF,GAAA,MAAAG,KAAA,IAAAA,KAAA,CAAAa,GAAA,CAAAhB,GAAA,EAAAM,MAAA,YAAAA,MAAA;AAI1B,SAASW,WAAWA,CAACC,MAAqB,EAAiB;EAChE,MAAMC,UAAuC,GAAG,CAAC,CAAC;EAClD,KAAK,MAAMC,KAAK,IAAIF,MAAM,CAACG,SAAS,EAAE;IACpCF,UAAU,CAACC,KAAK,CAACT,GAAG,CAAC,GAAG;MACtBW,OAAO,EAAE,EAAE;MACXC,OAAO,EAAE,EAAE;MACXC,MAAM,EAAE,EAAE;MACVC,WAAW,EAAE,EAAE;MACfC,KAAK,EAAE;IACT,CAAC;EACH;EACA,OAAO;IAACC,QAAQ,EAAE,CAAC;IAAER;EAAU,CAAC;AAClC;AAwBO,SAASS,WAAWA,CAACV,MAAqB,EAAEW,MAAW,EAAEC,MAAqB,EAAQ;EAE3F,MAAMC,IAAI,GAAGd,WAAW,CAACC,MAAM,CAAC,CAACC,UAAU;EAE3Ca,iBAAiB,CAACd,MAAM,CAACe,MAAM,EAAEJ,MAAM,EAAEE,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC;EAGpD,IAAID,MAAM,CAACH,QAAQ,KAAK,CAAC,EAAE;IACzBG,MAAM,CAACH,QAAQ,GAAG,CAAC;IACnBG,MAAM,CAACX,UAAU,GAAGY,IAAI;IACxB;EACF;EACAD,MAAM,CAACH,QAAQ,IAAI,CAAC;EACpB,KAAK,MAAMP,KAAK,IAAIF,MAAM,CAACG,SAAS,EAAE;IACpCa,KAAK,CAACtB,SAAS,CAACuB,IAAI,CAACC,KAAK,CAACN,MAAM,CAACX,UAAU,CAACC,KAAK,CAACT,GAAG,CAAC,CAACY,OAAO,EAAEQ,IAAI,CAACX,KAAK,CAACT,GAAG,CAAC,CAACY,OAAO,CAAC;IACzFW,KAAK,CAACtB,SAAS,CAACuB,IAAI,CAACC,KAAK,CAACN,MAAM,CAACX,UAAU,CAACC,KAAK,CAACT,GAAG,CAAC,CAACW,OAAO,EAAES,IAAI,CAACX,KAAK,CAACT,GAAG,CAAC,CAACW,OAAO,CAAC;IACzFY,KAAK,CAACtB,SAAS,CAACuB,IAAI,CAACC,KAAK,CAACN,MAAM,CAACX,UAAU,CAACC,KAAK,CAACT,GAAG,CAAC,CAACa,MAAM,EAAEO,IAAI,CAACX,KAAK,CAACT,GAAG,CAAC,CAACa,MAAM,CAAC;IACvFM,MAAM,CAACX,UAAU,CAACC,KAAK,CAACT,GAAG,CAAC,CAACe,KAAK,IAAIK,IAAI,CAACX,KAAK,CAACT,GAAG,CAAC,CAACe,KAAK;EAC7D;AACF;AAGA,SAASM,iBAAiBA,CACxBC,MAAoC,EACpCJ,MAAW,EACXE,IAAiC,EACjCM,MAAc,EACdC,MAAc,EACd;EACA,KAAK,MAAMC,IAAI,IAAIN,MAAM,EAAE;IACzB,MAAMb,KAAK,GAAGa,MAAM,CAACM,IAAI,CAAC;IAG1B,IAAIf,MAAa,GAAG,EAAE;IACtB,IACEK,MAAM,IACNT,KAAK,CAACmB,IAAI,IAAIV,MAAM,IACpBA,MAAM,CAACT,KAAK,CAACmB,IAAI,CAAC,KAAKC,SAAS,IAChCX,MAAM,CAACT,KAAK,CAACmB,IAAI,CAAC,KAAK,IAAI,EAC3B;MACA,IAAIV,MAAM,CAACT,KAAK,CAACmB,IAAI,CAAC,CAACE,WAAW,KAAKP,KAAK,EAAE;QAC5CV,MAAM,GAAGK,MAAM,CAACT,KAAK,CAACmB,IAAI,CAAC;MAC7B,CAAC,MAAM;QACLf,MAAM,CAACW,IAAI,CAACN,MAAM,CAACT,KAAK,CAACmB,IAAI,CAAC,CAAC;MACjC;IACF;IAEA,IAAIf,MAAM,CAACkB,MAAM,KAAK,CAAC,IAAIC,OAAO,CAACd,MAAM,CAAC,IAAIT,KAAK,CAACwB,cAAc,KAAK,UAAU,EAAE;MACjF,MAAM,IAAIC,KAAK,4BAAAC,MAAA,CAA4B1B,KAAK,CAACmB,IAAI,CAAE,CAAC;IAC1D;IACA,IAAIf,MAAM,CAACkB,MAAM,GAAG,CAAC,IAAItB,KAAK,CAACwB,cAAc,KAAK,UAAU,EAAE;MAC5D,MAAM,IAAIC,KAAK,+BAAAC,MAAA,CAA+B1B,KAAK,CAACmB,IAAI,CAAE,CAAC;IAC7D;IAGA,IAAIf,MAAM,CAACkB,MAAM,KAAK,CAAC,EAAE;MACvB,IAAItB,KAAK,CAAC2B,QAAQ,EAAE;QAClBf,iBAAiB,CAACZ,KAAK,CAACa,MAAM,EAAG,IAAI,EAAEF,IAAI,EAAEM,MAAM,EAAEC,MAAM,CAAC;MAC9D,CAAC,MAAM;QACLP,IAAI,CAACX,KAAK,CAACT,GAAG,CAAC,CAACe,KAAK,IAAI,CAAC;QAC1BK,IAAI,CAACX,KAAK,CAACT,GAAG,CAAC,CAACY,OAAO,CAACY,IAAI,CAACE,MAAM,CAAC;QACpCN,IAAI,CAACX,KAAK,CAACT,GAAG,CAAC,CAACW,OAAO,CAACa,IAAI,CAACG,MAAM,CAAC;MACtC;MACA;IACF;IAGA,KAAK,IAAIU,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGxB,MAAM,CAACkB,MAAM,EAAEM,CAAC,EAAE,EAAE;MACtC,MAAMC,IAAI,GAAGD,CAAC,KAAK,CAAC,GAAGX,MAAM,GAAGjB,KAAK,CAAC8B,SAAS;MAC/C,IAAI9B,KAAK,CAAC2B,QAAQ,EAAE;QAClBf,iBAAiB,CAACZ,KAAK,CAACa,MAAM,EAAGT,MAAM,CAACwB,CAAC,CAAC,EAAEjB,IAAI,EAAEkB,IAAI,EAAE7B,KAAK,CAAC+B,SAAS,CAAC;MAC1E,CAAC,MAAM;QACLpB,IAAI,CAACX,KAAK,CAACT,GAAG,CAAC,CAACe,KAAK,IAAI,CAAC;QAC1BK,IAAI,CAACX,KAAK,CAACT,GAAG,CAAC,CAACY,OAAO,CAACY,IAAI,CAACc,IAAI,CAAC;QAClClB,IAAI,CAACX,KAAK,CAACT,GAAG,CAAC,CAACW,OAAO,CAACa,IAAI,CAACf,KAAK,CAAC+B,SAAS,CAAC;QAC7CpB,IAAI,CAACX,KAAK,CAACT,GAAG,CAAC,CAACa,MAAM,CAACW,IAAI,CACzB1C,KAAK,CAAC2D,WAAW,CAAEhC,KAAK,CAACiC,YAAY,IAAIjC,KAAK,CAACkC,aAAa,EAAI9B,MAAM,CAACwB,CAAC,CAAC,CAC3E,CAAC;MACH;IACF;EACF;AACF;AAqBO,SAASO,kBAAkBA,CAACrC,MAAqB,EAAEY,MAAqB,EAAmB;EAChG,MAAM0B,OAAwB,GAAG,EAAE;EACnC,KAAK,IAAIR,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGlB,MAAM,CAACH,QAAQ,EAAEqB,CAAC,EAAE,EAAE;IACxCQ,OAAO,CAACrB,IAAI,CAAC,CAAC,CAAC,CAAC;EAClB;EACA,KAAK,MAAMxB,GAAG,IAAImB,MAAM,CAACX,UAAU,EAAE;IACnC,MAAMA,UAAU,GAAGW,MAAM,CAACX,UAAU,CAACR,GAAG,CAAC;IACzC,IAAIQ,UAAU,CAACO,KAAK,EAAE;MACpB+B,iBAAiB,CAACvC,MAAM,EAAEC,UAAU,EAAER,GAAG,EAAE6C,OAAO,CAAC;IACrD;EACF;EACA,OAAOA,OAAO;AAChB;AAGA,SAASC,iBAAiBA,CACxBvC,MAAqB,EACrBC,UAAuB,EACvBR,GAAW,EACX6C,OAAwB,EAClB;EACN,MAAMpC,KAAK,GAAGF,MAAM,CAACwC,SAAS,CAAC/C,GAAG,CAAC;EACnC,MAAMgD,MAAM,GAAGzC,MAAM,CAAC0C,eAAe,CAACjD,GAAG,CAAC;EAG1C,MAAMkD,OAAiB,GAAG,IAAI3B,KAAK,CAACd,KAAK,CAAC8B,SAAS,GAAG,CAAC,CAAC,CAACY,IAAI,CAAC,CAAC,CAAC;EAChE,IAAIC,MAAM,GAAG,CAAC;EACd,KAAK,IAAIf,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG7B,UAAU,CAACO,KAAK,EAAEsB,CAAC,EAAE,EAAE;IACzC,MAAMV,MAAM,GAAGnB,UAAU,CAACG,OAAO,CAAC0B,CAAC,CAAC;IACpC,MAAMX,MAAM,GAAGlB,UAAU,CAACI,OAAO,CAACyB,CAAC,CAAC;IACpCa,OAAO,CAACxB,MAAM,CAAC,EAAE;IACjBwB,OAAO,CAACC,IAAI,CAAC,CAAC,EAAEzB,MAAM,GAAG,CAAC,CAAC;IAE3B,IAAI2B,MAAM,GAAG,CAAC;IACd,IAAInC,MAAM,GAAG2B,OAAO,CAACK,OAAO,CAACG,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC;IAG3C,KAAK,MAAMC,IAAI,IAAIN,MAAM,EAAE;MACzB,IAAIM,IAAI,KAAK7C,KAAK,IAAIkB,MAAM,GAAG2B,IAAI,CAACd,SAAS,EAAE;QAC7C;MACF;MAEA,QAAQc,IAAI,CAACrB,cAAc;QACzB,KAAK,UAAU;UACb,IAAI,EAAEqB,IAAI,CAAC1B,IAAI,IAAIV,MAAM,CAAC,EAAE;YAE1BA,MAAM,CAACoC,IAAI,CAAC1B,IAAI,CAAC,GAAG,EAAE;UACxB;UACA,MAAM2B,EAAE,GAAGL,OAAO,CAACG,MAAM,EAAE,CAAC;UAC5B,OAAOnC,MAAM,CAACoC,IAAI,CAAC1B,IAAI,CAAC,CAACG,MAAM,IAAIwB,EAAE,EAAE;YAErCrC,MAAM,CAACoC,IAAI,CAAC1B,IAAI,CAAC,CAACJ,IAAI,CAAC,CAAC,CAAC,CAAC;UAC5B;UACAN,MAAM,GAAGA,MAAM,CAACoC,IAAI,CAAC1B,IAAI,CAAC,CAAC2B,EAAE,CAAC;UAC9B;QAEF;UACErC,MAAM,CAACoC,IAAI,CAAC1B,IAAI,CAAC,GAAGV,MAAM,CAACoC,IAAI,CAAC1B,IAAI,CAAC,IAAI,CAAC,CAAC;UAC3CV,MAAM,GAAGA,MAAM,CAACoC,IAAI,CAAC1B,IAAI,CAAC;MAC9B;IACF;IAGA,IAAID,MAAM,KAAKlB,KAAK,CAAC+B,SAAS,EAAE;MAC9B,MAAMgB,KAAK,GAAG1E,KAAK,CAAC2E,aAAa,CAE/BhD,KAAK,CAACiC,YAAY,IAAIjC,KAAK,CAACkC,aAAa,EACzCnC,UAAU,CAACK,MAAM,CAACuC,MAAM,CAAC,EACzB3C,KACF,CAAC;MACD2C,MAAM,EAAE;MAER,QAAQ3C,KAAK,CAACwB,cAAc;QAC1B,KAAK,UAAU;UACb,IAAI,EAAExB,KAAK,CAACmB,IAAI,IAAIV,MAAM,CAAC,EAAE;YAE3BA,MAAM,CAACT,KAAK,CAACmB,IAAI,CAAC,GAAG,EAAE;UACzB;UACA,MAAM2B,EAAE,GAAGL,OAAO,CAACG,MAAM,CAAC;UAC1B,OAAOnC,MAAM,CAACT,KAAK,CAACmB,IAAI,CAAC,CAACG,MAAM,IAAIwB,EAAE,EAAE;YAEtCrC,MAAM,CAACT,KAAK,CAACmB,IAAI,CAAC,CAACJ,IAAI,CAAC,IAAI,CAAC;UAC/B;UACAN,MAAM,CAACT,KAAK,CAACmB,IAAI,CAAC,CAAC2B,EAAE,CAAC,GAAGC,KAAK;UAC9B;QAEF;UACEtC,MAAM,CAACT,KAAK,CAACmB,IAAI,CAAC,GAAG4B,KAAK;MAC9B;IACF;EACF;AACF"}
@@ -7,7 +7,7 @@ exports.PARQUET_LOGICAL_TYPES = void 0;
7
7
  exports.fromPrimitive = fromPrimitive;
8
8
  exports.toPrimitive = toPrimitive;
9
9
  var _bson = require("@loaders.gl/bson");
10
- var PARQUET_LOGICAL_TYPES = {
10
+ const PARQUET_LOGICAL_TYPES = {
11
11
  BOOLEAN: {
12
12
  primitiveType: 'BOOLEAN',
13
13
  toPrimitive: toPrimitive_BOOLEAN,
@@ -183,93 +183,93 @@ function fromPrimitive_BOOLEAN(value) {
183
183
  return Boolean(value);
184
184
  }
185
185
  function toPrimitive_FLOAT(value) {
186
- var v = parseFloat(value);
186
+ const v = parseFloat(value);
187
187
  if (isNaN(v)) {
188
188
  throw new Error("invalid value for FLOAT: ".concat(value));
189
189
  }
190
190
  return v;
191
191
  }
192
192
  function toPrimitive_DOUBLE(value) {
193
- var v = parseFloat(value);
193
+ const v = parseFloat(value);
194
194
  if (isNaN(v)) {
195
195
  throw new Error("invalid value for DOUBLE: ".concat(value));
196
196
  }
197
197
  return v;
198
198
  }
199
199
  function toPrimitive_INT8(value) {
200
- var v = parseInt(value, 10);
200
+ const v = parseInt(value, 10);
201
201
  if (v < -0x80 || v > 0x7f || isNaN(v)) {
202
202
  throw new Error("invalid value for INT8: ".concat(value));
203
203
  }
204
204
  return v;
205
205
  }
206
206
  function toPrimitive_UINT8(value) {
207
- var v = parseInt(value, 10);
207
+ const v = parseInt(value, 10);
208
208
  if (v < 0 || v > 0xff || isNaN(v)) {
209
209
  throw new Error("invalid value for UINT8: ".concat(value));
210
210
  }
211
211
  return v;
212
212
  }
213
213
  function toPrimitive_INT16(value) {
214
- var v = parseInt(value, 10);
214
+ const v = parseInt(value, 10);
215
215
  if (v < -0x8000 || v > 0x7fff || isNaN(v)) {
216
216
  throw new Error("invalid value for INT16: ".concat(value));
217
217
  }
218
218
  return v;
219
219
  }
220
220
  function toPrimitive_UINT16(value) {
221
- var v = parseInt(value, 10);
221
+ const v = parseInt(value, 10);
222
222
  if (v < 0 || v > 0xffff || isNaN(v)) {
223
223
  throw new Error("invalid value for UINT16: ".concat(value));
224
224
  }
225
225
  return v;
226
226
  }
227
227
  function toPrimitive_INT32(value) {
228
- var v = parseInt(value, 10);
228
+ const v = parseInt(value, 10);
229
229
  if (v < -0x80000000 || v > 0x7fffffff || isNaN(v)) {
230
230
  throw new Error("invalid value for INT32: ".concat(value));
231
231
  }
232
232
  return v;
233
233
  }
234
234
  function decimalToPrimitive_INT32(value, field) {
235
- var primitiveValue = value * Math.pow(10, field.scale || 0);
236
- var v = Math.round(primitiveValue * Math.pow(10, -field.presision) % 1 * Math.pow(10, field.presision));
235
+ const primitiveValue = value * 10 ** (field.scale || 0);
236
+ const v = Math.round(primitiveValue * 10 ** -field.presision % 1 * 10 ** field.presision);
237
237
  if (v < -0x80000000 || v > 0x7fffffff || isNaN(v)) {
238
238
  throw new Error("invalid value for INT32: ".concat(value));
239
239
  }
240
240
  return v;
241
241
  }
242
242
  function toPrimitive_UINT32(value) {
243
- var v = parseInt(value, 10);
243
+ const v = parseInt(value, 10);
244
244
  if (v < 0 || v > 0xffffffffffff || isNaN(v)) {
245
245
  throw new Error("invalid value for UINT32: ".concat(value));
246
246
  }
247
247
  return v;
248
248
  }
249
249
  function toPrimitive_INT64(value) {
250
- var v = parseInt(value, 10);
250
+ const v = parseInt(value, 10);
251
251
  if (isNaN(v)) {
252
252
  throw new Error("invalid value for INT64: ".concat(value));
253
253
  }
254
254
  return v;
255
255
  }
256
256
  function decimalToPrimitive_INT64(value, field) {
257
- var primitiveValue = value * Math.pow(10, field.scale || 0);
258
- var v = Math.round(primitiveValue * Math.pow(10, -field.presision) % 1 * Math.pow(10, field.presision));
257
+ const primitiveValue = value * 10 ** (field.scale || 0);
258
+ const v = Math.round(primitiveValue * 10 ** -field.presision % 1 * 10 ** field.presision);
259
259
  if (isNaN(v)) {
260
260
  throw new Error("invalid value for INT64: ".concat(value));
261
261
  }
262
262
  return v;
263
263
  }
264
264
  function toPrimitive_UINT64(value) {
265
- var v = parseInt(value, 10);
265
+ const v = parseInt(value, 10);
266
266
  if (v < 0 || isNaN(v)) {
267
267
  throw new Error("invalid value for UINT64: ".concat(value));
268
268
  }
269
269
  return v;
270
270
  }
271
271
  function toPrimitive_INT96(value) {
272
- var v = parseInt(value, 10);
272
+ const v = parseInt(value, 10);
273
273
  if (isNaN(v)) {
274
274
  throw new Error("invalid value for INT96: ".concat(value));
275
275
  }
@@ -295,7 +295,7 @@ function fromPrimitive_JSON(value) {
295
295
  }
296
296
  function toPrimitive_BSON(value) {
297
297
  var _BSONWriter$encodeSyn;
298
- var arrayBuffer = (_BSONWriter$encodeSyn = _bson.BSONWriter.encodeSync) === null || _BSONWriter$encodeSyn === void 0 ? void 0 : _BSONWriter$encodeSyn.call(_bson.BSONWriter, value);
298
+ const arrayBuffer = (_BSONWriter$encodeSyn = _bson.BSONWriter.encodeSync) === null || _BSONWriter$encodeSyn === void 0 ? void 0 : _BSONWriter$encodeSyn.call(_bson.BSONWriter, value);
299
299
  return Buffer.from(arrayBuffer);
300
300
  }
301
301
  function fromPrimitive_BSON(value) {
@@ -303,26 +303,26 @@ function fromPrimitive_BSON(value) {
303
303
  return (_BSONLoader$parseSync = _bson.BSONLoader.parseSync) === null || _BSONLoader$parseSync === void 0 ? void 0 : _BSONLoader$parseSync.call(_bson.BSONLoader, value);
304
304
  }
305
305
  function toPrimitive_TIME_MILLIS(value) {
306
- var v = parseInt(value, 10);
306
+ const v = parseInt(value, 10);
307
307
  if (v < 0 || v > 0xffffffffffffffff || isNaN(v)) {
308
308
  throw new Error("invalid value for TIME_MILLIS: ".concat(value));
309
309
  }
310
310
  return v;
311
311
  }
312
312
  function toPrimitive_TIME_MICROS(value) {
313
- var v = parseInt(value, 10);
313
+ const v = parseInt(value, 10);
314
314
  if (v < 0 || isNaN(v)) {
315
315
  throw new Error("invalid value for TIME_MICROS: ".concat(value));
316
316
  }
317
317
  return v;
318
318
  }
319
- var kMillisPerDay = 86400000;
319
+ const kMillisPerDay = 86400000;
320
320
  function toPrimitive_DATE(value) {
321
321
  if (value instanceof Date) {
322
322
  return value.getTime() / kMillisPerDay;
323
323
  }
324
324
  {
325
- var v = parseInt(value, 10);
325
+ const v = parseInt(value, 10);
326
326
  if (v < 0 || isNaN(v)) {
327
327
  throw new Error("invalid value for DATE: ".concat(value));
328
328
  }
@@ -337,7 +337,7 @@ function toPrimitive_TIMESTAMP_MILLIS(value) {
337
337
  return value.getTime();
338
338
  }
339
339
  {
340
- var v = parseInt(value, 10);
340
+ const v = parseInt(value, 10);
341
341
  if (v < 0 || isNaN(v)) {
342
342
  throw new Error("invalid value for TIMESTAMP_MILLIS: ".concat(value));
343
343
  }
@@ -352,7 +352,7 @@ function toPrimitive_TIMESTAMP_MICROS(value) {
352
352
  return value.getTime() * 1000;
353
353
  }
354
354
  {
355
- var v = parseInt(value, 10);
355
+ const v = parseInt(value, 10);
356
356
  if (v < 0 || isNaN(v)) {
357
357
  throw new Error("invalid value for TIMESTAMP_MICROS: ".concat(value));
358
358
  }
@@ -366,41 +366,41 @@ function toPrimitive_INTERVAL(value) {
366
366
  if (!value.months || !value.days || !value.milliseconds) {
367
367
  throw new Error('value for INTERVAL must be object { months: ..., days: ..., milliseconds: ... }');
368
368
  }
369
- var buf = Buffer.alloc(12);
369
+ const buf = Buffer.alloc(12);
370
370
  buf.writeUInt32LE(value.months, 0);
371
371
  buf.writeUInt32LE(value.days, 4);
372
372
  buf.writeUInt32LE(value.milliseconds, 8);
373
373
  return buf;
374
374
  }
375
375
  function fromPrimitive_INTERVAL(value) {
376
- var buf = Buffer.from(value);
377
- var months = buf.readUInt32LE(0);
378
- var days = buf.readUInt32LE(4);
379
- var millis = buf.readUInt32LE(8);
376
+ const buf = Buffer.from(value);
377
+ const months = buf.readUInt32LE(0);
378
+ const days = buf.readUInt32LE(4);
379
+ const millis = buf.readUInt32LE(8);
380
380
  return {
381
- months: months,
382
- days: days,
381
+ months,
382
+ days,
383
383
  milliseconds: millis
384
384
  };
385
385
  }
386
386
  function decimalFromPrimitive_INT(value, field) {
387
- var presisionInt = Math.round(value * Math.pow(10, -field.presision) % 1 * Math.pow(10, field.presision));
388
- return presisionInt * Math.pow(10, -(field.scale || 0));
387
+ const presisionInt = Math.round(value * 10 ** -field.presision % 1 * 10 ** field.presision);
388
+ return presisionInt * 10 ** -(field.scale || 0);
389
389
  }
390
390
  function decimalFromPrimitive_BYTE_ARRAY(value, field) {
391
- var number = 0;
391
+ let number = 0;
392
392
  if (value.length <= 4) {
393
- for (var i = 0; i < value.length; i++) {
394
- var component = value[i] << 8 * (value.length - i - 1);
393
+ for (let i = 0; i < value.length; i++) {
394
+ const component = value[i] << 8 * (value.length - i - 1);
395
395
  number += component;
396
396
  }
397
397
  } else {
398
- for (var _i = 0; _i < value.length; _i++) {
399
- var _component = value[_i] * Math.pow(2, 8 * (value.length - 1 - _i));
400
- number += _component;
398
+ for (let i = 0; i < value.length; i++) {
399
+ const component = value[i] * 2 ** (8 * (value.length - 1 - i));
400
+ number += component;
401
401
  }
402
402
  }
403
- var presisionInt = Math.round(number * Math.pow(10, -field.presision) % 1 * Math.pow(10, field.presision));
404
- return presisionInt * Math.pow(10, -(field.scale || 0));
403
+ const presisionInt = Math.round(number * 10 ** -field.presision % 1 * 10 ** field.presision);
404
+ return presisionInt * 10 ** -(field.scale || 0);
405
405
  }
406
406
  //# sourceMappingURL=types.js.map