@loaders.gl/parquet 3.1.0-beta.3 → 3.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. package/dist/dist.min.js +6 -6
  2. package/dist/dist.min.js.map +2 -2
  3. package/dist/es5/bundle.js +1 -1
  4. package/dist/es5/bundle.js.map +1 -1
  5. package/dist/es5/constants.js +5 -5
  6. package/dist/es5/constants.js.map +1 -1
  7. package/dist/es5/index.js +19 -10
  8. package/dist/es5/index.js.map +1 -1
  9. package/dist/es5/lib/convert-schema.js +13 -13
  10. package/dist/es5/lib/convert-schema.js.map +1 -1
  11. package/dist/es5/lib/parse-parquet.js +154 -19
  12. package/dist/es5/lib/parse-parquet.js.map +1 -1
  13. package/dist/es5/lib/read-array-buffer.js +43 -6
  14. package/dist/es5/lib/read-array-buffer.js.map +1 -1
  15. package/dist/es5/parquet-loader.js +4 -4
  16. package/dist/es5/parquet-loader.js.map +1 -1
  17. package/dist/es5/parquet-writer.js +4 -4
  18. package/dist/es5/parquet-writer.js.map +1 -1
  19. package/dist/es5/parquetjs/codecs/dictionary.js +10 -2
  20. package/dist/es5/parquetjs/codecs/dictionary.js.map +1 -1
  21. package/dist/es5/parquetjs/codecs/index.js +6 -4
  22. package/dist/es5/parquetjs/codecs/index.js.map +1 -1
  23. package/dist/es5/parquetjs/codecs/plain.js +43 -41
  24. package/dist/es5/parquetjs/codecs/plain.js.map +1 -1
  25. package/dist/es5/parquetjs/codecs/rle.js +35 -25
  26. package/dist/es5/parquetjs/codecs/rle.js.map +1 -1
  27. package/dist/es5/parquetjs/compression.js +110 -27
  28. package/dist/es5/parquetjs/compression.js.map +1 -1
  29. package/dist/es5/parquetjs/encoder/writer.js +737 -301
  30. package/dist/es5/parquetjs/encoder/writer.js.map +1 -1
  31. package/dist/es5/parquetjs/file.js +15 -15
  32. package/dist/es5/parquetjs/file.js.map +1 -1
  33. package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js +1 -1
  34. package/dist/es5/parquetjs/parquet-thrift/BsonType.js +45 -31
  35. package/dist/es5/parquetjs/parquet-thrift/BsonType.js.map +1 -1
  36. package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js +152 -141
  37. package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -1
  38. package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js +160 -147
  39. package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -1
  40. package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js +259 -248
  41. package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -1
  42. package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js +79 -67
  43. package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -1
  44. package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js +1 -1
  45. package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js +1 -1
  46. package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js +124 -113
  47. package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -1
  48. package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js +169 -158
  49. package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -1
  50. package/dist/es5/parquetjs/parquet-thrift/DateType.js +45 -31
  51. package/dist/es5/parquetjs/parquet-thrift/DateType.js.map +1 -1
  52. package/dist/es5/parquetjs/parquet-thrift/DecimalType.js +79 -68
  53. package/dist/es5/parquetjs/parquet-thrift/DecimalType.js.map +1 -1
  54. package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js +94 -83
  55. package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -1
  56. package/dist/es5/parquetjs/parquet-thrift/Encoding.js +1 -1
  57. package/dist/es5/parquetjs/parquet-thrift/EnumType.js +45 -31
  58. package/dist/es5/parquetjs/parquet-thrift/EnumType.js.map +1 -1
  59. package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js +1 -1
  60. package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js +182 -170
  61. package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js.map +1 -1
  62. package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js +45 -31
  63. package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -1
  64. package/dist/es5/parquetjs/parquet-thrift/IntType.js +79 -68
  65. package/dist/es5/parquetjs/parquet-thrift/IntType.js.map +1 -1
  66. package/dist/es5/parquetjs/parquet-thrift/JsonType.js +45 -31
  67. package/dist/es5/parquetjs/parquet-thrift/JsonType.js.map +1 -1
  68. package/dist/es5/parquetjs/parquet-thrift/KeyValue.js +79 -68
  69. package/dist/es5/parquetjs/parquet-thrift/KeyValue.js.map +1 -1
  70. package/dist/es5/parquetjs/parquet-thrift/ListType.js +45 -31
  71. package/dist/es5/parquetjs/parquet-thrift/ListType.js.map +1 -1
  72. package/dist/es5/parquetjs/parquet-thrift/LogicalType.js +343 -319
  73. package/dist/es5/parquetjs/parquet-thrift/LogicalType.js.map +1 -1
  74. package/dist/es5/parquetjs/parquet-thrift/MapType.js +45 -31
  75. package/dist/es5/parquetjs/parquet-thrift/MapType.js.map +1 -1
  76. package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js +45 -31
  77. package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -1
  78. package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js +45 -31
  79. package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -1
  80. package/dist/es5/parquetjs/parquet-thrift/NullType.js +45 -31
  81. package/dist/es5/parquetjs/parquet-thrift/NullType.js.map +1 -1
  82. package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js +75 -64
  83. package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -1
  84. package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js +94 -83
  85. package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -1
  86. package/dist/es5/parquetjs/parquet-thrift/PageHeader.js +169 -158
  87. package/dist/es5/parquetjs/parquet-thrift/PageHeader.js.map +1 -1
  88. package/dist/es5/parquetjs/parquet-thrift/PageLocation.js +94 -83
  89. package/dist/es5/parquetjs/parquet-thrift/PageLocation.js.map +1 -1
  90. package/dist/es5/parquetjs/parquet-thrift/PageType.js +1 -1
  91. package/dist/es5/parquetjs/parquet-thrift/RowGroup.js +124 -113
  92. package/dist/es5/parquetjs/parquet-thrift/RowGroup.js.map +1 -1
  93. package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js +199 -188
  94. package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js.map +1 -1
  95. package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js +94 -83
  96. package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js.map +1 -1
  97. package/dist/es5/parquetjs/parquet-thrift/Statistics.js +135 -124
  98. package/dist/es5/parquetjs/parquet-thrift/Statistics.js.map +1 -1
  99. package/dist/es5/parquetjs/parquet-thrift/StringType.js +45 -31
  100. package/dist/es5/parquetjs/parquet-thrift/StringType.js.map +1 -1
  101. package/dist/es5/parquetjs/parquet-thrift/TimeType.js +79 -68
  102. package/dist/es5/parquetjs/parquet-thrift/TimeType.js.map +1 -1
  103. package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js +101 -88
  104. package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js.map +1 -1
  105. package/dist/es5/parquetjs/parquet-thrift/TimestampType.js +79 -68
  106. package/dist/es5/parquetjs/parquet-thrift/TimestampType.js.map +1 -1
  107. package/dist/es5/parquetjs/parquet-thrift/Type.js +1 -1
  108. package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js +45 -31
  109. package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -1
  110. package/dist/es5/parquetjs/parquet-thrift/UUIDType.js +45 -31
  111. package/dist/es5/parquetjs/parquet-thrift/UUIDType.js.map +1 -1
  112. package/dist/es5/parquetjs/parquet-thrift/index.js +43 -43
  113. package/dist/es5/parquetjs/parser/decoders.js +391 -218
  114. package/dist/es5/parquetjs/parser/decoders.js.map +1 -1
  115. package/dist/es5/parquetjs/parser/parquet-cursor.js +180 -62
  116. package/dist/es5/parquetjs/parser/parquet-cursor.js.map +1 -1
  117. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +370 -125
  118. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +1 -1
  119. package/dist/es5/parquetjs/parser/parquet-reader.js +320 -91
  120. package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
  121. package/dist/es5/parquetjs/schema/declare.js +11 -9
  122. package/dist/es5/parquetjs/schema/declare.js.map +1 -1
  123. package/dist/es5/parquetjs/schema/schema.js +87 -73
  124. package/dist/es5/parquetjs/schema/schema.js.map +1 -1
  125. package/dist/es5/parquetjs/schema/shred.js +96 -56
  126. package/dist/es5/parquetjs/schema/shred.js.map +1 -1
  127. package/dist/es5/parquetjs/schema/types.js +40 -39
  128. package/dist/es5/parquetjs/schema/types.js.map +1 -1
  129. package/dist/es5/parquetjs/utils/buffer-utils.js +1 -1
  130. package/dist/es5/parquetjs/utils/buffer-utils.js.map +1 -1
  131. package/dist/es5/parquetjs/utils/file-utils.js +12 -8
  132. package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
  133. package/dist/es5/parquetjs/utils/read-utils.js +50 -22
  134. package/dist/es5/parquetjs/utils/read-utils.js.map +1 -1
  135. package/dist/esm/parquet-loader.js +1 -1
  136. package/dist/esm/parquet-loader.js.map +1 -1
  137. package/dist/esm/parquet-writer.js +1 -1
  138. package/dist/esm/parquet-writer.js.map +1 -1
  139. package/package.json +5 -5
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
 
3
- const moduleExports = require('./index');
3
+ var moduleExports = require('./index');
4
4
 
5
5
  globalThis.loaders = globalThis.loaders || {};
6
6
  module.exports = Object.assign(globalThis.loaders, moduleExports);
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/bundle.ts"],"names":["moduleExports","require","globalThis","loaders","module","exports","Object","assign"],"mappings":";;AACA,MAAMA,aAAa,GAAGC,OAAO,CAAC,SAAD,CAA7B;;AACAC,UAAU,CAACC,OAAX,GAAqBD,UAAU,CAACC,OAAX,IAAsB,EAA3C;AACAC,MAAM,CAACC,OAAP,GAAiBC,MAAM,CAACC,MAAP,CAAcL,UAAU,CAACC,OAAzB,EAAkCH,aAAlC,CAAjB","sourcesContent":["// @ts-nocheck\nconst moduleExports = require('./index');\nglobalThis.loaders = globalThis.loaders || {};\nmodule.exports = Object.assign(globalThis.loaders, moduleExports);\n"],"file":"bundle.js"}
1
+ {"version":3,"sources":["../../src/bundle.ts"],"names":["moduleExports","require","globalThis","loaders","module","exports","Object","assign"],"mappings":";;AACA,IAAMA,aAAa,GAAGC,OAAO,CAAC,SAAD,CAA7B;;AACAC,UAAU,CAACC,OAAX,GAAqBD,UAAU,CAACC,OAAX,IAAsB,EAA3C;AACAC,MAAM,CAACC,OAAP,GAAiBC,MAAM,CAACC,MAAP,CAAcL,UAAU,CAACC,OAAzB,EAAkCH,aAAlC,CAAjB","sourcesContent":["// @ts-nocheck\nconst moduleExports = require('./index');\nglobalThis.loaders = globalThis.loaders || {};\nmodule.exports = Object.assign(globalThis.loaders, moduleExports);\n"],"file":"bundle.js"}
@@ -4,14 +4,14 @@ Object.defineProperty(exports, "__esModule", {
4
4
  value: true
5
5
  });
6
6
  exports.PARQUET_RDLVL_ENCODING = exports.PARQUET_RDLVL_TYPE = exports.PARQUET_VERSION = exports.PARQUET_MAGIC_ENCRYPTED = exports.PARQUET_MAGIC = void 0;
7
- const PARQUET_MAGIC = 'PAR1';
7
+ var PARQUET_MAGIC = 'PAR1';
8
8
  exports.PARQUET_MAGIC = PARQUET_MAGIC;
9
- const PARQUET_MAGIC_ENCRYPTED = 'PARE';
9
+ var PARQUET_MAGIC_ENCRYPTED = 'PARE';
10
10
  exports.PARQUET_MAGIC_ENCRYPTED = PARQUET_MAGIC_ENCRYPTED;
11
- const PARQUET_VERSION = 1;
11
+ var PARQUET_VERSION = 1;
12
12
  exports.PARQUET_VERSION = PARQUET_VERSION;
13
- const PARQUET_RDLVL_TYPE = 'INT32';
13
+ var PARQUET_RDLVL_TYPE = 'INT32';
14
14
  exports.PARQUET_RDLVL_TYPE = PARQUET_RDLVL_TYPE;
15
- const PARQUET_RDLVL_ENCODING = 'RLE';
15
+ var PARQUET_RDLVL_ENCODING = 'RLE';
16
16
  exports.PARQUET_RDLVL_ENCODING = PARQUET_RDLVL_ENCODING;
17
17
  //# sourceMappingURL=constants.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/constants.ts"],"names":["PARQUET_MAGIC","PARQUET_MAGIC_ENCRYPTED","PARQUET_VERSION","PARQUET_RDLVL_TYPE","PARQUET_RDLVL_ENCODING"],"mappings":";;;;;;AAIO,MAAMA,aAAa,GAAG,MAAtB;;AACA,MAAMC,uBAAuB,GAAG,MAAhC;;AAKA,MAAMC,eAAe,GAAG,CAAxB;;AAKA,MAAMC,kBAAkB,GAAG,OAA3B;;AACA,MAAMC,sBAAsB,GAAG,KAA/B","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/**\n * Parquet File Magic String\n */\nexport const PARQUET_MAGIC = 'PAR1';\nexport const PARQUET_MAGIC_ENCRYPTED = 'PARE';\n\n/**\n * Parquet File Format Version\n */\nexport const PARQUET_VERSION = 1;\n\n/**\n * Internal type used for repetition/definition levels\n */\nexport const PARQUET_RDLVL_TYPE = 'INT32';\nexport const PARQUET_RDLVL_ENCODING = 'RLE';\n"],"file":"constants.js"}
1
+ {"version":3,"sources":["../../src/constants.ts"],"names":["PARQUET_MAGIC","PARQUET_MAGIC_ENCRYPTED","PARQUET_VERSION","PARQUET_RDLVL_TYPE","PARQUET_RDLVL_ENCODING"],"mappings":";;;;;;AAIO,IAAMA,aAAa,GAAG,MAAtB;;AACA,IAAMC,uBAAuB,GAAG,MAAhC;;AAKA,IAAMC,eAAe,GAAG,CAAxB;;AAKA,IAAMC,kBAAkB,GAAG,OAA3B;;AACA,IAAMC,sBAAsB,GAAG,KAA/B","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/**\n * Parquet File Magic String\n */\nexport const PARQUET_MAGIC = 'PAR1';\nexport const PARQUET_MAGIC_ENCRYPTED = 'PARE';\n\n/**\n * Parquet File Format Version\n */\nexport const PARQUET_VERSION = 1;\n\n/**\n * Internal type used for repetition/definition levels\n */\nexport const PARQUET_RDLVL_TYPE = 'INT32';\nexport const PARQUET_RDLVL_ENCODING = 'RLE';\n"],"file":"constants.js"}
package/dist/es5/index.js CHANGED
@@ -1,52 +1,56 @@
1
1
  "use strict";
2
2
 
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
+
3
5
  Object.defineProperty(exports, "__esModule", {
4
6
  value: true
5
7
  });
6
8
  Object.defineProperty(exports, "ParquetWorkerLoader", {
7
9
  enumerable: true,
8
- get: function () {
10
+ get: function get() {
9
11
  return _parquetLoader.ParquetLoader;
10
12
  }
11
13
  });
12
14
  Object.defineProperty(exports, "_ParquetWriter", {
13
15
  enumerable: true,
14
- get: function () {
16
+ get: function get() {
15
17
  return _parquetWriter.ParquetWriter;
16
18
  }
17
19
  });
18
20
  Object.defineProperty(exports, "preloadCompressions", {
19
21
  enumerable: true,
20
- get: function () {
22
+ get: function get() {
21
23
  return _compression.preloadCompressions;
22
24
  }
23
25
  });
24
26
  Object.defineProperty(exports, "ParquetSchema", {
25
27
  enumerable: true,
26
- get: function () {
28
+ get: function get() {
27
29
  return _schema.ParquetSchema;
28
30
  }
29
31
  });
30
32
  Object.defineProperty(exports, "ParquetReader", {
31
33
  enumerable: true,
32
- get: function () {
34
+ get: function get() {
33
35
  return _parquetReader.ParquetReader;
34
36
  }
35
37
  });
36
38
  Object.defineProperty(exports, "ParquetEnvelopeReader", {
37
39
  enumerable: true,
38
- get: function () {
40
+ get: function get() {
39
41
  return _parquetEnvelopeReader.ParquetEnvelopeReader;
40
42
  }
41
43
  });
42
44
  Object.defineProperty(exports, "convertParquetToArrowSchema", {
43
45
  enumerable: true,
44
- get: function () {
46
+ get: function get() {
45
47
  return _convertSchema.convertParquetToArrowSchema;
46
48
  }
47
49
  });
48
50
  exports._typecheckParquetLoader = exports.ParquetLoader = void 0;
49
51
 
52
+ var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
53
+
50
54
  var _parquetLoader = require("./parquet-loader");
51
55
 
52
56
  var _parseParquet = require("./lib/parse-parquet");
@@ -63,11 +67,16 @@ var _parquetEnvelopeReader = require("./parquetjs/parser/parquet-envelope-reader
63
67
 
64
68
  var _convertSchema = require("./lib/convert-schema");
65
69
 
66
- const ParquetLoader = { ..._parquetLoader.ParquetLoader,
70
+ function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
71
+
72
+ function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
73
+
74
+ var ParquetLoader = _objectSpread(_objectSpread({}, _parquetLoader.ParquetLoader), {}, {
67
75
  parse: _parseParquet.parseParquet,
68
76
  parseFileInBatches: _parseParquet.parseParquetFileInBatches
69
- };
77
+ });
78
+
70
79
  exports.ParquetLoader = ParquetLoader;
71
- const _typecheckParquetLoader = ParquetLoader;
80
+ var _typecheckParquetLoader = ParquetLoader;
72
81
  exports._typecheckParquetLoader = _typecheckParquetLoader;
73
82
  //# sourceMappingURL=index.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/index.ts"],"names":["ParquetLoader","ParquetWorkerLoader","parse","parseParquet","parseFileInBatches","parseParquetFileInBatches","_typecheckParquetLoader"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAIA;;AACA;;AAaA;;AAIA;;AAEA;;AACA;;AACA;;AAEA;;AAlBO,MAAMA,aAAa,GAAG,EAC3B,GAAGC,4BADwB;AAE3BC,EAAAA,KAAK,EAAEC,0BAFoB;AAG3BC,EAAAA,kBAAkB,EAAEC;AAHO,CAAtB;;AAqBA,MAAMC,uBAAyC,GAAGN,aAAlD","sourcesContent":["import type {LoaderWithParser} from '@loaders.gl/loader-utils';\n\n// ParquetLoader\n\nimport {ParquetLoader as ParquetWorkerLoader} from './parquet-loader';\nimport {parseParquet, parseParquetFileInBatches} from './lib/parse-parquet';\n\nexport {ParquetWorkerLoader};\n\n/** ParquetJS table loader */\nexport const ParquetLoader = {\n ...ParquetWorkerLoader,\n parse: parseParquet,\n parseFileInBatches: parseParquetFileInBatches\n};\n\n// ParquetWriter\n\nexport {ParquetWriter as _ParquetWriter} from './parquet-writer';\n\n// EXPERIMENTAL - expose the internal parquetjs API\n\nexport {preloadCompressions} from './parquetjs/compression';\n\nexport {ParquetSchema} from './parquetjs/schema/schema';\nexport {ParquetReader} from './parquetjs/parser/parquet-reader';\nexport {ParquetEnvelopeReader} from './parquetjs/parser/parquet-envelope-reader';\n// export {ParquetWriter, ParquetEnvelopeWriter, ParquetTransformer} from './parquetjs/encoder/writer';\nexport {convertParquetToArrowSchema} from './lib/convert-schema';\n\n// TESTS\nexport const _typecheckParquetLoader: LoaderWithParser = ParquetLoader;\n"],"file":"index.js"}
1
+ {"version":3,"sources":["../../src/index.ts"],"names":["ParquetLoader","ParquetWorkerLoader","parse","parseParquet","parseFileInBatches","parseParquetFileInBatches","_typecheckParquetLoader"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAIA;;AACA;;AAaA;;AAIA;;AAEA;;AACA;;AACA;;AAEA;;;;;;AAlBO,IAAMA,aAAa,mCACrBC,4BADqB;AAExBC,EAAAA,KAAK,EAAEC,0BAFiB;AAGxBC,EAAAA,kBAAkB,EAAEC;AAHI,EAAnB;;;AAqBA,IAAMC,uBAAyC,GAAGN,aAAlD","sourcesContent":["import type {LoaderWithParser} from '@loaders.gl/loader-utils';\n\n// ParquetLoader\n\nimport {ParquetLoader as ParquetWorkerLoader} from './parquet-loader';\nimport {parseParquet, parseParquetFileInBatches} from './lib/parse-parquet';\n\nexport {ParquetWorkerLoader};\n\n/** ParquetJS table loader */\nexport const ParquetLoader = {\n ...ParquetWorkerLoader,\n parse: parseParquet,\n parseFileInBatches: parseParquetFileInBatches\n};\n\n// ParquetWriter\n\nexport {ParquetWriter as _ParquetWriter} from './parquet-writer';\n\n// EXPERIMENTAL - expose the internal parquetjs API\n\nexport {preloadCompressions} from './parquetjs/compression';\n\nexport {ParquetSchema} from './parquetjs/schema/schema';\nexport {ParquetReader} from './parquetjs/parser/parquet-reader';\nexport {ParquetEnvelopeReader} from './parquetjs/parser/parquet-envelope-reader';\n// export {ParquetWriter, ParquetEnvelopeWriter, ParquetTransformer} from './parquetjs/encoder/writer';\nexport {convertParquetToArrowSchema} from './lib/convert-schema';\n\n// TESTS\nexport const _typecheckParquetLoader: LoaderWithParser = ParquetLoader;\n"],"file":"index.js"}
@@ -8,7 +8,7 @@ exports.PARQUET_TYPE_MAPPING = void 0;
8
8
 
9
9
  var _schema = require("@loaders.gl/schema");
10
10
 
11
- const PARQUET_TYPE_MAPPING = {
11
+ var PARQUET_TYPE_MAPPING = {
12
12
  BOOLEAN: _schema.Bool,
13
13
  INT32: _schema.Int32,
14
14
  INT64: _schema.Float64,
@@ -42,16 +42,16 @@ const PARQUET_TYPE_MAPPING = {
42
42
  exports.PARQUET_TYPE_MAPPING = PARQUET_TYPE_MAPPING;
43
43
 
44
44
  function convertParquetToArrowSchema(parquetSchema) {
45
- const fields = getFields(parquetSchema.schema);
45
+ var fields = getFields(parquetSchema.schema);
46
46
  return new _schema.Schema(fields);
47
47
  }
48
48
 
49
49
  function getFieldMetadata(field) {
50
- const metadata = new Map();
50
+ var metadata = new Map();
51
51
 
52
- for (const key in field) {
52
+ for (var key in field) {
53
53
  if (key !== 'name') {
54
- const value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];
54
+ var value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];
55
55
  metadata.set(key, value);
56
56
  }
57
57
  }
@@ -60,19 +60,19 @@ function getFieldMetadata(field) {
60
60
  }
61
61
 
62
62
  function getFields(schema) {
63
- const fields = [];
63
+ var fields = [];
64
64
 
65
- for (const name in schema) {
66
- const field = schema[name];
65
+ for (var name in schema) {
66
+ var field = schema[name];
67
67
 
68
68
  if (field.fields) {
69
- const childField = getFields(field.fields);
70
- const nestedField = new _schema.Field(name, new _schema.Struct(childField), field.optional);
69
+ var childField = getFields(field.fields);
70
+ var nestedField = new _schema.Field(name, new _schema.Struct(childField), field.optional);
71
71
  fields.push(nestedField);
72
72
  } else {
73
- const FieldType = PARQUET_TYPE_MAPPING[field.type];
74
- const metadata = getFieldMetadata(field);
75
- const arrowField = new _schema.Field(name, new FieldType(), field.optional, metadata);
73
+ var FieldType = PARQUET_TYPE_MAPPING[field.type];
74
+ var metadata = getFieldMetadata(field);
75
+ var arrowField = new _schema.Field(name, new FieldType(), field.optional, metadata);
76
76
  fields.push(arrowField);
77
77
  }
78
78
  }
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/lib/convert-schema.ts"],"names":["PARQUET_TYPE_MAPPING","BOOLEAN","Bool","INT32","Int32","INT64","Float64","INT96","FLOAT","Float32","DOUBLE","BYTE_ARRAY","Binary","FIXED_LEN_BYTE_ARRAY","UTF8","Utf8","DATE","TIME_MILLIS","Int64","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","Uint16","UINT_32","Uint32","UINT_64","Uint64","INT_8","Int8","INT_16","Int16","INT_32","INT_64","JSON","BSON","INTERVAL","DECIMAL_INT32","DECIMAL_INT64","DECIMAL_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","convertParquetToArrowSchema","parquetSchema","fields","getFields","schema","Schema","getFieldMetadata","field","metadata","Map","key","value","stringify","set","name","childField","nestedField","Field","Struct","optional","push","FieldType","type","arrowField"],"mappings":";;;;;;;;AAGA;;AAmBO,MAAMA,oBAA8D,GAAG;AAC5EC,EAAAA,OAAO,EAAEC,YADmE;AAE5EC,EAAAA,KAAK,EAAEC,aAFqE;AAG5EC,EAAAA,KAAK,EAAEC,eAHqE;AAI5EC,EAAAA,KAAK,EAAED,eAJqE;AAK5EE,EAAAA,KAAK,EAAEC,eALqE;AAM5EC,EAAAA,MAAM,EAAEJ,eANoE;AAO5EK,EAAAA,UAAU,EAAEC,cAPgE;AAQ5EC,EAAAA,oBAAoB,EAAED,cARsD;AAS5EE,EAAAA,IAAI,EAAEC,YATsE;AAU5EC,EAAAA,IAAI,EAAEZ,aAVsE;AAW5Ea,EAAAA,WAAW,EAAEC,aAX+D;AAY5EC,EAAAA,WAAW,EAAED,aAZ+D;AAa5EE,EAAAA,gBAAgB,EAAEF,aAb0D;AAc5EG,EAAAA,gBAAgB,EAAEH,aAd0D;AAe5EI,EAAAA,MAAM,EAAElB,aAfoE;AAgB5EmB,EAAAA,OAAO,EAAEC,cAhBmE;AAiB5EC,EAAAA,OAAO,EAAEC,cAjBmE;AAkB5EC,EAAAA,OAAO,EAAEC,cAlBmE;AAmB5EC,EAAAA,KAAK,EAAEC,YAnBqE;AAoB5EC,EAAAA,MAAM,EAAEC,aApBoE;AAqB5EC,EAAAA,MAAM,EAAE7B,aArBoE;AAsB5E8B,EAAAA,MAAM,EAAEhB,aAtBoE;AAuB5EiB,EAAAA,IAAI,EAAEvB,cAvBsE;AAwB5EwB,EAAAA,IAAI,EAAExB,cAxBsE;AA0B5EyB,EAAAA,QAAQ,EAAEzB,cA1BkE;AA2B5E0B,EAAAA,aAAa,EAAE7B,eA3B6D;AA4B5E8B,EAAAA,aAAa,EAAEjC,eA5B6D;AA6B5EkC,EAAAA,kBAAkB,EAAElC,eA7BwD;AA8B5EmC,EAAAA,4BAA4B,EAAEnC;AA9B8C,CAAvE;;;AAiCA,SAASoC,2BAAT,CAAqCC,aAArC,EAA2E;AAChF,QAAMC,MAAM,GAAGC,SAAS,CAACF,aAAa,CAACG,MAAf,CAAxB;AAGA,SAAO,IAAIC,cAAJ,CAAWH,MAAX,CAAP;AACD;;AAED,SAASI,gBAAT,CAA0BC,KAA1B,EAAoE;AAClE,QAAMC,QAAQ,GAAG,IAAIC,GAAJ,EAAjB;;AAEA,OAAK,MAAMC,GAAX,IAAkBH,KAAlB,EAAyB;AACvB,QAAIG,GAAG,KAAK,MAAZ,EAAoB;AAClB,YAAMC,KAAK,GAAG,OAAOJ,KAAK,CAACG,GAAD,CAAZ,KAAsB,QAAtB,GAAiCjB,IAAI,CAACmB,SAAL,CAAeL,KAAK,CAACG,GAAD,CAApB,CAAjC,GAA8DH,KAAK,CAACG,GAAD,CAAjF;AACAF,MAAAA,QAAQ,CAACK,GAAT,CAAaH,GAAb,EAAkBC,KAAlB;AACD;AACF;;AAED,SAAOH,QAAP;AACD;;AAED,SAASL,SAAT,CAAmBC,MAAnB,EAAqD;AACnD,QAAMF,MAAe,GAAG,EAAxB;;AAEA,OAAK,MAAMY,IAAX,IAAmBV,MAAnB,EAA2B;AACzB,UAAMG,KAAK,GAAGH,MAAM,CAACU,IAAD,CAApB;;AAEA,QAAIP,KAAK,CAACL,MAAV,EAAkB;AAChB,YAAMa,UAAU,GAAGZ,SAAS,CAACI,KAAK,CAACL,MAAP,CAA5B;AACA,YAAMc,WAAW,GAAG,IAAIC,aAAJ,CAAUH,IAAV,EAAgB,IAAII,cAAJ,CAAWH,UAAX,CAAhB,EAAwCR,KAAK,CAACY,QAA9C,CAApB;AACAjB,MAAAA,MAAM,CAACkB,IAAP,CAAYJ,WAAZ;AACD,KAJD,MAIO;AACL,YAAMK,SAAS,GAAG/D,oBAAoB,CAACiD,KAAK,CAACe,IAAP,CAAtC;AACA,YAAMd,QAAQ,GAAGF,gBAAgB,CAACC,KAAD,CAAjC;AACA,YAAMgB,UAAU,GAAG,IAAIN,aAAJ,CAAUH,IAAV,EAAgB,IAAIO,SAAJ,EAAhB,EAAiCd,KAAK,CAACY,QAAvC,EAAiDX,QAAjD,CAAnB;AACAN,MAAAA,MAAM,CAACkB,IAAP,CAAYG,UAAZ;AACD;AACF;;AAED,SAAOrB,MAAP;AACD","sourcesContent":["import type {ParquetSchema} from '../parquetjs/schema/schema';\nimport type {FieldDefinition, ParquetField, ParquetType} from '../parquetjs/schema/declare';\n\nimport {\n Schema,\n Struct,\n Field,\n DataType,\n Bool,\n Float64,\n Int32,\n Float32,\n Binary,\n Utf8,\n Int64,\n Uint16,\n Uint32,\n Uint64,\n Int8,\n Int16\n} from '@loaders.gl/schema';\n\nexport const PARQUET_TYPE_MAPPING: {[type in ParquetType]: typeof DataType} = {\n BOOLEAN: Bool,\n INT32: Int32,\n INT64: Float64,\n INT96: Float64,\n FLOAT: Float32,\n DOUBLE: Float64,\n BYTE_ARRAY: Binary,\n FIXED_LEN_BYTE_ARRAY: Binary,\n UTF8: Utf8,\n DATE: Int32,\n TIME_MILLIS: Int64,\n TIME_MICROS: Int64,\n TIMESTAMP_MILLIS: Int64,\n TIMESTAMP_MICROS: Int64,\n UINT_8: Int32,\n UINT_16: Uint16,\n UINT_32: Uint32,\n UINT_64: Uint64,\n INT_8: Int8,\n INT_16: Int16,\n INT_32: Int32,\n INT_64: Int64,\n JSON: Binary,\n BSON: Binary,\n // TODO check interal type\n INTERVAL: Binary,\n DECIMAL_INT32: Float32,\n DECIMAL_INT64: Float64,\n DECIMAL_BYTE_ARRAY: Float64,\n DECIMAL_FIXED_LEN_BYTE_ARRAY: Float64\n};\n\nexport function convertParquetToArrowSchema(parquetSchema: ParquetSchema): Schema {\n const fields = getFields(parquetSchema.schema);\n\n // TODO add metadata if needed.\n return new Schema(fields);\n}\n\nfunction getFieldMetadata(field: ParquetField): Map<string, string> {\n const metadata = new Map();\n\n for (const key in field) {\n if (key !== 'name') {\n const value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];\n metadata.set(key, value);\n }\n }\n\n return metadata;\n}\n\nfunction getFields(schema: FieldDefinition): Field[] {\n const fields: Field[] = [];\n\n for (const name in schema) {\n const field = schema[name];\n\n if (field.fields) {\n const childField = getFields(field.fields);\n const nestedField = new Field(name, new Struct(childField), field.optional);\n fields.push(nestedField);\n } else {\n const FieldType = PARQUET_TYPE_MAPPING[field.type];\n const metadata = getFieldMetadata(field);\n const arrowField = new Field(name, new FieldType(), field.optional, metadata);\n fields.push(arrowField);\n }\n }\n\n return fields;\n}\n"],"file":"convert-schema.js"}
1
+ {"version":3,"sources":["../../../src/lib/convert-schema.ts"],"names":["PARQUET_TYPE_MAPPING","BOOLEAN","Bool","INT32","Int32","INT64","Float64","INT96","FLOAT","Float32","DOUBLE","BYTE_ARRAY","Binary","FIXED_LEN_BYTE_ARRAY","UTF8","Utf8","DATE","TIME_MILLIS","Int64","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","Uint16","UINT_32","Uint32","UINT_64","Uint64","INT_8","Int8","INT_16","Int16","INT_32","INT_64","JSON","BSON","INTERVAL","DECIMAL_INT32","DECIMAL_INT64","DECIMAL_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","convertParquetToArrowSchema","parquetSchema","fields","getFields","schema","Schema","getFieldMetadata","field","metadata","Map","key","value","stringify","set","name","childField","nestedField","Field","Struct","optional","push","FieldType","type","arrowField"],"mappings":";;;;;;;;AAGA;;AAmBO,IAAMA,oBAA8D,GAAG;AAC5EC,EAAAA,OAAO,EAAEC,YADmE;AAE5EC,EAAAA,KAAK,EAAEC,aAFqE;AAG5EC,EAAAA,KAAK,EAAEC,eAHqE;AAI5EC,EAAAA,KAAK,EAAED,eAJqE;AAK5EE,EAAAA,KAAK,EAAEC,eALqE;AAM5EC,EAAAA,MAAM,EAAEJ,eANoE;AAO5EK,EAAAA,UAAU,EAAEC,cAPgE;AAQ5EC,EAAAA,oBAAoB,EAAED,cARsD;AAS5EE,EAAAA,IAAI,EAAEC,YATsE;AAU5EC,EAAAA,IAAI,EAAEZ,aAVsE;AAW5Ea,EAAAA,WAAW,EAAEC,aAX+D;AAY5EC,EAAAA,WAAW,EAAED,aAZ+D;AAa5EE,EAAAA,gBAAgB,EAAEF,aAb0D;AAc5EG,EAAAA,gBAAgB,EAAEH,aAd0D;AAe5EI,EAAAA,MAAM,EAAElB,aAfoE;AAgB5EmB,EAAAA,OAAO,EAAEC,cAhBmE;AAiB5EC,EAAAA,OAAO,EAAEC,cAjBmE;AAkB5EC,EAAAA,OAAO,EAAEC,cAlBmE;AAmB5EC,EAAAA,KAAK,EAAEC,YAnBqE;AAoB5EC,EAAAA,MAAM,EAAEC,aApBoE;AAqB5EC,EAAAA,MAAM,EAAE7B,aArBoE;AAsB5E8B,EAAAA,MAAM,EAAEhB,aAtBoE;AAuB5EiB,EAAAA,IAAI,EAAEvB,cAvBsE;AAwB5EwB,EAAAA,IAAI,EAAExB,cAxBsE;AA0B5EyB,EAAAA,QAAQ,EAAEzB,cA1BkE;AA2B5E0B,EAAAA,aAAa,EAAE7B,eA3B6D;AA4B5E8B,EAAAA,aAAa,EAAEjC,eA5B6D;AA6B5EkC,EAAAA,kBAAkB,EAAElC,eA7BwD;AA8B5EmC,EAAAA,4BAA4B,EAAEnC;AA9B8C,CAAvE;;;AAiCA,SAASoC,2BAAT,CAAqCC,aAArC,EAA2E;AAChF,MAAMC,MAAM,GAAGC,SAAS,CAACF,aAAa,CAACG,MAAf,CAAxB;AAGA,SAAO,IAAIC,cAAJ,CAAWH,MAAX,CAAP;AACD;;AAED,SAASI,gBAAT,CAA0BC,KAA1B,EAAoE;AAClE,MAAMC,QAAQ,GAAG,IAAIC,GAAJ,EAAjB;;AAEA,OAAK,IAAMC,GAAX,IAAkBH,KAAlB,EAAyB;AACvB,QAAIG,GAAG,KAAK,MAAZ,EAAoB;AAClB,UAAMC,KAAK,GAAG,OAAOJ,KAAK,CAACG,GAAD,CAAZ,KAAsB,QAAtB,GAAiCjB,IAAI,CAACmB,SAAL,CAAeL,KAAK,CAACG,GAAD,CAApB,CAAjC,GAA8DH,KAAK,CAACG,GAAD,CAAjF;AACAF,MAAAA,QAAQ,CAACK,GAAT,CAAaH,GAAb,EAAkBC,KAAlB;AACD;AACF;;AAED,SAAOH,QAAP;AACD;;AAED,SAASL,SAAT,CAAmBC,MAAnB,EAAqD;AACnD,MAAMF,MAAe,GAAG,EAAxB;;AAEA,OAAK,IAAMY,IAAX,IAAmBV,MAAnB,EAA2B;AACzB,QAAMG,KAAK,GAAGH,MAAM,CAACU,IAAD,CAApB;;AAEA,QAAIP,KAAK,CAACL,MAAV,EAAkB;AAChB,UAAMa,UAAU,GAAGZ,SAAS,CAACI,KAAK,CAACL,MAAP,CAA5B;AACA,UAAMc,WAAW,GAAG,IAAIC,aAAJ,CAAUH,IAAV,EAAgB,IAAII,cAAJ,CAAWH,UAAX,CAAhB,EAAwCR,KAAK,CAACY,QAA9C,CAApB;AACAjB,MAAAA,MAAM,CAACkB,IAAP,CAAYJ,WAAZ;AACD,KAJD,MAIO;AACL,UAAMK,SAAS,GAAG/D,oBAAoB,CAACiD,KAAK,CAACe,IAAP,CAAtC;AACA,UAAMd,QAAQ,GAAGF,gBAAgB,CAACC,KAAD,CAAjC;AACA,UAAMgB,UAAU,GAAG,IAAIN,aAAJ,CAAUH,IAAV,EAAgB,IAAIO,SAAJ,EAAhB,EAAiCd,KAAK,CAACY,QAAvC,EAAiDX,QAAjD,CAAnB;AACAN,MAAAA,MAAM,CAACkB,IAAP,CAAYG,UAAZ;AACD;AACF;;AAED,SAAOrB,MAAP;AACD","sourcesContent":["import type {ParquetSchema} from '../parquetjs/schema/schema';\nimport type {FieldDefinition, ParquetField, ParquetType} from '../parquetjs/schema/declare';\n\nimport {\n Schema,\n Struct,\n Field,\n DataType,\n Bool,\n Float64,\n Int32,\n Float32,\n Binary,\n Utf8,\n Int64,\n Uint16,\n Uint32,\n Uint64,\n Int8,\n Int16\n} from '@loaders.gl/schema';\n\nexport const PARQUET_TYPE_MAPPING: {[type in ParquetType]: typeof DataType} = {\n BOOLEAN: Bool,\n INT32: Int32,\n INT64: Float64,\n INT96: Float64,\n FLOAT: Float32,\n DOUBLE: Float64,\n BYTE_ARRAY: Binary,\n FIXED_LEN_BYTE_ARRAY: Binary,\n UTF8: Utf8,\n DATE: Int32,\n TIME_MILLIS: Int64,\n TIME_MICROS: Int64,\n TIMESTAMP_MILLIS: Int64,\n TIMESTAMP_MICROS: Int64,\n UINT_8: Int32,\n UINT_16: Uint16,\n UINT_32: Uint32,\n UINT_64: Uint64,\n INT_8: Int8,\n INT_16: Int16,\n INT_32: Int32,\n INT_64: Int64,\n JSON: Binary,\n BSON: Binary,\n // TODO check interal type\n INTERVAL: Binary,\n DECIMAL_INT32: Float32,\n DECIMAL_INT64: Float64,\n DECIMAL_BYTE_ARRAY: Float64,\n DECIMAL_FIXED_LEN_BYTE_ARRAY: Float64\n};\n\nexport function convertParquetToArrowSchema(parquetSchema: ParquetSchema): Schema {\n const fields = getFields(parquetSchema.schema);\n\n // TODO add metadata if needed.\n return new Schema(fields);\n}\n\nfunction getFieldMetadata(field: ParquetField): Map<string, string> {\n const metadata = new Map();\n\n for (const key in field) {\n if (key !== 'name') {\n const value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];\n metadata.set(key, value);\n }\n }\n\n return metadata;\n}\n\nfunction getFields(schema: FieldDefinition): Field[] {\n const fields: Field[] = [];\n\n for (const name in schema) {\n const field = schema[name];\n\n if (field.fields) {\n const childField = getFields(field.fields);\n const nestedField = new Field(name, new Struct(childField), field.optional);\n fields.push(nestedField);\n } else {\n const FieldType = PARQUET_TYPE_MAPPING[field.type];\n const metadata = getFieldMetadata(field);\n const arrowField = new Field(name, new FieldType(), field.optional, metadata);\n fields.push(arrowField);\n }\n }\n\n return fields;\n}\n"],"file":"convert-schema.js"}
@@ -1,38 +1,173 @@
1
1
  "use strict";
2
2
 
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
+
3
5
  Object.defineProperty(exports, "__esModule", {
4
6
  value: true
5
7
  });
6
8
  exports.parseParquet = parseParquet;
7
9
  exports.parseParquetFileInBatches = parseParquetFileInBatches;
8
10
 
11
+ var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
12
+
13
+ var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
14
+
15
+ var _awaitAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/awaitAsyncGenerator"));
16
+
17
+ var _wrapAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/wrapAsyncGenerator"));
18
+
19
+ var _asyncIterator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncIterator"));
20
+
9
21
  var _parquetReader = require("../parquetjs/parser/parquet-reader");
10
22
 
11
- async function parseParquet(arrayBuffer, options) {
12
- const blob = new Blob([arrayBuffer]);
23
+ function parseParquet(_x3, _x4) {
24
+ return _parseParquet.apply(this, arguments);
25
+ }
26
+
27
+ function _parseParquet() {
28
+ _parseParquet = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee2(arrayBuffer, options) {
29
+ var blob, _iteratorNormalCompletion, _didIteratorError, _iteratorError, _iterator, _step, _value, batch;
30
+
31
+ return _regenerator.default.wrap(function _callee2$(_context2) {
32
+ while (1) {
33
+ switch (_context2.prev = _context2.next) {
34
+ case 0:
35
+ blob = new Blob([arrayBuffer]);
36
+ _iteratorNormalCompletion = true;
37
+ _didIteratorError = false;
38
+ _context2.prev = 3;
39
+ _iterator = (0, _asyncIterator2.default)(parseParquetFileInBatches(blob, options));
40
+
41
+ case 5:
42
+ _context2.next = 7;
43
+ return _iterator.next();
44
+
45
+ case 7:
46
+ _step = _context2.sent;
47
+ _iteratorNormalCompletion = _step.done;
48
+ _context2.next = 11;
49
+ return _step.value;
50
+
51
+ case 11:
52
+ _value = _context2.sent;
53
+
54
+ if (_iteratorNormalCompletion) {
55
+ _context2.next = 18;
56
+ break;
57
+ }
58
+
59
+ batch = _value;
60
+ return _context2.abrupt("return", batch);
61
+
62
+ case 15:
63
+ _iteratorNormalCompletion = true;
64
+ _context2.next = 5;
65
+ break;
66
+
67
+ case 18:
68
+ _context2.next = 24;
69
+ break;
13
70
 
14
- for await (const batch of parseParquetFileInBatches(blob, options)) {
15
- return batch;
16
- }
71
+ case 20:
72
+ _context2.prev = 20;
73
+ _context2.t0 = _context2["catch"](3);
74
+ _didIteratorError = true;
75
+ _iteratorError = _context2.t0;
17
76
 
18
- return null;
77
+ case 24:
78
+ _context2.prev = 24;
79
+ _context2.prev = 25;
80
+
81
+ if (!(!_iteratorNormalCompletion && _iterator.return != null)) {
82
+ _context2.next = 29;
83
+ break;
84
+ }
85
+
86
+ _context2.next = 29;
87
+ return _iterator.return();
88
+
89
+ case 29:
90
+ _context2.prev = 29;
91
+
92
+ if (!_didIteratorError) {
93
+ _context2.next = 32;
94
+ break;
95
+ }
96
+
97
+ throw _iteratorError;
98
+
99
+ case 32:
100
+ return _context2.finish(29);
101
+
102
+ case 33:
103
+ return _context2.finish(24);
104
+
105
+ case 34:
106
+ return _context2.abrupt("return", null);
107
+
108
+ case 35:
109
+ case "end":
110
+ return _context2.stop();
111
+ }
112
+ }
113
+ }, _callee2, null, [[3, 20, 24, 34], [25,, 29, 33]]);
114
+ }));
115
+ return _parseParquet.apply(this, arguments);
116
+ }
117
+
118
+ function parseParquetFileInBatches(_x, _x2) {
119
+ return _parseParquetFileInBatches.apply(this, arguments);
19
120
  }
20
121
 
21
- async function* parseParquetFileInBatches(blob, options) {
22
- const reader = await _parquetReader.ParquetReader.openBlob(blob);
23
- const rows = [];
122
+ function _parseParquetFileInBatches() {
123
+ _parseParquetFileInBatches = (0, _wrapAsyncGenerator2.default)(_regenerator.default.mark(function _callee(blob, options) {
124
+ var reader, rows, cursor, record;
125
+ return _regenerator.default.wrap(function _callee$(_context) {
126
+ while (1) {
127
+ switch (_context.prev = _context.next) {
128
+ case 0:
129
+ _context.next = 2;
130
+ return (0, _awaitAsyncGenerator2.default)(_parquetReader.ParquetReader.openBlob(blob));
131
+
132
+ case 2:
133
+ reader = _context.sent;
134
+ rows = [];
135
+ _context.prev = 4;
136
+ cursor = reader.getCursor();
137
+
138
+ case 6:
139
+ _context.next = 8;
140
+ return (0, _awaitAsyncGenerator2.default)(cursor.next());
141
+
142
+ case 8:
143
+ if (!(record = _context.sent)) {
144
+ _context.next = 12;
145
+ break;
146
+ }
147
+
148
+ rows.push(record);
149
+ _context.next = 6;
150
+ break;
151
+
152
+ case 12:
153
+ _context.prev = 12;
154
+ _context.next = 15;
155
+ return (0, _awaitAsyncGenerator2.default)(reader.close());
24
156
 
25
- try {
26
- const cursor = reader.getCursor();
27
- let record;
157
+ case 15:
158
+ return _context.finish(12);
28
159
 
29
- while (record = await cursor.next()) {
30
- rows.push(record);
31
- }
32
- } finally {
33
- await reader.close();
34
- }
160
+ case 16:
161
+ _context.next = 18;
162
+ return rows;
35
163
 
36
- yield rows;
164
+ case 18:
165
+ case "end":
166
+ return _context.stop();
167
+ }
168
+ }
169
+ }, _callee, null, [[4,, 12, 16]]);
170
+ }));
171
+ return _parseParquetFileInBatches.apply(this, arguments);
37
172
  }
38
173
  //# sourceMappingURL=parse-parquet.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/lib/parse-parquet.ts"],"names":["parseParquet","arrayBuffer","options","blob","Blob","batch","parseParquetFileInBatches","reader","ParquetReader","openBlob","rows","cursor","getCursor","record","next","push","close"],"mappings":";;;;;;;;AAGA;;AAEO,eAAeA,YAAf,CAA4BC,WAA5B,EAAsDC,OAAtD,EAAsF;AAC3F,QAAMC,IAAI,GAAG,IAAIC,IAAJ,CAAS,CAACH,WAAD,CAAT,CAAb;;AACA,aAAW,MAAMI,KAAjB,IAA0BC,yBAAyB,CAACH,IAAD,EAAOD,OAAP,CAAnD,EAAoE;AAClE,WAAOG,KAAP;AACD;;AACD,SAAO,IAAP;AACD;;AAEM,gBAAgBC,yBAAhB,CAA0CH,IAA1C,EAAsDD,OAAtD,EAAsF;AAC3F,QAAMK,MAAM,GAAG,MAAMC,6BAAcC,QAAd,CAAuBN,IAAvB,CAArB;AACA,QAAMO,IAAa,GAAG,EAAtB;;AACA,MAAI;AACF,UAAMC,MAAM,GAAGJ,MAAM,CAACK,SAAP,EAAf;AACA,QAAIC,MAAJ;;AACA,WAAQA,MAAM,GAAG,MAAMF,MAAM,CAACG,IAAP,EAAvB,EAAuC;AACrCJ,MAAAA,IAAI,CAACK,IAAL,CAAUF,MAAV;AACD;AACF,GAND,SAMU;AACR,UAAMN,MAAM,CAACS,KAAP,EAAN;AACD;;AACD,QAAMN,IAAN;AACD","sourcesContent":["// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../parquet-loader';\n\nimport {ParquetReader} from '../parquetjs/parser/parquet-reader';\n\nexport async function parseParquet(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions) {\n const blob = new Blob([arrayBuffer]);\n for await (const batch of parseParquetFileInBatches(blob, options)) {\n return batch;\n }\n return null;\n}\n\nexport async function* parseParquetFileInBatches(blob: Blob, options?: ParquetLoaderOptions) {\n const reader = await ParquetReader.openBlob(blob);\n const rows: any[][] = [];\n try {\n const cursor = reader.getCursor();\n let record: any[] | null;\n while ((record = await cursor.next())) {\n rows.push(record);\n }\n } finally {\n await reader.close();\n }\n yield rows;\n}\n"],"file":"parse-parquet.js"}
1
+ {"version":3,"sources":["../../../src/lib/parse-parquet.ts"],"names":["parseParquet","arrayBuffer","options","blob","Blob","parseParquetFileInBatches","batch","ParquetReader","openBlob","reader","rows","cursor","getCursor","next","record","push","close"],"mappings":";;;;;;;;;;;;;;;;;;;;AAGA;;SAEsBA,Y;;;;;4EAAf,kBAA4BC,WAA5B,EAAsDC,OAAtD;AAAA;;AAAA;AAAA;AAAA;AAAA;AACCC,YAAAA,IADD,GACQ,IAAIC,IAAJ,CAAS,CAACH,WAAD,CAAT,CADR;AAAA;AAAA;AAAA;AAAA,qDAEqBI,yBAAyB,CAACF,IAAD,EAAOD,OAAP,CAF9C;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAEYI,YAAAA,KAFZ;AAAA,8CAGIA,KAHJ;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;;AAAA;AAAA;;AAAA;AAAA,8CAKE,IALF;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,G;;;;SAQgBD,yB;;;;;2FAAhB,iBAA0CF,IAA1C,EAAsDD,OAAtD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sDACgBK,6BAAcC,QAAd,CAAuBL,IAAvB,CADhB;;AAAA;AACCM,YAAAA,MADD;AAECC,YAAAA,IAFD,GAEiB,EAFjB;AAAA;AAIGC,YAAAA,MAJH,GAIYF,MAAM,CAACG,SAAP,EAJZ;;AAAA;AAAA;AAAA,sDAMoBD,MAAM,CAACE,IAAP,EANpB;;AAAA;AAAA,kBAMKC,MANL;AAAA;AAAA;AAAA;;AAODJ,YAAAA,IAAI,CAACK,IAAL,CAAUD,MAAV;AAPC;AAAA;;AAAA;AAAA;AAAA;AAAA,sDAUGL,MAAM,CAACO,KAAP,EAVH;;AAAA;AAAA;;AAAA;AAAA;AAYL,mBAAMN,IAAN;;AAZK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,G","sourcesContent":["// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../parquet-loader';\n\nimport {ParquetReader} from '../parquetjs/parser/parquet-reader';\n\nexport async function parseParquet(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions) {\n const blob = new Blob([arrayBuffer]);\n for await (const batch of parseParquetFileInBatches(blob, options)) {\n return batch;\n }\n return null;\n}\n\nexport async function* parseParquetFileInBatches(blob: Blob, options?: ParquetLoaderOptions) {\n const reader = await ParquetReader.openBlob(blob);\n const rows: any[][] = [];\n try {\n const cursor = reader.getCursor();\n let record: any[] | null;\n while ((record = await cursor.next())) {\n rows.push(record);\n }\n } finally {\n await reader.close();\n }\n yield rows;\n}\n"],"file":"parse-parquet.js"}
@@ -1,16 +1,53 @@
1
1
  "use strict";
2
2
 
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
+
3
5
  Object.defineProperty(exports, "__esModule", {
4
6
  value: true
5
7
  });
6
8
  exports.readArrayBuffer = readArrayBuffer;
7
9
 
8
- async function readArrayBuffer(file, start, length) {
9
- if (file instanceof Blob) {
10
- const slice = file.slice(start, start + length);
11
- return await slice.arrayBuffer();
12
- }
10
+ var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
11
+
12
+ var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
13
+
14
+ function readArrayBuffer(_x, _x2, _x3) {
15
+ return _readArrayBuffer.apply(this, arguments);
16
+ }
17
+
18
+ function _readArrayBuffer() {
19
+ _readArrayBuffer = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee(file, start, length) {
20
+ var slice;
21
+ return _regenerator.default.wrap(function _callee$(_context) {
22
+ while (1) {
23
+ switch (_context.prev = _context.next) {
24
+ case 0:
25
+ if (!(file instanceof Blob)) {
26
+ _context.next = 5;
27
+ break;
28
+ }
29
+
30
+ slice = file.slice(start, start + length);
31
+ _context.next = 4;
32
+ return slice.arrayBuffer();
33
+
34
+ case 4:
35
+ return _context.abrupt("return", _context.sent);
36
+
37
+ case 5:
38
+ _context.next = 7;
39
+ return file.read(start, start + length);
40
+
41
+ case 7:
42
+ return _context.abrupt("return", _context.sent);
13
43
 
14
- return await file.read(start, start + length);
44
+ case 8:
45
+ case "end":
46
+ return _context.stop();
47
+ }
48
+ }
49
+ }, _callee);
50
+ }));
51
+ return _readArrayBuffer.apply(this, arguments);
15
52
  }
16
53
  //# sourceMappingURL=read-array-buffer.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/lib/read-array-buffer.ts"],"names":["readArrayBuffer","file","start","length","Blob","slice","arrayBuffer","read"],"mappings":";;;;;;;AAEO,eAAeA,eAAf,CACLC,IADK,EAELC,KAFK,EAGLC,MAHK,EAIiB;AACtB,MAAIF,IAAI,YAAYG,IAApB,EAA0B;AACxB,UAAMC,KAAK,GAAGJ,IAAI,CAACI,KAAL,CAAWH,KAAX,EAAkBA,KAAK,GAAGC,MAA1B,CAAd;AACA,WAAO,MAAME,KAAK,CAACC,WAAN,EAAb;AACD;;AACD,SAAO,MAAML,IAAI,CAACM,IAAL,CAAUL,KAAV,EAAiBA,KAAK,GAAGC,MAAzB,CAAb;AACD","sourcesContent":["// Random-Access read\n\nexport async function readArrayBuffer(\n file: Blob | ArrayBuffer | any,\n start: number,\n length: number\n): Promise<ArrayBuffer> {\n if (file instanceof Blob) {\n const slice = file.slice(start, start + length);\n return await slice.arrayBuffer();\n }\n return await file.read(start, start + length);\n}\n\n/**\n * Read a slice of a Blob or File, without loading the entire file into memory\n * The trick when reading File objects is to read successive \"slices\" of the File\n * Per spec https://w3c.github.io/FileAPI/, slicing a File only updates the start and end fields\n * Actually reading from file happens in `readAsArrayBuffer`\n * @param blob to read\n export async function readBlob(blob: Blob): Promise<ArrayBuffer> {\n return await new Promise((resolve, reject) => {\n const fileReader = new FileReader();\n fileReader.onload = (event: ProgressEvent<FileReader>) =>\n resolve(event?.target?.result as ArrayBuffer);\n // TODO - reject with a proper Error\n fileReader.onerror = (error: ProgressEvent<FileReader>) => reject(error);\n fileReader.readAsArrayBuffer(blob);\n });\n}\n*/\n"],"file":"read-array-buffer.js"}
1
+ {"version":3,"sources":["../../../src/lib/read-array-buffer.ts"],"names":["readArrayBuffer","file","start","length","Blob","slice","arrayBuffer","read"],"mappings":";;;;;;;;;;;;;SAEsBA,e;;;;;+EAAf,iBACLC,IADK,EAELC,KAFK,EAGLC,MAHK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAKDF,IAAI,YAAYG,IALf;AAAA;AAAA;AAAA;;AAMGC,YAAAA,KANH,GAMWJ,IAAI,CAACI,KAAL,CAAWH,KAAX,EAAkBA,KAAK,GAAGC,MAA1B,CANX;AAAA;AAAA,mBAOUE,KAAK,CAACC,WAAN,EAPV;;AAAA;AAAA;;AAAA;AAAA;AAAA,mBASQL,IAAI,CAACM,IAAL,CAAUL,KAAV,EAAiBA,KAAK,GAAGC,MAAzB,CATR;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,G","sourcesContent":["// Random-Access read\n\nexport async function readArrayBuffer(\n file: Blob | ArrayBuffer | any,\n start: number,\n length: number\n): Promise<ArrayBuffer> {\n if (file instanceof Blob) {\n const slice = file.slice(start, start + length);\n return await slice.arrayBuffer();\n }\n return await file.read(start, start + length);\n}\n\n/**\n * Read a slice of a Blob or File, without loading the entire file into memory\n * The trick when reading File objects is to read successive \"slices\" of the File\n * Per spec https://w3c.github.io/FileAPI/, slicing a File only updates the start and end fields\n * Actually reading from file happens in `readAsArrayBuffer`\n * @param blob to read\n export async function readBlob(blob: Blob): Promise<ArrayBuffer> {\n return await new Promise((resolve, reject) => {\n const fileReader = new FileReader();\n fileReader.onload = (event: ProgressEvent<FileReader>) =>\n resolve(event?.target?.result as ArrayBuffer);\n // TODO - reject with a proper Error\n fileReader.onerror = (error: ProgressEvent<FileReader>) => reject(error);\n fileReader.readAsArrayBuffer(blob);\n });\n}\n*/\n"],"file":"read-array-buffer.js"}
@@ -4,14 +4,14 @@ Object.defineProperty(exports, "__esModule", {
4
4
  value: true
5
5
  });
6
6
  exports._typecheckParquetLoader = exports.ParquetLoader = void 0;
7
- const VERSION = typeof "3.1.0-beta.3" !== 'undefined' ? "3.1.0-beta.3" : 'latest';
8
- const DEFAULT_PARQUET_LOADER_OPTIONS = {
7
+ var VERSION = typeof "3.1.1" !== 'undefined' ? "3.1.1" : 'latest';
8
+ var DEFAULT_PARQUET_LOADER_OPTIONS = {
9
9
  parquet: {
10
10
  type: 'object-row-table',
11
11
  url: undefined
12
12
  }
13
13
  };
14
- const ParquetLoader = {
14
+ var ParquetLoader = {
15
15
  name: 'Apache Parquet',
16
16
  id: 'parquet',
17
17
  module: 'parquet',
@@ -25,6 +25,6 @@ const ParquetLoader = {
25
25
  options: DEFAULT_PARQUET_LOADER_OPTIONS
26
26
  };
27
27
  exports.ParquetLoader = ParquetLoader;
28
- const _typecheckParquetLoader = ParquetLoader;
28
+ var _typecheckParquetLoader = ParquetLoader;
29
29
  exports._typecheckParquetLoader = _typecheckParquetLoader;
30
30
  //# sourceMappingURL=parquet-loader.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/parquet-loader.ts"],"names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","parquet","type","url","undefined","ParquetLoader","name","id","module","version","worker","category","extensions","mimeTypes","binary","tests","options","_typecheckParquetLoader"],"mappings":";;;;;;AAIA,MAAMA,OAAO,GAAG,0BAAuB,WAAvB,oBAAmD,QAAnE;AASA,MAAMC,8BAAoD,GAAG;AAC3DC,EAAAA,OAAO,EAAE;AACPC,IAAAA,IAAI,EAAE,kBADC;AAEPC,IAAAA,GAAG,EAAEC;AAFE;AADkD,CAA7D;AAQO,MAAMC,aAAa,GAAG;AAC3BC,EAAAA,IAAI,EAAE,gBADqB;AAE3BC,EAAAA,EAAE,EAAE,SAFuB;AAG3BC,EAAAA,MAAM,EAAE,SAHmB;AAI3BC,EAAAA,OAAO,EAAEV,OAJkB;AAK3BW,EAAAA,MAAM,EAAE,IALmB;AAM3BC,EAAAA,QAAQ,EAAE,OANiB;AAO3BC,EAAAA,UAAU,EAAE,CAAC,SAAD,CAPe;AAQ3BC,EAAAA,SAAS,EAAE,CAAC,0BAAD,CARgB;AAS3BC,EAAAA,MAAM,EAAE,IATmB;AAU3BC,EAAAA,KAAK,EAAE,CAAC,MAAD,EAAS,MAAT,CAVoB;AAW3BC,EAAAA,OAAO,EAAEhB;AAXkB,CAAtB;;AAcA,MAAMiB,uBAA+B,GAAGZ,aAAxC","sourcesContent":["import type {Loader, LoaderOptions} from '@loaders.gl/loader-utils';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetLoaderOptions = LoaderOptions & {\n parquet?: {\n type?: 'object-row-table';\n url?: string;\n };\n};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS: ParquetLoaderOptions = {\n parquet: {\n type: 'object-row-table',\n url: undefined\n }\n};\n\n/** ParquetJS table loader */\nexport const ParquetLoader = {\n name: 'Apache Parquet',\n id: 'parquet',\n module: 'parquet',\n version: VERSION,\n worker: true,\n category: 'table',\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n binary: true,\n tests: ['PAR1', 'PARE'],\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nexport const _typecheckParquetLoader: Loader = ParquetLoader;\n"],"file":"parquet-loader.js"}
1
+ {"version":3,"sources":["../../src/parquet-loader.ts"],"names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","parquet","type","url","undefined","ParquetLoader","name","id","module","version","worker","category","extensions","mimeTypes","binary","tests","options","_typecheckParquetLoader"],"mappings":";;;;;;AAIA,IAAMA,OAAO,GAAG,mBAAuB,WAAvB,aAAmD,QAAnE;AASA,IAAMC,8BAAoD,GAAG;AAC3DC,EAAAA,OAAO,EAAE;AACPC,IAAAA,IAAI,EAAE,kBADC;AAEPC,IAAAA,GAAG,EAAEC;AAFE;AADkD,CAA7D;AAQO,IAAMC,aAAa,GAAG;AAC3BC,EAAAA,IAAI,EAAE,gBADqB;AAE3BC,EAAAA,EAAE,EAAE,SAFuB;AAG3BC,EAAAA,MAAM,EAAE,SAHmB;AAI3BC,EAAAA,OAAO,EAAEV,OAJkB;AAK3BW,EAAAA,MAAM,EAAE,IALmB;AAM3BC,EAAAA,QAAQ,EAAE,OANiB;AAO3BC,EAAAA,UAAU,EAAE,CAAC,SAAD,CAPe;AAQ3BC,EAAAA,SAAS,EAAE,CAAC,0BAAD,CARgB;AAS3BC,EAAAA,MAAM,EAAE,IATmB;AAU3BC,EAAAA,KAAK,EAAE,CAAC,MAAD,EAAS,MAAT,CAVoB;AAW3BC,EAAAA,OAAO,EAAEhB;AAXkB,CAAtB;;AAcA,IAAMiB,uBAA+B,GAAGZ,aAAxC","sourcesContent":["import type {Loader, LoaderOptions} from '@loaders.gl/loader-utils';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetLoaderOptions = LoaderOptions & {\n parquet?: {\n type?: 'object-row-table';\n url?: string;\n };\n};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS: ParquetLoaderOptions = {\n parquet: {\n type: 'object-row-table',\n url: undefined\n }\n};\n\n/** ParquetJS table loader */\nexport const ParquetLoader = {\n name: 'Apache Parquet',\n id: 'parquet',\n module: 'parquet',\n version: VERSION,\n worker: true,\n category: 'table',\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n binary: true,\n tests: ['PAR1', 'PARE'],\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nexport const _typecheckParquetLoader: Loader = ParquetLoader;\n"],"file":"parquet-loader.js"}
@@ -4,16 +4,16 @@ Object.defineProperty(exports, "__esModule", {
4
4
  value: true
5
5
  });
6
6
  exports.ParquetWriter = void 0;
7
- const VERSION = typeof "3.1.0-beta.3" !== 'undefined' ? "3.1.0-beta.3" : 'latest';
8
- const DEFAULT_PARQUET_LOADER_OPTIONS = {};
9
- const ParquetWriter = {
7
+ var VERSION = typeof "3.1.1" !== 'undefined' ? "3.1.1" : 'latest';
8
+ var DEFAULT_PARQUET_LOADER_OPTIONS = {};
9
+ var ParquetWriter = {
10
10
  name: 'Apache Parquet',
11
11
  id: 'parquet',
12
12
  module: 'parquet',
13
13
  version: VERSION,
14
14
  extensions: ['parquet'],
15
15
  mimeTypes: ['application/octet-stream'],
16
- encodeSync,
16
+ encodeSync: encodeSync,
17
17
  binary: true,
18
18
  options: DEFAULT_PARQUET_LOADER_OPTIONS
19
19
  };
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/parquet-writer.ts"],"names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","ParquetWriter","name","id","module","version","extensions","mimeTypes","encodeSync","binary","options","data","ArrayBuffer"],"mappings":";;;;;;AAIA,MAAMA,OAAO,GAAG,0BAAuB,WAAvB,oBAAmD,QAAnE;AAIA,MAAMC,8BAA8B,GAAG,EAAvC;AAEO,MAAMC,aAAqB,GAAG;AACnCC,EAAAA,IAAI,EAAE,gBAD6B;AAEnCC,EAAAA,EAAE,EAAE,SAF+B;AAGnCC,EAAAA,MAAM,EAAE,SAH2B;AAInCC,EAAAA,OAAO,EAAEN,OAJ0B;AAKnCO,EAAAA,UAAU,EAAE,CAAC,SAAD,CALuB;AAMnCC,EAAAA,SAAS,EAAE,CAAC,0BAAD,CANwB;AAOnCC,EAAAA,UAPmC;AAQnCC,EAAAA,MAAM,EAAE,IAR2B;AASnCC,EAAAA,OAAO,EAAEV;AAT0B,CAA9B;;;AAYP,SAASQ,UAAT,CAAoBG,IAApB,EAA0BD,OAA1B,EAA0D;AACxD,SAAO,IAAIE,WAAJ,CAAgB,CAAhB,CAAP;AACD","sourcesContent":["import type {Writer} from '@loaders.gl/loader-utils';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetWriterOptions = {};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS = {};\n\nexport const ParquetWriter: Writer = {\n name: 'Apache Parquet',\n id: 'parquet',\n module: 'parquet',\n version: VERSION,\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n encodeSync,\n binary: true,\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nfunction encodeSync(data, options?: ParquetWriterOptions) {\n return new ArrayBuffer(0);\n}\n"],"file":"parquet-writer.js"}
1
+ {"version":3,"sources":["../../src/parquet-writer.ts"],"names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","ParquetWriter","name","id","module","version","extensions","mimeTypes","encodeSync","binary","options","data","ArrayBuffer"],"mappings":";;;;;;AAIA,IAAMA,OAAO,GAAG,mBAAuB,WAAvB,aAAmD,QAAnE;AAIA,IAAMC,8BAA8B,GAAG,EAAvC;AAEO,IAAMC,aAAqB,GAAG;AACnCC,EAAAA,IAAI,EAAE,gBAD6B;AAEnCC,EAAAA,EAAE,EAAE,SAF+B;AAGnCC,EAAAA,MAAM,EAAE,SAH2B;AAInCC,EAAAA,OAAO,EAAEN,OAJ0B;AAKnCO,EAAAA,UAAU,EAAE,CAAC,SAAD,CALuB;AAMnCC,EAAAA,SAAS,EAAE,CAAC,0BAAD,CANwB;AAOnCC,EAAAA,UAAU,EAAVA,UAPmC;AAQnCC,EAAAA,MAAM,EAAE,IAR2B;AASnCC,EAAAA,OAAO,EAAEV;AAT0B,CAA9B;;;AAYP,SAASQ,UAAT,CAAoBG,IAApB,EAA0BD,OAA1B,EAA0D;AACxD,SAAO,IAAIE,WAAJ,CAAgB,CAAhB,CAAP;AACD","sourcesContent":["import type {Writer} from '@loaders.gl/loader-utils';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetWriterOptions = {};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS = {};\n\nexport const ParquetWriter: Writer = {\n name: 'Apache Parquet',\n id: 'parquet',\n module: 'parquet',\n version: VERSION,\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n encodeSync,\n binary: true,\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nfunction encodeSync(data, options?: ParquetWriterOptions) {\n return new ArrayBuffer(0);\n}\n"],"file":"parquet-writer.js"}
@@ -1,19 +1,27 @@
1
1
  "use strict";
2
2
 
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
+
3
5
  Object.defineProperty(exports, "__esModule", {
4
6
  value: true
5
7
  });
6
8
  exports.decodeValues = decodeValues;
7
9
  exports.encodeValues = encodeValues;
8
10
 
11
+ var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
12
+
9
13
  var _rle = require("./rle");
10
14
 
15
+ function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
16
+
17
+ function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
18
+
11
19
  function decodeValues(type, cursor, count, opts) {
12
20
  opts.bitWidth = cursor.buffer.slice(cursor.offset, cursor.offset + 1).readInt8(0);
13
21
  cursor.offset += 1;
14
- return (0, _rle.decodeValues)(type, cursor, count, { ...opts,
22
+ return (0, _rle.decodeValues)(type, cursor, count, _objectSpread(_objectSpread({}, opts), {}, {
15
23
  disableEnvelope: true
16
- });
24
+ }));
17
25
  }
18
26
 
19
27
  function encodeValues(type, cursor, count, opts) {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/parquetjs/codecs/dictionary.ts"],"names":["decodeValues","type","cursor","count","opts","bitWidth","buffer","slice","offset","readInt8","disableEnvelope","encodeValues","Error"],"mappings":";;;;;;;;AAAA;;AAEO,SAASA,YAAT,CAAsBC,IAAtB,EAA4BC,MAA5B,EAAoCC,KAApC,EAA2CC,IAA3C,EAAiD;AACtDA,EAAAA,IAAI,CAACC,QAAL,GAAgBH,MAAM,CAACI,MAAP,CAAcC,KAAd,CAAoBL,MAAM,CAACM,MAA3B,EAAmCN,MAAM,CAACM,MAAP,GAAgB,CAAnD,EAAsDC,QAAtD,CAA+D,CAA/D,CAAhB;AACAP,EAAAA,MAAM,CAACM,MAAP,IAAiB,CAAjB;AACA,SAAO,uBAAgBP,IAAhB,EAAsBC,MAAtB,EAA8BC,KAA9B,EAAqC,EAAC,GAAGC,IAAJ;AAAUM,IAAAA,eAAe,EAAE;AAA3B,GAArC,CAAP;AACD;;AAEM,SAASC,YAAT,CAAsBV,IAAtB,EAA4BC,MAA5B,EAAoCC,KAApC,EAA2CC,IAA3C,EAAiD;AACtD,QAAM,IAAIQ,KAAJ,CAAU,kDAAV,CAAN;AACD","sourcesContent":["import {decodeValues as decodeRleValues} from './rle';\n\nexport function decodeValues(type, cursor, count, opts) {\n opts.bitWidth = cursor.buffer.slice(cursor.offset, cursor.offset + 1).readInt8(0);\n cursor.offset += 1;\n return decodeRleValues(type, cursor, count, {...opts, disableEnvelope: true});\n}\n\nexport function encodeValues(type, cursor, count, opts) {\n throw new Error('Encode dictionary functionality is not supported');\n}\n"],"file":"dictionary.js"}
1
+ {"version":3,"sources":["../../../../src/parquetjs/codecs/dictionary.ts"],"names":["decodeValues","type","cursor","count","opts","bitWidth","buffer","slice","offset","readInt8","disableEnvelope","encodeValues","Error"],"mappings":";;;;;;;;;;;;AAAA;;;;;;AAEO,SAASA,YAAT,CAAsBC,IAAtB,EAA4BC,MAA5B,EAAoCC,KAApC,EAA2CC,IAA3C,EAAiD;AACtDA,EAAAA,IAAI,CAACC,QAAL,GAAgBH,MAAM,CAACI,MAAP,CAAcC,KAAd,CAAoBL,MAAM,CAACM,MAA3B,EAAmCN,MAAM,CAACM,MAAP,GAAgB,CAAnD,EAAsDC,QAAtD,CAA+D,CAA/D,CAAhB;AACAP,EAAAA,MAAM,CAACM,MAAP,IAAiB,CAAjB;AACA,SAAO,uBAAgBP,IAAhB,EAAsBC,MAAtB,EAA8BC,KAA9B,kCAAyCC,IAAzC;AAA+CM,IAAAA,eAAe,EAAE;AAAhE,KAAP;AACD;;AAEM,SAASC,YAAT,CAAsBV,IAAtB,EAA4BC,MAA5B,EAAoCC,KAApC,EAA2CC,IAA3C,EAAiD;AACtD,QAAM,IAAIQ,KAAJ,CAAU,kDAAV,CAAN;AACD","sourcesContent":["import {decodeValues as decodeRleValues} from './rle';\n\nexport function decodeValues(type, cursor, count, opts) {\n opts.bitWidth = cursor.buffer.slice(cursor.offset, cursor.offset + 1).readInt8(0);\n cursor.offset += 1;\n return decodeRleValues(type, cursor, count, {...opts, disableEnvelope: true});\n}\n\nexport function encodeValues(type, cursor, count, opts) {\n throw new Error('Encode dictionary functionality is not supported');\n}\n"],"file":"dictionary.js"}