@loaders.gl/parquet 3.3.2 → 3.4.0-alpha.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (119) hide show
  1. package/dist/dist.min.js +16 -16
  2. package/dist/dist.min.js.map +3 -3
  3. package/dist/es5/index.js +46 -5
  4. package/dist/es5/index.js.map +1 -1
  5. package/dist/es5/lib/arrow/convert-columns-to-row-group.js +2 -0
  6. package/dist/es5/lib/arrow/convert-columns-to-row-group.js.map +1 -0
  7. package/dist/es5/lib/arrow/convert-row-group-to-columns.js +20 -0
  8. package/dist/es5/lib/arrow/convert-row-group-to-columns.js.map +1 -0
  9. package/dist/es5/lib/arrow/convert-schema-from-parquet.js +98 -0
  10. package/dist/es5/lib/arrow/convert-schema-from-parquet.js.map +1 -0
  11. package/dist/es5/lib/{convert-schema.js → arrow/convert-schema-to-parquet.js} +5 -31
  12. package/dist/es5/lib/arrow/convert-schema-to-parquet.js.map +1 -0
  13. package/dist/es5/lib/geo/decode-geo-metadata.js +82 -0
  14. package/dist/es5/lib/geo/decode-geo-metadata.js.map +1 -0
  15. package/dist/es5/lib/geo/geoparquet-schema.js +83 -0
  16. package/dist/es5/lib/geo/geoparquet-schema.js.map +1 -0
  17. package/dist/es5/lib/parsers/parse-parquet-to-columns.js +177 -0
  18. package/dist/es5/lib/parsers/parse-parquet-to-columns.js.map +1 -0
  19. package/dist/es5/lib/{parse-parquet.js → parsers/parse-parquet-to-rows.js} +2 -2
  20. package/dist/es5/lib/parsers/parse-parquet-to-rows.js.map +1 -0
  21. package/dist/es5/lib/wip/convert-schema-deep.rs.disabled +976 -0
  22. package/dist/es5/parquet-loader.js +3 -2
  23. package/dist/es5/parquet-loader.js.map +1 -1
  24. package/dist/es5/parquet-wasm-loader.js +1 -1
  25. package/dist/es5/parquet-wasm-loader.js.map +1 -1
  26. package/dist/es5/parquet-wasm-writer.js +1 -1
  27. package/dist/es5/parquet-wasm-writer.js.map +1 -1
  28. package/dist/es5/parquet-writer.js +1 -1
  29. package/dist/es5/parquet-writer.js.map +1 -1
  30. package/dist/esm/index.js +12 -2
  31. package/dist/esm/index.js.map +1 -1
  32. package/dist/esm/lib/arrow/convert-columns-to-row-group.js +2 -0
  33. package/dist/esm/lib/arrow/convert-columns-to-row-group.js.map +1 -0
  34. package/dist/esm/lib/arrow/convert-row-group-to-columns.js +10 -0
  35. package/dist/esm/lib/arrow/convert-row-group-to-columns.js.map +1 -0
  36. package/dist/esm/lib/{convert-schema.js → arrow/convert-schema-from-parquet.js} +32 -16
  37. package/dist/esm/lib/arrow/convert-schema-from-parquet.js.map +1 -0
  38. package/dist/esm/lib/arrow/convert-schema-to-parquet.js +40 -0
  39. package/dist/esm/lib/arrow/convert-schema-to-parquet.js.map +1 -0
  40. package/dist/esm/lib/geo/decode-geo-metadata.js +64 -0
  41. package/dist/esm/lib/geo/decode-geo-metadata.js.map +1 -0
  42. package/dist/esm/lib/geo/geoparquet-schema.js +78 -0
  43. package/dist/esm/lib/geo/geoparquet-schema.js.map +1 -0
  44. package/dist/esm/lib/parsers/parse-parquet-to-columns.js +37 -0
  45. package/dist/esm/lib/parsers/parse-parquet-to-columns.js.map +1 -0
  46. package/dist/esm/lib/{parse-parquet.js → parsers/parse-parquet-to-rows.js} +2 -2
  47. package/dist/esm/lib/parsers/parse-parquet-to-rows.js.map +1 -0
  48. package/dist/esm/lib/wip/convert-schema-deep.rs.disabled +976 -0
  49. package/dist/esm/parquet-loader.js +3 -2
  50. package/dist/esm/parquet-loader.js.map +1 -1
  51. package/dist/esm/parquet-wasm-loader.js +1 -1
  52. package/dist/esm/parquet-wasm-loader.js.map +1 -1
  53. package/dist/esm/parquet-wasm-writer.js +1 -1
  54. package/dist/esm/parquet-wasm-writer.js.map +1 -1
  55. package/dist/esm/parquet-writer.js +1 -1
  56. package/dist/esm/parquet-writer.js.map +1 -1
  57. package/dist/index.d.ts +23 -3
  58. package/dist/index.d.ts.map +1 -1
  59. package/dist/index.js +24 -6
  60. package/dist/lib/arrow/convert-columns-to-row-group.d.ts +1 -0
  61. package/dist/lib/arrow/convert-columns-to-row-group.d.ts.map +1 -0
  62. package/dist/lib/arrow/convert-columns-to-row-group.js +1 -0
  63. package/dist/lib/arrow/convert-row-group-to-columns.d.ts +4 -0
  64. package/dist/lib/arrow/convert-row-group-to-columns.d.ts.map +1 -0
  65. package/dist/lib/arrow/convert-row-group-to-columns.js +12 -0
  66. package/dist/lib/arrow/convert-schema-from-parquet.d.ts +9 -0
  67. package/dist/lib/arrow/convert-schema-from-parquet.d.ts.map +1 -0
  68. package/dist/lib/{convert-schema.js → arrow/convert-schema-from-parquet.js} +30 -18
  69. package/dist/lib/arrow/convert-schema-to-parquet.d.ts +7 -0
  70. package/dist/lib/arrow/convert-schema-to-parquet.d.ts.map +1 -0
  71. package/dist/lib/arrow/convert-schema-to-parquet.js +72 -0
  72. package/dist/lib/geo/decode-geo-metadata.d.ts +31 -0
  73. package/dist/lib/geo/decode-geo-metadata.d.ts.map +1 -0
  74. package/dist/lib/geo/decode-geo-metadata.js +73 -0
  75. package/dist/lib/geo/geoparquet-schema.d.ts +80 -0
  76. package/dist/lib/geo/geoparquet-schema.d.ts.map +1 -0
  77. package/dist/lib/geo/geoparquet-schema.js +69 -0
  78. package/dist/lib/parsers/parse-parquet-to-columns.d.ts +5 -0
  79. package/dist/lib/parsers/parse-parquet-to-columns.d.ts.map +1 -0
  80. package/dist/lib/parsers/parse-parquet-to-columns.js +40 -0
  81. package/dist/lib/parsers/parse-parquet-to-rows.d.ts +4 -0
  82. package/dist/lib/parsers/parse-parquet-to-rows.d.ts.map +1 -0
  83. package/dist/lib/{parse-parquet.js → parsers/parse-parquet-to-rows.js} +1 -1
  84. package/dist/parquet-loader.d.ts +1 -0
  85. package/dist/parquet-loader.d.ts.map +1 -1
  86. package/dist/parquet-loader.js +2 -1
  87. package/dist/parquet-worker.js +19 -19
  88. package/dist/parquet-worker.js.map +3 -3
  89. package/package.json +5 -5
  90. package/src/index.ts +22 -2
  91. package/src/lib/arrow/convert-columns-to-row-group.ts +0 -0
  92. package/src/lib/arrow/convert-row-group-to-columns.ts +15 -0
  93. package/src/lib/{convert-schema.ts → arrow/convert-schema-from-parquet.ts} +41 -22
  94. package/src/lib/arrow/convert-schema-to-parquet.ts +102 -0
  95. package/src/lib/geo/decode-geo-metadata.ts +99 -0
  96. package/src/lib/geo/geoparquet-schema.ts +69 -0
  97. package/src/lib/parsers/parse-parquet-to-columns.ts +49 -0
  98. package/src/lib/{parse-parquet.ts → parsers/parse-parquet-to-rows.ts} +2 -2
  99. package/src/lib/wip/convert-schema-deep.rs.disabled +976 -0
  100. package/src/parquet-loader.ts +3 -1
  101. package/dist/es5/lib/convert-schema.js.map +0 -1
  102. package/dist/es5/lib/parse-parquet.js.map +0 -1
  103. package/dist/es5/lib/read-array-buffer.js +0 -43
  104. package/dist/es5/lib/read-array-buffer.js.map +0 -1
  105. package/dist/esm/lib/convert-schema.js.map +0 -1
  106. package/dist/esm/lib/parse-parquet.js.map +0 -1
  107. package/dist/esm/lib/read-array-buffer.js +0 -10
  108. package/dist/esm/lib/read-array-buffer.js.map +0 -1
  109. package/dist/lib/convert-schema.d.ts +0 -8
  110. package/dist/lib/convert-schema.d.ts.map +0 -1
  111. package/dist/lib/parse-parquet.d.ts +0 -4
  112. package/dist/lib/parse-parquet.d.ts.map +0 -1
  113. package/dist/lib/read-array-buffer.d.ts +0 -19
  114. package/dist/lib/read-array-buffer.d.ts.map +0 -1
  115. package/dist/lib/read-array-buffer.js +0 -29
  116. package/src/lib/read-array-buffer.ts +0 -31
  117. /package/dist/es5/lib/{convert-schema-deep.ts.disabled → wip/convert-schema-deep.java.disabled} +0 -0
  118. /package/dist/esm/lib/{convert-schema-deep.ts.disabled → wip/convert-schema-deep.java.disabled} +0 -0
  119. /package/src/lib/{convert-schema-deep.ts.disabled → wip/convert-schema-deep.java.disabled} +0 -0
@@ -0,0 +1,177 @@
1
+ "use strict";
2
+
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
+ Object.defineProperty(exports, "__esModule", {
5
+ value: true
6
+ });
7
+ exports.parseParquetFileInColumnarBatches = parseParquetFileInColumnarBatches;
8
+ exports.parseParquetInColumns = parseParquetInColumns;
9
+ var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
10
+ var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
11
+ var _awaitAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/awaitAsyncGenerator"));
12
+ var _wrapAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/wrapAsyncGenerator"));
13
+ var _loaderUtils = require("@loaders.gl/loader-utils");
14
+ var _parquetReader = require("../../parquetjs/parser/parquet-reader");
15
+ var _convertSchemaFromParquet = require("../arrow/convert-schema-from-parquet");
16
+ var _convertRowGroupToColumns = require("../arrow/convert-row-group-to-columns");
17
+ var _decodeGeoMetadata = require("../geo/decode-geo-metadata");
18
+ function _asyncIterator(iterable) { var method, async, sync, retry = 2; for ("undefined" != typeof Symbol && (async = Symbol.asyncIterator, sync = Symbol.iterator); retry--;) { if (async && null != (method = iterable[async])) return method.call(iterable); if (sync && null != (method = iterable[sync])) return new AsyncFromSyncIterator(method.call(iterable)); async = "@@asyncIterator", sync = "@@iterator"; } throw new TypeError("Object is not async iterable"); }
19
+ function AsyncFromSyncIterator(s) { function AsyncFromSyncIteratorContinuation(r) { if (Object(r) !== r) return Promise.reject(new TypeError(r + " is not an object.")); var done = r.done; return Promise.resolve(r.value).then(function (value) { return { value: value, done: done }; }); } return AsyncFromSyncIterator = function AsyncFromSyncIterator(s) { this.s = s, this.n = s.next; }, AsyncFromSyncIterator.prototype = { s: null, n: null, next: function next() { return AsyncFromSyncIteratorContinuation(this.n.apply(this.s, arguments)); }, return: function _return(value) { var ret = this.s.return; return void 0 === ret ? Promise.resolve({ value: value, done: !0 }) : AsyncFromSyncIteratorContinuation(ret.apply(this.s, arguments)); }, throw: function _throw(value) { var thr = this.s.return; return void 0 === thr ? Promise.reject(value) : AsyncFromSyncIteratorContinuation(thr.apply(this.s, arguments)); } }, new AsyncFromSyncIterator(s); }
20
+ function parseParquetInColumns(_x3, _x4) {
21
+ return _parseParquetInColumns.apply(this, arguments);
22
+ }
23
+ function _parseParquetInColumns() {
24
+ _parseParquetInColumns = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee2(arrayBuffer, options) {
25
+ var blob, _iteratorAbruptCompletion, _didIteratorError, _iteratorError, _iterator, _step, batch;
26
+ return _regenerator.default.wrap(function _callee2$(_context2) {
27
+ while (1) {
28
+ switch (_context2.prev = _context2.next) {
29
+ case 0:
30
+ blob = new Blob([arrayBuffer]);
31
+ _iteratorAbruptCompletion = false;
32
+ _didIteratorError = false;
33
+ _context2.prev = 3;
34
+ _iterator = _asyncIterator(parseParquetFileInColumnarBatches(blob, options));
35
+ case 5:
36
+ _context2.next = 7;
37
+ return _iterator.next();
38
+ case 7:
39
+ if (!(_iteratorAbruptCompletion = !(_step = _context2.sent).done)) {
40
+ _context2.next = 13;
41
+ break;
42
+ }
43
+ batch = _step.value;
44
+ return _context2.abrupt("return", batch);
45
+ case 10:
46
+ _iteratorAbruptCompletion = false;
47
+ _context2.next = 5;
48
+ break;
49
+ case 13:
50
+ _context2.next = 19;
51
+ break;
52
+ case 15:
53
+ _context2.prev = 15;
54
+ _context2.t0 = _context2["catch"](3);
55
+ _didIteratorError = true;
56
+ _iteratorError = _context2.t0;
57
+ case 19:
58
+ _context2.prev = 19;
59
+ _context2.prev = 20;
60
+ if (!(_iteratorAbruptCompletion && _iterator.return != null)) {
61
+ _context2.next = 24;
62
+ break;
63
+ }
64
+ _context2.next = 24;
65
+ return _iterator.return();
66
+ case 24:
67
+ _context2.prev = 24;
68
+ if (!_didIteratorError) {
69
+ _context2.next = 27;
70
+ break;
71
+ }
72
+ throw _iteratorError;
73
+ case 27:
74
+ return _context2.finish(24);
75
+ case 28:
76
+ return _context2.finish(19);
77
+ case 29:
78
+ return _context2.abrupt("return", null);
79
+ case 30:
80
+ case "end":
81
+ return _context2.stop();
82
+ }
83
+ }
84
+ }, _callee2, null, [[3, 15, 19, 29], [20,, 24, 28]]);
85
+ }));
86
+ return _parseParquetInColumns.apply(this, arguments);
87
+ }
88
+ function parseParquetFileInColumnarBatches(_x, _x2) {
89
+ return _parseParquetFileInColumnarBatches.apply(this, arguments);
90
+ }
91
+ function _parseParquetFileInColumnarBatches() {
92
+ _parseParquetFileInColumnarBatches = (0, _wrapAsyncGenerator2.default)(_regenerator.default.mark(function _callee(blob, options) {
93
+ var file, reader, parquetSchema, parquetMetadata, schema, rowGroups, _iteratorAbruptCompletion2, _didIteratorError2, _iteratorError2, _iterator2, _step2, rowGroup;
94
+ return _regenerator.default.wrap(function _callee$(_context) {
95
+ while (1) {
96
+ switch (_context.prev = _context.next) {
97
+ case 0:
98
+ file = (0, _loaderUtils.makeReadableFile)(blob);
99
+ reader = new _parquetReader.ParquetReader(file);
100
+ _context.next = 4;
101
+ return (0, _awaitAsyncGenerator2.default)(reader.getSchema());
102
+ case 4:
103
+ parquetSchema = _context.sent;
104
+ _context.next = 7;
105
+ return (0, _awaitAsyncGenerator2.default)(reader.getFileMetadata());
106
+ case 7:
107
+ parquetMetadata = _context.sent;
108
+ schema = (0, _convertSchemaFromParquet.convertSchemaFromParquet)(parquetSchema, parquetMetadata);
109
+ (0, _decodeGeoMetadata.unpackGeoMetadata)(schema);
110
+ rowGroups = reader.rowGroupIterator(options === null || options === void 0 ? void 0 : options.parquet);
111
+ _iteratorAbruptCompletion2 = false;
112
+ _didIteratorError2 = false;
113
+ _context.prev = 13;
114
+ _iterator2 = _asyncIterator(rowGroups);
115
+ case 15:
116
+ _context.next = 17;
117
+ return (0, _awaitAsyncGenerator2.default)(_iterator2.next());
118
+ case 17:
119
+ if (!(_iteratorAbruptCompletion2 = !(_step2 = _context.sent).done)) {
120
+ _context.next = 24;
121
+ break;
122
+ }
123
+ rowGroup = _step2.value;
124
+ _context.next = 21;
125
+ return convertRowGroupToTableBatch(schema, rowGroup);
126
+ case 21:
127
+ _iteratorAbruptCompletion2 = false;
128
+ _context.next = 15;
129
+ break;
130
+ case 24:
131
+ _context.next = 30;
132
+ break;
133
+ case 26:
134
+ _context.prev = 26;
135
+ _context.t0 = _context["catch"](13);
136
+ _didIteratorError2 = true;
137
+ _iteratorError2 = _context.t0;
138
+ case 30:
139
+ _context.prev = 30;
140
+ _context.prev = 31;
141
+ if (!(_iteratorAbruptCompletion2 && _iterator2.return != null)) {
142
+ _context.next = 35;
143
+ break;
144
+ }
145
+ _context.next = 35;
146
+ return (0, _awaitAsyncGenerator2.default)(_iterator2.return());
147
+ case 35:
148
+ _context.prev = 35;
149
+ if (!_didIteratorError2) {
150
+ _context.next = 38;
151
+ break;
152
+ }
153
+ throw _iteratorError2;
154
+ case 38:
155
+ return _context.finish(35);
156
+ case 39:
157
+ return _context.finish(30);
158
+ case 40:
159
+ case "end":
160
+ return _context.stop();
161
+ }
162
+ }
163
+ }, _callee, null, [[13, 26, 30, 40], [31,, 35, 39]]);
164
+ }));
165
+ return _parseParquetFileInColumnarBatches.apply(this, arguments);
166
+ }
167
+ function convertRowGroupToTableBatch(schema, rowGroup) {
168
+ var data = (0, _convertRowGroupToColumns.convertParquetRowGroupToColumns)(schema, rowGroup);
169
+ return {
170
+ shape: 'columnar-table',
171
+ batchType: 'data',
172
+ schema: schema,
173
+ data: data,
174
+ length: rowGroup.rowCount
175
+ };
176
+ }
177
+ //# sourceMappingURL=parse-parquet-to-columns.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"parse-parquet-to-columns.js","names":["parseParquetInColumns","arrayBuffer","options","blob","Blob","parseParquetFileInColumnarBatches","batch","file","makeReadableFile","reader","ParquetReader","getSchema","parquetSchema","getFileMetadata","parquetMetadata","schema","convertSchemaFromParquet","unpackGeoMetadata","rowGroups","rowGroupIterator","parquet","rowGroup","convertRowGroupToTableBatch","data","convertParquetRowGroupToColumns","shape","batchType","length","rowCount"],"sources":["../../../../src/lib/parsers/parse-parquet-to-columns.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\nimport {ColumnarTableBatch, Schema} from '@loaders.gl/schema';\nimport {makeReadableFile} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../../parquet-loader';\nimport {ParquetReader} from '../../parquetjs/parser/parquet-reader';\nimport {ParquetBuffer} from '../../parquetjs/schema/declare';\nimport {convertSchemaFromParquet} from '../arrow/convert-schema-from-parquet';\nimport {convertParquetRowGroupToColumns} from '../arrow/convert-row-group-to-columns';\nimport {unpackGeoMetadata} from '../geo/decode-geo-metadata';\n\nexport async function parseParquetInColumns(\n arrayBuffer: ArrayBuffer,\n options?: ParquetLoaderOptions\n) {\n const blob = new Blob([arrayBuffer]);\n for await (const batch of parseParquetFileInColumnarBatches(blob, options)) {\n return batch;\n }\n return null;\n}\n\nexport async function* parseParquetFileInColumnarBatches(\n blob: Blob,\n options?: ParquetLoaderOptions\n): AsyncIterable<ColumnarTableBatch> {\n const file = makeReadableFile(blob);\n const reader = new ParquetReader(file);\n const parquetSchema = await reader.getSchema();\n const parquetMetadata = await reader.getFileMetadata();\n const schema = convertSchemaFromParquet(parquetSchema, parquetMetadata);\n unpackGeoMetadata(schema);\n const rowGroups = reader.rowGroupIterator(options?.parquet);\n for await (const rowGroup of rowGroups) {\n yield convertRowGroupToTableBatch(schema, rowGroup);\n }\n}\n\nfunction convertRowGroupToTableBatch(schema: Schema, rowGroup: ParquetBuffer): ColumnarTableBatch {\n const data = convertParquetRowGroupToColumns(schema, rowGroup);\n return {\n shape: 'columnar-table',\n batchType: 'data',\n schema,\n data,\n length: rowGroup.rowCount\n };\n}\n"],"mappings":";;;;;;;;;;;;AAIA;AAEA;AAEA;AACA;AACA;AAA6D;AAAA;AAAA,SAEvCA,qBAAqB;EAAA;AAAA;AAAA;EAAA,mFAApC,kBACLC,WAAwB,EACxBC,OAA8B;IAAA;IAAA;MAAA;QAAA;UAAA;YAExBC,IAAI,GAAG,IAAIC,IAAI,CAAC,CAACH,WAAW,CAAC,CAAC;YAAA;YAAA;YAAA;YAAA,2BACVI,iCAAiC,CAACF,IAAI,EAAED,OAAO,CAAC;UAAA;YAAA;YAAA;UAAA;YAAA;cAAA;cAAA;YAAA;YAAzDI,KAAK;YAAA,kCACbA,KAAK;UAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;UAAA;YAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;YAAA;cAAA;cAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;cAAA;cAAA;YAAA;YAAA;UAAA;YAAA;UAAA;YAAA;UAAA;YAAA,kCAEP,IAAI;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CACZ;EAAA;AAAA;AAAA,SAEsBD,iCAAiC;EAAA;AAAA;AAAA;EAAA,iGAAjD,iBACLF,IAAU,EACVD,OAA8B;IAAA;IAAA;MAAA;QAAA;UAAA;YAExBK,IAAI,GAAG,IAAAC,6BAAgB,EAACL,IAAI,CAAC;YAC7BM,MAAM,GAAG,IAAIC,4BAAa,CAACH,IAAI,CAAC;YAAA;YAAA,0CACVE,MAAM,CAACE,SAAS,EAAE;UAAA;YAAxCC,aAAa;YAAA;YAAA,0CACWH,MAAM,CAACI,eAAe,EAAE;UAAA;YAAhDC,eAAe;YACfC,MAAM,GAAG,IAAAC,kDAAwB,EAACJ,aAAa,EAAEE,eAAe,CAAC;YACvE,IAAAG,oCAAiB,EAACF,MAAM,CAAC;YACnBG,SAAS,GAAGT,MAAM,CAACU,gBAAgB,CAACjB,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEkB,OAAO,CAAC;YAAA;YAAA;YAAA;YAAA,4BAC9BF,SAAS;UAAA;YAAA;YAAA;UAAA;YAAA;cAAA;cAAA;YAAA;YAArBG,QAAQ;YAAA;YACvB,OAAMC,2BAA2B,CAACP,MAAM,EAAEM,QAAQ,CAAC;UAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;UAAA;YAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;YAAA;cAAA;cAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;cAAA;cAAA;YAAA;YAAA;UAAA;YAAA;UAAA;YAAA;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CAEtD;EAAA;AAAA;AAED,SAASC,2BAA2B,CAACP,MAAc,EAAEM,QAAuB,EAAsB;EAChG,IAAME,IAAI,GAAG,IAAAC,yDAA+B,EAACT,MAAM,EAAEM,QAAQ,CAAC;EAC9D,OAAO;IACLI,KAAK,EAAE,gBAAgB;IACvBC,SAAS,EAAE,MAAM;IACjBX,MAAM,EAANA,MAAM;IACNQ,IAAI,EAAJA,IAAI;IACJI,MAAM,EAAEN,QAAQ,CAACO;EACnB,CAAC;AACH"}
@@ -11,7 +11,7 @@ var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/
11
11
  var _awaitAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/awaitAsyncGenerator"));
12
12
  var _wrapAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/wrapAsyncGenerator"));
13
13
  var _loaderUtils = require("@loaders.gl/loader-utils");
14
- var _parquetReader = require("../parquetjs/parser/parquet-reader");
14
+ var _parquetReader = require("../../parquetjs/parser/parquet-reader");
15
15
  function _asyncIterator(iterable) { var method, async, sync, retry = 2; for ("undefined" != typeof Symbol && (async = Symbol.asyncIterator, sync = Symbol.iterator); retry--;) { if (async && null != (method = iterable[async])) return method.call(iterable); if (sync && null != (method = iterable[sync])) return new AsyncFromSyncIterator(method.call(iterable)); async = "@@asyncIterator", sync = "@@iterator"; } throw new TypeError("Object is not async iterable"); }
16
16
  function AsyncFromSyncIterator(s) { function AsyncFromSyncIteratorContinuation(r) { if (Object(r) !== r) return Promise.reject(new TypeError(r + " is not an object.")); var done = r.done; return Promise.resolve(r.value).then(function (value) { return { value: value, done: done }; }); } return AsyncFromSyncIterator = function AsyncFromSyncIterator(s) { this.s = s, this.n = s.next; }, AsyncFromSyncIterator.prototype = { s: null, n: null, next: function next() { return AsyncFromSyncIteratorContinuation(this.n.apply(this.s, arguments)); }, return: function _return(value) { var ret = this.s.return; return void 0 === ret ? Promise.resolve({ value: value, done: !0 }) : AsyncFromSyncIteratorContinuation(ret.apply(this.s, arguments)); }, throw: function _throw(value) { var thr = this.s.return; return void 0 === thr ? Promise.reject(value) : AsyncFromSyncIteratorContinuation(thr.apply(this.s, arguments)); } }, new AsyncFromSyncIterator(s); }
17
17
  function parseParquet(_x3, _x4) {
@@ -151,4 +151,4 @@ function _parseParquetFileInBatches() {
151
151
  }));
152
152
  return _parseParquetFileInBatches.apply(this, arguments);
153
153
  }
154
- //# sourceMappingURL=parse-parquet.js.map
154
+ //# sourceMappingURL=parse-parquet-to-rows.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"parse-parquet-to-rows.js","names":["parseParquet","arrayBuffer","options","blob","Blob","parseParquetFileInBatches","batch","file","makeReadableFile","reader","ParquetReader","rowBatches","rowBatchIterator","parquet","rows"],"sources":["../../../../src/lib/parsers/parse-parquet-to-rows.ts"],"sourcesContent":["// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\n// import {ColumnarTableBatch} from '@loaders.gl/schema';\nimport {makeReadableFile} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../../parquet-loader';\nimport {ParquetReader} from '../../parquetjs/parser/parquet-reader';\n\nexport async function parseParquet(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions) {\n const blob = new Blob([arrayBuffer]);\n for await (const batch of parseParquetFileInBatches(blob, options)) {\n return batch;\n }\n return null;\n}\n\nexport async function* parseParquetFileInBatches(blob: Blob, options?: ParquetLoaderOptions) {\n const file = makeReadableFile(blob);\n const reader = new ParquetReader(file);\n const rowBatches = reader.rowBatchIterator(options?.parquet);\n for await (const rows of rowBatches) {\n yield rows;\n }\n}\n\n// export async function* parseParquetFileInColumnarBatches(blob: Blob, options?: {columnList?: string[][]}): AsyncIterable<ColumnarTableBatch> {\n// const rowGroupReader = new ParquetRowGroupReader({data: blob, columnList: options?.columnList});\n// try {\n// for await (const rowGroup of rowGroupReader) {\n// yield convertRowGroupToTableBatch(rowGroup);\n// }\n// } finally {\n// await rowGroupReader.close();\n// }\n// }\n\n// function convertRowGroupToTableBatch(rowGroup): ColumnarTableBatch {\n// // @ts-expect-error\n// return {\n// data: rowGroup\n// };\n// }\n"],"mappings":";;;;;;;;;;;;AAEA;AAEA;AAAoE;AAAA;AAAA,SAE9CA,YAAY;EAAA;AAAA;AAAA;EAAA,0EAA3B,kBAA4BC,WAAwB,EAAEC,OAA8B;IAAA;IAAA;MAAA;QAAA;UAAA;YACnFC,IAAI,GAAG,IAAIC,IAAI,CAAC,CAACH,WAAW,CAAC,CAAC;YAAA;YAAA;YAAA;YAAA,2BACVI,yBAAyB,CAACF,IAAI,EAAED,OAAO,CAAC;UAAA;YAAA;YAAA;UAAA;YAAA;cAAA;cAAA;YAAA;YAAjDI,KAAK;YAAA,kCACbA,KAAK;UAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;UAAA;YAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;YAAA;cAAA;cAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;cAAA;cAAA;YAAA;YAAA;UAAA;YAAA;UAAA;YAAA;UAAA;YAAA,kCAEP,IAAI;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CACZ;EAAA;AAAA;AAAA,SAEsBD,yBAAyB;EAAA;AAAA;AAAA;EAAA,yFAAzC,iBAA0CF,IAAU,EAAED,OAA8B;IAAA;IAAA;MAAA;QAAA;UAAA;YACnFK,IAAI,GAAG,IAAAC,6BAAgB,EAACL,IAAI,CAAC;YAC7BM,MAAM,GAAG,IAAIC,4BAAa,CAACH,IAAI,CAAC;YAChCI,UAAU,GAAGF,MAAM,CAACG,gBAAgB,CAACV,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEW,OAAO,CAAC;YAAA;YAAA;YAAA;YAAA,4BACnCF,UAAU;UAAA;YAAA;YAAA;UAAA;YAAA;cAAA;cAAA;YAAA;YAAlBG,IAAI;YAAA;YACnB,OAAMA,IAAI;UAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;UAAA;YAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;YAAA;cAAA;cAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;cAAA;cAAA;YAAA;YAAA;UAAA;YAAA;UAAA;YAAA;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CAEb;EAAA;AAAA"}