@loaders.gl/parquet 3.4.13 → 3.4.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +15 -24
- package/dist/dist.min.js.map +3 -3
- package/dist/es5/bundle.js +1 -1
- package/dist/es5/bundle.js.map +1 -1
- package/dist/es5/constants.js +5 -5
- package/dist/es5/constants.js.map +1 -1
- package/dist/es5/index.js +24 -24
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/arrow/convert-row-group-to-columns.js +2 -7
- package/dist/es5/lib/arrow/convert-row-group-to-columns.js.map +1 -1
- package/dist/es5/lib/arrow/convert-schema-from-parquet.js +22 -33
- package/dist/es5/lib/arrow/convert-schema-from-parquet.js.map +1 -1
- package/dist/es5/lib/arrow/convert-schema-to-parquet.js +2 -2
- package/dist/es5/lib/arrow/convert-schema-to-parquet.js.map +1 -1
- package/dist/es5/lib/geo/decode-geo-metadata.js +16 -27
- package/dist/es5/lib/geo/decode-geo-metadata.js.map +1 -1
- package/dist/es5/lib/parsers/parse-parquet-to-columns.js +20 -151
- package/dist/es5/lib/parsers/parse-parquet-to-columns.js.map +1 -1
- package/dist/es5/lib/parsers/parse-parquet-to-rows.js +13 -138
- package/dist/es5/lib/parsers/parse-parquet-to-rows.js.map +1 -1
- package/dist/es5/lib/wasm/encode-parquet-wasm.js +8 -29
- package/dist/es5/lib/wasm/encode-parquet-wasm.js.map +1 -1
- package/dist/es5/lib/wasm/load-wasm/index.js +1 -1
- package/dist/es5/lib/wasm/load-wasm/load-wasm-browser.js +10 -33
- package/dist/es5/lib/wasm/load-wasm/load-wasm-browser.js.map +1 -1
- package/dist/es5/lib/wasm/load-wasm/load-wasm-node.js +4 -22
- package/dist/es5/lib/wasm/load-wasm/load-wasm-node.js.map +1 -1
- package/dist/es5/lib/wasm/parse-parquet-wasm.js +13 -46
- package/dist/es5/lib/wasm/parse-parquet-wasm.js.map +1 -1
- package/dist/es5/parquet-loader.js +4 -4
- package/dist/es5/parquet-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-loader.js +4 -4
- package/dist/es5/parquet-wasm-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-writer.js +3 -3
- package/dist/es5/parquet-wasm-writer.js.map +1 -1
- package/dist/es5/parquet-writer.js +4 -4
- package/dist/es5/parquet-writer.js.map +1 -1
- package/dist/es5/parquetjs/codecs/dictionary.js +3 -6
- package/dist/es5/parquetjs/codecs/dictionary.js.map +1 -1
- package/dist/es5/parquetjs/codecs/index.js +4 -5
- package/dist/es5/parquetjs/codecs/index.js.map +1 -1
- package/dist/es5/parquetjs/codecs/plain.js +41 -41
- package/dist/es5/parquetjs/codecs/plain.js.map +1 -1
- package/dist/es5/parquetjs/codecs/rle.js +25 -30
- package/dist/es5/parquetjs/codecs/rle.js.map +1 -1
- package/dist/es5/parquetjs/compression.js +26 -90
- package/dist/es5/parquetjs/compression.js.map +1 -1
- package/dist/es5/parquetjs/encoder/parquet-encoder.js +245 -536
- package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js +1 -1
- package/dist/es5/parquetjs/parquet-thrift/BsonType.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/BsonType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js +123 -133
- package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js +138 -150
- package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js +241 -251
- package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js +58 -70
- package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js +97 -107
- package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js +136 -146
- package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DateType.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/DateType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DecimalType.js +58 -68
- package/dist/es5/parquetjs/parquet-thrift/DecimalType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js +71 -81
- package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/Encoding.js +1 -1
- package/dist/es5/parquetjs/parquet-thrift/EnumType.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/EnumType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js +1 -1
- package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js +164 -174
- package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/IntType.js +58 -68
- package/dist/es5/parquetjs/parquet-thrift/IntType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/JsonType.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/JsonType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/KeyValue.js +58 -68
- package/dist/es5/parquetjs/parquet-thrift/KeyValue.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ListType.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/ListType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/LogicalType.js +274 -310
- package/dist/es5/parquetjs/parquet-thrift/LogicalType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/MapType.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/MapType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/NullType.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/NullType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js +56 -66
- package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js +71 -81
- package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageHeader.js +136 -146
- package/dist/es5/parquetjs/parquet-thrift/PageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageLocation.js +71 -81
- package/dist/es5/parquetjs/parquet-thrift/PageLocation.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageType.js +1 -1
- package/dist/es5/parquetjs/parquet-thrift/RowGroup.js +105 -115
- package/dist/es5/parquetjs/parquet-thrift/RowGroup.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js +162 -172
- package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js +71 -81
- package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/Statistics.js +106 -116
- package/dist/es5/parquetjs/parquet-thrift/Statistics.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/StringType.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/StringType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TimeType.js +58 -68
- package/dist/es5/parquetjs/parquet-thrift/TimeType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js +76 -90
- package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TimestampType.js +58 -68
- package/dist/es5/parquetjs/parquet-thrift/TimestampType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/Type.js +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/UUIDType.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/UUIDType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/index.js +43 -43
- package/dist/es5/parquetjs/parquet-thrift/index.js.map +1 -1
- package/dist/es5/parquetjs/parser/decoders.js +195 -327
- package/dist/es5/parquetjs/parser/decoders.js.map +1 -1
- package/dist/es5/parquetjs/parser/parquet-reader.js +155 -582
- package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/es5/parquetjs/schema/declare.js +10 -11
- package/dist/es5/parquetjs/schema/declare.js.map +1 -1
- package/dist/es5/parquetjs/schema/schema.js +65 -82
- package/dist/es5/parquetjs/schema/schema.js.map +1 -1
- package/dist/es5/parquetjs/schema/shred.js +56 -87
- package/dist/es5/parquetjs/schema/shred.js.map +1 -1
- package/dist/es5/parquetjs/schema/types.js +40 -40
- package/dist/es5/parquetjs/schema/types.js.map +1 -1
- package/dist/es5/parquetjs/utils/file-utils.js +8 -12
- package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/es5/parquetjs/utils/read-utils.js +22 -39
- package/dist/es5/parquetjs/utils/read-utils.js.map +1 -1
- package/dist/esm/parquet-loader.js +1 -1
- package/dist/esm/parquet-wasm-loader.js +1 -1
- package/dist/esm/parquet-wasm-writer.js +1 -1
- package/dist/esm/parquet-writer.js +1 -1
- package/dist/parquet-worker.js +15 -24
- package/dist/parquet-worker.js.map +3 -3
- package/package.json +6 -6
|
@@ -1,172 +1,41 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
3
|
Object.defineProperty(exports, "__esModule", {
|
|
5
4
|
value: true
|
|
6
5
|
});
|
|
7
6
|
exports.parseParquetFileInColumnarBatches = parseParquetFileInColumnarBatches;
|
|
8
7
|
exports.parseParquetInColumns = parseParquetInColumns;
|
|
9
|
-
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
10
|
-
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
|
11
|
-
var _awaitAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/awaitAsyncGenerator"));
|
|
12
|
-
var _wrapAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/wrapAsyncGenerator"));
|
|
13
8
|
var _loaderUtils = require("@loaders.gl/loader-utils");
|
|
14
9
|
var _parquetReader = require("../../parquetjs/parser/parquet-reader");
|
|
15
10
|
var _convertSchemaFromParquet = require("../arrow/convert-schema-from-parquet");
|
|
16
11
|
var _convertRowGroupToColumns = require("../arrow/convert-row-group-to-columns");
|
|
17
12
|
var _decodeGeoMetadata = require("../geo/decode-geo-metadata");
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
13
|
+
async function parseParquetInColumns(arrayBuffer, options) {
|
|
14
|
+
const blob = new Blob([arrayBuffer]);
|
|
15
|
+
for await (const batch of parseParquetFileInColumnarBatches(blob, options)) {
|
|
16
|
+
return batch;
|
|
17
|
+
}
|
|
18
|
+
return null;
|
|
22
19
|
}
|
|
23
|
-
function
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
case 5:
|
|
35
|
-
_context2.next = 7;
|
|
36
|
-
return _iterator.next();
|
|
37
|
-
case 7:
|
|
38
|
-
if (!(_iteratorAbruptCompletion = !(_step = _context2.sent).done)) {
|
|
39
|
-
_context2.next = 13;
|
|
40
|
-
break;
|
|
41
|
-
}
|
|
42
|
-
batch = _step.value;
|
|
43
|
-
return _context2.abrupt("return", batch);
|
|
44
|
-
case 10:
|
|
45
|
-
_iteratorAbruptCompletion = false;
|
|
46
|
-
_context2.next = 5;
|
|
47
|
-
break;
|
|
48
|
-
case 13:
|
|
49
|
-
_context2.next = 19;
|
|
50
|
-
break;
|
|
51
|
-
case 15:
|
|
52
|
-
_context2.prev = 15;
|
|
53
|
-
_context2.t0 = _context2["catch"](3);
|
|
54
|
-
_didIteratorError = true;
|
|
55
|
-
_iteratorError = _context2.t0;
|
|
56
|
-
case 19:
|
|
57
|
-
_context2.prev = 19;
|
|
58
|
-
_context2.prev = 20;
|
|
59
|
-
if (!(_iteratorAbruptCompletion && _iterator.return != null)) {
|
|
60
|
-
_context2.next = 24;
|
|
61
|
-
break;
|
|
62
|
-
}
|
|
63
|
-
_context2.next = 24;
|
|
64
|
-
return _iterator.return();
|
|
65
|
-
case 24:
|
|
66
|
-
_context2.prev = 24;
|
|
67
|
-
if (!_didIteratorError) {
|
|
68
|
-
_context2.next = 27;
|
|
69
|
-
break;
|
|
70
|
-
}
|
|
71
|
-
throw _iteratorError;
|
|
72
|
-
case 27:
|
|
73
|
-
return _context2.finish(24);
|
|
74
|
-
case 28:
|
|
75
|
-
return _context2.finish(19);
|
|
76
|
-
case 29:
|
|
77
|
-
return _context2.abrupt("return", null);
|
|
78
|
-
case 30:
|
|
79
|
-
case "end":
|
|
80
|
-
return _context2.stop();
|
|
81
|
-
}
|
|
82
|
-
}, _callee2, null, [[3, 15, 19, 29], [20,, 24, 28]]);
|
|
83
|
-
}));
|
|
84
|
-
return _parseParquetInColumns.apply(this, arguments);
|
|
85
|
-
}
|
|
86
|
-
function parseParquetFileInColumnarBatches(_x, _x2) {
|
|
87
|
-
return _parseParquetFileInColumnarBatches.apply(this, arguments);
|
|
88
|
-
}
|
|
89
|
-
function _parseParquetFileInColumnarBatches() {
|
|
90
|
-
_parseParquetFileInColumnarBatches = (0, _wrapAsyncGenerator2.default)(_regenerator.default.mark(function _callee(blob, options) {
|
|
91
|
-
var file, reader, parquetSchema, parquetMetadata, schema, rowGroups, _iteratorAbruptCompletion2, _didIteratorError2, _iteratorError2, _iterator2, _step2, rowGroup;
|
|
92
|
-
return _regenerator.default.wrap(function _callee$(_context) {
|
|
93
|
-
while (1) switch (_context.prev = _context.next) {
|
|
94
|
-
case 0:
|
|
95
|
-
file = (0, _loaderUtils.makeReadableFile)(blob);
|
|
96
|
-
reader = new _parquetReader.ParquetReader(file);
|
|
97
|
-
_context.next = 4;
|
|
98
|
-
return (0, _awaitAsyncGenerator2.default)(reader.getSchema());
|
|
99
|
-
case 4:
|
|
100
|
-
parquetSchema = _context.sent;
|
|
101
|
-
_context.next = 7;
|
|
102
|
-
return (0, _awaitAsyncGenerator2.default)(reader.getFileMetadata());
|
|
103
|
-
case 7:
|
|
104
|
-
parquetMetadata = _context.sent;
|
|
105
|
-
schema = (0, _convertSchemaFromParquet.convertSchemaFromParquet)(parquetSchema, parquetMetadata);
|
|
106
|
-
(0, _decodeGeoMetadata.unpackGeoMetadata)(schema);
|
|
107
|
-
rowGroups = reader.rowGroupIterator(options === null || options === void 0 ? void 0 : options.parquet);
|
|
108
|
-
_iteratorAbruptCompletion2 = false;
|
|
109
|
-
_didIteratorError2 = false;
|
|
110
|
-
_context.prev = 13;
|
|
111
|
-
_iterator2 = _asyncIterator(rowGroups);
|
|
112
|
-
case 15:
|
|
113
|
-
_context.next = 17;
|
|
114
|
-
return (0, _awaitAsyncGenerator2.default)(_iterator2.next());
|
|
115
|
-
case 17:
|
|
116
|
-
if (!(_iteratorAbruptCompletion2 = !(_step2 = _context.sent).done)) {
|
|
117
|
-
_context.next = 24;
|
|
118
|
-
break;
|
|
119
|
-
}
|
|
120
|
-
rowGroup = _step2.value;
|
|
121
|
-
_context.next = 21;
|
|
122
|
-
return convertRowGroupToTableBatch(schema, rowGroup);
|
|
123
|
-
case 21:
|
|
124
|
-
_iteratorAbruptCompletion2 = false;
|
|
125
|
-
_context.next = 15;
|
|
126
|
-
break;
|
|
127
|
-
case 24:
|
|
128
|
-
_context.next = 30;
|
|
129
|
-
break;
|
|
130
|
-
case 26:
|
|
131
|
-
_context.prev = 26;
|
|
132
|
-
_context.t0 = _context["catch"](13);
|
|
133
|
-
_didIteratorError2 = true;
|
|
134
|
-
_iteratorError2 = _context.t0;
|
|
135
|
-
case 30:
|
|
136
|
-
_context.prev = 30;
|
|
137
|
-
_context.prev = 31;
|
|
138
|
-
if (!(_iteratorAbruptCompletion2 && _iterator2.return != null)) {
|
|
139
|
-
_context.next = 35;
|
|
140
|
-
break;
|
|
141
|
-
}
|
|
142
|
-
_context.next = 35;
|
|
143
|
-
return (0, _awaitAsyncGenerator2.default)(_iterator2.return());
|
|
144
|
-
case 35:
|
|
145
|
-
_context.prev = 35;
|
|
146
|
-
if (!_didIteratorError2) {
|
|
147
|
-
_context.next = 38;
|
|
148
|
-
break;
|
|
149
|
-
}
|
|
150
|
-
throw _iteratorError2;
|
|
151
|
-
case 38:
|
|
152
|
-
return _context.finish(35);
|
|
153
|
-
case 39:
|
|
154
|
-
return _context.finish(30);
|
|
155
|
-
case 40:
|
|
156
|
-
case "end":
|
|
157
|
-
return _context.stop();
|
|
158
|
-
}
|
|
159
|
-
}, _callee, null, [[13, 26, 30, 40], [31,, 35, 39]]);
|
|
160
|
-
}));
|
|
161
|
-
return _parseParquetFileInColumnarBatches.apply(this, arguments);
|
|
20
|
+
async function* parseParquetFileInColumnarBatches(blob, options) {
|
|
21
|
+
const file = (0, _loaderUtils.makeReadableFile)(blob);
|
|
22
|
+
const reader = new _parquetReader.ParquetReader(file);
|
|
23
|
+
const parquetSchema = await reader.getSchema();
|
|
24
|
+
const parquetMetadata = await reader.getFileMetadata();
|
|
25
|
+
const schema = (0, _convertSchemaFromParquet.convertSchemaFromParquet)(parquetSchema, parquetMetadata);
|
|
26
|
+
(0, _decodeGeoMetadata.unpackGeoMetadata)(schema);
|
|
27
|
+
const rowGroups = reader.rowGroupIterator(options === null || options === void 0 ? void 0 : options.parquet);
|
|
28
|
+
for await (const rowGroup of rowGroups) {
|
|
29
|
+
yield convertRowGroupToTableBatch(schema, rowGroup);
|
|
30
|
+
}
|
|
162
31
|
}
|
|
163
32
|
function convertRowGroupToTableBatch(schema, rowGroup) {
|
|
164
|
-
|
|
33
|
+
const data = (0, _convertRowGroupToColumns.convertParquetRowGroupToColumns)(schema, rowGroup);
|
|
165
34
|
return {
|
|
166
35
|
shape: 'columnar-table',
|
|
167
36
|
batchType: 'data',
|
|
168
|
-
schema
|
|
169
|
-
data
|
|
37
|
+
schema,
|
|
38
|
+
data,
|
|
170
39
|
length: rowGroup.rowCount
|
|
171
40
|
};
|
|
172
41
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parse-parquet-to-columns.js","names":["_loaderUtils","require","_parquetReader","_convertSchemaFromParquet","_convertRowGroupToColumns","_decodeGeoMetadata","
|
|
1
|
+
{"version":3,"file":"parse-parquet-to-columns.js","names":["_loaderUtils","require","_parquetReader","_convertSchemaFromParquet","_convertRowGroupToColumns","_decodeGeoMetadata","parseParquetInColumns","arrayBuffer","options","blob","Blob","batch","parseParquetFileInColumnarBatches","file","makeReadableFile","reader","ParquetReader","parquetSchema","getSchema","parquetMetadata","getFileMetadata","schema","convertSchemaFromParquet","unpackGeoMetadata","rowGroups","rowGroupIterator","parquet","rowGroup","convertRowGroupToTableBatch","data","convertParquetRowGroupToColumns","shape","batchType","length","rowCount"],"sources":["../../../../src/lib/parsers/parse-parquet-to-columns.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\nimport {ColumnarTableBatch, Schema} from '@loaders.gl/schema';\nimport {makeReadableFile} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../../parquet-loader';\nimport {ParquetReader} from '../../parquetjs/parser/parquet-reader';\nimport {ParquetBuffer} from '../../parquetjs/schema/declare';\nimport {convertSchemaFromParquet} from '../arrow/convert-schema-from-parquet';\nimport {convertParquetRowGroupToColumns} from '../arrow/convert-row-group-to-columns';\nimport {unpackGeoMetadata} from '../geo/decode-geo-metadata';\n\nexport async function parseParquetInColumns(\n arrayBuffer: ArrayBuffer,\n options?: ParquetLoaderOptions\n) {\n const blob = new Blob([arrayBuffer]);\n for await (const batch of parseParquetFileInColumnarBatches(blob, options)) {\n return batch;\n }\n return null;\n}\n\nexport async function* parseParquetFileInColumnarBatches(\n blob: Blob,\n options?: ParquetLoaderOptions\n): AsyncIterable<ColumnarTableBatch> {\n const file = makeReadableFile(blob);\n const reader = new ParquetReader(file);\n const parquetSchema = await reader.getSchema();\n const parquetMetadata = await reader.getFileMetadata();\n const schema = convertSchemaFromParquet(parquetSchema, parquetMetadata);\n unpackGeoMetadata(schema);\n const rowGroups = reader.rowGroupIterator(options?.parquet);\n for await (const rowGroup of rowGroups) {\n yield convertRowGroupToTableBatch(schema, rowGroup);\n }\n}\n\nfunction convertRowGroupToTableBatch(schema: Schema, rowGroup: ParquetBuffer): ColumnarTableBatch {\n const data = convertParquetRowGroupToColumns(schema, rowGroup);\n return {\n shape: 'columnar-table',\n batchType: 'data',\n schema,\n data,\n length: rowGroup.rowCount\n };\n}\n"],"mappings":";;;;;;;AAIA,IAAAA,YAAA,GAAAC,OAAA;AAEA,IAAAC,cAAA,GAAAD,OAAA;AAEA,IAAAE,yBAAA,GAAAF,OAAA;AACA,IAAAG,yBAAA,GAAAH,OAAA;AACA,IAAAI,kBAAA,GAAAJ,OAAA;AAEO,eAAeK,qBAAqBA,CACzCC,WAAwB,EACxBC,OAA8B,EAC9B;EACA,MAAMC,IAAI,GAAG,IAAIC,IAAI,CAAC,CAACH,WAAW,CAAC,CAAC;EACpC,WAAW,MAAMI,KAAK,IAAIC,iCAAiC,CAACH,IAAI,EAAED,OAAO,CAAC,EAAE;IAC1E,OAAOG,KAAK;EACd;EACA,OAAO,IAAI;AACb;AAEO,gBAAgBC,iCAAiCA,CACtDH,IAAU,EACVD,OAA8B,EACK;EACnC,MAAMK,IAAI,GAAG,IAAAC,6BAAgB,EAACL,IAAI,CAAC;EACnC,MAAMM,MAAM,GAAG,IAAIC,4BAAa,CAACH,IAAI,CAAC;EACtC,MAAMI,aAAa,GAAG,MAAMF,MAAM,CAACG,SAAS,CAAC,CAAC;EAC9C,MAAMC,eAAe,GAAG,MAAMJ,MAAM,CAACK,eAAe,CAAC,CAAC;EACtD,MAAMC,MAAM,GAAG,IAAAC,kDAAwB,EAACL,aAAa,EAAEE,eAAe,CAAC;EACvE,IAAAI,oCAAiB,EAACF,MAAM,CAAC;EACzB,MAAMG,SAAS,GAAGT,MAAM,CAACU,gBAAgB,CAACjB,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEkB,OAAO,CAAC;EAC3D,WAAW,MAAMC,QAAQ,IAAIH,SAAS,EAAE;IACtC,MAAMI,2BAA2B,CAACP,MAAM,EAAEM,QAAQ,CAAC;EACrD;AACF;AAEA,SAASC,2BAA2BA,CAACP,MAAc,EAAEM,QAAuB,EAAsB;EAChG,MAAME,IAAI,GAAG,IAAAC,yDAA+B,EAACT,MAAM,EAAEM,QAAQ,CAAC;EAC9D,OAAO;IACLI,KAAK,EAAE,gBAAgB;IACvBC,SAAS,EAAE,MAAM;IACjBX,MAAM;IACNQ,IAAI;IACJI,MAAM,EAAEN,QAAQ,CAACO;EACnB,CAAC;AACH"}
|
|
@@ -1,150 +1,25 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
3
|
Object.defineProperty(exports, "__esModule", {
|
|
5
4
|
value: true
|
|
6
5
|
});
|
|
7
6
|
exports.parseParquet = parseParquet;
|
|
8
7
|
exports.parseParquetFileInBatches = parseParquetFileInBatches;
|
|
9
|
-
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
10
|
-
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
|
11
|
-
var _awaitAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/awaitAsyncGenerator"));
|
|
12
|
-
var _wrapAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/wrapAsyncGenerator"));
|
|
13
8
|
var _loaderUtils = require("@loaders.gl/loader-utils");
|
|
14
9
|
var _parquetReader = require("../../parquetjs/parser/parquet-reader");
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
10
|
+
async function parseParquet(arrayBuffer, options) {
|
|
11
|
+
const blob = new Blob([arrayBuffer]);
|
|
12
|
+
for await (const batch of parseParquetFileInBatches(blob, options)) {
|
|
13
|
+
return batch;
|
|
14
|
+
}
|
|
15
|
+
return null;
|
|
19
16
|
}
|
|
20
|
-
function
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
_iteratorAbruptCompletion = false;
|
|
28
|
-
_didIteratorError = false;
|
|
29
|
-
_context2.prev = 3;
|
|
30
|
-
_iterator = _asyncIterator(parseParquetFileInBatches(blob, options));
|
|
31
|
-
case 5:
|
|
32
|
-
_context2.next = 7;
|
|
33
|
-
return _iterator.next();
|
|
34
|
-
case 7:
|
|
35
|
-
if (!(_iteratorAbruptCompletion = !(_step = _context2.sent).done)) {
|
|
36
|
-
_context2.next = 13;
|
|
37
|
-
break;
|
|
38
|
-
}
|
|
39
|
-
batch = _step.value;
|
|
40
|
-
return _context2.abrupt("return", batch);
|
|
41
|
-
case 10:
|
|
42
|
-
_iteratorAbruptCompletion = false;
|
|
43
|
-
_context2.next = 5;
|
|
44
|
-
break;
|
|
45
|
-
case 13:
|
|
46
|
-
_context2.next = 19;
|
|
47
|
-
break;
|
|
48
|
-
case 15:
|
|
49
|
-
_context2.prev = 15;
|
|
50
|
-
_context2.t0 = _context2["catch"](3);
|
|
51
|
-
_didIteratorError = true;
|
|
52
|
-
_iteratorError = _context2.t0;
|
|
53
|
-
case 19:
|
|
54
|
-
_context2.prev = 19;
|
|
55
|
-
_context2.prev = 20;
|
|
56
|
-
if (!(_iteratorAbruptCompletion && _iterator.return != null)) {
|
|
57
|
-
_context2.next = 24;
|
|
58
|
-
break;
|
|
59
|
-
}
|
|
60
|
-
_context2.next = 24;
|
|
61
|
-
return _iterator.return();
|
|
62
|
-
case 24:
|
|
63
|
-
_context2.prev = 24;
|
|
64
|
-
if (!_didIteratorError) {
|
|
65
|
-
_context2.next = 27;
|
|
66
|
-
break;
|
|
67
|
-
}
|
|
68
|
-
throw _iteratorError;
|
|
69
|
-
case 27:
|
|
70
|
-
return _context2.finish(24);
|
|
71
|
-
case 28:
|
|
72
|
-
return _context2.finish(19);
|
|
73
|
-
case 29:
|
|
74
|
-
return _context2.abrupt("return", null);
|
|
75
|
-
case 30:
|
|
76
|
-
case "end":
|
|
77
|
-
return _context2.stop();
|
|
78
|
-
}
|
|
79
|
-
}, _callee2, null, [[3, 15, 19, 29], [20,, 24, 28]]);
|
|
80
|
-
}));
|
|
81
|
-
return _parseParquet.apply(this, arguments);
|
|
82
|
-
}
|
|
83
|
-
function parseParquetFileInBatches(_x, _x2) {
|
|
84
|
-
return _parseParquetFileInBatches.apply(this, arguments);
|
|
85
|
-
}
|
|
86
|
-
function _parseParquetFileInBatches() {
|
|
87
|
-
_parseParquetFileInBatches = (0, _wrapAsyncGenerator2.default)(_regenerator.default.mark(function _callee(blob, options) {
|
|
88
|
-
var file, reader, rowBatches, _iteratorAbruptCompletion2, _didIteratorError2, _iteratorError2, _iterator2, _step2, rows;
|
|
89
|
-
return _regenerator.default.wrap(function _callee$(_context) {
|
|
90
|
-
while (1) switch (_context.prev = _context.next) {
|
|
91
|
-
case 0:
|
|
92
|
-
file = (0, _loaderUtils.makeReadableFile)(blob);
|
|
93
|
-
reader = new _parquetReader.ParquetReader(file);
|
|
94
|
-
rowBatches = reader.rowBatchIterator(options === null || options === void 0 ? void 0 : options.parquet);
|
|
95
|
-
_iteratorAbruptCompletion2 = false;
|
|
96
|
-
_didIteratorError2 = false;
|
|
97
|
-
_context.prev = 5;
|
|
98
|
-
_iterator2 = _asyncIterator(rowBatches);
|
|
99
|
-
case 7:
|
|
100
|
-
_context.next = 9;
|
|
101
|
-
return (0, _awaitAsyncGenerator2.default)(_iterator2.next());
|
|
102
|
-
case 9:
|
|
103
|
-
if (!(_iteratorAbruptCompletion2 = !(_step2 = _context.sent).done)) {
|
|
104
|
-
_context.next = 16;
|
|
105
|
-
break;
|
|
106
|
-
}
|
|
107
|
-
rows = _step2.value;
|
|
108
|
-
_context.next = 13;
|
|
109
|
-
return rows;
|
|
110
|
-
case 13:
|
|
111
|
-
_iteratorAbruptCompletion2 = false;
|
|
112
|
-
_context.next = 7;
|
|
113
|
-
break;
|
|
114
|
-
case 16:
|
|
115
|
-
_context.next = 22;
|
|
116
|
-
break;
|
|
117
|
-
case 18:
|
|
118
|
-
_context.prev = 18;
|
|
119
|
-
_context.t0 = _context["catch"](5);
|
|
120
|
-
_didIteratorError2 = true;
|
|
121
|
-
_iteratorError2 = _context.t0;
|
|
122
|
-
case 22:
|
|
123
|
-
_context.prev = 22;
|
|
124
|
-
_context.prev = 23;
|
|
125
|
-
if (!(_iteratorAbruptCompletion2 && _iterator2.return != null)) {
|
|
126
|
-
_context.next = 27;
|
|
127
|
-
break;
|
|
128
|
-
}
|
|
129
|
-
_context.next = 27;
|
|
130
|
-
return (0, _awaitAsyncGenerator2.default)(_iterator2.return());
|
|
131
|
-
case 27:
|
|
132
|
-
_context.prev = 27;
|
|
133
|
-
if (!_didIteratorError2) {
|
|
134
|
-
_context.next = 30;
|
|
135
|
-
break;
|
|
136
|
-
}
|
|
137
|
-
throw _iteratorError2;
|
|
138
|
-
case 30:
|
|
139
|
-
return _context.finish(27);
|
|
140
|
-
case 31:
|
|
141
|
-
return _context.finish(22);
|
|
142
|
-
case 32:
|
|
143
|
-
case "end":
|
|
144
|
-
return _context.stop();
|
|
145
|
-
}
|
|
146
|
-
}, _callee, null, [[5, 18, 22, 32], [23,, 27, 31]]);
|
|
147
|
-
}));
|
|
148
|
-
return _parseParquetFileInBatches.apply(this, arguments);
|
|
17
|
+
async function* parseParquetFileInBatches(blob, options) {
|
|
18
|
+
const file = (0, _loaderUtils.makeReadableFile)(blob);
|
|
19
|
+
const reader = new _parquetReader.ParquetReader(file);
|
|
20
|
+
const rowBatches = reader.rowBatchIterator(options === null || options === void 0 ? void 0 : options.parquet);
|
|
21
|
+
for await (const rows of rowBatches) {
|
|
22
|
+
yield rows;
|
|
23
|
+
}
|
|
149
24
|
}
|
|
150
25
|
//# sourceMappingURL=parse-parquet-to-rows.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parse-parquet-to-rows.js","names":["_loaderUtils","require","_parquetReader","
|
|
1
|
+
{"version":3,"file":"parse-parquet-to-rows.js","names":["_loaderUtils","require","_parquetReader","parseParquet","arrayBuffer","options","blob","Blob","batch","parseParquetFileInBatches","file","makeReadableFile","reader","ParquetReader","rowBatches","rowBatchIterator","parquet","rows"],"sources":["../../../../src/lib/parsers/parse-parquet-to-rows.ts"],"sourcesContent":["// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\n// import {ColumnarTableBatch} from '@loaders.gl/schema';\nimport {makeReadableFile} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../../parquet-loader';\nimport {ParquetReader} from '../../parquetjs/parser/parquet-reader';\n\nexport async function parseParquet(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions) {\n const blob = new Blob([arrayBuffer]);\n for await (const batch of parseParquetFileInBatches(blob, options)) {\n return batch;\n }\n return null;\n}\n\nexport async function* parseParquetFileInBatches(blob: Blob, options?: ParquetLoaderOptions) {\n const file = makeReadableFile(blob);\n const reader = new ParquetReader(file);\n const rowBatches = reader.rowBatchIterator(options?.parquet);\n for await (const rows of rowBatches) {\n yield rows;\n }\n}\n\n// export async function* parseParquetFileInColumnarBatches(blob: Blob, options?: {columnList?: string[][]}): AsyncIterable<ColumnarTableBatch> {\n// const rowGroupReader = new ParquetRowGroupReader({data: blob, columnList: options?.columnList});\n// try {\n// for await (const rowGroup of rowGroupReader) {\n// yield convertRowGroupToTableBatch(rowGroup);\n// }\n// } finally {\n// await rowGroupReader.close();\n// }\n// }\n\n// function convertRowGroupToTableBatch(rowGroup): ColumnarTableBatch {\n// // @ts-expect-error\n// return {\n// data: rowGroup\n// };\n// }\n"],"mappings":";;;;;;;AAEA,IAAAA,YAAA,GAAAC,OAAA;AAEA,IAAAC,cAAA,GAAAD,OAAA;AAEO,eAAeE,YAAYA,CAACC,WAAwB,EAAEC,OAA8B,EAAE;EAC3F,MAAMC,IAAI,GAAG,IAAIC,IAAI,CAAC,CAACH,WAAW,CAAC,CAAC;EACpC,WAAW,MAAMI,KAAK,IAAIC,yBAAyB,CAACH,IAAI,EAAED,OAAO,CAAC,EAAE;IAClE,OAAOG,KAAK;EACd;EACA,OAAO,IAAI;AACb;AAEO,gBAAgBC,yBAAyBA,CAACH,IAAU,EAAED,OAA8B,EAAE;EAC3F,MAAMK,IAAI,GAAG,IAAAC,6BAAgB,EAACL,IAAI,CAAC;EACnC,MAAMM,MAAM,GAAG,IAAIC,4BAAa,CAACH,IAAI,CAAC;EACtC,MAAMI,UAAU,GAAGF,MAAM,CAACG,gBAAgB,CAACV,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEW,OAAO,CAAC;EAC5D,WAAW,MAAMC,IAAI,IAAIH,UAAU,EAAE;IACnC,MAAMG,IAAI;EACZ;AACF"}
|
|
@@ -1,41 +1,20 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
3
|
Object.defineProperty(exports, "__esModule", {
|
|
5
4
|
value: true
|
|
6
5
|
});
|
|
7
6
|
exports.encode = encode;
|
|
8
7
|
exports.tableToIPC = tableToIPC;
|
|
9
|
-
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
10
|
-
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
|
11
8
|
var _apacheArrow = require("apache-arrow");
|
|
12
9
|
var _loadWasm = require("./load-wasm");
|
|
13
|
-
function encode(
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
while (1) switch (_context.prev = _context.next) {
|
|
22
|
-
case 0:
|
|
23
|
-
wasmUrl = options === null || options === void 0 ? void 0 : (_options$parquet = options.parquet) === null || _options$parquet === void 0 ? void 0 : _options$parquet.wasmUrl;
|
|
24
|
-
_context.next = 3;
|
|
25
|
-
return (0, _loadWasm.loadWasm)(wasmUrl);
|
|
26
|
-
case 3:
|
|
27
|
-
wasm = _context.sent;
|
|
28
|
-
arrowIPCBytes = tableToIPC(table);
|
|
29
|
-
writerProperties = new wasm.WriterPropertiesBuilder().build();
|
|
30
|
-
parquetBytes = wasm.writeParquet(arrowIPCBytes, writerProperties);
|
|
31
|
-
return _context.abrupt("return", parquetBytes.buffer.slice(parquetBytes.byteOffset, parquetBytes.byteLength + parquetBytes.byteOffset));
|
|
32
|
-
case 8:
|
|
33
|
-
case "end":
|
|
34
|
-
return _context.stop();
|
|
35
|
-
}
|
|
36
|
-
}, _callee);
|
|
37
|
-
}));
|
|
38
|
-
return _encode.apply(this, arguments);
|
|
10
|
+
async function encode(table, options) {
|
|
11
|
+
var _options$parquet;
|
|
12
|
+
const wasmUrl = options === null || options === void 0 ? void 0 : (_options$parquet = options.parquet) === null || _options$parquet === void 0 ? void 0 : _options$parquet.wasmUrl;
|
|
13
|
+
const wasm = await (0, _loadWasm.loadWasm)(wasmUrl);
|
|
14
|
+
const arrowIPCBytes = tableToIPC(table);
|
|
15
|
+
const writerProperties = new wasm.WriterPropertiesBuilder().build();
|
|
16
|
+
const parquetBytes = wasm.writeParquet(arrowIPCBytes, writerProperties);
|
|
17
|
+
return parquetBytes.buffer.slice(parquetBytes.byteOffset, parquetBytes.byteLength + parquetBytes.byteOffset);
|
|
39
18
|
}
|
|
40
19
|
function tableToIPC(table) {
|
|
41
20
|
return _apacheArrow.RecordBatchStreamWriter.writeAll(table).toUint8Array(true);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"encode-parquet-wasm.js","names":["_apacheArrow","require","_loadWasm","encode","
|
|
1
|
+
{"version":3,"file":"encode-parquet-wasm.js","names":["_apacheArrow","require","_loadWasm","encode","table","options","_options$parquet","wasmUrl","parquet","wasm","loadWasm","arrowIPCBytes","tableToIPC","writerProperties","WriterPropertiesBuilder","build","parquetBytes","writeParquet","buffer","slice","byteOffset","byteLength","RecordBatchStreamWriter","writeAll","toUint8Array"],"sources":["../../../../src/lib/wasm/encode-parquet-wasm.ts"],"sourcesContent":["import type {Table} from 'apache-arrow';\nimport type {WriterOptions} from '@loaders.gl/loader-utils';\n\nimport {RecordBatchStreamWriter} from 'apache-arrow';\nimport {loadWasm} from './load-wasm';\n\nexport type ParquetWriterOptions = WriterOptions & {\n parquet?: {\n wasmUrl?: string;\n };\n};\n\n/**\n * Encode Arrow Table to Parquet buffer\n */\nexport async function encode(table: Table, options?: ParquetWriterOptions): Promise<ArrayBuffer> {\n const wasmUrl = options?.parquet?.wasmUrl;\n const wasm = await loadWasm(wasmUrl);\n\n const arrowIPCBytes = tableToIPC(table);\n // TODO: provide options for how to write table.\n const writerProperties = new wasm.WriterPropertiesBuilder().build();\n const parquetBytes = wasm.writeParquet(arrowIPCBytes, writerProperties);\n return parquetBytes.buffer.slice(\n parquetBytes.byteOffset,\n parquetBytes.byteLength + parquetBytes.byteOffset\n );\n}\n\n/**\n * Serialize a {@link Table} to the IPC format. This function is a convenience\n * wrapper for {@link RecordBatchStreamWriter} and {@link RecordBatchFileWriter}.\n * Opposite of {@link tableFromIPC}.\n *\n * @param table The Table to serialize.\n * @param type Whether to serialize the Table as a file or a stream.\n */\nexport function tableToIPC(table: Table): Uint8Array {\n return RecordBatchStreamWriter.writeAll(table).toUint8Array(true);\n}\n"],"mappings":";;;;;;;AAGA,IAAAA,YAAA,GAAAC,OAAA;AACA,IAAAC,SAAA,GAAAD,OAAA;AAWO,eAAeE,MAAMA,CAACC,KAAY,EAAEC,OAA8B,EAAwB;EAAA,IAAAC,gBAAA;EAC/F,MAAMC,OAAO,GAAGF,OAAO,aAAPA,OAAO,wBAAAC,gBAAA,GAAPD,OAAO,CAAEG,OAAO,cAAAF,gBAAA,uBAAhBA,gBAAA,CAAkBC,OAAO;EACzC,MAAME,IAAI,GAAG,MAAM,IAAAC,kBAAQ,EAACH,OAAO,CAAC;EAEpC,MAAMI,aAAa,GAAGC,UAAU,CAACR,KAAK,CAAC;EAEvC,MAAMS,gBAAgB,GAAG,IAAIJ,IAAI,CAACK,uBAAuB,CAAC,CAAC,CAACC,KAAK,CAAC,CAAC;EACnE,MAAMC,YAAY,GAAGP,IAAI,CAACQ,YAAY,CAACN,aAAa,EAAEE,gBAAgB,CAAC;EACvE,OAAOG,YAAY,CAACE,MAAM,CAACC,KAAK,CAC9BH,YAAY,CAACI,UAAU,EACvBJ,YAAY,CAACK,UAAU,GAAGL,YAAY,CAACI,UACzC,CAAC;AACH;AAUO,SAASR,UAAUA,CAACR,KAAY,EAAc;EACnD,OAAOkB,oCAAuB,CAACC,QAAQ,CAACnB,KAAK,CAAC,CAACoB,YAAY,CAAC,IAAI,CAAC;AACnE"}
|
|
@@ -1,42 +1,19 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
-
var _typeof = require("@babel/runtime/helpers/typeof");
|
|
5
3
|
Object.defineProperty(exports, "__esModule", {
|
|
6
4
|
value: true
|
|
7
5
|
});
|
|
8
6
|
exports.loadWasm = loadWasm;
|
|
9
|
-
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
10
|
-
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
|
11
7
|
var wasmEsm = _interopRequireWildcard(require("parquet-wasm/esm2/arrow1"));
|
|
12
|
-
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function
|
|
13
|
-
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null ||
|
|
14
|
-
|
|
15
|
-
function loadWasm(
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
case 0:
|
|
23
|
-
if (!(cached !== null)) {
|
|
24
|
-
_context.next = 2;
|
|
25
|
-
break;
|
|
26
|
-
}
|
|
27
|
-
return _context.abrupt("return", cached);
|
|
28
|
-
case 2:
|
|
29
|
-
_context.next = 4;
|
|
30
|
-
return wasmEsm.default(wasmUrl);
|
|
31
|
-
case 4:
|
|
32
|
-
cached = wasmEsm;
|
|
33
|
-
return _context.abrupt("return", wasmEsm);
|
|
34
|
-
case 6:
|
|
35
|
-
case "end":
|
|
36
|
-
return _context.stop();
|
|
37
|
-
}
|
|
38
|
-
}, _callee);
|
|
39
|
-
}));
|
|
40
|
-
return _loadWasm.apply(this, arguments);
|
|
8
|
+
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
|
|
9
|
+
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
|
|
10
|
+
let cached = null;
|
|
11
|
+
async function loadWasm(wasmUrl) {
|
|
12
|
+
if (cached !== null) {
|
|
13
|
+
return cached;
|
|
14
|
+
}
|
|
15
|
+
await wasmEsm.default(wasmUrl);
|
|
16
|
+
cached = wasmEsm;
|
|
17
|
+
return wasmEsm;
|
|
41
18
|
}
|
|
42
19
|
//# sourceMappingURL=load-wasm-browser.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"load-wasm-browser.js","names":["wasmEsm","_interopRequireWildcard","require","_getRequireWildcardCache","nodeInterop","WeakMap","cacheBabelInterop","cacheNodeInterop","obj","__esModule","
|
|
1
|
+
{"version":3,"file":"load-wasm-browser.js","names":["wasmEsm","_interopRequireWildcard","require","_getRequireWildcardCache","nodeInterop","WeakMap","cacheBabelInterop","cacheNodeInterop","obj","__esModule","default","cache","has","get","newObj","hasPropertyDescriptor","Object","defineProperty","getOwnPropertyDescriptor","key","prototype","hasOwnProperty","call","desc","set","cached","loadWasm","wasmUrl"],"sources":["../../../../../src/lib/wasm/load-wasm/load-wasm-browser.ts"],"sourcesContent":["import * as wasmEsm from 'parquet-wasm/esm2/arrow1';\n\nlet cached: typeof wasmEsm | null = null;\n\nexport async function loadWasm(wasmUrl?: string) {\n if (cached !== null) {\n return cached;\n }\n\n // For ESM bundles, need to await the default export, which loads the WASM\n await wasmEsm.default(wasmUrl);\n cached = wasmEsm;\n\n return wasmEsm;\n}\n"],"mappings":";;;;;;AAAA,IAAAA,OAAA,GAAAC,uBAAA,CAAAC,OAAA;AAAoD,SAAAC,yBAAAC,WAAA,eAAAC,OAAA,kCAAAC,iBAAA,OAAAD,OAAA,QAAAE,gBAAA,OAAAF,OAAA,YAAAF,wBAAA,YAAAA,CAAAC,WAAA,WAAAA,WAAA,GAAAG,gBAAA,GAAAD,iBAAA,KAAAF,WAAA;AAAA,SAAAH,wBAAAO,GAAA,EAAAJ,WAAA,SAAAA,WAAA,IAAAI,GAAA,IAAAA,GAAA,CAAAC,UAAA,WAAAD,GAAA,QAAAA,GAAA,oBAAAA,GAAA,wBAAAA,GAAA,4BAAAE,OAAA,EAAAF,GAAA,UAAAG,KAAA,GAAAR,wBAAA,CAAAC,WAAA,OAAAO,KAAA,IAAAA,KAAA,CAAAC,GAAA,CAAAJ,GAAA,YAAAG,KAAA,CAAAE,GAAA,CAAAL,GAAA,SAAAM,MAAA,WAAAC,qBAAA,GAAAC,MAAA,CAAAC,cAAA,IAAAD,MAAA,CAAAE,wBAAA,WAAAC,GAAA,IAAAX,GAAA,QAAAW,GAAA,kBAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAd,GAAA,EAAAW,GAAA,SAAAI,IAAA,GAAAR,qBAAA,GAAAC,MAAA,CAAAE,wBAAA,CAAAV,GAAA,EAAAW,GAAA,cAAAI,IAAA,KAAAA,IAAA,CAAAV,GAAA,IAAAU,IAAA,CAAAC,GAAA,KAAAR,MAAA,CAAAC,cAAA,CAAAH,MAAA,EAAAK,GAAA,EAAAI,IAAA,YAAAT,MAAA,CAAAK,GAAA,IAAAX,GAAA,CAAAW,GAAA,SAAAL,MAAA,CAAAJ,OAAA,GAAAF,GAAA,MAAAG,KAAA,IAAAA,KAAA,CAAAa,GAAA,CAAAhB,GAAA,EAAAM,MAAA,YAAAA,MAAA;AAEpD,IAAIW,MAA6B,GAAG,IAAI;AAEjC,eAAeC,QAAQA,CAACC,OAAgB,EAAE;EAC/C,IAAIF,MAAM,KAAK,IAAI,EAAE;IACnB,OAAOA,MAAM;EACf;EAGA,MAAMzB,OAAO,CAACU,OAAO,CAACiB,OAAO,CAAC;EAC9BF,MAAM,GAAGzB,OAAO;EAEhB,OAAOA,OAAO;AAChB"}
|
|
@@ -1,31 +1,13 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
-
var _typeof = require("@babel/runtime/helpers/typeof");
|
|
5
3
|
Object.defineProperty(exports, "__esModule", {
|
|
6
4
|
value: true
|
|
7
5
|
});
|
|
8
6
|
exports.loadWasm = loadWasm;
|
|
9
|
-
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
10
|
-
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
|
11
7
|
var wasmNode = _interopRequireWildcard(require("parquet-wasm/node/arrow1"));
|
|
12
|
-
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function
|
|
13
|
-
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null ||
|
|
14
|
-
function loadWasm(
|
|
15
|
-
return
|
|
16
|
-
}
|
|
17
|
-
function _loadWasm() {
|
|
18
|
-
_loadWasm = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee(wasmUrl) {
|
|
19
|
-
return _regenerator.default.wrap(function _callee$(_context) {
|
|
20
|
-
while (1) switch (_context.prev = _context.next) {
|
|
21
|
-
case 0:
|
|
22
|
-
return _context.abrupt("return", wasmNode);
|
|
23
|
-
case 1:
|
|
24
|
-
case "end":
|
|
25
|
-
return _context.stop();
|
|
26
|
-
}
|
|
27
|
-
}, _callee);
|
|
28
|
-
}));
|
|
29
|
-
return _loadWasm.apply(this, arguments);
|
|
8
|
+
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
|
|
9
|
+
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
|
|
10
|
+
async function loadWasm(wasmUrl) {
|
|
11
|
+
return wasmNode;
|
|
30
12
|
}
|
|
31
13
|
//# sourceMappingURL=load-wasm-node.js.map
|