@loaders.gl/parquet 4.0.2 → 4.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/arrow1_bg.wasm +0 -0
- package/dist/constants.d.ts.map +1 -1
- package/dist/constants.js.map +1 -1
- package/dist/index.cjs +50 -33
- package/dist/index.d.ts +2 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +14 -7
- package/dist/index.js.map +1 -1
- package/dist/lib/parsers/parse-parquet-to-columns.d.ts +4 -4
- package/dist/lib/parsers/parse-parquet-to-columns.d.ts.map +1 -1
- package/dist/lib/parsers/parse-parquet-to-columns.js +5 -8
- package/dist/lib/parsers/parse-parquet-to-columns.js.map +1 -1
- package/dist/lib/parsers/parse-parquet-to-rows.d.ts +4 -4
- package/dist/lib/parsers/parse-parquet-to-rows.d.ts.map +1 -1
- package/dist/lib/parsers/parse-parquet-to-rows.js +30 -19
- package/dist/lib/parsers/parse-parquet-to-rows.js.map +1 -1
- package/dist/lib/wasm/encode-parquet-wasm.d.ts +2 -10
- package/dist/lib/wasm/encode-parquet-wasm.d.ts.map +1 -1
- package/dist/lib/wasm/encode-parquet-wasm.js +4 -5
- package/dist/lib/wasm/encode-parquet-wasm.js.map +1 -1
- package/dist/lib/wasm/load-wasm-browser.d.ts.map +1 -0
- package/dist/lib/wasm/load-wasm-browser.js.map +1 -0
- package/dist/lib/wasm/load-wasm-node.d.ts.map +1 -0
- package/dist/lib/wasm/load-wasm-node.js.map +1 -0
- package/dist/lib/wasm/{load-wasm/index.d.ts → load-wasm.d.ts} +1 -1
- package/dist/lib/wasm/load-wasm.d.ts.map +1 -0
- package/dist/lib/wasm/{load-wasm/index.js → load-wasm.js} +1 -1
- package/dist/lib/wasm/load-wasm.js.map +1 -0
- package/dist/lib/wasm/parse-parquet-wasm.d.ts +2 -2
- package/dist/lib/wasm/parse-parquet-wasm.d.ts.map +1 -1
- package/dist/lib/wasm/parse-parquet-wasm.js +9 -7
- package/dist/lib/wasm/parse-parquet-wasm.js.map +1 -1
- package/dist/parquet-loader.d.ts +3 -3
- package/dist/parquet-loader.d.ts.map +1 -1
- package/dist/parquet-loader.js +1 -1
- package/dist/parquet-loader.js.map +1 -1
- package/dist/parquet-wasm-loader.d.ts +2 -2
- package/dist/parquet-wasm-loader.d.ts.map +1 -1
- package/dist/parquet-wasm-loader.js.map +1 -1
- package/dist/parquet-wasm-writer.d.ts +3 -3
- package/dist/parquet-wasm-writer.d.ts.map +1 -1
- package/dist/parquet-wasm-writer.js +2 -2
- package/dist/parquet-wasm-writer.js.map +1 -1
- package/dist/parquet-writer.d.ts +2 -2
- package/dist/parquet-writer.d.ts.map +1 -1
- package/dist/parquet-writer.js.map +1 -1
- package/dist/parquetjs/parquet-thrift/index.d.ts +1 -1
- package/dist/parquetjs/parquet-thrift/index.d.ts.map +1 -1
- package/dist/parquetjs/parquet-thrift/index.js +1 -1
- package/dist/parquetjs/parquet-thrift/index.js.map +1 -1
- package/dist/polyfills/buffer/buffer-polyfill.browser.d.ts.map +1 -0
- package/dist/polyfills/buffer/buffer-polyfill.browser.js.map +1 -0
- package/dist/polyfills/buffer/buffer-polyfill.node.d.ts.map +1 -0
- package/dist/polyfills/buffer/buffer-polyfill.node.js.map +1 -0
- package/dist/polyfills/buffer/buffer.d.ts.map +1 -0
- package/dist/polyfills/buffer/buffer.js.map +1 -0
- package/dist/polyfills/buffer/index.d.ts.map +1 -0
- package/dist/polyfills/buffer/index.js.map +1 -0
- package/dist/polyfills/buffer/install-buffer-polyfill.d.ts.map +1 -0
- package/dist/polyfills/buffer/install-buffer-polyfill.js.map +1 -0
- package/dist/polyfills/util.d.ts +9 -0
- package/dist/polyfills/util.d.ts.map +1 -0
- package/dist/polyfills/util.js +3 -0
- package/dist/polyfills/util.js.map +1 -0
- package/dist/workers/parquet-worker.js.map +1 -1
- package/package.json +16 -14
- package/src/constants.ts +3 -0
- package/src/index.ts +18 -17
- package/src/lib/parsers/parse-parquet-to-columns.ts +9 -11
- package/src/lib/parsers/parse-parquet-to-rows.ts +49 -25
- package/src/lib/wasm/encode-parquet-wasm.ts +8 -13
- package/src/lib/wasm/parse-parquet-wasm.ts +12 -12
- package/src/parquet-loader.ts +4 -9
- package/src/parquet-wasm-loader.ts +2 -2
- package/src/parquet-wasm-writer.ts +5 -5
- package/src/parquet-writer.ts +2 -2
- package/src/parquetjs/parquet-thrift/index.ts +1 -1
- package/src/polyfills/util.js +7 -0
- package/src/workers/parquet-worker.ts +3 -0
- package/dist/buffer-polyfill/buffer-polyfill.browser.d.ts.map +0 -1
- package/dist/buffer-polyfill/buffer-polyfill.browser.js.map +0 -1
- package/dist/buffer-polyfill/buffer-polyfill.node.d.ts.map +0 -1
- package/dist/buffer-polyfill/buffer-polyfill.node.js.map +0 -1
- package/dist/buffer-polyfill/buffer.d.ts.map +0 -1
- package/dist/buffer-polyfill/buffer.js.map +0 -1
- package/dist/buffer-polyfill/index.d.ts.map +0 -1
- package/dist/buffer-polyfill/index.js.map +0 -1
- package/dist/buffer-polyfill/install-buffer-polyfill.d.ts.map +0 -1
- package/dist/buffer-polyfill/install-buffer-polyfill.js.map +0 -1
- package/dist/lib/wasm/load-wasm/index.d.ts.map +0 -1
- package/dist/lib/wasm/load-wasm/index.js.map +0 -1
- package/dist/lib/wasm/load-wasm/load-wasm-browser.d.ts.map +0 -1
- package/dist/lib/wasm/load-wasm/load-wasm-browser.js.map +0 -1
- package/dist/lib/wasm/load-wasm/load-wasm-node.d.ts.map +0 -1
- package/dist/lib/wasm/load-wasm/load-wasm-node.js.map +0 -1
- /package/dist/lib/wasm/{load-wasm/load-wasm-browser.d.ts → load-wasm-browser.d.ts} +0 -0
- /package/dist/lib/wasm/{load-wasm/load-wasm-browser.js → load-wasm-browser.js} +0 -0
- /package/dist/lib/wasm/{load-wasm/load-wasm-node.d.ts → load-wasm-node.d.ts} +0 -0
- /package/dist/lib/wasm/{load-wasm/load-wasm-node.js → load-wasm-node.js} +0 -0
- /package/dist/{buffer-polyfill → polyfills/buffer}/buffer-polyfill.browser.d.ts +0 -0
- /package/dist/{buffer-polyfill → polyfills/buffer}/buffer-polyfill.browser.js +0 -0
- /package/dist/{buffer-polyfill → polyfills/buffer}/buffer-polyfill.node.d.ts +0 -0
- /package/dist/{buffer-polyfill → polyfills/buffer}/buffer-polyfill.node.js +0 -0
- /package/dist/{buffer-polyfill → polyfills/buffer}/buffer.d.ts +0 -0
- /package/dist/{buffer-polyfill → polyfills/buffer}/buffer.js +0 -0
- /package/dist/{buffer-polyfill → polyfills/buffer}/index.d.ts +0 -0
- /package/dist/{buffer-polyfill → polyfills/buffer}/index.js +0 -0
- /package/dist/{buffer-polyfill → polyfills/buffer}/install-buffer-polyfill.d.ts +0 -0
- /package/dist/{buffer-polyfill → polyfills/buffer}/install-buffer-polyfill.js +0 -0
- /package/src/lib/wasm/{load-wasm/load-wasm-browser.ts → load-wasm-browser.ts} +0 -0
- /package/src/lib/wasm/{load-wasm/load-wasm-node.ts → load-wasm-node.ts} +0 -0
- /package/src/lib/wasm/{load-wasm/index.ts → load-wasm.ts} +0 -0
- /package/src/{buffer-polyfill → polyfills/buffer}/buffer-polyfill.browser.ts +0 -0
- /package/src/{buffer-polyfill → polyfills/buffer}/buffer-polyfill.node.ts +0 -0
- /package/src/{buffer-polyfill → polyfills/buffer}/buffer.ts +0 -0
- /package/src/{buffer-polyfill → polyfills/buffer}/index.ts +0 -0
- /package/src/{buffer-polyfill → polyfills/buffer}/install-buffer-polyfill.ts +0 -0
|
Binary file
|
package/dist/constants.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../src/constants.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../src/constants.ts"],"names":[],"mappings":"AAIA;;GAEG;AACH,eAAO,MAAM,aAAa,SAAS,CAAC;AACpC,eAAO,MAAM,uBAAuB,SAAS,CAAC;AAE9C;;GAEG;AACH,eAAO,MAAM,eAAe,IAAI,CAAC;AAEjC;;GAEG;AACH,eAAO,MAAM,kBAAkB,UAAU,CAAC;AAC1C,eAAO,MAAM,sBAAsB,QAAQ,CAAC"}
|
package/dist/constants.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"constants.js","names":["PARQUET_MAGIC","PARQUET_MAGIC_ENCRYPTED","PARQUET_VERSION","PARQUET_RDLVL_TYPE","PARQUET_RDLVL_ENCODING"],"sources":["../src/constants.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/**\n * Parquet File Magic String\n */\nexport const PARQUET_MAGIC = 'PAR1';\nexport const PARQUET_MAGIC_ENCRYPTED = 'PARE';\n\n/**\n * Parquet File Format Version\n */\nexport const PARQUET_VERSION = 1;\n\n/**\n * Internal type used for repetition/definition levels\n */\nexport const PARQUET_RDLVL_TYPE = 'INT32';\nexport const PARQUET_RDLVL_ENCODING = 'RLE';\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"constants.js","names":["PARQUET_MAGIC","PARQUET_MAGIC_ENCRYPTED","PARQUET_VERSION","PARQUET_RDLVL_TYPE","PARQUET_RDLVL_ENCODING"],"sources":["../src/constants.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n\n// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/**\n * Parquet File Magic String\n */\nexport const PARQUET_MAGIC = 'PAR1';\nexport const PARQUET_MAGIC_ENCRYPTED = 'PARE';\n\n/**\n * Parquet File Format Version\n */\nexport const PARQUET_VERSION = 1;\n\n/**\n * Internal type used for repetition/definition levels\n */\nexport const PARQUET_RDLVL_TYPE = 'INT32';\nexport const PARQUET_RDLVL_ENCODING = 'RLE';\n"],"mappings":"AAOA,OAAO,MAAMA,aAAa,GAAG,MAAM;AACnC,OAAO,MAAMC,uBAAuB,GAAG,MAAM;AAK7C,OAAO,MAAMC,eAAe,GAAG,CAAC;AAKhC,OAAO,MAAMC,kBAAkB,GAAG,OAAO;AACzC,OAAO,MAAMC,sBAAsB,GAAG,KAAK"}
|
package/dist/index.cjs
CHANGED
|
@@ -46,7 +46,7 @@ __export(src_exports, {
|
|
|
46
46
|
});
|
|
47
47
|
module.exports = __toCommonJS(src_exports);
|
|
48
48
|
|
|
49
|
-
// src/buffer
|
|
49
|
+
// src/polyfills/buffer/buffer.ts
|
|
50
50
|
var import_base64_js = __toESM(require("base64-js"), 1);
|
|
51
51
|
var import_ieee754 = __toESM(require("ieee754"), 1);
|
|
52
52
|
var kMaxLength = 2147483647;
|
|
@@ -1765,17 +1765,20 @@ var hexSliceLookupTable = function() {
|
|
|
1765
1765
|
return table;
|
|
1766
1766
|
}();
|
|
1767
1767
|
|
|
1768
|
-
// src/buffer
|
|
1768
|
+
// src/polyfills/buffer/buffer-polyfill.browser.ts
|
|
1769
1769
|
function installBufferPolyfill() {
|
|
1770
1770
|
globalThis.Buffer = globalThis.Buffer || Buffer2;
|
|
1771
1771
|
return globalThis.Buffer;
|
|
1772
1772
|
}
|
|
1773
1773
|
|
|
1774
|
-
// src/buffer
|
|
1774
|
+
// src/polyfills/buffer/install-buffer-polyfill.ts
|
|
1775
1775
|
globalThis.process = globalThis.process || {};
|
|
1776
1776
|
globalThis.process.env = globalThis.process.env || {};
|
|
1777
1777
|
var Buffer3 = installBufferPolyfill();
|
|
1778
1778
|
|
|
1779
|
+
// src/index.ts
|
|
1780
|
+
var import_loader_utils = require("@loaders.gl/loader-utils");
|
|
1781
|
+
|
|
1779
1782
|
// src/parquet-loader.ts
|
|
1780
1783
|
var VERSION = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
|
|
1781
1784
|
var ParquetLoader = {
|
|
@@ -1816,7 +1819,6 @@ ParquetLoader.Buffer = Buffer;
|
|
|
1816
1819
|
ParquetColumnarLoader.Buffer = Buffer;
|
|
1817
1820
|
|
|
1818
1821
|
// src/lib/parsers/parse-parquet-to-rows.ts
|
|
1819
|
-
var import_loader_utils = require("@loaders.gl/loader-utils");
|
|
1820
1822
|
var import_gis2 = require("@loaders.gl/gis");
|
|
1821
1823
|
var import_wkt = require("@loaders.gl/wkt");
|
|
1822
1824
|
|
|
@@ -7198,11 +7200,9 @@ async function getSchemaFromParquetReader(reader) {
|
|
|
7198
7200
|
}
|
|
7199
7201
|
|
|
7200
7202
|
// src/lib/parsers/parse-parquet-to-rows.ts
|
|
7201
|
-
async function
|
|
7203
|
+
async function parseParquetFile(file, options) {
|
|
7202
7204
|
var _a, _b;
|
|
7203
7205
|
installBufferPolyfill();
|
|
7204
|
-
const blob = new Blob([arrayBuffer]);
|
|
7205
|
-
const file = new import_loader_utils.BlobFile(blob);
|
|
7206
7206
|
const reader = new ParquetReader(file, {
|
|
7207
7207
|
preserveBinary: (_a = options == null ? void 0 : options.parquet) == null ? void 0 : _a.preserveBinary
|
|
7208
7208
|
});
|
|
@@ -7220,12 +7220,41 @@ async function parseParquet(arrayBuffer, options) {
|
|
|
7220
7220
|
data: rows
|
|
7221
7221
|
};
|
|
7222
7222
|
const shape = (_b = options == null ? void 0 : options.parquet) == null ? void 0 : _b.shape;
|
|
7223
|
+
return convertTable(objectRowTable, shape);
|
|
7224
|
+
}
|
|
7225
|
+
async function* parseParquetFileInBatches(file, options) {
|
|
7226
|
+
var _a, _b;
|
|
7227
|
+
const reader = new ParquetReader(file, {
|
|
7228
|
+
preserveBinary: (_a = options == null ? void 0 : options.parquet) == null ? void 0 : _a.preserveBinary
|
|
7229
|
+
});
|
|
7230
|
+
const schema = await getSchemaFromParquetReader(reader);
|
|
7231
|
+
const rowBatches = reader.rowBatchIterator(options == null ? void 0 : options.parquet);
|
|
7232
|
+
for await (const rows of rowBatches) {
|
|
7233
|
+
const objectRowTable = {
|
|
7234
|
+
shape: "object-row-table",
|
|
7235
|
+
schema,
|
|
7236
|
+
data: rows
|
|
7237
|
+
};
|
|
7238
|
+
const shape = (_b = options == null ? void 0 : options.parquet) == null ? void 0 : _b.shape;
|
|
7239
|
+
const table = convertTable(objectRowTable, shape);
|
|
7240
|
+
yield {
|
|
7241
|
+
batchType: "data",
|
|
7242
|
+
schema,
|
|
7243
|
+
...table,
|
|
7244
|
+
length: rows.length
|
|
7245
|
+
};
|
|
7246
|
+
}
|
|
7247
|
+
}
|
|
7248
|
+
function convertTable(objectRowTable, shape) {
|
|
7223
7249
|
switch (shape) {
|
|
7224
7250
|
case "object-row-table":
|
|
7225
7251
|
return objectRowTable;
|
|
7226
7252
|
case "geojson-table":
|
|
7227
7253
|
try {
|
|
7228
|
-
return (0, import_gis2.convertWKBTableToGeoJSON)(objectRowTable, schema, [
|
|
7254
|
+
return (0, import_gis2.convertWKBTableToGeoJSON)(objectRowTable, objectRowTable.schema, [
|
|
7255
|
+
import_wkt.WKTLoader,
|
|
7256
|
+
import_wkt.WKBLoader
|
|
7257
|
+
]);
|
|
7229
7258
|
} catch (error) {
|
|
7230
7259
|
return objectRowTable;
|
|
7231
7260
|
}
|
|
@@ -7233,28 +7262,11 @@ async function parseParquet(arrayBuffer, options) {
|
|
|
7233
7262
|
throw new Error(shape);
|
|
7234
7263
|
}
|
|
7235
7264
|
}
|
|
7236
|
-
async function* parseParquetFileInBatches(reader, options) {
|
|
7237
|
-
const schema = await getSchemaFromParquetReader(reader);
|
|
7238
|
-
const rowBatches = reader.rowBatchIterator(options == null ? void 0 : options.parquet);
|
|
7239
|
-
for await (const rows of rowBatches) {
|
|
7240
|
-
yield {
|
|
7241
|
-
batchType: "data",
|
|
7242
|
-
shape: "object-row-table",
|
|
7243
|
-
schema,
|
|
7244
|
-
data: rows,
|
|
7245
|
-
length: rows.length
|
|
7246
|
-
};
|
|
7247
|
-
}
|
|
7248
|
-
}
|
|
7249
7265
|
|
|
7250
7266
|
// src/lib/parsers/parse-parquet-to-columns.ts
|
|
7251
|
-
|
|
7252
|
-
async function parseParquetInColumns(arrayBuffer, options) {
|
|
7267
|
+
async function parseParquetFileInColumns(file, options) {
|
|
7253
7268
|
installBufferPolyfill();
|
|
7254
|
-
const
|
|
7255
|
-
const file = new import_loader_utils2.BlobFile(blob);
|
|
7256
|
-
const reader = new ParquetReader(file);
|
|
7257
|
-
for await (const batch of parseParquetFileInColumnarBatches(reader, options)) {
|
|
7269
|
+
for await (const batch of parseParquetFileInColumnarBatches(file, options)) {
|
|
7258
7270
|
return {
|
|
7259
7271
|
shape: "columnar-table",
|
|
7260
7272
|
schema: batch.schema,
|
|
@@ -7263,7 +7275,8 @@ async function parseParquetInColumns(arrayBuffer, options) {
|
|
|
7263
7275
|
}
|
|
7264
7276
|
throw new Error("empty table");
|
|
7265
7277
|
}
|
|
7266
|
-
async function* parseParquetFileInColumnarBatches(
|
|
7278
|
+
async function* parseParquetFileInColumnarBatches(file, options) {
|
|
7279
|
+
const reader = new ParquetReader(file);
|
|
7267
7280
|
const schema = await getSchemaFromParquetReader(reader);
|
|
7268
7281
|
const parquetSchema = await reader.getSchema();
|
|
7269
7282
|
const rowGroups = reader.rowGroupIterator(options == null ? void 0 : options.parquet);
|
|
@@ -7680,14 +7693,18 @@ function encodeFooter(schema, rowCount, rowGroups, userMetadata) {
|
|
|
7680
7693
|
// src/index.ts
|
|
7681
7694
|
var ParquetLoader2 = {
|
|
7682
7695
|
...ParquetLoader,
|
|
7683
|
-
parse
|
|
7684
|
-
|
|
7696
|
+
parse(arrayBuffer, options) {
|
|
7697
|
+
return parseParquetFile(new import_loader_utils.BlobFile(arrayBuffer), options);
|
|
7698
|
+
},
|
|
7699
|
+
parseFile: parseParquetFile,
|
|
7685
7700
|
parseFileInBatches: parseParquetFileInBatches
|
|
7686
7701
|
};
|
|
7687
7702
|
var ParquetColumnarLoader2 = {
|
|
7688
|
-
...
|
|
7689
|
-
parse
|
|
7690
|
-
|
|
7703
|
+
...ParquetColumnarLoader,
|
|
7704
|
+
parse(arrayBuffer, options) {
|
|
7705
|
+
return parseParquetFileInColumns(new import_loader_utils.BlobFile(arrayBuffer), options);
|
|
7706
|
+
},
|
|
7707
|
+
parseFile: parseParquetFileInColumns,
|
|
7691
7708
|
parseFileInBatches: parseParquetFileInColumnarBatches
|
|
7692
7709
|
};
|
|
7693
7710
|
/* !
|
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
export { Buffer } from './buffer
|
|
1
|
+
export { Buffer } from './polyfills/buffer/install-buffer-polyfill';
|
|
2
2
|
import type { LoaderWithParser } from '@loaders.gl/loader-utils';
|
|
3
3
|
import type { ObjectRowTable, ObjectRowTableBatch, ColumnarTable, ColumnarTableBatch, GeoJSONTable, GeoJSONTableBatch } from '@loaders.gl/schema';
|
|
4
4
|
import { ParquetLoader as ParquetWorkerLoader, ParquetLoaderOptions } from './parquet-loader';
|
|
@@ -13,5 +13,5 @@ export { ParquetSchema } from './parquetjs/schema/schema';
|
|
|
13
13
|
export { ParquetReader } from './parquetjs/parser/parquet-reader';
|
|
14
14
|
export { ParquetEncoder } from './parquetjs/encoder/parquet-encoder';
|
|
15
15
|
export { convertParquetSchema, convertParquetSchema as convertParquetToArrowSchema } from './lib/arrow/convert-schema-from-parquet';
|
|
16
|
-
export { BufferPolyfill, installBufferPolyfill } from './buffer
|
|
16
|
+
export { BufferPolyfill, installBufferPolyfill } from './polyfills/buffer';
|
|
17
17
|
//# sourceMappingURL=index.d.ts.map
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAC,MAAM,EAAC,MAAM,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAGA,OAAO,EAAC,MAAM,EAAC,MAAM,4CAA4C,CAAC;AAElE,OAAO,KAAK,EAAC,gBAAgB,EAAC,MAAM,0BAA0B,CAAC;AAC/D,OAAO,KAAK,EACV,cAAc,EACd,mBAAmB,EACnB,aAAa,EACb,kBAAkB,EAClB,YAAY,EACZ,iBAAiB,EAClB,MAAM,oBAAoB,CAAC;AAO5B,OAAO,EACL,aAAa,IAAI,mBAAmB,EAEpC,oBAAoB,EACrB,MAAM,kBAAkB,CAAC;AAW1B,OAAO,EAAC,mBAAmB,EAAC,CAAC;AAG7B,6BAA6B;AAC7B,eAAO,MAAM,aAAa,EAAE,gBAAgB,CAC1C,cAAc,GAAG,YAAY,EAC7B,mBAAmB,GAAG,iBAAiB,EACvC,oBAAoB,CAQrB,CAAC;AAEF,6BAA6B;AAC7B,eAAO,MAAM,qBAAqB,EAAE,gBAAgB,CAClD,aAAa,EACb,kBAAkB,EAClB,oBAAoB,CAQrB,CAAC;AASF,OAAO,EAAC,aAAa,IAAI,cAAc,EAAC,MAAM,kBAAkB,CAAC;AAKjE,OAAO,EAAC,mBAAmB,EAAC,MAAM,yBAAyB,CAAC;AAE5D,OAAO,EAAC,aAAa,EAAC,MAAM,2BAA2B,CAAC;AACxD,OAAO,EAAC,aAAa,EAAC,MAAM,mCAAmC,CAAC;AAChE,OAAO,EAAC,cAAc,EAAC,MAAM,qCAAqC,CAAC;AAEnE,OAAO,EACL,oBAAoB,EACpB,oBAAoB,IAAI,2BAA2B,EACpD,MAAM,yCAAyC,CAAC;AAGjD,OAAO,EAAC,cAAc,EAAE,qBAAqB,EAAC,MAAM,oBAAoB,CAAC"}
|
package/dist/index.js
CHANGED
|
@@ -1,16 +1,23 @@
|
|
|
1
|
-
export { Buffer } from "./buffer
|
|
2
|
-
import {
|
|
3
|
-
import {
|
|
4
|
-
import {
|
|
1
|
+
export { Buffer } from "./polyfills/buffer/install-buffer-polyfill.js";
|
|
2
|
+
import { BlobFile } from '@loaders.gl/loader-utils';
|
|
3
|
+
import { ParquetLoader as ParquetWorkerLoader, ParquetColumnarLoader as ParquetColumnarWorkerLoader } from "./parquet-loader.js";
|
|
4
|
+
import { parseParquetFile, parseParquetFileInBatches } from "./lib/parsers/parse-parquet-to-rows.js";
|
|
5
|
+
import { parseParquetFileInColumns, parseParquetFileInColumnarBatches } from "./lib/parsers/parse-parquet-to-columns.js";
|
|
5
6
|
export { ParquetWorkerLoader };
|
|
6
7
|
export const ParquetLoader = {
|
|
7
8
|
...ParquetWorkerLoader,
|
|
8
|
-
parse
|
|
9
|
+
parse(arrayBuffer, options) {
|
|
10
|
+
return parseParquetFile(new BlobFile(arrayBuffer), options);
|
|
11
|
+
},
|
|
12
|
+
parseFile: parseParquetFile,
|
|
9
13
|
parseFileInBatches: parseParquetFileInBatches
|
|
10
14
|
};
|
|
11
15
|
export const ParquetColumnarLoader = {
|
|
12
16
|
...ParquetColumnarWorkerLoader,
|
|
13
|
-
parse
|
|
17
|
+
parse(arrayBuffer, options) {
|
|
18
|
+
return parseParquetFileInColumns(new BlobFile(arrayBuffer), options);
|
|
19
|
+
},
|
|
20
|
+
parseFile: parseParquetFileInColumns,
|
|
14
21
|
parseFileInBatches: parseParquetFileInColumnarBatches
|
|
15
22
|
};
|
|
16
23
|
export { ParquetWriter as _ParquetWriter } from "./parquet-writer.js";
|
|
@@ -19,5 +26,5 @@ export { ParquetSchema } from "./parquetjs/schema/schema.js";
|
|
|
19
26
|
export { ParquetReader } from "./parquetjs/parser/parquet-reader.js";
|
|
20
27
|
export { ParquetEncoder } from "./parquetjs/encoder/parquet-encoder.js";
|
|
21
28
|
export { convertParquetSchema, convertParquetSchema as convertParquetToArrowSchema } from "./lib/arrow/convert-schema-from-parquet.js";
|
|
22
|
-
export { BufferPolyfill, installBufferPolyfill } from "./buffer
|
|
29
|
+
export { BufferPolyfill, installBufferPolyfill } from "./polyfills/buffer/index.js";
|
|
23
30
|
//# sourceMappingURL=index.js.map
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":["Buffer","ParquetLoader","ParquetWorkerLoader","ParquetColumnarWorkerLoader","
|
|
1
|
+
{"version":3,"file":"index.js","names":["Buffer","BlobFile","ParquetLoader","ParquetWorkerLoader","ParquetColumnarLoader","ParquetColumnarWorkerLoader","parseParquetFile","parseParquetFileInBatches","parseParquetFileInColumns","parseParquetFileInColumnarBatches","parse","arrayBuffer","options","parseFile","parseFileInBatches","ParquetWriter","_ParquetWriter","preloadCompressions","ParquetSchema","ParquetReader","ParquetEncoder","convertParquetSchema","convertParquetToArrowSchema","BufferPolyfill","installBufferPolyfill"],"sources":["../src/index.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n\nexport {Buffer} from './polyfills/buffer/install-buffer-polyfill';\n\nimport type {LoaderWithParser} from '@loaders.gl/loader-utils';\nimport type {\n ObjectRowTable,\n ObjectRowTableBatch,\n ColumnarTable,\n ColumnarTableBatch,\n GeoJSONTable,\n GeoJSONTableBatch\n} from '@loaders.gl/schema';\n\n// import {ArrowTable, ArrowTableBatch} from '@loaders.gl/arrow';\n\n// ParquetLoader\n\nimport {BlobFile} from '@loaders.gl/loader-utils';\nimport {\n ParquetLoader as ParquetWorkerLoader,\n ParquetColumnarLoader as ParquetColumnarWorkerLoader,\n ParquetLoaderOptions\n} from './parquet-loader';\nimport {parseParquetFile, parseParquetFileInBatches} from './lib/parsers/parse-parquet-to-rows';\nimport {\n parseParquetFileInColumns,\n parseParquetFileInColumnarBatches\n} from './lib/parsers/parse-parquet-to-columns';\n\n// import type {ParquetWasmLoaderOptions} from './lib/wasm/parse-parquet-wasm';\n// import {parseParquetWasm} from './lib/wasm/parse-parquet-wasm';\n// import {ParquetWasmLoader as ParquetWasmWorkerLoader} from './parquet-wasm-loader';\n\nexport {ParquetWorkerLoader};\n// export {ParquetWasmWorkerLoader};\n\n/** ParquetJS table loader */\nexport const ParquetLoader: LoaderWithParser<\n ObjectRowTable | GeoJSONTable,\n ObjectRowTableBatch | GeoJSONTableBatch,\n ParquetLoaderOptions\n> = {\n ...ParquetWorkerLoader,\n parse(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions) {\n return parseParquetFile(new BlobFile(arrayBuffer), options);\n },\n parseFile: parseParquetFile,\n parseFileInBatches: parseParquetFileInBatches\n};\n\n/** ParquetJS table loader */\nexport const ParquetColumnarLoader: LoaderWithParser<\n ColumnarTable,\n ColumnarTableBatch,\n ParquetLoaderOptions\n> = {\n ...ParquetColumnarWorkerLoader,\n parse(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions) {\n return parseParquetFileInColumns(new BlobFile(arrayBuffer), options);\n },\n parseFile: parseParquetFileInColumns,\n parseFileInBatches: parseParquetFileInColumnarBatches\n};\n\n// export const ParquetWasmLoader: LoaderWithParser<ArrowTable, never, ParquetWasmLoaderOptions> = {\n// ...ParquetWasmWorkerLoader,\n// parse: parseParquetWasm\n// };\n\n// ParquetWriter\n\nexport {ParquetWriter as _ParquetWriter} from './parquet-writer';\n// export {ParquetWasmWriter} from './parquet-wasm-writer';\n\n// EXPERIMENTAL - expose the internal parquetjs API\n\nexport {preloadCompressions} from './parquetjs/compression';\n\nexport {ParquetSchema} from './parquetjs/schema/schema';\nexport {ParquetReader} from './parquetjs/parser/parquet-reader';\nexport {ParquetEncoder} from './parquetjs/encoder/parquet-encoder';\n\nexport {\n convertParquetSchema,\n convertParquetSchema as convertParquetToArrowSchema\n} from './lib/arrow/convert-schema-from-parquet';\n\n// Experimental\nexport {BufferPolyfill, installBufferPolyfill} from './polyfills/buffer';\n"],"mappings":"SAGQA,MAAM;AAgBd,SAAQC,QAAQ,QAAO,0BAA0B;AAAC,SAEhDC,aAAa,IAAIC,mBAAmB,EACpCC,qBAAqB,IAAIC,2BAA2B;AAAA,SAG9CC,gBAAgB,EAAEC,yBAAyB;AAAA,SAEjDC,yBAAyB,EACzBC,iCAAiC;AAOnC,SAAQN,mBAAmB;AAI3B,OAAO,MAAMD,aAIZ,GAAG;EACF,GAAGC,mBAAmB;EACtBO,KAAKA,CAACC,WAAwB,EAAEC,OAA8B,EAAE;IAC9D,OAAON,gBAAgB,CAAC,IAAIL,QAAQ,CAACU,WAAW,CAAC,EAAEC,OAAO,CAAC;EAC7D,CAAC;EACDC,SAAS,EAAEP,gBAAgB;EAC3BQ,kBAAkB,EAAEP;AACtB,CAAC;AAGD,OAAO,MAAMH,qBAIZ,GAAG;EACF,GAAGC,2BAA2B;EAC9BK,KAAKA,CAACC,WAAwB,EAAEC,OAA8B,EAAE;IAC9D,OAAOJ,yBAAyB,CAAC,IAAIP,QAAQ,CAACU,WAAW,CAAC,EAAEC,OAAO,CAAC;EACtE,CAAC;EACDC,SAAS,EAAEL,yBAAyB;EACpCM,kBAAkB,EAAEL;AACtB,CAAC;AAAC,SASMM,aAAa,IAAIC,cAAc;AAAA,SAK/BC,mBAAmB;AAAA,SAEnBC,aAAa;AAAA,SACbC,aAAa;AAAA,SACbC,cAAc;AAAA,SAGpBC,oBAAoB,EACpBA,oBAAoB,IAAIC,2BAA2B;AAAA,SAI7CC,cAAc,EAAEC,qBAAqB"}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { ColumnarTable, ColumnarTableBatch } from '@loaders.gl/schema';
|
|
1
|
+
import type { ColumnarTable, ColumnarTableBatch } from '@loaders.gl/schema';
|
|
2
|
+
import type { ReadableFile } from '@loaders.gl/loader-utils';
|
|
2
3
|
import type { ParquetLoaderOptions } from '../../parquet-loader';
|
|
3
|
-
|
|
4
|
-
export declare function
|
|
5
|
-
export declare function parseParquetFileInColumnarBatches(reader: ParquetReader, options?: ParquetLoaderOptions): AsyncIterable<ColumnarTableBatch>;
|
|
4
|
+
export declare function parseParquetFileInColumns(file: ReadableFile, options?: ParquetLoaderOptions): Promise<ColumnarTable>;
|
|
5
|
+
export declare function parseParquetFileInColumnarBatches(file: ReadableFile, options?: ParquetLoaderOptions): AsyncIterable<ColumnarTableBatch>;
|
|
6
6
|
//# sourceMappingURL=parse-parquet-to-columns.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parse-parquet-to-columns.d.ts","sourceRoot":"","sources":["../../../src/lib/parsers/parse-parquet-to-columns.ts"],"names":[],"mappings":"AAGA,OAAO,EAAC,aAAa,EAAE,kBAAkB,EAAS,MAAM,oBAAoB,CAAC;
|
|
1
|
+
{"version":3,"file":"parse-parquet-to-columns.d.ts","sourceRoot":"","sources":["../../../src/lib/parsers/parse-parquet-to-columns.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAC,aAAa,EAAE,kBAAkB,EAAS,MAAM,oBAAoB,CAAC;AAClF,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,0BAA0B,CAAC;AAC3D,OAAO,KAAK,EAAC,oBAAoB,EAAC,MAAM,sBAAsB,CAAC;AAQ/D,wBAAsB,yBAAyB,CAC7C,IAAI,EAAE,YAAY,EAClB,OAAO,CAAC,EAAE,oBAAoB,GAC7B,OAAO,CAAC,aAAa,CAAC,CAUxB;AAED,wBAAuB,iCAAiC,CACtD,IAAI,EAAE,YAAY,EAClB,OAAO,CAAC,EAAE,oBAAoB,GAC7B,aAAa,CAAC,kBAAkB,CAAC,CAanC"}
|
|
@@ -1,14 +1,10 @@
|
|
|
1
|
-
import { BlobFile } from '@loaders.gl/loader-utils';
|
|
2
1
|
import { ParquetReader } from "../../parquetjs/parser/parquet-reader.js";
|
|
3
2
|
import { materializeColumns } from "../../parquetjs/schema/shred.js";
|
|
4
3
|
import { getSchemaFromParquetReader } from "./get-parquet-schema.js";
|
|
5
|
-
import { installBufferPolyfill } from "../../buffer
|
|
6
|
-
export async function
|
|
4
|
+
import { installBufferPolyfill } from "../../polyfills/buffer/index.js";
|
|
5
|
+
export async function parseParquetFileInColumns(file, options) {
|
|
7
6
|
installBufferPolyfill();
|
|
8
|
-
const
|
|
9
|
-
const file = new BlobFile(blob);
|
|
10
|
-
const reader = new ParquetReader(file);
|
|
11
|
-
for await (const batch of parseParquetFileInColumnarBatches(reader, options)) {
|
|
7
|
+
for await (const batch of parseParquetFileInColumnarBatches(file, options)) {
|
|
12
8
|
return {
|
|
13
9
|
shape: 'columnar-table',
|
|
14
10
|
schema: batch.schema,
|
|
@@ -17,7 +13,8 @@ export async function parseParquetInColumns(arrayBuffer, options) {
|
|
|
17
13
|
}
|
|
18
14
|
throw new Error('empty table');
|
|
19
15
|
}
|
|
20
|
-
export async function* parseParquetFileInColumnarBatches(
|
|
16
|
+
export async function* parseParquetFileInColumnarBatches(file, options) {
|
|
17
|
+
const reader = new ParquetReader(file);
|
|
21
18
|
const schema = await getSchemaFromParquetReader(reader);
|
|
22
19
|
const parquetSchema = await reader.getSchema();
|
|
23
20
|
const rowGroups = reader.rowGroupIterator(options === null || options === void 0 ? void 0 : options.parquet);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parse-parquet-to-columns.js","names":["
|
|
1
|
+
{"version":3,"file":"parse-parquet-to-columns.js","names":["ParquetReader","materializeColumns","getSchemaFromParquetReader","installBufferPolyfill","parseParquetFileInColumns","file","options","batch","parseParquetFileInColumnarBatches","shape","schema","data","Error","reader","parquetSchema","getSchema","rowGroups","rowGroupIterator","parquet","rowGroup","convertRowGroupToTableBatch","batchType","length","rowCount"],"sources":["../../../src/lib/parsers/parse-parquet-to-columns.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n\nimport type {ColumnarTable, ColumnarTableBatch, Schema} from '@loaders.gl/schema';\nimport type {ReadableFile} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../../parquet-loader';\nimport {ParquetReader} from '../../parquetjs/parser/parquet-reader';\nimport {ParquetRowGroup} from '../../parquetjs/schema/declare';\nimport {ParquetSchema} from '../../parquetjs/schema/schema';\nimport {materializeColumns} from '../../parquetjs/schema/shred';\nimport {getSchemaFromParquetReader} from './get-parquet-schema';\nimport {installBufferPolyfill} from '../../polyfills/buffer';\n\nexport async function parseParquetFileInColumns(\n file: ReadableFile,\n options?: ParquetLoaderOptions\n): Promise<ColumnarTable> {\n installBufferPolyfill();\n for await (const batch of parseParquetFileInColumnarBatches(file, options)) {\n return {\n shape: 'columnar-table',\n schema: batch.schema,\n data: batch.data\n };\n }\n throw new Error('empty table');\n}\n\nexport async function* parseParquetFileInColumnarBatches(\n file: ReadableFile,\n options?: ParquetLoaderOptions\n): AsyncIterable<ColumnarTableBatch> {\n const reader = new ParquetReader(file);\n\n // Extract schema and geo metadata\n const schema = await getSchemaFromParquetReader(reader);\n\n const parquetSchema = await reader.getSchema();\n\n // Iterate over row batches\n const rowGroups = reader.rowGroupIterator(options?.parquet);\n for await (const rowGroup of rowGroups) {\n yield convertRowGroupToTableBatch(rowGroup, parquetSchema, schema);\n }\n}\n\nfunction convertRowGroupToTableBatch(\n rowGroup: ParquetRowGroup,\n parquetSchema: ParquetSchema,\n schema: Schema\n): ColumnarTableBatch {\n // const data = convertParquetRowGroupToColumns(schema, rowGroup);\n const data = materializeColumns(parquetSchema, rowGroup);\n return {\n shape: 'columnar-table',\n batchType: 'data',\n schema,\n data,\n length: rowGroup.rowCount\n };\n}\n"],"mappings":"SAMQA,aAAa;AAAA,SAGbC,kBAAkB;AAAA,SAClBC,0BAA0B;AAAA,SAC1BC,qBAAqB;AAE7B,OAAO,eAAeC,yBAAyBA,CAC7CC,IAAkB,EAClBC,OAA8B,EACN;EACxBH,qBAAqB,CAAC,CAAC;EACvB,WAAW,MAAMI,KAAK,IAAIC,iCAAiC,CAACH,IAAI,EAAEC,OAAO,CAAC,EAAE;IAC1E,OAAO;MACLG,KAAK,EAAE,gBAAgB;MACvBC,MAAM,EAAEH,KAAK,CAACG,MAAM;MACpBC,IAAI,EAAEJ,KAAK,CAACI;IACd,CAAC;EACH;EACA,MAAM,IAAIC,KAAK,CAAC,aAAa,CAAC;AAChC;AAEA,OAAO,gBAAgBJ,iCAAiCA,CACtDH,IAAkB,EAClBC,OAA8B,EACK;EACnC,MAAMO,MAAM,GAAG,IAAIb,aAAa,CAACK,IAAI,CAAC;EAGtC,MAAMK,MAAM,GAAG,MAAMR,0BAA0B,CAACW,MAAM,CAAC;EAEvD,MAAMC,aAAa,GAAG,MAAMD,MAAM,CAACE,SAAS,CAAC,CAAC;EAG9C,MAAMC,SAAS,GAAGH,MAAM,CAACI,gBAAgB,CAACX,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEY,OAAO,CAAC;EAC3D,WAAW,MAAMC,QAAQ,IAAIH,SAAS,EAAE;IACtC,MAAMI,2BAA2B,CAACD,QAAQ,EAAEL,aAAa,EAAEJ,MAAM,CAAC;EACpE;AACF;AAEA,SAASU,2BAA2BA,CAClCD,QAAyB,EACzBL,aAA4B,EAC5BJ,MAAc,EACM;EAEpB,MAAMC,IAAI,GAAGV,kBAAkB,CAACa,aAAa,EAAEK,QAAQ,CAAC;EACxD,OAAO;IACLV,KAAK,EAAE,gBAAgB;IACvBY,SAAS,EAAE,MAAM;IACjBX,MAAM;IACNC,IAAI;IACJW,MAAM,EAAEH,QAAQ,CAACI;EACnB,CAAC;AACH"}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import type { ReadableFile } from '@loaders.gl/loader-utils';
|
|
2
|
+
import type { GeoJSONTable, GeoJSONTableBatch, ObjectRowTable, ObjectRowTableBatch } from '@loaders.gl/schema';
|
|
2
3
|
import type { ParquetLoaderOptions } from '../../parquet-loader';
|
|
3
|
-
|
|
4
|
-
export declare function
|
|
5
|
-
export declare function parseParquetFileInBatches(reader: ParquetReader, options?: ParquetLoaderOptions): AsyncIterable<ObjectRowTableBatch>;
|
|
4
|
+
export declare function parseParquetFile(file: ReadableFile, options?: ParquetLoaderOptions): Promise<ObjectRowTable | GeoJSONTable>;
|
|
5
|
+
export declare function parseParquetFileInBatches(file: ReadableFile, options?: ParquetLoaderOptions): AsyncIterable<ObjectRowTableBatch | GeoJSONTableBatch>;
|
|
6
6
|
//# sourceMappingURL=parse-parquet-to-rows.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parse-parquet-to-rows.d.ts","sourceRoot":"","sources":["../../../src/lib/parsers/parse-parquet-to-rows.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"parse-parquet-to-rows.d.ts","sourceRoot":"","sources":["../../../src/lib/parsers/parse-parquet-to-rows.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,0BAA0B,CAAC;AAC3D,OAAO,KAAK,EACV,YAAY,EACZ,iBAAiB,EACjB,cAAc,EACd,mBAAmB,EACpB,MAAM,oBAAoB,CAAC;AAI5B,OAAO,KAAK,EAAC,oBAAoB,EAAC,MAAM,sBAAsB,CAAC;AAM/D,wBAAsB,gBAAgB,CACpC,IAAI,EAAE,YAAY,EAClB,OAAO,CAAC,EAAE,oBAAoB,GAC7B,OAAO,CAAC,cAAc,GAAG,YAAY,CAAC,CA0BxC;AAED,wBAAuB,yBAAyB,CAC9C,IAAI,EAAE,YAAY,EAClB,OAAO,CAAC,EAAE,oBAAoB,GAC7B,aAAa,CAAC,mBAAmB,GAAG,iBAAiB,CAAC,CAuBxD"}
|
|
@@ -1,14 +1,11 @@
|
|
|
1
|
-
import { BlobFile } from '@loaders.gl/loader-utils';
|
|
2
1
|
import { convertWKBTableToGeoJSON } from '@loaders.gl/gis';
|
|
3
2
|
import { WKTLoader, WKBLoader } from '@loaders.gl/wkt';
|
|
4
3
|
import { ParquetReader } from "../../parquetjs/parser/parquet-reader.js";
|
|
5
4
|
import { getSchemaFromParquetReader } from "./get-parquet-schema.js";
|
|
6
|
-
import { installBufferPolyfill } from "../../buffer
|
|
7
|
-
export async function
|
|
5
|
+
import { installBufferPolyfill } from "../../polyfills/buffer/index.js";
|
|
6
|
+
export async function parseParquetFile(file, options) {
|
|
8
7
|
var _options$parquet, _options$parquet2;
|
|
9
8
|
installBufferPolyfill();
|
|
10
|
-
const blob = new Blob([arrayBuffer]);
|
|
11
|
-
const file = new BlobFile(blob);
|
|
12
9
|
const reader = new ParquetReader(file, {
|
|
13
10
|
preserveBinary: options === null || options === void 0 ? void 0 : (_options$parquet = options.parquet) === null || _options$parquet === void 0 ? void 0 : _options$parquet.preserveBinary
|
|
14
11
|
});
|
|
@@ -26,12 +23,39 @@ export async function parseParquet(arrayBuffer, options) {
|
|
|
26
23
|
data: rows
|
|
27
24
|
};
|
|
28
25
|
const shape = options === null || options === void 0 ? void 0 : (_options$parquet2 = options.parquet) === null || _options$parquet2 === void 0 ? void 0 : _options$parquet2.shape;
|
|
26
|
+
return convertTable(objectRowTable, shape);
|
|
27
|
+
}
|
|
28
|
+
export async function* parseParquetFileInBatches(file, options) {
|
|
29
|
+
var _options$parquet3;
|
|
30
|
+
const reader = new ParquetReader(file, {
|
|
31
|
+
preserveBinary: options === null || options === void 0 ? void 0 : (_options$parquet3 = options.parquet) === null || _options$parquet3 === void 0 ? void 0 : _options$parquet3.preserveBinary
|
|
32
|
+
});
|
|
33
|
+
const schema = await getSchemaFromParquetReader(reader);
|
|
34
|
+
const rowBatches = reader.rowBatchIterator(options === null || options === void 0 ? void 0 : options.parquet);
|
|
35
|
+
for await (const rows of rowBatches) {
|
|
36
|
+
var _options$parquet4;
|
|
37
|
+
const objectRowTable = {
|
|
38
|
+
shape: 'object-row-table',
|
|
39
|
+
schema,
|
|
40
|
+
data: rows
|
|
41
|
+
};
|
|
42
|
+
const shape = options === null || options === void 0 ? void 0 : (_options$parquet4 = options.parquet) === null || _options$parquet4 === void 0 ? void 0 : _options$parquet4.shape;
|
|
43
|
+
const table = convertTable(objectRowTable, shape);
|
|
44
|
+
yield {
|
|
45
|
+
batchType: 'data',
|
|
46
|
+
schema,
|
|
47
|
+
...table,
|
|
48
|
+
length: rows.length
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
function convertTable(objectRowTable, shape) {
|
|
29
53
|
switch (shape) {
|
|
30
54
|
case 'object-row-table':
|
|
31
55
|
return objectRowTable;
|
|
32
56
|
case 'geojson-table':
|
|
33
57
|
try {
|
|
34
|
-
return convertWKBTableToGeoJSON(objectRowTable, schema, [WKTLoader, WKBLoader]);
|
|
58
|
+
return convertWKBTableToGeoJSON(objectRowTable, objectRowTable.schema, [WKTLoader, WKBLoader]);
|
|
35
59
|
} catch (error) {
|
|
36
60
|
return objectRowTable;
|
|
37
61
|
}
|
|
@@ -39,17 +63,4 @@ export async function parseParquet(arrayBuffer, options) {
|
|
|
39
63
|
throw new Error(shape);
|
|
40
64
|
}
|
|
41
65
|
}
|
|
42
|
-
export async function* parseParquetFileInBatches(reader, options) {
|
|
43
|
-
const schema = await getSchemaFromParquetReader(reader);
|
|
44
|
-
const rowBatches = reader.rowBatchIterator(options === null || options === void 0 ? void 0 : options.parquet);
|
|
45
|
-
for await (const rows of rowBatches) {
|
|
46
|
-
yield {
|
|
47
|
-
batchType: 'data',
|
|
48
|
-
shape: 'object-row-table',
|
|
49
|
-
schema,
|
|
50
|
-
data: rows,
|
|
51
|
-
length: rows.length
|
|
52
|
-
};
|
|
53
|
-
}
|
|
54
|
-
}
|
|
55
66
|
//# sourceMappingURL=parse-parquet-to-rows.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parse-parquet-to-rows.js","names":["
|
|
1
|
+
{"version":3,"file":"parse-parquet-to-rows.js","names":["convertWKBTableToGeoJSON","WKTLoader","WKBLoader","ParquetReader","getSchemaFromParquetReader","installBufferPolyfill","parseParquetFile","file","options","_options$parquet","_options$parquet2","reader","preserveBinary","parquet","schema","rows","rowBatches","rowBatchIterator","rowBatch","row","push","objectRowTable","shape","data","convertTable","parseParquetFileInBatches","_options$parquet3","_options$parquet4","table","batchType","length","error","Error"],"sources":["../../../src/lib/parsers/parse-parquet-to-rows.ts"],"sourcesContent":["// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\n// import {ColumnarTableBatch} from '@loaders.gl/schema';\nimport type {ReadableFile} from '@loaders.gl/loader-utils';\nimport type {\n GeoJSONTable,\n GeoJSONTableBatch,\n ObjectRowTable,\n ObjectRowTableBatch\n} from '@loaders.gl/schema';\nimport {convertWKBTableToGeoJSON} from '@loaders.gl/gis';\nimport {WKTLoader, WKBLoader} from '@loaders.gl/wkt';\n\nimport type {ParquetLoaderOptions} from '../../parquet-loader';\nimport type {ParquetRow} from '../../parquetjs/schema/declare';\nimport {ParquetReader} from '../../parquetjs/parser/parquet-reader';\nimport {getSchemaFromParquetReader} from './get-parquet-schema';\nimport {installBufferPolyfill} from '../../polyfills/buffer';\n\nexport async function parseParquetFile(\n file: ReadableFile,\n options?: ParquetLoaderOptions\n): Promise<ObjectRowTable | GeoJSONTable> {\n installBufferPolyfill();\n\n const reader = new ParquetReader(file, {\n preserveBinary: options?.parquet?.preserveBinary\n });\n\n const schema = await getSchemaFromParquetReader(reader);\n\n const rows: ParquetRow[] = [];\n\n const rowBatches = reader.rowBatchIterator(options?.parquet);\n for await (const rowBatch of rowBatches) {\n // we have only one input batch so return\n for (const row of rowBatch) {\n rows.push(row);\n }\n }\n const objectRowTable: ObjectRowTable = {\n shape: 'object-row-table',\n schema,\n data: rows\n };\n\n const shape = options?.parquet?.shape;\n return convertTable(objectRowTable, shape);\n}\n\nexport async function* parseParquetFileInBatches(\n file: ReadableFile,\n options?: ParquetLoaderOptions\n): AsyncIterable<ObjectRowTableBatch | GeoJSONTableBatch> {\n const reader = new ParquetReader(file, {\n preserveBinary: options?.parquet?.preserveBinary\n });\n\n const schema = await getSchemaFromParquetReader(reader);\n const rowBatches = reader.rowBatchIterator(options?.parquet);\n for await (const rows of rowBatches) {\n const objectRowTable: ObjectRowTable = {\n shape: 'object-row-table',\n schema,\n data: rows\n };\n const shape = options?.parquet?.shape;\n const table = convertTable(objectRowTable, shape);\n\n yield {\n batchType: 'data',\n schema,\n ...table,\n length: rows.length\n };\n }\n}\n\nfunction convertTable(\n objectRowTable: ObjectRowTable,\n shape?: 'object-row-table' | 'geojson-table'\n): ObjectRowTable | GeoJSONTable {\n switch (shape) {\n case 'object-row-table':\n return objectRowTable;\n\n case 'geojson-table':\n try {\n return convertWKBTableToGeoJSON(objectRowTable, objectRowTable.schema!, [\n WKTLoader,\n WKBLoader\n ]);\n } catch (error) {\n return objectRowTable;\n }\n\n default:\n throw new Error(shape);\n }\n}\n"],"mappings":"AASA,SAAQA,wBAAwB,QAAO,iBAAiB;AACxD,SAAQC,SAAS,EAAEC,SAAS,QAAO,iBAAiB;AAAC,SAI7CC,aAAa;AAAA,SACbC,0BAA0B;AAAA,SAC1BC,qBAAqB;AAE7B,OAAO,eAAeC,gBAAgBA,CACpCC,IAAkB,EAClBC,OAA8B,EACU;EAAA,IAAAC,gBAAA,EAAAC,iBAAA;EACxCL,qBAAqB,CAAC,CAAC;EAEvB,MAAMM,MAAM,GAAG,IAAIR,aAAa,CAACI,IAAI,EAAE;IACrCK,cAAc,EAAEJ,OAAO,aAAPA,OAAO,wBAAAC,gBAAA,GAAPD,OAAO,CAAEK,OAAO,cAAAJ,gBAAA,uBAAhBA,gBAAA,CAAkBG;EACpC,CAAC,CAAC;EAEF,MAAME,MAAM,GAAG,MAAMV,0BAA0B,CAACO,MAAM,CAAC;EAEvD,MAAMI,IAAkB,GAAG,EAAE;EAE7B,MAAMC,UAAU,GAAGL,MAAM,CAACM,gBAAgB,CAACT,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEK,OAAO,CAAC;EAC5D,WAAW,MAAMK,QAAQ,IAAIF,UAAU,EAAE;IAEvC,KAAK,MAAMG,GAAG,IAAID,QAAQ,EAAE;MAC1BH,IAAI,CAACK,IAAI,CAACD,GAAG,CAAC;IAChB;EACF;EACA,MAAME,cAA8B,GAAG;IACrCC,KAAK,EAAE,kBAAkB;IACzBR,MAAM;IACNS,IAAI,EAAER;EACR,CAAC;EAED,MAAMO,KAAK,GAAGd,OAAO,aAAPA,OAAO,wBAAAE,iBAAA,GAAPF,OAAO,CAAEK,OAAO,cAAAH,iBAAA,uBAAhBA,iBAAA,CAAkBY,KAAK;EACrC,OAAOE,YAAY,CAACH,cAAc,EAAEC,KAAK,CAAC;AAC5C;AAEA,OAAO,gBAAgBG,yBAAyBA,CAC9ClB,IAAkB,EAClBC,OAA8B,EAC0B;EAAA,IAAAkB,iBAAA;EACxD,MAAMf,MAAM,GAAG,IAAIR,aAAa,CAACI,IAAI,EAAE;IACrCK,cAAc,EAAEJ,OAAO,aAAPA,OAAO,wBAAAkB,iBAAA,GAAPlB,OAAO,CAAEK,OAAO,cAAAa,iBAAA,uBAAhBA,iBAAA,CAAkBd;EACpC,CAAC,CAAC;EAEF,MAAME,MAAM,GAAG,MAAMV,0BAA0B,CAACO,MAAM,CAAC;EACvD,MAAMK,UAAU,GAAGL,MAAM,CAACM,gBAAgB,CAACT,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEK,OAAO,CAAC;EAC5D,WAAW,MAAME,IAAI,IAAIC,UAAU,EAAE;IAAA,IAAAW,iBAAA;IACnC,MAAMN,cAA8B,GAAG;MACrCC,KAAK,EAAE,kBAAkB;MACzBR,MAAM;MACNS,IAAI,EAAER;IACR,CAAC;IACD,MAAMO,KAAK,GAAGd,OAAO,aAAPA,OAAO,wBAAAmB,iBAAA,GAAPnB,OAAO,CAAEK,OAAO,cAAAc,iBAAA,uBAAhBA,iBAAA,CAAkBL,KAAK;IACrC,MAAMM,KAAK,GAAGJ,YAAY,CAACH,cAAc,EAAEC,KAAK,CAAC;IAEjD,MAAM;MACJO,SAAS,EAAE,MAAM;MACjBf,MAAM;MACN,GAAGc,KAAK;MACRE,MAAM,EAAEf,IAAI,CAACe;IACf,CAAC;EACH;AACF;AAEA,SAASN,YAAYA,CACnBH,cAA8B,EAC9BC,KAA4C,EACb;EAC/B,QAAQA,KAAK;IACX,KAAK,kBAAkB;MACrB,OAAOD,cAAc;IAEvB,KAAK,eAAe;MAClB,IAAI;QACF,OAAOrB,wBAAwB,CAACqB,cAAc,EAAEA,cAAc,CAACP,MAAM,EAAG,CACtEb,SAAS,EACTC,SAAS,CACV,CAAC;MACJ,CAAC,CAAC,OAAO6B,KAAK,EAAE;QACd,OAAOV,cAAc;MACvB;IAEF;MACE,MAAM,IAAIW,KAAK,CAACV,KAAK,CAAC;EAC1B;AACF"}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import type { WriterOptions } from '@loaders.gl/loader-utils';
|
|
2
|
-
import
|
|
2
|
+
import type { ArrowTable } from '@loaders.gl/arrow';
|
|
3
3
|
export type ParquetWriterOptions = WriterOptions & {
|
|
4
4
|
parquet?: {
|
|
5
5
|
wasmUrl?: string;
|
|
@@ -8,13 +8,5 @@ export type ParquetWriterOptions = WriterOptions & {
|
|
|
8
8
|
/**
|
|
9
9
|
* Encode Arrow arrow.Table to Parquet buffer
|
|
10
10
|
*/
|
|
11
|
-
export declare function encode(table:
|
|
12
|
-
/**
|
|
13
|
-
* Serialize a table to the IPC format. This function is a convenience
|
|
14
|
-
* Opposite of {@link tableFromIPC}.
|
|
15
|
-
*
|
|
16
|
-
* @param table The arrow.Table to serialize.
|
|
17
|
-
* @param type Whether to serialize the arrow.Table as a file or a stream.
|
|
18
|
-
*/
|
|
19
|
-
export declare function tableToIPC(table: arrow.Table): Uint8Array;
|
|
11
|
+
export declare function encode(table: ArrowTable, options?: ParquetWriterOptions): Promise<ArrayBuffer>;
|
|
20
12
|
//# sourceMappingURL=encode-parquet-wasm.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"encode-parquet-wasm.d.ts","sourceRoot":"","sources":["../../../src/lib/wasm/encode-parquet-wasm.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,0BAA0B,CAAC;
|
|
1
|
+
{"version":3,"file":"encode-parquet-wasm.d.ts","sourceRoot":"","sources":["../../../src/lib/wasm/encode-parquet-wasm.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,0BAA0B,CAAC;AAC5D,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,mBAAmB,CAAC;AAKlD,MAAM,MAAM,oBAAoB,GAAG,aAAa,GAAG;IACjD,OAAO,CAAC,EAAE;QACR,OAAO,CAAC,EAAE,MAAM,CAAC;KAClB,CAAC;CACH,CAAC;AAEF;;GAEG;AACH,wBAAsB,MAAM,CAC1B,KAAK,EAAE,UAAU,EACjB,OAAO,CAAC,EAAE,oBAAoB,GAC7B,OAAO,CAAC,WAAW,CAAC,CAiBtB"}
|
|
@@ -1,15 +1,14 @@
|
|
|
1
1
|
import * as arrow from 'apache-arrow';
|
|
2
|
-
import { loadWasm } from "./load-wasm
|
|
2
|
+
import { loadWasm } from "./load-wasm.js";
|
|
3
3
|
export async function encode(table, options) {
|
|
4
4
|
var _options$parquet;
|
|
5
5
|
const wasmUrl = options === null || options === void 0 ? void 0 : (_options$parquet = options.parquet) === null || _options$parquet === void 0 ? void 0 : _options$parquet.wasmUrl;
|
|
6
6
|
const wasm = await loadWasm(wasmUrl);
|
|
7
|
-
const
|
|
7
|
+
const arrowTable = table.data;
|
|
8
|
+
const writer = arrow.RecordBatchStreamWriter.writeAll(arrowTable);
|
|
9
|
+
const arrowIPCBytes = writer.toUint8Array(true);
|
|
8
10
|
const writerProperties = new wasm.WriterPropertiesBuilder().build();
|
|
9
11
|
const parquetBytes = wasm.writeParquet(arrowIPCBytes, writerProperties);
|
|
10
12
|
return parquetBytes.buffer.slice(parquetBytes.byteOffset, parquetBytes.byteLength + parquetBytes.byteOffset);
|
|
11
13
|
}
|
|
12
|
-
export function tableToIPC(table) {
|
|
13
|
-
return arrow.RecordBatchStreamWriter.writeAll(table).toUint8Array(true);
|
|
14
|
-
}
|
|
15
14
|
//# sourceMappingURL=encode-parquet-wasm.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"encode-parquet-wasm.js","names":["arrow","loadWasm","encode","table","options","_options$parquet","wasmUrl","parquet","wasm","arrowIPCBytes","
|
|
1
|
+
{"version":3,"file":"encode-parquet-wasm.js","names":["arrow","loadWasm","encode","table","options","_options$parquet","wasmUrl","parquet","wasm","arrowTable","data","writer","RecordBatchStreamWriter","writeAll","arrowIPCBytes","toUint8Array","writerProperties","WriterPropertiesBuilder","build","parquetBytes","writeParquet","buffer","slice","byteOffset","byteLength"],"sources":["../../../src/lib/wasm/encode-parquet-wasm.ts"],"sourcesContent":["import type {WriterOptions} from '@loaders.gl/loader-utils';\nimport type {ArrowTable} from '@loaders.gl/arrow';\n\nimport * as arrow from 'apache-arrow';\nimport {loadWasm} from './load-wasm';\n\nexport type ParquetWriterOptions = WriterOptions & {\n parquet?: {\n wasmUrl?: string;\n };\n};\n\n/**\n * Encode Arrow arrow.Table to Parquet buffer\n */\nexport async function encode(\n table: ArrowTable,\n options?: ParquetWriterOptions\n): Promise<ArrayBuffer> {\n const wasmUrl = options?.parquet?.wasmUrl;\n const wasm = await loadWasm(wasmUrl);\n\n const arrowTable: arrow.Table = table.data;\n\n // Serialize a table to the IPC format.\n const writer = arrow.RecordBatchStreamWriter.writeAll(arrowTable);\n const arrowIPCBytes = writer.toUint8Array(true);\n\n // TODO: provide options for how to write table.\n const writerProperties = new wasm.WriterPropertiesBuilder().build();\n const parquetBytes = wasm.writeParquet(arrowIPCBytes, writerProperties);\n return parquetBytes.buffer.slice(\n parquetBytes.byteOffset,\n parquetBytes.byteLength + parquetBytes.byteOffset\n );\n}\n"],"mappings":"AAGA,OAAO,KAAKA,KAAK,MAAM,cAAc;AAAC,SAC9BC,QAAQ;AAWhB,OAAO,eAAeC,MAAMA,CAC1BC,KAAiB,EACjBC,OAA8B,EACR;EAAA,IAAAC,gBAAA;EACtB,MAAMC,OAAO,GAAGF,OAAO,aAAPA,OAAO,wBAAAC,gBAAA,GAAPD,OAAO,CAAEG,OAAO,cAAAF,gBAAA,uBAAhBA,gBAAA,CAAkBC,OAAO;EACzC,MAAME,IAAI,GAAG,MAAMP,QAAQ,CAACK,OAAO,CAAC;EAEpC,MAAMG,UAAuB,GAAGN,KAAK,CAACO,IAAI;EAG1C,MAAMC,MAAM,GAAGX,KAAK,CAACY,uBAAuB,CAACC,QAAQ,CAACJ,UAAU,CAAC;EACjE,MAAMK,aAAa,GAAGH,MAAM,CAACI,YAAY,CAAC,IAAI,CAAC;EAG/C,MAAMC,gBAAgB,GAAG,IAAIR,IAAI,CAACS,uBAAuB,CAAC,CAAC,CAACC,KAAK,CAAC,CAAC;EACnE,MAAMC,YAAY,GAAGX,IAAI,CAACY,YAAY,CAACN,aAAa,EAAEE,gBAAgB,CAAC;EACvE,OAAOG,YAAY,CAACE,MAAM,CAACC,KAAK,CAC9BH,YAAY,CAACI,UAAU,EACvBJ,YAAY,CAACK,UAAU,GAAGL,YAAY,CAACI,UACzC,CAAC;AACH"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"load-wasm-browser.d.ts","sourceRoot":"","sources":["../../../src/lib/wasm/load-wasm-browser.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,OAAO,MAAM,0BAA0B,CAAC;AAIpD,wBAAsB,QAAQ,CAAC,OAAO,CAAC,EAAE,MAAM,2BAU9C"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"load-wasm-browser.js","names":["wasmEsm","cached","loadWasm","wasmUrl","default"],"sources":["../../../src/lib/wasm/load-wasm-browser.ts"],"sourcesContent":["import * as wasmEsm from 'parquet-wasm/esm2/arrow1';\n\nlet cached: typeof wasmEsm | null = null;\n\nexport async function loadWasm(wasmUrl?: string) {\n if (cached !== null) {\n return cached;\n }\n\n // For ESM bundles, need to await the default export, which loads the WASM\n await wasmEsm.default(wasmUrl);\n cached = wasmEsm;\n\n return wasmEsm;\n}\n"],"mappings":"AAAA,OAAO,KAAKA,OAAO,MAAM,0BAA0B;AAEnD,IAAIC,MAA6B,GAAG,IAAI;AAExC,OAAO,eAAeC,QAAQA,CAACC,OAAgB,EAAE;EAC/C,IAAIF,MAAM,KAAK,IAAI,EAAE;IACnB,OAAOA,MAAM;EACf;EAGA,MAAMD,OAAO,CAACI,OAAO,CAACD,OAAO,CAAC;EAC9BF,MAAM,GAAGD,OAAO;EAEhB,OAAOA,OAAO;AAChB"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"load-wasm-node.d.ts","sourceRoot":"","sources":["../../../src/lib/wasm/load-wasm-node.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,QAAQ,MAAM,0BAA0B,CAAC;AAErD,wBAAsB,QAAQ,CAAC,OAAO,CAAC,EAAE,MAAM,4BAE9C"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"load-wasm-node.js","names":["wasmNode","loadWasm","wasmUrl"],"sources":["../../../src/lib/wasm/load-wasm-node.ts"],"sourcesContent":["import * as wasmNode from 'parquet-wasm/node/arrow1';\n\nexport async function loadWasm(wasmUrl?: string) {\n return wasmNode;\n}\n"],"mappings":"AAAA,OAAO,KAAKA,QAAQ,MAAM,0BAA0B;AAEpD,OAAO,eAAeC,QAAQA,CAACC,OAAgB,EAAE;EAC/C,OAAOF,QAAQ;AACjB"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
export { loadWasm } from './load-wasm-node';
|
|
2
|
-
//# sourceMappingURL=
|
|
2
|
+
//# sourceMappingURL=load-wasm.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"load-wasm.d.ts","sourceRoot":"","sources":["../../../src/lib/wasm/load-wasm.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,QAAQ,EAAC,MAAM,kBAAkB,CAAC"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
export { loadWasm } from "./load-wasm-node.js";
|
|
2
|
-
//# sourceMappingURL=
|
|
2
|
+
//# sourceMappingURL=load-wasm.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"load-wasm.js","names":["loadWasm"],"sources":["../../../src/lib/wasm/load-wasm.ts"],"sourcesContent":["export {loadWasm} from './load-wasm-node';\n"],"mappings":"SAAQA,QAAQ"}
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import type { LoaderOptions } from '@loaders.gl/loader-utils';
|
|
2
|
-
import
|
|
2
|
+
import type { ArrowTable } from '@loaders.gl/arrow';
|
|
3
3
|
export type ParquetWasmLoaderOptions = LoaderOptions & {
|
|
4
4
|
parquet?: {
|
|
5
5
|
type?: 'arrow-table';
|
|
6
6
|
wasmUrl?: string;
|
|
7
7
|
};
|
|
8
8
|
};
|
|
9
|
-
export declare function parseParquetWasm(arrayBuffer: ArrayBuffer, options?: ParquetWasmLoaderOptions): Promise<
|
|
9
|
+
export declare function parseParquetWasm(arrayBuffer: ArrayBuffer, options?: ParquetWasmLoaderOptions): Promise<ArrowTable>;
|
|
10
10
|
//# sourceMappingURL=parse-parquet-wasm.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parse-parquet-wasm.d.ts","sourceRoot":"","sources":["../../../src/lib/wasm/parse-parquet-wasm.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,0BAA0B,CAAC;AAC5D,OAAO,KAAK,
|
|
1
|
+
{"version":3,"file":"parse-parquet-wasm.d.ts","sourceRoot":"","sources":["../../../src/lib/wasm/parse-parquet-wasm.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,0BAA0B,CAAC;AAC5D,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,mBAAmB,CAAC;AAKlD,MAAM,MAAM,wBAAwB,GAAG,aAAa,GAAG;IACrD,OAAO,CAAC,EAAE;QACR,IAAI,CAAC,EAAE,aAAa,CAAC;QACrB,OAAO,CAAC,EAAE,MAAM,CAAC;KAClB,CAAC;CACH,CAAC;AAEF,wBAAsB,gBAAgB,CACpC,WAAW,EAAE,WAAW,EACxB,OAAO,CAAC,EAAE,wBAAwB,GACjC,OAAO,CAAC,UAAU,CAAC,CAuBrB"}
|