@loaders.gl/parquet 4.2.0-alpha.6 → 4.2.0-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -1743,7 +1743,7 @@ globalThis.process.env = globalThis.process.env || {};
1743
1743
  var Buffer3 = installBufferPolyfill();
1744
1744
 
1745
1745
  // dist/parquet-loader.js
1746
- var import_loader_utils = require("@loaders.gl/loader-utils");
1746
+ var import_loader_utils2 = require("@loaders.gl/loader-utils");
1747
1747
 
1748
1748
  // dist/parquetjs/codecs/plain.js
1749
1749
  var import_int53 = __toESM(require("int53"), 1);
@@ -2102,6 +2102,7 @@ var PARQUET_CODECS = {
2102
2102
 
2103
2103
  // dist/parquetjs/compression.js
2104
2104
  var import_compression = require("@loaders.gl/compression");
2105
+ var import_loader_utils = require("@loaders.gl/loader-utils");
2105
2106
  var import_lz4js = __toESM(require("lz4js"), 1);
2106
2107
  function toBuffer(arrayBuffer) {
2107
2108
  return Buffer.from(arrayBuffer);
@@ -2138,8 +2139,9 @@ var PARQUET_COMPRESSION_METHODS = {
2138
2139
  ZSTD: new import_compression.ZstdCompression({ modules })
2139
2140
  };
2140
2141
  async function preloadCompressions(options) {
2142
+ (0, import_loader_utils.registerJSModules)(options == null ? void 0 : options.modules);
2141
2143
  const compressions = Object.values(PARQUET_COMPRESSION_METHODS);
2142
- return await Promise.all(compressions.map((compression) => compression.preload()));
2144
+ return await Promise.all(compressions.map((compression) => compression.preload(options == null ? void 0 : options.modules)));
2143
2145
  }
2144
2146
  async function deflate(method, value) {
2145
2147
  const compression = PARQUET_COMPRESSION_METHODS[method];
@@ -6314,7 +6316,7 @@ var FileMetaData = class {
6314
6316
  };
6315
6317
 
6316
6318
  // dist/lib/constants.js
6317
- var VERSION = true ? "4.2.0-alpha.5" : "latest";
6319
+ var VERSION = true ? "4.2.0-alpha.6" : "latest";
6318
6320
  var PARQUET_WASM_URL = "https://unpkg.com/parquet-wasm@0.6.0-beta.1/esm/arrow1_bg.wasm";
6319
6321
  var PARQUET_MAGIC = "PAR1";
6320
6322
  var PARQUET_MAGIC_ENCRYPTED = "PARE";
@@ -6945,6 +6947,7 @@ async function getSchemaFromParquetReader(reader) {
6945
6947
  async function parseParquetFile(file, options) {
6946
6948
  var _a, _b;
6947
6949
  installBufferPolyfill();
6950
+ await preloadCompressions(options);
6948
6951
  const reader = new ParquetReader(file, {
6949
6952
  preserveBinary: (_a = options == null ? void 0 : options.parquet) == null ? void 0 : _a.preserveBinary
6950
6953
  });
@@ -6966,6 +6969,8 @@ async function parseParquetFile(file, options) {
6966
6969
  }
6967
6970
  async function* parseParquetFileInBatches(file, options) {
6968
6971
  var _a, _b;
6972
+ installBufferPolyfill();
6973
+ await preloadCompressions(options);
6969
6974
  const reader = new ParquetReader(file, {
6970
6975
  preserveBinary: (_a = options == null ? void 0 : options.parquet) == null ? void 0 : _a.preserveBinary
6971
6976
  });
@@ -7057,6 +7062,7 @@ function convertBatch(objectRowBatch, shape) {
7057
7062
  // dist/lib/parsers/parse-parquet-to-columns.js
7058
7063
  async function parseParquetFileInColumns(file, options) {
7059
7064
  installBufferPolyfill();
7065
+ await preloadCompressions(options);
7060
7066
  for await (const batch of parseParquetFileInColumnarBatches(file, options)) {
7061
7067
  return {
7062
7068
  shape: "columnar-table",
@@ -7067,6 +7073,8 @@ async function parseParquetFileInColumns(file, options) {
7067
7073
  throw new Error("empty table");
7068
7074
  }
7069
7075
  async function* parseParquetFileInColumnarBatches(file, options) {
7076
+ installBufferPolyfill();
7077
+ await preloadCompressions(options);
7070
7078
  const reader = new ParquetReader(file);
7071
7079
  const schema = await getSchemaFromParquetReader(reader);
7072
7080
  const parquetSchema = await reader.getSchema();
@@ -7087,8 +7095,10 @@ function convertRowGroupToTableBatch(rowGroup, parquetSchema, schema) {
7087
7095
  }
7088
7096
 
7089
7097
  // dist/parquet-loader.js
7090
- var VERSION2 = true ? "4.2.0-alpha.5" : "latest";
7098
+ var VERSION2 = true ? "4.2.0-alpha.6" : "latest";
7091
7099
  var ParquetWorkerLoader = {
7100
+ dataType: null,
7101
+ batchType: null,
7092
7102
  name: "Apache Parquet",
7093
7103
  id: "parquet",
7094
7104
  module: "parquet",
@@ -7111,12 +7121,16 @@ var ParquetWorkerLoader = {
7111
7121
  };
7112
7122
  var ParquetLoader = {
7113
7123
  ...ParquetWorkerLoader,
7114
- parse: (arrayBuffer, options) => parseParquetFile(new import_loader_utils.BlobFile(arrayBuffer), options),
7124
+ dataType: null,
7125
+ batchType: null,
7126
+ parse: (arrayBuffer, options) => parseParquetFile(new import_loader_utils2.BlobFile(arrayBuffer), options),
7115
7127
  parseFile: parseParquetFile,
7116
7128
  parseFileInBatches: parseParquetFileInBatches
7117
7129
  };
7118
7130
  ParquetLoader.Buffer = Buffer;
7119
7131
  var GeoParquetWorkerLoader = {
7132
+ dataType: null,
7133
+ batchType: null,
7120
7134
  name: "Apache Parquet",
7121
7135
  id: "parquet",
7122
7136
  module: "parquet",
@@ -7140,12 +7154,14 @@ var GeoParquetWorkerLoader = {
7140
7154
  var GeoParquetLoader = {
7141
7155
  ...GeoParquetWorkerLoader,
7142
7156
  parse(arrayBuffer, options) {
7143
- return parseGeoParquetFile(new import_loader_utils.BlobFile(arrayBuffer), options);
7157
+ return parseGeoParquetFile(new import_loader_utils2.BlobFile(arrayBuffer), options);
7144
7158
  },
7145
7159
  parseFile: parseGeoParquetFile,
7146
7160
  parseFileInBatches: parseGeoParquetFileInBatches
7147
7161
  };
7148
7162
  var ParquetColumnarWorkerLoader = {
7163
+ dataType: null,
7164
+ batchType: null,
7149
7165
  name: "Apache Parquet",
7150
7166
  id: "parquet",
7151
7167
  module: "parquet",
@@ -7161,14 +7177,14 @@ var ParquetColumnarWorkerLoader = {
7161
7177
  var ParquetColumnarLoader = {
7162
7178
  ...ParquetColumnarWorkerLoader,
7163
7179
  parse(arrayBuffer, options) {
7164
- return parseParquetFileInColumns(new import_loader_utils.BlobFile(arrayBuffer), options);
7180
+ return parseParquetFileInColumns(new import_loader_utils2.BlobFile(arrayBuffer), options);
7165
7181
  },
7166
7182
  parseFile: parseParquetFileInColumns,
7167
7183
  parseFileInBatches: parseParquetFileInColumnarBatches
7168
7184
  };
7169
7185
 
7170
7186
  // dist/parquet-writer.js
7171
- var VERSION3 = true ? "4.2.0-alpha.5" : "latest";
7187
+ var VERSION3 = true ? "4.2.0-alpha.6" : "latest";
7172
7188
  var ParquetWriter = {
7173
7189
  name: "Apache Parquet",
7174
7190
  id: "parquet",
@@ -7225,6 +7241,8 @@ async function parseParquetWasm(arrayBuffer, options) {
7225
7241
 
7226
7242
  // dist/parquet-wasm-loader.js
7227
7243
  var ParquetWasmWorkerLoader = {
7244
+ dataType: null,
7245
+ batchType: null,
7228
7246
  name: "Apache Parquet",
7229
7247
  id: "parquet-wasm",
7230
7248
  module: "parquet",