@loaders.gl/parquet 3.4.13 → 3.4.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +15 -24
- package/dist/dist.min.js.map +3 -3
- package/dist/es5/bundle.js +1 -1
- package/dist/es5/bundle.js.map +1 -1
- package/dist/es5/constants.js +5 -5
- package/dist/es5/constants.js.map +1 -1
- package/dist/es5/index.js +24 -24
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/arrow/convert-row-group-to-columns.js +2 -7
- package/dist/es5/lib/arrow/convert-row-group-to-columns.js.map +1 -1
- package/dist/es5/lib/arrow/convert-schema-from-parquet.js +22 -33
- package/dist/es5/lib/arrow/convert-schema-from-parquet.js.map +1 -1
- package/dist/es5/lib/arrow/convert-schema-to-parquet.js +2 -2
- package/dist/es5/lib/arrow/convert-schema-to-parquet.js.map +1 -1
- package/dist/es5/lib/geo/decode-geo-metadata.js +16 -27
- package/dist/es5/lib/geo/decode-geo-metadata.js.map +1 -1
- package/dist/es5/lib/parsers/parse-parquet-to-columns.js +20 -151
- package/dist/es5/lib/parsers/parse-parquet-to-columns.js.map +1 -1
- package/dist/es5/lib/parsers/parse-parquet-to-rows.js +13 -138
- package/dist/es5/lib/parsers/parse-parquet-to-rows.js.map +1 -1
- package/dist/es5/lib/wasm/encode-parquet-wasm.js +8 -29
- package/dist/es5/lib/wasm/encode-parquet-wasm.js.map +1 -1
- package/dist/es5/lib/wasm/load-wasm/index.js +1 -1
- package/dist/es5/lib/wasm/load-wasm/load-wasm-browser.js +10 -33
- package/dist/es5/lib/wasm/load-wasm/load-wasm-browser.js.map +1 -1
- package/dist/es5/lib/wasm/load-wasm/load-wasm-node.js +4 -22
- package/dist/es5/lib/wasm/load-wasm/load-wasm-node.js.map +1 -1
- package/dist/es5/lib/wasm/parse-parquet-wasm.js +13 -46
- package/dist/es5/lib/wasm/parse-parquet-wasm.js.map +1 -1
- package/dist/es5/parquet-loader.js +4 -4
- package/dist/es5/parquet-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-loader.js +4 -4
- package/dist/es5/parquet-wasm-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-writer.js +3 -3
- package/dist/es5/parquet-wasm-writer.js.map +1 -1
- package/dist/es5/parquet-writer.js +4 -4
- package/dist/es5/parquet-writer.js.map +1 -1
- package/dist/es5/parquetjs/codecs/dictionary.js +3 -6
- package/dist/es5/parquetjs/codecs/dictionary.js.map +1 -1
- package/dist/es5/parquetjs/codecs/index.js +4 -5
- package/dist/es5/parquetjs/codecs/index.js.map +1 -1
- package/dist/es5/parquetjs/codecs/plain.js +41 -41
- package/dist/es5/parquetjs/codecs/plain.js.map +1 -1
- package/dist/es5/parquetjs/codecs/rle.js +25 -30
- package/dist/es5/parquetjs/codecs/rle.js.map +1 -1
- package/dist/es5/parquetjs/compression.js +26 -90
- package/dist/es5/parquetjs/compression.js.map +1 -1
- package/dist/es5/parquetjs/encoder/parquet-encoder.js +245 -536
- package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js +1 -1
- package/dist/es5/parquetjs/parquet-thrift/BsonType.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/BsonType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js +123 -133
- package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js +138 -150
- package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js +241 -251
- package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js +58 -70
- package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js +97 -107
- package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js +136 -146
- package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DateType.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/DateType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DecimalType.js +58 -68
- package/dist/es5/parquetjs/parquet-thrift/DecimalType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js +71 -81
- package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/Encoding.js +1 -1
- package/dist/es5/parquetjs/parquet-thrift/EnumType.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/EnumType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js +1 -1
- package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js +164 -174
- package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/IntType.js +58 -68
- package/dist/es5/parquetjs/parquet-thrift/IntType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/JsonType.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/JsonType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/KeyValue.js +58 -68
- package/dist/es5/parquetjs/parquet-thrift/KeyValue.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ListType.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/ListType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/LogicalType.js +274 -310
- package/dist/es5/parquetjs/parquet-thrift/LogicalType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/MapType.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/MapType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/NullType.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/NullType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js +56 -66
- package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js +71 -81
- package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageHeader.js +136 -146
- package/dist/es5/parquetjs/parquet-thrift/PageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageLocation.js +71 -81
- package/dist/es5/parquetjs/parquet-thrift/PageLocation.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageType.js +1 -1
- package/dist/es5/parquetjs/parquet-thrift/RowGroup.js +105 -115
- package/dist/es5/parquetjs/parquet-thrift/RowGroup.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js +162 -172
- package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js +71 -81
- package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/Statistics.js +106 -116
- package/dist/es5/parquetjs/parquet-thrift/Statistics.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/StringType.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/StringType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TimeType.js +58 -68
- package/dist/es5/parquetjs/parquet-thrift/TimeType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js +76 -90
- package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TimestampType.js +58 -68
- package/dist/es5/parquetjs/parquet-thrift/TimestampType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/Type.js +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/UUIDType.js +28 -40
- package/dist/es5/parquetjs/parquet-thrift/UUIDType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/index.js +43 -43
- package/dist/es5/parquetjs/parquet-thrift/index.js.map +1 -1
- package/dist/es5/parquetjs/parser/decoders.js +195 -327
- package/dist/es5/parquetjs/parser/decoders.js.map +1 -1
- package/dist/es5/parquetjs/parser/parquet-reader.js +155 -582
- package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/es5/parquetjs/schema/declare.js +10 -11
- package/dist/es5/parquetjs/schema/declare.js.map +1 -1
- package/dist/es5/parquetjs/schema/schema.js +65 -82
- package/dist/es5/parquetjs/schema/schema.js.map +1 -1
- package/dist/es5/parquetjs/schema/shred.js +56 -87
- package/dist/es5/parquetjs/schema/shred.js.map +1 -1
- package/dist/es5/parquetjs/schema/types.js +40 -40
- package/dist/es5/parquetjs/schema/types.js.map +1 -1
- package/dist/es5/parquetjs/utils/file-utils.js +8 -12
- package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/es5/parquetjs/utils/read-utils.js +22 -39
- package/dist/es5/parquetjs/utils/read-utils.js.map +1 -1
- package/dist/esm/parquet-loader.js +1 -1
- package/dist/esm/parquet-wasm-loader.js +1 -1
- package/dist/esm/parquet-wasm-writer.js +1 -1
- package/dist/esm/parquet-writer.js +1 -1
- package/dist/parquet-worker.js +15 -24
- package/dist/parquet-worker.js.map +3 -3
- package/package.json +6 -6
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"load-wasm-node.js","names":["wasmNode","_interopRequireWildcard","require","_getRequireWildcardCache","nodeInterop","WeakMap","cacheBabelInterop","cacheNodeInterop","obj","__esModule","
|
|
1
|
+
{"version":3,"file":"load-wasm-node.js","names":["wasmNode","_interopRequireWildcard","require","_getRequireWildcardCache","nodeInterop","WeakMap","cacheBabelInterop","cacheNodeInterop","obj","__esModule","default","cache","has","get","newObj","hasPropertyDescriptor","Object","defineProperty","getOwnPropertyDescriptor","key","prototype","hasOwnProperty","call","desc","set","loadWasm","wasmUrl"],"sources":["../../../../../src/lib/wasm/load-wasm/load-wasm-node.ts"],"sourcesContent":["import * as wasmNode from 'parquet-wasm/node/arrow1';\n\nexport async function loadWasm(wasmUrl?: string) {\n return wasmNode;\n}\n"],"mappings":";;;;;;AAAA,IAAAA,QAAA,GAAAC,uBAAA,CAAAC,OAAA;AAAqD,SAAAC,yBAAAC,WAAA,eAAAC,OAAA,kCAAAC,iBAAA,OAAAD,OAAA,QAAAE,gBAAA,OAAAF,OAAA,YAAAF,wBAAA,YAAAA,CAAAC,WAAA,WAAAA,WAAA,GAAAG,gBAAA,GAAAD,iBAAA,KAAAF,WAAA;AAAA,SAAAH,wBAAAO,GAAA,EAAAJ,WAAA,SAAAA,WAAA,IAAAI,GAAA,IAAAA,GAAA,CAAAC,UAAA,WAAAD,GAAA,QAAAA,GAAA,oBAAAA,GAAA,wBAAAA,GAAA,4BAAAE,OAAA,EAAAF,GAAA,UAAAG,KAAA,GAAAR,wBAAA,CAAAC,WAAA,OAAAO,KAAA,IAAAA,KAAA,CAAAC,GAAA,CAAAJ,GAAA,YAAAG,KAAA,CAAAE,GAAA,CAAAL,GAAA,SAAAM,MAAA,WAAAC,qBAAA,GAAAC,MAAA,CAAAC,cAAA,IAAAD,MAAA,CAAAE,wBAAA,WAAAC,GAAA,IAAAX,GAAA,QAAAW,GAAA,kBAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAd,GAAA,EAAAW,GAAA,SAAAI,IAAA,GAAAR,qBAAA,GAAAC,MAAA,CAAAE,wBAAA,CAAAV,GAAA,EAAAW,GAAA,cAAAI,IAAA,KAAAA,IAAA,CAAAV,GAAA,IAAAU,IAAA,CAAAC,GAAA,KAAAR,MAAA,CAAAC,cAAA,CAAAH,MAAA,EAAAK,GAAA,EAAAI,IAAA,YAAAT,MAAA,CAAAK,GAAA,IAAAX,GAAA,CAAAW,GAAA,SAAAL,MAAA,CAAAJ,OAAA,GAAAF,GAAA,MAAAG,KAAA,IAAAA,KAAA,CAAAa,GAAA,CAAAhB,GAAA,EAAAM,MAAA,YAAAA,MAAA;AAE9C,eAAeW,QAAQA,CAACC,OAAgB,EAAE;EAC/C,OAAO1B,QAAQ;AACjB"}
|
|
@@ -1,59 +1,26 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
3
|
Object.defineProperty(exports, "__esModule", {
|
|
5
4
|
value: true
|
|
6
5
|
});
|
|
7
6
|
exports.parseParquet = parseParquet;
|
|
8
|
-
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
9
|
-
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
|
10
7
|
var _apacheArrow = require("apache-arrow");
|
|
11
8
|
var _loadWasmNode = require("./load-wasm/load-wasm-node");
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
var wasmUrl, wasm, arr, arrowIPCUint8Arr, arrowIPCBuffer, arrowTable;
|
|
22
|
-
return _regenerator.default.wrap(function _callee$(_context) {
|
|
23
|
-
while (1) switch (_context.prev = _context.next) {
|
|
24
|
-
case 0:
|
|
25
|
-
wasmUrl = options === null || options === void 0 ? void 0 : (_options$parquet = options.parquet) === null || _options$parquet === void 0 ? void 0 : _options$parquet.wasmUrl;
|
|
26
|
-
_context.next = 3;
|
|
27
|
-
return (0, _loadWasmNode.loadWasm)(wasmUrl);
|
|
28
|
-
case 3:
|
|
29
|
-
wasm = _context.sent;
|
|
30
|
-
arr = new Uint8Array(arrayBuffer);
|
|
31
|
-
arrowIPCUint8Arr = wasm.readParquet(arr);
|
|
32
|
-
arrowIPCBuffer = arrowIPCUint8Arr.buffer.slice(arrowIPCUint8Arr.byteOffset, arrowIPCUint8Arr.byteLength + arrowIPCUint8Arr.byteOffset);
|
|
33
|
-
arrowTable = tableFromIPC(arrowIPCBuffer);
|
|
34
|
-
return _context.abrupt("return", arrowTable);
|
|
35
|
-
case 9:
|
|
36
|
-
case "end":
|
|
37
|
-
return _context.stop();
|
|
38
|
-
}
|
|
39
|
-
}, _callee);
|
|
40
|
-
}));
|
|
41
|
-
return _parseParquet.apply(this, arguments);
|
|
9
|
+
async function parseParquet(arrayBuffer, options) {
|
|
10
|
+
var _options$parquet;
|
|
11
|
+
const wasmUrl = options === null || options === void 0 ? void 0 : (_options$parquet = options.parquet) === null || _options$parquet === void 0 ? void 0 : _options$parquet.wasmUrl;
|
|
12
|
+
const wasm = await (0, _loadWasmNode.loadWasm)(wasmUrl);
|
|
13
|
+
const arr = new Uint8Array(arrayBuffer);
|
|
14
|
+
const arrowIPCUint8Arr = wasm.readParquet(arr);
|
|
15
|
+
const arrowIPCBuffer = arrowIPCUint8Arr.buffer.slice(arrowIPCUint8Arr.byteOffset, arrowIPCUint8Arr.byteLength + arrowIPCUint8Arr.byteOffset);
|
|
16
|
+
const arrowTable = tableFromIPC(arrowIPCBuffer);
|
|
17
|
+
return arrowTable;
|
|
42
18
|
}
|
|
43
19
|
function tableFromIPC(input) {
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
try {
|
|
49
|
-
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
|
50
|
-
var recordBatch = _step.value;
|
|
51
|
-
recordBatches.push(recordBatch);
|
|
52
|
-
}
|
|
53
|
-
} catch (err) {
|
|
54
|
-
_iterator.e(err);
|
|
55
|
-
} finally {
|
|
56
|
-
_iterator.f();
|
|
20
|
+
const reader = _apacheArrow.RecordBatchStreamReader.from(input);
|
|
21
|
+
const recordBatches = [];
|
|
22
|
+
for (const recordBatch of reader) {
|
|
23
|
+
recordBatches.push(recordBatch);
|
|
57
24
|
}
|
|
58
25
|
return new _apacheArrow.Table(recordBatches);
|
|
59
26
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parse-parquet-wasm.js","names":["_apacheArrow","require","_loadWasmNode","
|
|
1
|
+
{"version":3,"file":"parse-parquet-wasm.js","names":["_apacheArrow","require","_loadWasmNode","parseParquet","arrayBuffer","options","_options$parquet","wasmUrl","parquet","wasm","loadWasm","arr","Uint8Array","arrowIPCUint8Arr","readParquet","arrowIPCBuffer","buffer","slice","byteOffset","byteLength","arrowTable","tableFromIPC","input","reader","RecordBatchStreamReader","from","recordBatches","recordBatch","push","Table"],"sources":["../../../../src/lib/wasm/parse-parquet-wasm.ts"],"sourcesContent":["// eslint-disable\nimport type {RecordBatch} from 'apache-arrow';\nimport type {LoaderOptions} from '@loaders.gl/loader-utils';\nimport {Table, RecordBatchStreamReader} from 'apache-arrow';\nimport {loadWasm} from './load-wasm/load-wasm-node';\n\nexport type ParquetLoaderOptions = LoaderOptions & {\n parquet?: {\n type?: 'arrow-table';\n wasmUrl?: string;\n };\n};\n\nexport async function parseParquet(\n arrayBuffer: ArrayBuffer,\n options?: ParquetLoaderOptions\n): Promise<Table> {\n const wasmUrl = options?.parquet?.wasmUrl;\n const wasm = await loadWasm(wasmUrl);\n\n const arr = new Uint8Array(arrayBuffer);\n const arrowIPCUint8Arr = wasm.readParquet(arr);\n const arrowIPCBuffer = arrowIPCUint8Arr.buffer.slice(\n arrowIPCUint8Arr.byteOffset,\n arrowIPCUint8Arr.byteLength + arrowIPCUint8Arr.byteOffset\n );\n const arrowTable = tableFromIPC(arrowIPCBuffer);\n return arrowTable;\n}\n\n/**\n * Deserialize the IPC format into a {@link Table}. This function is a\n * convenience wrapper for {@link RecordBatchReader}. Opposite of {@link tableToIPC}.\n */\nfunction tableFromIPC(input: ArrayBuffer): Table {\n const reader = RecordBatchStreamReader.from(input);\n const recordBatches: RecordBatch[] = [];\n for (const recordBatch of reader) {\n recordBatches.push(recordBatch);\n }\n return new Table(recordBatches);\n}\n"],"mappings":";;;;;;AAGA,IAAAA,YAAA,GAAAC,OAAA;AACA,IAAAC,aAAA,GAAAD,OAAA;AASO,eAAeE,YAAYA,CAChCC,WAAwB,EACxBC,OAA8B,EACd;EAAA,IAAAC,gBAAA;EAChB,MAAMC,OAAO,GAAGF,OAAO,aAAPA,OAAO,wBAAAC,gBAAA,GAAPD,OAAO,CAAEG,OAAO,cAAAF,gBAAA,uBAAhBA,gBAAA,CAAkBC,OAAO;EACzC,MAAME,IAAI,GAAG,MAAM,IAAAC,sBAAQ,EAACH,OAAO,CAAC;EAEpC,MAAMI,GAAG,GAAG,IAAIC,UAAU,CAACR,WAAW,CAAC;EACvC,MAAMS,gBAAgB,GAAGJ,IAAI,CAACK,WAAW,CAACH,GAAG,CAAC;EAC9C,MAAMI,cAAc,GAAGF,gBAAgB,CAACG,MAAM,CAACC,KAAK,CAClDJ,gBAAgB,CAACK,UAAU,EAC3BL,gBAAgB,CAACM,UAAU,GAAGN,gBAAgB,CAACK,UACjD,CAAC;EACD,MAAME,UAAU,GAAGC,YAAY,CAACN,cAAc,CAAC;EAC/C,OAAOK,UAAU;AACnB;AAMA,SAASC,YAAYA,CAACC,KAAkB,EAAS;EAC/C,MAAMC,MAAM,GAAGC,oCAAuB,CAACC,IAAI,CAACH,KAAK,CAAC;EAClD,MAAMI,aAA4B,GAAG,EAAE;EACvC,KAAK,MAAMC,WAAW,IAAIJ,MAAM,EAAE;IAChCG,aAAa,CAACE,IAAI,CAACD,WAAW,CAAC;EACjC;EACA,OAAO,IAAIE,kBAAK,CAACH,aAAa,CAAC;AACjC"}
|
|
@@ -4,8 +4,8 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
4
4
|
value: true
|
|
5
5
|
});
|
|
6
6
|
exports._typecheckParquetLoader = exports.ParquetLoader = void 0;
|
|
7
|
-
|
|
8
|
-
|
|
7
|
+
const VERSION = typeof "3.4.15" !== 'undefined' ? "3.4.15" : 'latest';
|
|
8
|
+
const DEFAULT_PARQUET_LOADER_OPTIONS = {
|
|
9
9
|
parquet: {
|
|
10
10
|
type: 'object-row-table',
|
|
11
11
|
url: undefined,
|
|
@@ -13,7 +13,7 @@ var DEFAULT_PARQUET_LOADER_OPTIONS = {
|
|
|
13
13
|
geoparquet: true
|
|
14
14
|
}
|
|
15
15
|
};
|
|
16
|
-
|
|
16
|
+
const ParquetLoader = {
|
|
17
17
|
name: 'Apache Parquet',
|
|
18
18
|
id: 'parquet',
|
|
19
19
|
module: 'parquet',
|
|
@@ -27,6 +27,6 @@ var ParquetLoader = {
|
|
|
27
27
|
options: DEFAULT_PARQUET_LOADER_OPTIONS
|
|
28
28
|
};
|
|
29
29
|
exports.ParquetLoader = ParquetLoader;
|
|
30
|
-
|
|
30
|
+
const _typecheckParquetLoader = ParquetLoader;
|
|
31
31
|
exports._typecheckParquetLoader = _typecheckParquetLoader;
|
|
32
32
|
//# sourceMappingURL=parquet-loader.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parquet-loader.js","names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","parquet","type","url","undefined","columnList","geoparquet","ParquetLoader","name","id","module","version","worker","category","extensions","mimeTypes","binary","tests","options","exports","_typecheckParquetLoader"],"sources":["../../src/parquet-loader.ts"],"sourcesContent":["import type {Loader, LoaderOptions} from '@loaders.gl/loader-utils';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetLoaderOptions = LoaderOptions & {\n parquet?: {\n type?: 'object-row-table';\n url?: string;\n columnList?: string[] | string[][];\n geoparquet?: boolean;\n };\n};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS: ParquetLoaderOptions = {\n parquet: {\n type: 'object-row-table',\n url: undefined,\n columnList: [],\n geoparquet: true\n }\n};\n\n/** ParquetJS table loader */\nexport const ParquetLoader = {\n name: 'Apache Parquet',\n id: 'parquet',\n module: 'parquet',\n version: VERSION,\n worker: true,\n category: 'table',\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n binary: true,\n tests: ['PAR1', 'PARE'],\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nexport const _typecheckParquetLoader: Loader = ParquetLoader;\n"],"mappings":";;;;;;AAIA,
|
|
1
|
+
{"version":3,"file":"parquet-loader.js","names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","parquet","type","url","undefined","columnList","geoparquet","ParquetLoader","name","id","module","version","worker","category","extensions","mimeTypes","binary","tests","options","exports","_typecheckParquetLoader"],"sources":["../../src/parquet-loader.ts"],"sourcesContent":["import type {Loader, LoaderOptions} from '@loaders.gl/loader-utils';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetLoaderOptions = LoaderOptions & {\n parquet?: {\n type?: 'object-row-table';\n url?: string;\n columnList?: string[] | string[][];\n geoparquet?: boolean;\n };\n};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS: ParquetLoaderOptions = {\n parquet: {\n type: 'object-row-table',\n url: undefined,\n columnList: [],\n geoparquet: true\n }\n};\n\n/** ParquetJS table loader */\nexport const ParquetLoader = {\n name: 'Apache Parquet',\n id: 'parquet',\n module: 'parquet',\n version: VERSION,\n worker: true,\n category: 'table',\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n binary: true,\n tests: ['PAR1', 'PARE'],\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nexport const _typecheckParquetLoader: Loader = ParquetLoader;\n"],"mappings":";;;;;;AAIA,MAAMA,OAAO,GAAG,eAAkB,KAAK,WAAW,cAAiB,QAAQ;AAW3E,MAAMC,8BAAoD,GAAG;EAC3DC,OAAO,EAAE;IACPC,IAAI,EAAE,kBAAkB;IACxBC,GAAG,EAAEC,SAAS;IACdC,UAAU,EAAE,EAAE;IACdC,UAAU,EAAE;EACd;AACF,CAAC;AAGM,MAAMC,aAAa,GAAG;EAC3BC,IAAI,EAAE,gBAAgB;EACtBC,EAAE,EAAE,SAAS;EACbC,MAAM,EAAE,SAAS;EACjBC,OAAO,EAAEZ,OAAO;EAChBa,MAAM,EAAE,IAAI;EACZC,QAAQ,EAAE,OAAO;EACjBC,UAAU,EAAE,CAAC,SAAS,CAAC;EACvBC,SAAS,EAAE,CAAC,0BAA0B,CAAC;EACvCC,MAAM,EAAE,IAAI;EACZC,KAAK,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;EACvBC,OAAO,EAAElB;AACX,CAAC;AAACmB,OAAA,CAAAZ,aAAA,GAAAA,aAAA;AAEK,MAAMa,uBAA+B,GAAGb,aAAa;AAACY,OAAA,CAAAC,uBAAA,GAAAA,uBAAA"}
|
|
@@ -4,14 +4,14 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
4
4
|
value: true
|
|
5
5
|
});
|
|
6
6
|
exports._typecheckParquetLoader = exports.ParquetWasmLoader = void 0;
|
|
7
|
-
|
|
8
|
-
|
|
7
|
+
const VERSION = typeof "3.4.15" !== 'undefined' ? "3.4.15" : 'latest';
|
|
8
|
+
const DEFAULT_PARQUET_LOADER_OPTIONS = {
|
|
9
9
|
parquet: {
|
|
10
10
|
type: 'arrow-table',
|
|
11
11
|
wasmUrl: 'https://unpkg.com/parquet-wasm@0.3.1/esm2/arrow1_bg.wasm'
|
|
12
12
|
}
|
|
13
13
|
};
|
|
14
|
-
|
|
14
|
+
const ParquetWasmLoader = {
|
|
15
15
|
name: 'Apache Parquet',
|
|
16
16
|
id: 'parquet-wasm',
|
|
17
17
|
module: 'parquet',
|
|
@@ -25,6 +25,6 @@ var ParquetWasmLoader = {
|
|
|
25
25
|
options: DEFAULT_PARQUET_LOADER_OPTIONS
|
|
26
26
|
};
|
|
27
27
|
exports.ParquetWasmLoader = ParquetWasmLoader;
|
|
28
|
-
|
|
28
|
+
const _typecheckParquetLoader = ParquetWasmLoader;
|
|
29
29
|
exports._typecheckParquetLoader = _typecheckParquetLoader;
|
|
30
30
|
//# sourceMappingURL=parquet-wasm-loader.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parquet-wasm-loader.js","names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","parquet","type","wasmUrl","ParquetWasmLoader","name","id","module","version","worker","category","extensions","mimeTypes","binary","tests","options","exports","_typecheckParquetLoader"],"sources":["../../src/parquet-wasm-loader.ts"],"sourcesContent":["import type {Loader, LoaderOptions} from '@loaders.gl/loader-utils';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetLoaderOptions = LoaderOptions & {\n parquet?: {\n type?: 'arrow-table';\n wasmUrl?: string;\n };\n};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS: ParquetLoaderOptions = {\n parquet: {\n type: 'arrow-table',\n wasmUrl: 'https://unpkg.com/parquet-wasm@0.3.1/esm2/arrow1_bg.wasm'\n }\n};\n\n/** ParquetJS table loader */\nexport const ParquetWasmLoader = {\n name: 'Apache Parquet',\n id: 'parquet-wasm',\n module: 'parquet',\n version: VERSION,\n worker: false,\n category: 'table',\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n binary: true,\n tests: ['PAR1', 'PARE'],\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nexport const _typecheckParquetLoader: Loader = ParquetWasmLoader;\n"],"mappings":";;;;;;AAIA,
|
|
1
|
+
{"version":3,"file":"parquet-wasm-loader.js","names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","parquet","type","wasmUrl","ParquetWasmLoader","name","id","module","version","worker","category","extensions","mimeTypes","binary","tests","options","exports","_typecheckParquetLoader"],"sources":["../../src/parquet-wasm-loader.ts"],"sourcesContent":["import type {Loader, LoaderOptions} from '@loaders.gl/loader-utils';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetLoaderOptions = LoaderOptions & {\n parquet?: {\n type?: 'arrow-table';\n wasmUrl?: string;\n };\n};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS: ParquetLoaderOptions = {\n parquet: {\n type: 'arrow-table',\n wasmUrl: 'https://unpkg.com/parquet-wasm@0.3.1/esm2/arrow1_bg.wasm'\n }\n};\n\n/** ParquetJS table loader */\nexport const ParquetWasmLoader = {\n name: 'Apache Parquet',\n id: 'parquet-wasm',\n module: 'parquet',\n version: VERSION,\n worker: false,\n category: 'table',\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n binary: true,\n tests: ['PAR1', 'PARE'],\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nexport const _typecheckParquetLoader: Loader = ParquetWasmLoader;\n"],"mappings":";;;;;;AAIA,MAAMA,OAAO,GAAG,eAAkB,KAAK,WAAW,cAAiB,QAAQ;AAS3E,MAAMC,8BAAoD,GAAG;EAC3DC,OAAO,EAAE;IACPC,IAAI,EAAE,aAAa;IACnBC,OAAO,EAAE;EACX;AACF,CAAC;AAGM,MAAMC,iBAAiB,GAAG;EAC/BC,IAAI,EAAE,gBAAgB;EACtBC,EAAE,EAAE,cAAc;EAClBC,MAAM,EAAE,SAAS;EACjBC,OAAO,EAAET,OAAO;EAChBU,MAAM,EAAE,KAAK;EACbC,QAAQ,EAAE,OAAO;EACjBC,UAAU,EAAE,CAAC,SAAS,CAAC;EACvBC,SAAS,EAAE,CAAC,0BAA0B,CAAC;EACvCC,MAAM,EAAE,IAAI;EACZC,KAAK,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;EACvBC,OAAO,EAAEf;AACX,CAAC;AAACgB,OAAA,CAAAZ,iBAAA,GAAAA,iBAAA;AAEK,MAAMa,uBAA+B,GAAGb,iBAAiB;AAACY,OAAA,CAAAC,uBAAA,GAAAA,uBAAA"}
|
|
@@ -5,13 +5,13 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
5
5
|
});
|
|
6
6
|
exports.ParquetWasmWriter = void 0;
|
|
7
7
|
var _encodeParquetWasm = require("./lib/wasm/encode-parquet-wasm");
|
|
8
|
-
|
|
9
|
-
|
|
8
|
+
const VERSION = typeof "3.4.15" !== 'undefined' ? "3.4.15" : 'latest';
|
|
9
|
+
const DEFAULT_PARQUET_WRITER_OPTIONS = {
|
|
10
10
|
parquet: {
|
|
11
11
|
wasmUrl: 'https://unpkg.com/parquet-wasm@0.3.1/esm2/arrow1_bg.wasm'
|
|
12
12
|
}
|
|
13
13
|
};
|
|
14
|
-
|
|
14
|
+
const ParquetWasmWriter = {
|
|
15
15
|
name: 'Apache Parquet',
|
|
16
16
|
id: 'parquet-wasm',
|
|
17
17
|
module: 'parquet',
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parquet-wasm-writer.js","names":["_encodeParquetWasm","require","VERSION","DEFAULT_PARQUET_WRITER_OPTIONS","parquet","wasmUrl","ParquetWasmWriter","name","id","module","version","extensions","mimeTypes","encode","binary","options","exports"],"sources":["../../src/parquet-wasm-writer.ts"],"sourcesContent":["import type {Writer} from '@loaders.gl/loader-utils';\nimport {encode, ParquetWriterOptions} from './lib/wasm/encode-parquet-wasm';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nconst DEFAULT_PARQUET_WRITER_OPTIONS: ParquetWriterOptions = {\n parquet: {\n wasmUrl: 'https://unpkg.com/parquet-wasm@0.3.1/esm2/arrow1_bg.wasm'\n }\n};\n\nexport const ParquetWasmWriter: Writer = {\n name: 'Apache Parquet',\n id: 'parquet-wasm',\n module: 'parquet',\n version: VERSION,\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n encode,\n binary: true,\n options: DEFAULT_PARQUET_WRITER_OPTIONS\n};\n"],"mappings":";;;;;;AACA,IAAAA,kBAAA,GAAAC,OAAA;AAIA,
|
|
1
|
+
{"version":3,"file":"parquet-wasm-writer.js","names":["_encodeParquetWasm","require","VERSION","DEFAULT_PARQUET_WRITER_OPTIONS","parquet","wasmUrl","ParquetWasmWriter","name","id","module","version","extensions","mimeTypes","encode","binary","options","exports"],"sources":["../../src/parquet-wasm-writer.ts"],"sourcesContent":["import type {Writer} from '@loaders.gl/loader-utils';\nimport {encode, ParquetWriterOptions} from './lib/wasm/encode-parquet-wasm';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nconst DEFAULT_PARQUET_WRITER_OPTIONS: ParquetWriterOptions = {\n parquet: {\n wasmUrl: 'https://unpkg.com/parquet-wasm@0.3.1/esm2/arrow1_bg.wasm'\n }\n};\n\nexport const ParquetWasmWriter: Writer = {\n name: 'Apache Parquet',\n id: 'parquet-wasm',\n module: 'parquet',\n version: VERSION,\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n encode,\n binary: true,\n options: DEFAULT_PARQUET_WRITER_OPTIONS\n};\n"],"mappings":";;;;;;AACA,IAAAA,kBAAA,GAAAC,OAAA;AAIA,MAAMC,OAAO,GAAG,eAAkB,KAAK,WAAW,cAAiB,QAAQ;AAE3E,MAAMC,8BAAoD,GAAG;EAC3DC,OAAO,EAAE;IACPC,OAAO,EAAE;EACX;AACF,CAAC;AAEM,MAAMC,iBAAyB,GAAG;EACvCC,IAAI,EAAE,gBAAgB;EACtBC,EAAE,EAAE,cAAc;EAClBC,MAAM,EAAE,SAAS;EACjBC,OAAO,EAAER,OAAO;EAChBS,UAAU,EAAE,CAAC,SAAS,CAAC;EACvBC,SAAS,EAAE,CAAC,0BAA0B,CAAC;EACvCC,MAAM,EAANA,yBAAM;EACNC,MAAM,EAAE,IAAI;EACZC,OAAO,EAAEZ;AACX,CAAC;AAACa,OAAA,CAAAV,iBAAA,GAAAA,iBAAA"}
|
|
@@ -4,16 +4,16 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
4
4
|
value: true
|
|
5
5
|
});
|
|
6
6
|
exports.ParquetWriter = void 0;
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
7
|
+
const VERSION = typeof "3.4.15" !== 'undefined' ? "3.4.15" : 'latest';
|
|
8
|
+
const DEFAULT_PARQUET_LOADER_OPTIONS = {};
|
|
9
|
+
const ParquetWriter = {
|
|
10
10
|
name: 'Apache Parquet',
|
|
11
11
|
id: 'parquet',
|
|
12
12
|
module: 'parquet',
|
|
13
13
|
version: VERSION,
|
|
14
14
|
extensions: ['parquet'],
|
|
15
15
|
mimeTypes: ['application/octet-stream'],
|
|
16
|
-
encodeSync
|
|
16
|
+
encodeSync,
|
|
17
17
|
binary: true,
|
|
18
18
|
options: DEFAULT_PARQUET_LOADER_OPTIONS
|
|
19
19
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parquet-writer.js","names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","ParquetWriter","name","id","module","version","extensions","mimeTypes","encodeSync","binary","options","exports","data","ArrayBuffer"],"sources":["../../src/parquet-writer.ts"],"sourcesContent":["import type {Writer} from '@loaders.gl/loader-utils';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetWriterOptions = {};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS = {};\n\nexport const ParquetWriter: Writer = {\n name: 'Apache Parquet',\n id: 'parquet',\n module: 'parquet',\n version: VERSION,\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n encodeSync,\n binary: true,\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nfunction encodeSync(data, options?: ParquetWriterOptions) {\n return new ArrayBuffer(0);\n}\n"],"mappings":";;;;;;AAIA,
|
|
1
|
+
{"version":3,"file":"parquet-writer.js","names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","ParquetWriter","name","id","module","version","extensions","mimeTypes","encodeSync","binary","options","exports","data","ArrayBuffer"],"sources":["../../src/parquet-writer.ts"],"sourcesContent":["import type {Writer} from '@loaders.gl/loader-utils';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetWriterOptions = {};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS = {};\n\nexport const ParquetWriter: Writer = {\n name: 'Apache Parquet',\n id: 'parquet',\n module: 'parquet',\n version: VERSION,\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n encodeSync,\n binary: true,\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nfunction encodeSync(data, options?: ParquetWriterOptions) {\n return new ArrayBuffer(0);\n}\n"],"mappings":";;;;;;AAIA,MAAMA,OAAO,GAAG,eAAkB,KAAK,WAAW,cAAiB,QAAQ;AAI3E,MAAMC,8BAA8B,GAAG,CAAC,CAAC;AAElC,MAAMC,aAAqB,GAAG;EACnCC,IAAI,EAAE,gBAAgB;EACtBC,EAAE,EAAE,SAAS;EACbC,MAAM,EAAE,SAAS;EACjBC,OAAO,EAAEN,OAAO;EAChBO,UAAU,EAAE,CAAC,SAAS,CAAC;EACvBC,SAAS,EAAE,CAAC,0BAA0B,CAAC;EACvCC,UAAU;EACVC,MAAM,EAAE,IAAI;EACZC,OAAO,EAAEV;AACX,CAAC;AAACW,OAAA,CAAAV,aAAA,GAAAA,aAAA;AAEF,SAASO,UAAUA,CAACI,IAAI,EAAEF,OAA8B,EAAE;EACxD,OAAO,IAAIG,WAAW,CAAC,CAAC,CAAC;AAC3B"}
|
|
@@ -1,21 +1,18 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
3
|
Object.defineProperty(exports, "__esModule", {
|
|
5
4
|
value: true
|
|
6
5
|
});
|
|
7
6
|
exports.decodeValues = decodeValues;
|
|
8
7
|
exports.encodeValues = encodeValues;
|
|
9
|
-
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
|
10
8
|
var _rle = require("./rle");
|
|
11
|
-
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
|
|
12
|
-
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
|
|
13
9
|
function decodeValues(type, cursor, count, opts) {
|
|
14
10
|
opts.bitWidth = cursor.buffer.slice(cursor.offset, cursor.offset + 1).readInt8(0);
|
|
15
11
|
cursor.offset += 1;
|
|
16
|
-
return (0, _rle.decodeValues)(type, cursor, count,
|
|
12
|
+
return (0, _rle.decodeValues)(type, cursor, count, {
|
|
13
|
+
...opts,
|
|
17
14
|
disableEnvelope: true
|
|
18
|
-
})
|
|
15
|
+
});
|
|
19
16
|
}
|
|
20
17
|
function encodeValues(type, cursor, count, opts) {
|
|
21
18
|
throw new Error('Encode dictionary functionality is not supported');
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"dictionary.js","names":["_rle","require","
|
|
1
|
+
{"version":3,"file":"dictionary.js","names":["_rle","require","decodeValues","type","cursor","count","opts","bitWidth","buffer","slice","offset","readInt8","decodeRleValues","disableEnvelope","encodeValues","Error"],"sources":["../../../../src/parquetjs/codecs/dictionary.ts"],"sourcesContent":["import {decodeValues as decodeRleValues} from './rle';\n\nexport function decodeValues(type, cursor, count, opts) {\n opts.bitWidth = cursor.buffer.slice(cursor.offset, cursor.offset + 1).readInt8(0);\n cursor.offset += 1;\n return decodeRleValues(type, cursor, count, {...opts, disableEnvelope: true});\n}\n\nexport function encodeValues(type, cursor, count, opts) {\n throw new Error('Encode dictionary functionality is not supported');\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,IAAA,GAAAC,OAAA;AAEO,SAASC,YAAYA,CAACC,IAAI,EAAEC,MAAM,EAAEC,KAAK,EAAEC,IAAI,EAAE;EACtDA,IAAI,CAACC,QAAQ,GAAGH,MAAM,CAACI,MAAM,CAACC,KAAK,CAACL,MAAM,CAACM,MAAM,EAAEN,MAAM,CAACM,MAAM,GAAG,CAAC,CAAC,CAACC,QAAQ,CAAC,CAAC,CAAC;EACjFP,MAAM,CAACM,MAAM,IAAI,CAAC;EAClB,OAAO,IAAAE,iBAAe,EAACT,IAAI,EAAEC,MAAM,EAAEC,KAAK,EAAE;IAAC,GAAGC,IAAI;IAAEO,eAAe,EAAE;EAAI,CAAC,CAAC;AAC/E;AAEO,SAASC,YAAYA,CAACX,IAAI,EAAEC,MAAM,EAAEC,KAAK,EAAEC,IAAI,EAAE;EACtD,MAAM,IAAIS,KAAK,CAAC,kDAAkD,CAAC;AACrE"}
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
|
|
3
|
-
var _typeof = require("@babel/runtime/helpers/typeof");
|
|
4
3
|
Object.defineProperty(exports, "__esModule", {
|
|
5
4
|
value: true
|
|
6
5
|
});
|
|
@@ -18,14 +17,14 @@ Object.keys(_declare).forEach(function (key) {
|
|
|
18
17
|
if (key in exports && exports[key] === _declare[key]) return;
|
|
19
18
|
Object.defineProperty(exports, key, {
|
|
20
19
|
enumerable: true,
|
|
21
|
-
get: function
|
|
20
|
+
get: function () {
|
|
22
21
|
return _declare[key];
|
|
23
22
|
}
|
|
24
23
|
});
|
|
25
24
|
});
|
|
26
|
-
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function
|
|
27
|
-
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null ||
|
|
28
|
-
|
|
25
|
+
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
|
|
26
|
+
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
|
|
27
|
+
const PARQUET_CODECS = {
|
|
29
28
|
PLAIN: {
|
|
30
29
|
encodeValues: PLAIN.encodeValues,
|
|
31
30
|
decodeValues: PLAIN.decodeValues
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":["PLAIN","_interopRequireWildcard","require","RLE","DICTIONARY","_declare","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","_getRequireWildcardCache","nodeInterop","WeakMap","cacheBabelInterop","cacheNodeInterop","obj","__esModule","
|
|
1
|
+
{"version":3,"file":"index.js","names":["PLAIN","_interopRequireWildcard","require","RLE","DICTIONARY","_declare","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","_getRequireWildcardCache","nodeInterop","WeakMap","cacheBabelInterop","cacheNodeInterop","obj","__esModule","default","cache","has","newObj","hasPropertyDescriptor","getOwnPropertyDescriptor","desc","set","PARQUET_CODECS","encodeValues","decodeValues","PLAIN_DICTIONARY","RLE_DICTIONARY"],"sources":["../../../../src/parquetjs/codecs/index.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport type {ParquetCodec} from '../schema/declare';\nimport type {ParquetCodecKit} from './declare';\nimport * as PLAIN from './plain';\nimport * as RLE from './rle';\nimport * as DICTIONARY from './dictionary';\n\nexport * from './declare';\n\nexport const PARQUET_CODECS: Record<ParquetCodec, ParquetCodecKit> = {\n PLAIN: {\n encodeValues: PLAIN.encodeValues,\n decodeValues: PLAIN.decodeValues\n },\n RLE: {\n encodeValues: RLE.encodeValues,\n decodeValues: RLE.decodeValues\n },\n // Using the PLAIN_DICTIONARY enum value is deprecated in the Parquet 2.0 specification.\n PLAIN_DICTIONARY: {\n // @ts-ignore\n encodeValues: DICTIONARY.encodeValues,\n decodeValues: DICTIONARY.decodeValues\n },\n // Prefer using RLE_DICTIONARY in a data page and PLAIN in a dictionary page for Parquet 2.0+ files.\n RLE_DICTIONARY: {\n // @ts-ignore\n encodeValues: DICTIONARY.encodeValues,\n decodeValues: DICTIONARY.decodeValues\n }\n};\n"],"mappings":";;;;;;;;;AAGA,IAAAA,KAAA,GAAAC,uBAAA,CAAAC,OAAA;AACA,IAAAC,GAAA,GAAAF,uBAAA,CAAAC,OAAA;AACA,IAAAE,UAAA,GAAAH,uBAAA,CAAAC,OAAA;AAEA,IAAAG,QAAA,GAAAH,OAAA;AAAAI,MAAA,CAAAC,IAAA,CAAAF,QAAA,EAAAG,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAJ,QAAA,CAAAI,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAZ,QAAA,CAAAI,GAAA;IAAA;EAAA;AAAA;AAA0B,SAAAS,yBAAAC,WAAA,eAAAC,OAAA,kCAAAC,iBAAA,OAAAD,OAAA,QAAAE,gBAAA,OAAAF,OAAA,YAAAF,wBAAA,YAAAA,CAAAC,WAAA,WAAAA,WAAA,GAAAG,gBAAA,GAAAD,iBAAA,KAAAF,WAAA;AAAA,SAAAlB,wBAAAsB,GAAA,EAAAJ,WAAA,SAAAA,WAAA,IAAAI,GAAA,IAAAA,GAAA,CAAAC,UAAA,WAAAD,GAAA,QAAAA,GAAA,oBAAAA,GAAA,wBAAAA,GAAA,4BAAAE,OAAA,EAAAF,GAAA,UAAAG,KAAA,GAAAR,wBAAA,CAAAC,WAAA,OAAAO,KAAA,IAAAA,KAAA,CAAAC,GAAA,CAAAJ,GAAA,YAAAG,KAAA,CAAAT,GAAA,CAAAM,GAAA,SAAAK,MAAA,WAAAC,qBAAA,GAAAvB,MAAA,CAAAS,cAAA,IAAAT,MAAA,CAAAwB,wBAAA,WAAArB,GAAA,IAAAc,GAAA,QAAAd,GAAA,kBAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAW,GAAA,EAAAd,GAAA,SAAAsB,IAAA,GAAAF,qBAAA,GAAAvB,MAAA,CAAAwB,wBAAA,CAAAP,GAAA,EAAAd,GAAA,cAAAsB,IAAA,KAAAA,IAAA,CAAAd,GAAA,IAAAc,IAAA,CAAAC,GAAA,KAAA1B,MAAA,CAAAS,cAAA,CAAAa,MAAA,EAAAnB,GAAA,EAAAsB,IAAA,YAAAH,MAAA,CAAAnB,GAAA,IAAAc,GAAA,CAAAd,GAAA,SAAAmB,MAAA,CAAAH,OAAA,GAAAF,GAAA,MAAAG,KAAA,IAAAA,KAAA,CAAAM,GAAA,CAAAT,GAAA,EAAAK,MAAA,YAAAA,MAAA;AAEnB,MAAMK,cAAqD,GAAG;EACnEjC,KAAK,EAAE;IACLkC,YAAY,EAAElC,KAAK,CAACkC,YAAY;IAChCC,YAAY,EAAEnC,KAAK,CAACmC;EACtB,CAAC;EACDhC,GAAG,EAAE;IACH+B,YAAY,EAAE/B,GAAG,CAAC+B,YAAY;IAC9BC,YAAY,EAAEhC,GAAG,CAACgC;EACpB,CAAC;EAEDC,gBAAgB,EAAE;IAEhBF,YAAY,EAAE9B,UAAU,CAAC8B,YAAY;IACrCC,YAAY,EAAE/B,UAAU,CAAC+B;EAC3B,CAAC;EAEDE,cAAc,EAAE;IAEdH,YAAY,EAAE9B,UAAU,CAAC8B,YAAY;IACrCC,YAAY,EAAE/B,UAAU,CAAC+B;EAC3B;AACF,CAAC;AAACrB,OAAA,CAAAmB,cAAA,GAAAA,cAAA"}
|
|
@@ -52,9 +52,9 @@ function decodeValues(type, cursor, count, opts) {
|
|
|
52
52
|
}
|
|
53
53
|
}
|
|
54
54
|
function encodeValues_BOOLEAN(values) {
|
|
55
|
-
|
|
55
|
+
const buf = Buffer.alloc(Math.ceil(values.length / 8));
|
|
56
56
|
buf.fill(0);
|
|
57
|
-
for (
|
|
57
|
+
for (let i = 0; i < values.length; i++) {
|
|
58
58
|
if (values[i]) {
|
|
59
59
|
buf[Math.floor(i / 8)] |= 1 << i % 8;
|
|
60
60
|
}
|
|
@@ -62,47 +62,47 @@ function encodeValues_BOOLEAN(values) {
|
|
|
62
62
|
return buf;
|
|
63
63
|
}
|
|
64
64
|
function decodeValues_BOOLEAN(cursor, count) {
|
|
65
|
-
|
|
66
|
-
for (
|
|
67
|
-
|
|
65
|
+
const values = [];
|
|
66
|
+
for (let i = 0; i < count; i++) {
|
|
67
|
+
const b = cursor.buffer[cursor.offset + Math.floor(i / 8)];
|
|
68
68
|
values.push((b & 1 << i % 8) > 0);
|
|
69
69
|
}
|
|
70
70
|
cursor.offset += Math.ceil(count / 8);
|
|
71
71
|
return values;
|
|
72
72
|
}
|
|
73
73
|
function encodeValues_INT32(values) {
|
|
74
|
-
|
|
75
|
-
for (
|
|
74
|
+
const buf = Buffer.alloc(4 * values.length);
|
|
75
|
+
for (let i = 0; i < values.length; i++) {
|
|
76
76
|
buf.writeInt32LE(values[i], i * 4);
|
|
77
77
|
}
|
|
78
78
|
return buf;
|
|
79
79
|
}
|
|
80
80
|
function decodeValues_INT32(cursor, count) {
|
|
81
|
-
|
|
82
|
-
for (
|
|
81
|
+
const values = [];
|
|
82
|
+
for (let i = 0; i < count; i++) {
|
|
83
83
|
values.push(cursor.buffer.readInt32LE(cursor.offset));
|
|
84
84
|
cursor.offset += 4;
|
|
85
85
|
}
|
|
86
86
|
return values;
|
|
87
87
|
}
|
|
88
88
|
function encodeValues_INT64(values) {
|
|
89
|
-
|
|
90
|
-
for (
|
|
89
|
+
const buf = Buffer.alloc(8 * values.length);
|
|
90
|
+
for (let i = 0; i < values.length; i++) {
|
|
91
91
|
_int.default.writeInt64LE(values[i], buf, i * 8);
|
|
92
92
|
}
|
|
93
93
|
return buf;
|
|
94
94
|
}
|
|
95
95
|
function decodeValues_INT64(cursor, count) {
|
|
96
|
-
|
|
97
|
-
for (
|
|
96
|
+
const values = [];
|
|
97
|
+
for (let i = 0; i < count; i++) {
|
|
98
98
|
values.push(_int.default.readInt64LE(cursor.buffer, cursor.offset));
|
|
99
99
|
cursor.offset += 8;
|
|
100
100
|
}
|
|
101
101
|
return values;
|
|
102
102
|
}
|
|
103
103
|
function encodeValues_INT96(values) {
|
|
104
|
-
|
|
105
|
-
for (
|
|
104
|
+
const buf = Buffer.alloc(12 * values.length);
|
|
105
|
+
for (let i = 0; i < values.length; i++) {
|
|
106
106
|
if (values[i] >= 0) {
|
|
107
107
|
_int.default.writeInt64LE(values[i], buf, i * 12);
|
|
108
108
|
buf.writeUInt32LE(0, i * 12 + 8);
|
|
@@ -114,10 +114,10 @@ function encodeValues_INT96(values) {
|
|
|
114
114
|
return buf;
|
|
115
115
|
}
|
|
116
116
|
function decodeValues_INT96(cursor, count) {
|
|
117
|
-
|
|
118
|
-
for (
|
|
119
|
-
|
|
120
|
-
|
|
117
|
+
const values = [];
|
|
118
|
+
for (let i = 0; i < count; i++) {
|
|
119
|
+
const low = _int.default.readInt64LE(cursor.buffer, cursor.offset);
|
|
120
|
+
const high = cursor.buffer.readUInt32LE(cursor.offset + 8);
|
|
121
121
|
if (high === 0xffffffff) {
|
|
122
122
|
values.push(~-low + 1);
|
|
123
123
|
} else {
|
|
@@ -128,54 +128,54 @@ function decodeValues_INT96(cursor, count) {
|
|
|
128
128
|
return values;
|
|
129
129
|
}
|
|
130
130
|
function encodeValues_FLOAT(values) {
|
|
131
|
-
|
|
132
|
-
for (
|
|
131
|
+
const buf = Buffer.alloc(4 * values.length);
|
|
132
|
+
for (let i = 0; i < values.length; i++) {
|
|
133
133
|
buf.writeFloatLE(values[i], i * 4);
|
|
134
134
|
}
|
|
135
135
|
return buf;
|
|
136
136
|
}
|
|
137
137
|
function decodeValues_FLOAT(cursor, count) {
|
|
138
|
-
|
|
139
|
-
for (
|
|
138
|
+
const values = [];
|
|
139
|
+
for (let i = 0; i < count; i++) {
|
|
140
140
|
values.push(cursor.buffer.readFloatLE(cursor.offset));
|
|
141
141
|
cursor.offset += 4;
|
|
142
142
|
}
|
|
143
143
|
return values;
|
|
144
144
|
}
|
|
145
145
|
function encodeValues_DOUBLE(values) {
|
|
146
|
-
|
|
147
|
-
for (
|
|
146
|
+
const buf = Buffer.alloc(8 * values.length);
|
|
147
|
+
for (let i = 0; i < values.length; i++) {
|
|
148
148
|
buf.writeDoubleLE(values[i], i * 8);
|
|
149
149
|
}
|
|
150
150
|
return buf;
|
|
151
151
|
}
|
|
152
152
|
function decodeValues_DOUBLE(cursor, count) {
|
|
153
|
-
|
|
154
|
-
for (
|
|
153
|
+
const values = [];
|
|
154
|
+
for (let i = 0; i < count; i++) {
|
|
155
155
|
values.push(cursor.buffer.readDoubleLE(cursor.offset));
|
|
156
156
|
cursor.offset += 8;
|
|
157
157
|
}
|
|
158
158
|
return values;
|
|
159
159
|
}
|
|
160
160
|
function encodeValues_BYTE_ARRAY(values) {
|
|
161
|
-
|
|
162
|
-
for (
|
|
161
|
+
let buf_len = 0;
|
|
162
|
+
for (let i = 0; i < values.length; i++) {
|
|
163
163
|
values[i] = Buffer.from(values[i]);
|
|
164
164
|
buf_len += 4 + values[i].length;
|
|
165
165
|
}
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
for (
|
|
169
|
-
buf.writeUInt32LE(values[
|
|
170
|
-
values[
|
|
171
|
-
buf_pos += 4 + values[
|
|
166
|
+
const buf = Buffer.alloc(buf_len);
|
|
167
|
+
let buf_pos = 0;
|
|
168
|
+
for (let i = 0; i < values.length; i++) {
|
|
169
|
+
buf.writeUInt32LE(values[i].length, buf_pos);
|
|
170
|
+
values[i].copy(buf, buf_pos + 4);
|
|
171
|
+
buf_pos += 4 + values[i].length;
|
|
172
172
|
}
|
|
173
173
|
return buf;
|
|
174
174
|
}
|
|
175
175
|
function decodeValues_BYTE_ARRAY(cursor, count) {
|
|
176
|
-
|
|
177
|
-
for (
|
|
178
|
-
|
|
176
|
+
const values = [];
|
|
177
|
+
for (let i = 0; i < count; i++) {
|
|
178
|
+
const len = cursor.buffer.readUInt32LE(cursor.offset);
|
|
179
179
|
cursor.offset += 4;
|
|
180
180
|
values.push(cursor.buffer.slice(cursor.offset, cursor.offset + len));
|
|
181
181
|
cursor.offset += len;
|
|
@@ -186,7 +186,7 @@ function encodeValues_FIXED_LEN_BYTE_ARRAY(values, opts) {
|
|
|
186
186
|
if (!opts.typeLength) {
|
|
187
187
|
throw new Error('missing option: typeLength (required for FIXED_LEN_BYTE_ARRAY)');
|
|
188
188
|
}
|
|
189
|
-
for (
|
|
189
|
+
for (let i = 0; i < values.length; i++) {
|
|
190
190
|
values[i] = Buffer.from(values[i]);
|
|
191
191
|
if (values[i].length !== opts.typeLength) {
|
|
192
192
|
throw new Error("invalid value for FIXED_LEN_BYTE_ARRAY: ".concat(values[i]));
|
|
@@ -195,11 +195,11 @@ function encodeValues_FIXED_LEN_BYTE_ARRAY(values, opts) {
|
|
|
195
195
|
return Buffer.concat(values);
|
|
196
196
|
}
|
|
197
197
|
function decodeValues_FIXED_LEN_BYTE_ARRAY(cursor, count, opts) {
|
|
198
|
-
|
|
198
|
+
const values = [];
|
|
199
199
|
if (!opts.typeLength) {
|
|
200
200
|
throw new Error('missing option: typeLength (required for FIXED_LEN_BYTE_ARRAY)');
|
|
201
201
|
}
|
|
202
|
-
for (
|
|
202
|
+
for (let i = 0; i < count; i++) {
|
|
203
203
|
values.push(cursor.buffer.slice(cursor.offset, cursor.offset + opts.typeLength));
|
|
204
204
|
cursor.offset += opts.typeLength;
|
|
205
205
|
}
|