@loaders.gl/parquet 3.3.0-alpha.8 → 3.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +17 -26
- package/dist/dist.min.js.map +3 -3
- package/dist/es5/index.js +3 -3
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/convert-schema-deep.ts.disabled +910 -0
- package/dist/es5/lib/parse-parquet.js +49 -25
- package/dist/es5/lib/parse-parquet.js.map +1 -1
- package/dist/es5/parquet-loader.js +3 -2
- package/dist/es5/parquet-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-loader.js +1 -1
- package/dist/es5/parquet-wasm-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-writer.js +1 -1
- package/dist/es5/parquet-wasm-writer.js.map +1 -1
- package/dist/es5/parquet-writer.js +1 -1
- package/dist/es5/parquet-writer.js.map +1 -1
- package/dist/es5/parquetjs/compression.js +15 -5
- package/dist/es5/parquetjs/compression.js.map +1 -1
- package/dist/es5/parquetjs/encoder/{writer.js → parquet-encoder.js} +70 -158
- package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -0
- package/dist/es5/parquetjs/parser/parquet-reader.js +553 -222
- package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/es5/parquetjs/schema/declare.js +3 -1
- package/dist/es5/parquetjs/schema/declare.js.map +1 -1
- package/dist/es5/parquetjs/schema/shred.js +39 -33
- package/dist/es5/parquetjs/schema/shred.js.map +1 -1
- package/dist/es5/parquetjs/schema/types.js.map +1 -1
- package/dist/es5/parquetjs/utils/file-utils.js +2 -3
- package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/esm/index.js +1 -1
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/convert-schema-deep.ts.disabled +910 -0
- package/dist/esm/lib/parse-parquet.js +6 -12
- package/dist/esm/lib/parse-parquet.js.map +1 -1
- package/dist/esm/parquet-loader.js +3 -2
- package/dist/esm/parquet-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-loader.js +1 -1
- package/dist/esm/parquet-wasm-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-writer.js +1 -1
- package/dist/esm/parquet-wasm-writer.js.map +1 -1
- package/dist/esm/parquet-writer.js +1 -1
- package/dist/esm/parquet-writer.js.map +1 -1
- package/dist/esm/parquetjs/compression.js +10 -1
- package/dist/esm/parquetjs/compression.js.map +1 -1
- package/dist/esm/parquetjs/encoder/{writer.js → parquet-encoder.js} +7 -37
- package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +1 -0
- package/dist/esm/parquetjs/parser/parquet-reader.js +158 -72
- package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/esm/parquetjs/schema/declare.js +1 -0
- package/dist/esm/parquetjs/schema/declare.js.map +1 -1
- package/dist/esm/parquetjs/schema/shred.js +42 -34
- package/dist/esm/parquetjs/schema/shred.js.map +1 -1
- package/dist/esm/parquetjs/schema/types.js.map +1 -1
- package/dist/esm/parquetjs/utils/file-utils.js +1 -1
- package/dist/esm/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +3 -4
- package/dist/lib/parse-parquet.d.ts +2 -2
- package/dist/lib/parse-parquet.d.ts.map +1 -1
- package/dist/lib/parse-parquet.js +24 -12
- package/dist/parquet-loader.d.ts +1 -0
- package/dist/parquet-loader.d.ts.map +1 -1
- package/dist/parquet-loader.js +2 -1
- package/dist/parquet-worker.js +15 -24
- package/dist/parquet-worker.js.map +3 -3
- package/dist/parquetjs/compression.d.ts.map +1 -1
- package/dist/parquetjs/compression.js +16 -5
- package/dist/parquetjs/encoder/{writer.d.ts → parquet-encoder.d.ts} +10 -19
- package/dist/parquetjs/encoder/parquet-encoder.d.ts.map +1 -0
- package/dist/parquetjs/encoder/{writer.js → parquet-encoder.js} +39 -37
- package/dist/parquetjs/parser/parquet-reader.d.ts +47 -57
- package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -1
- package/dist/parquetjs/parser/parquet-reader.js +168 -102
- package/dist/parquetjs/schema/declare.d.ts +14 -7
- package/dist/parquetjs/schema/declare.d.ts.map +1 -1
- package/dist/parquetjs/schema/declare.js +2 -0
- package/dist/parquetjs/schema/shred.d.ts +115 -0
- package/dist/parquetjs/schema/shred.d.ts.map +1 -1
- package/dist/parquetjs/schema/shred.js +161 -43
- package/dist/parquetjs/schema/types.d.ts +2 -2
- package/dist/parquetjs/schema/types.d.ts.map +1 -1
- package/dist/parquetjs/utils/file-utils.d.ts +3 -4
- package/dist/parquetjs/utils/file-utils.d.ts.map +1 -1
- package/dist/parquetjs/utils/file-utils.js +2 -5
- package/package.json +7 -5
- package/src/index.ts +2 -2
- package/src/lib/convert-schema-deep.ts.disabled +910 -0
- package/src/lib/parse-parquet.ts +25 -12
- package/src/parquet-loader.ts +3 -1
- package/src/parquetjs/compression.ts +14 -1
- package/src/parquetjs/encoder/{writer.ts → parquet-encoder.ts} +22 -28
- package/src/parquetjs/parser/parquet-reader.ts +239 -122
- package/src/parquetjs/schema/declare.ts +17 -9
- package/src/parquetjs/schema/shred.ts +157 -28
- package/src/parquetjs/schema/types.ts +21 -27
- package/src/parquetjs/utils/file-utils.ts +3 -4
- package/dist/es5/parquetjs/encoder/writer.js.map +0 -1
- package/dist/es5/parquetjs/file.js +0 -94
- package/dist/es5/parquetjs/file.js.map +0 -1
- package/dist/es5/parquetjs/parser/parquet-cursor.js +0 -183
- package/dist/es5/parquetjs/parser/parquet-cursor.js.map +0 -1
- package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +0 -327
- package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
- package/dist/es5/parquetjs/utils/buffer-utils.js +0 -19
- package/dist/es5/parquetjs/utils/buffer-utils.js.map +0 -1
- package/dist/esm/parquetjs/encoder/writer.js.map +0 -1
- package/dist/esm/parquetjs/file.js +0 -81
- package/dist/esm/parquetjs/file.js.map +0 -1
- package/dist/esm/parquetjs/parser/parquet-cursor.js +0 -78
- package/dist/esm/parquetjs/parser/parquet-cursor.js.map +0 -1
- package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +0 -129
- package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
- package/dist/esm/parquetjs/utils/buffer-utils.js +0 -13
- package/dist/esm/parquetjs/utils/buffer-utils.js.map +0 -1
- package/dist/parquetjs/encoder/writer.d.ts.map +0 -1
- package/dist/parquetjs/file.d.ts +0 -10
- package/dist/parquetjs/file.d.ts.map +0 -1
- package/dist/parquetjs/file.js +0 -99
- package/dist/parquetjs/parser/parquet-cursor.d.ts +0 -36
- package/dist/parquetjs/parser/parquet-cursor.d.ts.map +0 -1
- package/dist/parquetjs/parser/parquet-cursor.js +0 -74
- package/dist/parquetjs/parser/parquet-envelope-reader.d.ts +0 -40
- package/dist/parquetjs/parser/parquet-envelope-reader.d.ts.map +0 -1
- package/dist/parquetjs/parser/parquet-envelope-reader.js +0 -136
- package/dist/parquetjs/utils/buffer-utils.d.ts +0 -10
- package/dist/parquetjs/utils/buffer-utils.d.ts.map +0 -1
- package/dist/parquetjs/utils/buffer-utils.js +0 -22
- package/src/parquetjs/file.ts +0 -90
- package/src/parquetjs/parser/parquet-cursor.ts +0 -94
- package/src/parquetjs/parser/parquet-envelope-reader.ts +0 -199
- package/src/parquetjs/utils/buffer-utils.ts +0 -18
|
@@ -1,134 +1,200 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.ParquetReader = void 0;
|
|
4
|
-
// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
|
|
5
|
-
const parquet_envelope_reader_1 = require("./parquet-envelope-reader");
|
|
6
4
|
const schema_1 = require("../schema/schema");
|
|
7
|
-
const parquet_cursor_1 = require("./parquet-cursor");
|
|
8
|
-
const constants_1 = require("../../constants");
|
|
9
5
|
const decoders_1 = require("./decoders");
|
|
6
|
+
const shred_1 = require("../schema/shred");
|
|
7
|
+
const constants_1 = require("../../constants");
|
|
8
|
+
const parquet_thrift_1 = require("../parquet-thrift");
|
|
9
|
+
const read_utils_1 = require("../utils/read-utils");
|
|
10
|
+
const decoders_2 = require("./decoders");
|
|
11
|
+
const DEFAULT_PROPS = {
|
|
12
|
+
defaultDictionarySize: 1e6
|
|
13
|
+
};
|
|
10
14
|
/**
|
|
11
|
-
*
|
|
12
|
-
*
|
|
13
|
-
*
|
|
14
|
-
*
|
|
15
|
-
* avoid leaking file descriptors.
|
|
15
|
+
* The parquet envelope reader allows direct, unbuffered access to the individual
|
|
16
|
+
* sections of the parquet file, namely the header, footer and the row groups.
|
|
17
|
+
* This class is intended for advanced/internal users; if you just want to retrieve
|
|
18
|
+
* rows from a parquet file use the ParquetReader instead
|
|
16
19
|
*/
|
|
17
20
|
class ParquetReader {
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
const readFn = async (start, length) => {
|
|
23
|
-
const arrayBuffer = await blob.slice(start, start + length).arrayBuffer();
|
|
24
|
-
return Buffer.from(arrayBuffer);
|
|
25
|
-
};
|
|
26
|
-
const closeFn = async () => { };
|
|
27
|
-
const size = blob.size;
|
|
28
|
-
const envelopeReader = new parquet_envelope_reader_1.ParquetEnvelopeReader(readFn, closeFn, size);
|
|
29
|
-
try {
|
|
30
|
-
await envelopeReader.readHeader();
|
|
31
|
-
const metadata = await envelopeReader.readFooter();
|
|
32
|
-
return new ParquetReader(metadata, envelopeReader);
|
|
33
|
-
}
|
|
34
|
-
catch (err) {
|
|
35
|
-
await envelopeReader.close();
|
|
36
|
-
throw err;
|
|
37
|
-
}
|
|
21
|
+
constructor(file, props) {
|
|
22
|
+
this.metadata = null;
|
|
23
|
+
this.file = file;
|
|
24
|
+
this.props = { ...DEFAULT_PROPS, ...props };
|
|
38
25
|
}
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
const
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
}
|
|
52
|
-
catch (err) {
|
|
53
|
-
await envelopeReader.close();
|
|
54
|
-
throw err;
|
|
26
|
+
close() {
|
|
27
|
+
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
|
28
|
+
this.file.close();
|
|
29
|
+
}
|
|
30
|
+
// HIGH LEVEL METHODS
|
|
31
|
+
/** Yield one row at a time */
|
|
32
|
+
async *rowIterator(props) {
|
|
33
|
+
for await (const rows of this.rowBatchIterator(props)) {
|
|
34
|
+
// yield *rows
|
|
35
|
+
for (const row of rows) {
|
|
36
|
+
yield row;
|
|
37
|
+
}
|
|
55
38
|
}
|
|
56
39
|
}
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
return new ParquetReader(metadata, envelopeReader);
|
|
40
|
+
/** Yield one batch of rows at a time */
|
|
41
|
+
async *rowBatchIterator(props) {
|
|
42
|
+
const schema = await this.getSchema();
|
|
43
|
+
for await (const rowGroup of this.rowGroupIterator(props)) {
|
|
44
|
+
yield (0, shred_1.materializeRecords)(schema, rowGroup);
|
|
63
45
|
}
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
46
|
+
}
|
|
47
|
+
/** Iterate over the raw row groups */
|
|
48
|
+
async *rowGroupIterator(props) {
|
|
49
|
+
// Ensure strings are nested in arrays
|
|
50
|
+
const columnList = (props?.columnList || []).map((x) => Array.isArray(x) ? x : [x]);
|
|
51
|
+
const metadata = await this.getFileMetadata();
|
|
52
|
+
const schema = await this.getSchema();
|
|
53
|
+
const rowGroupCount = metadata?.row_groups.length || 0;
|
|
54
|
+
for (let rowGroupIndex = 0; rowGroupIndex < rowGroupCount; rowGroupIndex++) {
|
|
55
|
+
const rowGroup = await this.readRowGroup(schema, metadata.row_groups[rowGroupIndex], columnList);
|
|
56
|
+
yield rowGroup;
|
|
67
57
|
}
|
|
68
58
|
}
|
|
59
|
+
async getRowCount() {
|
|
60
|
+
const metadata = await this.getFileMetadata();
|
|
61
|
+
return Number(metadata.num_rows);
|
|
62
|
+
}
|
|
63
|
+
async getSchema() {
|
|
64
|
+
const metadata = await this.getFileMetadata();
|
|
65
|
+
const root = metadata.schema[0];
|
|
66
|
+
const { schema: schemaDefinition } = (0, decoders_1.decodeSchema)(metadata.schema, 1, root.num_children);
|
|
67
|
+
const schema = new schema_1.ParquetSchema(schemaDefinition);
|
|
68
|
+
return schema;
|
|
69
|
+
}
|
|
69
70
|
/**
|
|
70
|
-
*
|
|
71
|
-
*
|
|
72
|
-
* and internal use cases. Consider using one of the open{File,Buffer} methods
|
|
73
|
-
* instead
|
|
71
|
+
* Returns the user (key/value) metadata for this file
|
|
72
|
+
* In parquet this is not stored on the schema like it is in arrow
|
|
74
73
|
*/
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
74
|
+
async getSchemaMetadata() {
|
|
75
|
+
const metadata = await this.getFileMetadata();
|
|
76
|
+
const md = {};
|
|
77
|
+
for (const kv of metadata.key_value_metadata) {
|
|
78
|
+
md[kv.key] = kv.value;
|
|
78
79
|
}
|
|
79
|
-
|
|
80
|
-
this.envelopeReader = envelopeReader;
|
|
81
|
-
const root = this.metadata.schema[0];
|
|
82
|
-
const { schema } = (0, decoders_1.decodeSchema)(this.metadata.schema, 1, root.num_children);
|
|
83
|
-
this.schema = new schema_1.ParquetSchema(schema);
|
|
80
|
+
return md;
|
|
84
81
|
}
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
// this.envelopeReader = null;
|
|
92
|
-
// this.metadata = null;
|
|
82
|
+
async getFileMetadata() {
|
|
83
|
+
if (!this.metadata) {
|
|
84
|
+
await this.readHeader();
|
|
85
|
+
this.metadata = this.readFooter();
|
|
86
|
+
}
|
|
87
|
+
return this.metadata;
|
|
93
88
|
}
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
89
|
+
// LOW LEVEL METHODS
|
|
90
|
+
/** Metadata is stored in the footer */
|
|
91
|
+
async readHeader() {
|
|
92
|
+
const buffer = await this.file.read(0, constants_1.PARQUET_MAGIC.length);
|
|
93
|
+
const magic = buffer.toString();
|
|
94
|
+
switch (magic) {
|
|
95
|
+
case constants_1.PARQUET_MAGIC:
|
|
96
|
+
break;
|
|
97
|
+
case constants_1.PARQUET_MAGIC_ENCRYPTED:
|
|
98
|
+
throw new Error('Encrypted parquet file not supported');
|
|
99
|
+
default:
|
|
100
|
+
throw new Error(`Invalid parquet file (magic=${magic})`);
|
|
98
101
|
}
|
|
99
|
-
// tslint:disable-next-line:no-parameter-reassignment
|
|
100
|
-
columnList = columnList.map((x) => (Array.isArray(x) ? x : [x]));
|
|
101
|
-
return new parquet_cursor_1.ParquetCursor(this.metadata, this.envelopeReader, this.schema, columnList);
|
|
102
102
|
}
|
|
103
|
-
/**
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
103
|
+
/** Metadata is stored in the footer */
|
|
104
|
+
async readFooter() {
|
|
105
|
+
const trailerLen = constants_1.PARQUET_MAGIC.length + 4;
|
|
106
|
+
const trailerBuf = await this.file.read(this.file.size - trailerLen, trailerLen);
|
|
107
|
+
const magic = trailerBuf.slice(4).toString();
|
|
108
|
+
if (magic !== constants_1.PARQUET_MAGIC) {
|
|
109
|
+
throw new Error(`Not a valid parquet file (magic="${magic})`);
|
|
110
|
+
}
|
|
111
|
+
const metadataSize = trailerBuf.readUInt32LE(0);
|
|
112
|
+
const metadataOffset = this.file.size - metadataSize - trailerLen;
|
|
113
|
+
if (metadataOffset < constants_1.PARQUET_MAGIC.length) {
|
|
114
|
+
throw new Error(`Invalid metadata size ${metadataOffset}`);
|
|
115
|
+
}
|
|
116
|
+
const metadataBuf = await this.file.read(metadataOffset, metadataSize);
|
|
117
|
+
// let metadata = new parquet_thrift.FileMetaData();
|
|
118
|
+
// parquet_util.decodeThrift(metadata, metadataBuf);
|
|
119
|
+
const { metadata } = (0, read_utils_1.decodeFileMetadata)(metadataBuf);
|
|
120
|
+
return metadata;
|
|
109
121
|
}
|
|
110
|
-
/**
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
122
|
+
/** Data is stored in row groups (similar to Apache Arrow record batches) */
|
|
123
|
+
async readRowGroup(schema, rowGroup, columnList) {
|
|
124
|
+
const buffer = {
|
|
125
|
+
rowCount: Number(rowGroup.num_rows),
|
|
126
|
+
columnData: {}
|
|
127
|
+
};
|
|
128
|
+
for (const colChunk of rowGroup.columns) {
|
|
129
|
+
const colMetadata = colChunk.meta_data;
|
|
130
|
+
const colKey = colMetadata?.path_in_schema;
|
|
131
|
+
if (columnList.length > 0 && (0, read_utils_1.fieldIndexOf)(columnList, colKey) < 0) {
|
|
132
|
+
continue; // eslint-disable-line no-continue
|
|
133
|
+
}
|
|
134
|
+
buffer.columnData[colKey.join()] = await this.readColumnChunk(schema, colChunk);
|
|
135
|
+
}
|
|
136
|
+
return buffer;
|
|
115
137
|
}
|
|
116
138
|
/**
|
|
117
|
-
*
|
|
139
|
+
* Each row group contains column chunks for all the columns.
|
|
118
140
|
*/
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
md[kv.key] = kv.value;
|
|
141
|
+
async readColumnChunk(schema, colChunk) {
|
|
142
|
+
if (colChunk.file_path !== undefined && colChunk.file_path !== null) {
|
|
143
|
+
throw new Error('external references are not supported');
|
|
123
144
|
}
|
|
124
|
-
|
|
145
|
+
const field = schema.findField(colChunk.meta_data?.path_in_schema);
|
|
146
|
+
const type = (0, read_utils_1.getThriftEnum)(parquet_thrift_1.Type, colChunk.meta_data?.type);
|
|
147
|
+
if (type !== field.primitiveType) {
|
|
148
|
+
throw new Error(`chunk type not matching schema: ${type}`);
|
|
149
|
+
}
|
|
150
|
+
const compression = (0, read_utils_1.getThriftEnum)(parquet_thrift_1.CompressionCodec, colChunk.meta_data?.codec);
|
|
151
|
+
const pagesOffset = Number(colChunk.meta_data?.data_page_offset);
|
|
152
|
+
let pagesSize = Number(colChunk.meta_data?.total_compressed_size);
|
|
153
|
+
if (!colChunk.file_path) {
|
|
154
|
+
pagesSize = Math.min(this.file.size - pagesOffset, Number(colChunk.meta_data?.total_compressed_size));
|
|
155
|
+
}
|
|
156
|
+
const options = {
|
|
157
|
+
type,
|
|
158
|
+
rLevelMax: field.rLevelMax,
|
|
159
|
+
dLevelMax: field.dLevelMax,
|
|
160
|
+
compression,
|
|
161
|
+
column: field,
|
|
162
|
+
numValues: colChunk.meta_data?.num_values,
|
|
163
|
+
dictionary: []
|
|
164
|
+
};
|
|
165
|
+
let dictionary;
|
|
166
|
+
const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;
|
|
167
|
+
if (dictionaryPageOffset) {
|
|
168
|
+
const dictionaryOffset = Number(dictionaryPageOffset);
|
|
169
|
+
// Getting dictionary from column chunk to iterate all over indexes to get dataPage values.
|
|
170
|
+
dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);
|
|
171
|
+
}
|
|
172
|
+
dictionary = options.dictionary?.length ? options.dictionary : dictionary;
|
|
173
|
+
const pagesBuf = await this.file.read(pagesOffset, pagesSize);
|
|
174
|
+
return await (0, decoders_2.decodeDataPages)(pagesBuf, { ...options, dictionary });
|
|
125
175
|
}
|
|
126
176
|
/**
|
|
127
|
-
*
|
|
177
|
+
* Getting dictionary for allows to flatten values by indices.
|
|
178
|
+
* @param dictionaryPageOffset
|
|
179
|
+
* @param options
|
|
180
|
+
* @param pagesOffset
|
|
181
|
+
* @returns
|
|
128
182
|
*/
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
183
|
+
async getDictionary(dictionaryPageOffset, options, pagesOffset) {
|
|
184
|
+
if (dictionaryPageOffset === 0) {
|
|
185
|
+
// dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);
|
|
186
|
+
// pagesBuf = await this.read(pagesOffset, dictionarySize);
|
|
187
|
+
// In this case we are working with parquet-mr files format. Problem is described below:
|
|
188
|
+
// https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding
|
|
189
|
+
// We need to get dictionary page from column chunk if it exists.
|
|
190
|
+
// Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.
|
|
191
|
+
return [];
|
|
192
|
+
}
|
|
193
|
+
const dictionarySize = Math.min(this.file.size - dictionaryPageOffset, this.props.defaultDictionarySize);
|
|
194
|
+
const pagesBuf = await this.file.read(dictionaryPageOffset, dictionarySize);
|
|
195
|
+
const cursor = { buffer: pagesBuf, offset: 0, size: pagesBuf.length };
|
|
196
|
+
const decodedPage = await (0, decoders_2.decodePage)(cursor, options);
|
|
197
|
+
return decodedPage.dictionary;
|
|
132
198
|
}
|
|
133
199
|
}
|
|
134
200
|
exports.ParquetReader = ParquetReader;
|
|
@@ -45,6 +45,7 @@ export interface ParquetField {
|
|
|
45
45
|
fieldCount?: number;
|
|
46
46
|
fields?: Record<string, ParquetField>;
|
|
47
47
|
}
|
|
48
|
+
/** @todo better name, this is an internal type? */
|
|
48
49
|
export interface ParquetOptions {
|
|
49
50
|
type: ParquetType;
|
|
50
51
|
rLevelMax: number;
|
|
@@ -54,27 +55,33 @@ export interface ParquetOptions {
|
|
|
54
55
|
numValues?: Int64;
|
|
55
56
|
dictionary?: ParquetDictionary;
|
|
56
57
|
}
|
|
57
|
-
export interface ParquetData {
|
|
58
|
-
dlevels: number[];
|
|
59
|
-
rlevels: number[];
|
|
60
|
-
values: any[];
|
|
61
|
-
count: number;
|
|
62
|
-
pageHeaders: PageHeader[];
|
|
63
|
-
}
|
|
64
58
|
export interface ParquetPageData {
|
|
65
59
|
dlevels: number[];
|
|
66
60
|
rlevels: number[];
|
|
61
|
+
/** Actual column chunks */
|
|
67
62
|
values: any[];
|
|
68
63
|
count: number;
|
|
69
64
|
dictionary?: ParquetDictionary;
|
|
65
|
+
/** The "raw" page header from the file */
|
|
70
66
|
pageHeader: PageHeader;
|
|
71
67
|
}
|
|
72
68
|
export interface ParquetRecord {
|
|
73
69
|
[key: string]: any;
|
|
74
70
|
}
|
|
71
|
+
/** @
|
|
72
|
+
* Holds data for one row group (column chunks) */
|
|
75
73
|
export declare class ParquetBuffer {
|
|
74
|
+
/** Number of rows in this page */
|
|
76
75
|
rowCount: number;
|
|
77
76
|
columnData: Record<string, ParquetData>;
|
|
78
77
|
constructor(rowCount?: number, columnData?: Record<string, ParquetData>);
|
|
79
78
|
}
|
|
79
|
+
/** Holds the data for one column chunk */
|
|
80
|
+
export interface ParquetData {
|
|
81
|
+
dlevels: number[];
|
|
82
|
+
rlevels: number[];
|
|
83
|
+
values: any[];
|
|
84
|
+
count: number;
|
|
85
|
+
pageHeaders: PageHeader[];
|
|
86
|
+
}
|
|
80
87
|
//# sourceMappingURL=declare.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"declare.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/schema/declare.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,MAAM,YAAY,CAAC;AAC/B,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,mBAAmB,CAAC;AAElD,MAAM,MAAM,YAAY,GAAG,OAAO,GAAG,KAAK,GAAG,kBAAkB,CAAC;AAChE,MAAM,MAAM,kBAAkB,GAC1B,cAAc,GACd,MAAM,GACN,QAAQ,GACR,KAAK,GACL,QAAQ,GACR,KAAK,GACL,SAAS,GACT,MAAM,CAAC;AACX,MAAM,MAAM,cAAc,GAAG,UAAU,GAAG,UAAU,GAAG,UAAU,CAAC;AAClE,MAAM,MAAM,WAAW,GAAG,aAAa,GAAG,YAAY,CAAC;AAEvD;;GAEG;AACH,MAAM,MAAM,aAAa,GAErB,SAAS,GACT,OAAO,GACP,OAAO,GACP,OAAO,GACP,OAAO,GACP,QAAQ,GACR,YAAY,GACZ,sBAAsB,CAAC;AAE3B;;GAEG;AACH,MAAM,MAAM,YAAY,GAEpB,MAAM,GAMN,eAAe,GACf,eAAe,GACf,oBAAoB,GACpB,8BAA8B,GAC9B,MAAM,GACN,aAAa,GACb,aAAa,GACb,kBAAkB,GAClB,kBAAkB,GAClB,QAAQ,GACR,SAAS,GACT,SAAS,GACT,SAAS,GACT,OAAO,GACP,QAAQ,GACR,QAAQ,GACR,QAAQ,GACR,MAAM,GACN,MAAM,GACN,UAAU,CAAC;AAEf,MAAM,MAAM,iBAAiB,GAAG,MAAM,EAAE,CAAC;AAEzC,MAAM,WAAW,gBAAgB;IAC/B,CAAC,MAAM,EAAE,MAAM,GAAG,eAAe,CAAC;CACnC;AAED,MAAM,WAAW,eAAe;IAC9B,IAAI,CAAC,EAAE,WAAW,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,YAAY,CAAC;IACxB,WAAW,CAAC,EAAE,kBAAkB,CAAC;IACjC,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,MAAM,CAAC,EAAE,gBAAgB,CAAC;CAC3B;AAED,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,EAAE,CAAC;IACf,GAAG,EAAE,MAAM,CAAC;IACZ,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B,cAAc,EAAE,cAAc,CAAC;IAC/B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,YAAY,CAAC;IACxB,WAAW,CAAC,EAAE,kBAAkB,CAAC;IACjC,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;CACvC;AAED,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,WAAW,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,EAAE,kBAAkB,CAAC;IAChC,MAAM,EAAE,YAAY,CAAC;IACrB,SAAS,CAAC,EAAE,KAAK,CAAC;IAClB,UAAU,CAAC,EAAE,iBAAiB,CAAC;CAChC;AAED,MAAM,WAAW,
|
|
1
|
+
{"version":3,"file":"declare.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/schema/declare.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,MAAM,YAAY,CAAC;AAC/B,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,mBAAmB,CAAC;AAElD,MAAM,MAAM,YAAY,GAAG,OAAO,GAAG,KAAK,GAAG,kBAAkB,CAAC;AAChE,MAAM,MAAM,kBAAkB,GAC1B,cAAc,GACd,MAAM,GACN,QAAQ,GACR,KAAK,GACL,QAAQ,GACR,KAAK,GACL,SAAS,GACT,MAAM,CAAC;AACX,MAAM,MAAM,cAAc,GAAG,UAAU,GAAG,UAAU,GAAG,UAAU,CAAC;AAClE,MAAM,MAAM,WAAW,GAAG,aAAa,GAAG,YAAY,CAAC;AAEvD;;GAEG;AACH,MAAM,MAAM,aAAa,GAErB,SAAS,GACT,OAAO,GACP,OAAO,GACP,OAAO,GACP,OAAO,GACP,QAAQ,GACR,YAAY,GACZ,sBAAsB,CAAC;AAE3B;;GAEG;AACH,MAAM,MAAM,YAAY,GAEpB,MAAM,GAMN,eAAe,GACf,eAAe,GACf,oBAAoB,GACpB,8BAA8B,GAC9B,MAAM,GACN,aAAa,GACb,aAAa,GACb,kBAAkB,GAClB,kBAAkB,GAClB,QAAQ,GACR,SAAS,GACT,SAAS,GACT,SAAS,GACT,OAAO,GACP,QAAQ,GACR,QAAQ,GACR,QAAQ,GACR,MAAM,GACN,MAAM,GACN,UAAU,CAAC;AAEf,MAAM,MAAM,iBAAiB,GAAG,MAAM,EAAE,CAAC;AAEzC,MAAM,WAAW,gBAAgB;IAC/B,CAAC,MAAM,EAAE,MAAM,GAAG,eAAe,CAAC;CACnC;AAED,MAAM,WAAW,eAAe;IAC9B,IAAI,CAAC,EAAE,WAAW,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,YAAY,CAAC;IACxB,WAAW,CAAC,EAAE,kBAAkB,CAAC;IACjC,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,MAAM,CAAC,EAAE,gBAAgB,CAAC;CAC3B;AAED,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,EAAE,CAAC;IACf,GAAG,EAAE,MAAM,CAAC;IACZ,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B,cAAc,EAAE,cAAc,CAAC;IAC/B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,YAAY,CAAC;IACxB,WAAW,CAAC,EAAE,kBAAkB,CAAC;IACjC,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;CACvC;AAED,mDAAmD;AACnD,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,WAAW,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,EAAE,kBAAkB,CAAC;IAChC,MAAM,EAAE,YAAY,CAAC;IACrB,SAAS,CAAC,EAAE,KAAK,CAAC;IAClB,UAAU,CAAC,EAAE,iBAAiB,CAAC;CAChC;AAED,MAAM,WAAW,eAAe;IAC9B,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,2BAA2B;IAC3B,MAAM,EAAE,GAAG,EAAE,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,iBAAiB,CAAC;IAC/B,0CAA0C;IAC1C,UAAU,EAAE,UAAU,CAAC;CACxB;AAED,MAAM,WAAW,aAAa;IAC5B,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED;kDACkD;AAClD,qBAAa,aAAa;IACxB,kCAAkC;IAClC,QAAQ,EAAE,MAAM,CAAC;IAEjB,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,CAAC;gBAC5B,QAAQ,GAAE,MAAU,EAAE,UAAU,GAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAM;CAI/E;AAED,0CAA0C;AAC1C,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,MAAM,EAAE,GAAG,EAAE,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,WAAW,EAAE,UAAU,EAAE,CAAC;CAC3B"}
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.ParquetBuffer = void 0;
|
|
4
|
+
/** @
|
|
5
|
+
* Holds data for one row group (column chunks) */
|
|
4
6
|
class ParquetBuffer {
|
|
5
7
|
constructor(rowCount = 0, columnData = {}) {
|
|
6
8
|
this.rowCount = rowCount;
|
|
@@ -45,4 +45,119 @@ export declare function shredRecord(schema: ParquetSchema, record: any, buffer:
|
|
|
45
45
|
* }
|
|
46
46
|
*/
|
|
47
47
|
export declare function materializeRecords(schema: ParquetSchema, buffer: ParquetBuffer): ParquetRecord[];
|
|
48
|
+
/**
|
|
49
|
+
* 'Materialize' a list of <value, repetition_level, definition_level>
|
|
50
|
+
* tuples back to nested records (objects/arrays) using the Google Dremel
|
|
51
|
+
* Algorithm..
|
|
52
|
+
*
|
|
53
|
+
* The buffer argument must point to an object with the following structure (i.e.
|
|
54
|
+
* the same structure that is returned by shredRecords):
|
|
55
|
+
*
|
|
56
|
+
* buffer = {
|
|
57
|
+
* columnData: [
|
|
58
|
+
* 'my_col': {
|
|
59
|
+
* dlevels: [d1, d2, .. dN],
|
|
60
|
+
* rlevels: [r1, r2, .. rN],
|
|
61
|
+
* values: [v1, v2, .. vN],
|
|
62
|
+
* }, ...
|
|
63
|
+
* ],
|
|
64
|
+
* rowCount: X,
|
|
65
|
+
* }
|
|
66
|
+
*
|
|
67
|
+
export function extractColumns(schema: ParquetSchema, buffer: ParquetBuffer): Record<string, unknown> {
|
|
68
|
+
const columns: ParquetRecord = {};
|
|
69
|
+
for (const key in buffer.columnData) {
|
|
70
|
+
const columnData = buffer.columnData[key];
|
|
71
|
+
if (columnData.count) {
|
|
72
|
+
extractColumn(schema, columnData, key, columns);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
return columns;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// eslint-disable-next-line max-statements, complexity
|
|
79
|
+
function extractColumn(
|
|
80
|
+
schema: ParquetSchema,
|
|
81
|
+
columnData: ParquetData,
|
|
82
|
+
key: string,
|
|
83
|
+
columns: Record<string, unknown>
|
|
84
|
+
) {
|
|
85
|
+
if (columnData.count <= 0) {
|
|
86
|
+
return;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
const record = columns;
|
|
90
|
+
|
|
91
|
+
const field = schema.findField(key);
|
|
92
|
+
const branch = schema.findFieldBranch(key);
|
|
93
|
+
|
|
94
|
+
// tslint:disable-next-line:prefer-array-literal
|
|
95
|
+
const rLevels: number[] = new Array(field.rLevelMax + 1).fill(0);
|
|
96
|
+
let vIndex = 0;
|
|
97
|
+
|
|
98
|
+
let i = 0;
|
|
99
|
+
const dLevel = columnData.dlevels[i];
|
|
100
|
+
const rLevel = columnData.rlevels[i];
|
|
101
|
+
rLevels[rLevel]++;
|
|
102
|
+
rLevels.fill(0, rLevel + 1);
|
|
103
|
+
|
|
104
|
+
let rIndex = 0;
|
|
105
|
+
let record = records[rLevels[rIndex++] - 1];
|
|
106
|
+
|
|
107
|
+
// Internal nodes
|
|
108
|
+
for (const step of branch) {
|
|
109
|
+
if (step === field || dLevel < step.dLevelMax) {
|
|
110
|
+
break;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
switch (step.repetitionType) {
|
|
114
|
+
case 'REPEATED':
|
|
115
|
+
if (!(step.name in record)) {
|
|
116
|
+
// eslint-disable max-depth
|
|
117
|
+
record[step.name] = [];
|
|
118
|
+
}
|
|
119
|
+
const ix = rLevels[rIndex++];
|
|
120
|
+
while (record[step.name].length <= ix) {
|
|
121
|
+
// eslint-disable max-depth
|
|
122
|
+
record[step.name].push({});
|
|
123
|
+
}
|
|
124
|
+
record = record[step.name][ix];
|
|
125
|
+
break;
|
|
126
|
+
|
|
127
|
+
default:
|
|
128
|
+
record[step.name] = record[step.name] || {};
|
|
129
|
+
record = record[step.name];
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// Leaf node
|
|
134
|
+
if (dLevel === field.dLevelMax) {
|
|
135
|
+
const value = Types.fromPrimitive(
|
|
136
|
+
// @ts-ignore
|
|
137
|
+
field.originalType || field.primitiveType,
|
|
138
|
+
columnData.values[vIndex],
|
|
139
|
+
field
|
|
140
|
+
);
|
|
141
|
+
vIndex++;
|
|
142
|
+
|
|
143
|
+
switch (field.repetitionType) {
|
|
144
|
+
case 'REPEATED':
|
|
145
|
+
if (!(field.name in record)) {
|
|
146
|
+
// eslint-disable max-depth
|
|
147
|
+
record[field.name] = [];
|
|
148
|
+
}
|
|
149
|
+
const ix = rLevels[rIndex];
|
|
150
|
+
while (record[field.name].length <= ix) {
|
|
151
|
+
// eslint-disable max-depth
|
|
152
|
+
record[field.name].push(null);
|
|
153
|
+
}
|
|
154
|
+
record[field.name][ix] = value;
|
|
155
|
+
break;
|
|
156
|
+
|
|
157
|
+
default:
|
|
158
|
+
record[field.name] = value;
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
*/
|
|
48
163
|
//# sourceMappingURL=shred.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"shred.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/schema/shred.ts"],"names":[],"mappings":"AAEA,OAAO,EAAC,aAAa,EAA6B,aAAa,EAAC,MAAM,WAAW,CAAC;AAClF,OAAO,EAAC,aAAa,EAAC,MAAM,UAAU,CAAC;AAGvC,OAAO,EAAC,aAAa,EAAC,CAAC;AAEvB,wBAAgB,WAAW,CAAC,MAAM,EAAE,aAAa,GAAG,aAAa,CAYhE;AAED;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,wBAAgB,WAAW,CAAC,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,aAAa,GAAG,IAAI,CAmB3F;AAgED;;;;;;;;;;;;;;;;;;GAkBG;AACH,wBAAgB,kBAAkB,CAAC,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,aAAa,GAAG,aAAa,EAAE,
|
|
1
|
+
{"version":3,"file":"shred.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/schema/shred.ts"],"names":[],"mappings":"AAEA,OAAO,EAAC,aAAa,EAA6B,aAAa,EAAC,MAAM,WAAW,CAAC;AAClF,OAAO,EAAC,aAAa,EAAC,MAAM,UAAU,CAAC;AAGvC,OAAO,EAAC,aAAa,EAAC,CAAC;AAEvB,wBAAgB,WAAW,CAAC,MAAM,EAAE,aAAa,GAAG,aAAa,CAYhE;AAED;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,wBAAgB,WAAW,CAAC,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,aAAa,GAAG,IAAI,CAmB3F;AAgED;;;;;;;;;;;;;;;;;;GAkBG;AACH,wBAAgB,kBAAkB,CAAC,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,aAAa,GAAG,aAAa,EAAE,CAYhG;AAmFD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAkHE"}
|