@loaders.gl/parquet 3.3.0-alpha.8 → 3.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +17 -26
- package/dist/dist.min.js.map +3 -3
- package/dist/es5/index.js +3 -3
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/convert-schema-deep.ts.disabled +910 -0
- package/dist/es5/lib/parse-parquet.js +49 -25
- package/dist/es5/lib/parse-parquet.js.map +1 -1
- package/dist/es5/parquet-loader.js +3 -2
- package/dist/es5/parquet-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-loader.js +1 -1
- package/dist/es5/parquet-wasm-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-writer.js +1 -1
- package/dist/es5/parquet-wasm-writer.js.map +1 -1
- package/dist/es5/parquet-writer.js +1 -1
- package/dist/es5/parquet-writer.js.map +1 -1
- package/dist/es5/parquetjs/compression.js +15 -5
- package/dist/es5/parquetjs/compression.js.map +1 -1
- package/dist/es5/parquetjs/encoder/{writer.js → parquet-encoder.js} +70 -158
- package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -0
- package/dist/es5/parquetjs/parser/parquet-reader.js +553 -222
- package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/es5/parquetjs/schema/declare.js +3 -1
- package/dist/es5/parquetjs/schema/declare.js.map +1 -1
- package/dist/es5/parquetjs/schema/shred.js +39 -33
- package/dist/es5/parquetjs/schema/shred.js.map +1 -1
- package/dist/es5/parquetjs/schema/types.js.map +1 -1
- package/dist/es5/parquetjs/utils/file-utils.js +2 -3
- package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/esm/index.js +1 -1
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/convert-schema-deep.ts.disabled +910 -0
- package/dist/esm/lib/parse-parquet.js +6 -12
- package/dist/esm/lib/parse-parquet.js.map +1 -1
- package/dist/esm/parquet-loader.js +3 -2
- package/dist/esm/parquet-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-loader.js +1 -1
- package/dist/esm/parquet-wasm-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-writer.js +1 -1
- package/dist/esm/parquet-wasm-writer.js.map +1 -1
- package/dist/esm/parquet-writer.js +1 -1
- package/dist/esm/parquet-writer.js.map +1 -1
- package/dist/esm/parquetjs/compression.js +10 -1
- package/dist/esm/parquetjs/compression.js.map +1 -1
- package/dist/esm/parquetjs/encoder/{writer.js → parquet-encoder.js} +7 -37
- package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +1 -0
- package/dist/esm/parquetjs/parser/parquet-reader.js +158 -72
- package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/esm/parquetjs/schema/declare.js +1 -0
- package/dist/esm/parquetjs/schema/declare.js.map +1 -1
- package/dist/esm/parquetjs/schema/shred.js +42 -34
- package/dist/esm/parquetjs/schema/shred.js.map +1 -1
- package/dist/esm/parquetjs/schema/types.js.map +1 -1
- package/dist/esm/parquetjs/utils/file-utils.js +1 -1
- package/dist/esm/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +3 -4
- package/dist/lib/parse-parquet.d.ts +2 -2
- package/dist/lib/parse-parquet.d.ts.map +1 -1
- package/dist/lib/parse-parquet.js +24 -12
- package/dist/parquet-loader.d.ts +1 -0
- package/dist/parquet-loader.d.ts.map +1 -1
- package/dist/parquet-loader.js +2 -1
- package/dist/parquet-worker.js +15 -24
- package/dist/parquet-worker.js.map +3 -3
- package/dist/parquetjs/compression.d.ts.map +1 -1
- package/dist/parquetjs/compression.js +16 -5
- package/dist/parquetjs/encoder/{writer.d.ts → parquet-encoder.d.ts} +10 -19
- package/dist/parquetjs/encoder/parquet-encoder.d.ts.map +1 -0
- package/dist/parquetjs/encoder/{writer.js → parquet-encoder.js} +39 -37
- package/dist/parquetjs/parser/parquet-reader.d.ts +47 -57
- package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -1
- package/dist/parquetjs/parser/parquet-reader.js +168 -102
- package/dist/parquetjs/schema/declare.d.ts +14 -7
- package/dist/parquetjs/schema/declare.d.ts.map +1 -1
- package/dist/parquetjs/schema/declare.js +2 -0
- package/dist/parquetjs/schema/shred.d.ts +115 -0
- package/dist/parquetjs/schema/shred.d.ts.map +1 -1
- package/dist/parquetjs/schema/shred.js +161 -43
- package/dist/parquetjs/schema/types.d.ts +2 -2
- package/dist/parquetjs/schema/types.d.ts.map +1 -1
- package/dist/parquetjs/utils/file-utils.d.ts +3 -4
- package/dist/parquetjs/utils/file-utils.d.ts.map +1 -1
- package/dist/parquetjs/utils/file-utils.js +2 -5
- package/package.json +7 -5
- package/src/index.ts +2 -2
- package/src/lib/convert-schema-deep.ts.disabled +910 -0
- package/src/lib/parse-parquet.ts +25 -12
- package/src/parquet-loader.ts +3 -1
- package/src/parquetjs/compression.ts +14 -1
- package/src/parquetjs/encoder/{writer.ts → parquet-encoder.ts} +22 -28
- package/src/parquetjs/parser/parquet-reader.ts +239 -122
- package/src/parquetjs/schema/declare.ts +17 -9
- package/src/parquetjs/schema/shred.ts +157 -28
- package/src/parquetjs/schema/types.ts +21 -27
- package/src/parquetjs/utils/file-utils.ts +3 -4
- package/dist/es5/parquetjs/encoder/writer.js.map +0 -1
- package/dist/es5/parquetjs/file.js +0 -94
- package/dist/es5/parquetjs/file.js.map +0 -1
- package/dist/es5/parquetjs/parser/parquet-cursor.js +0 -183
- package/dist/es5/parquetjs/parser/parquet-cursor.js.map +0 -1
- package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +0 -327
- package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
- package/dist/es5/parquetjs/utils/buffer-utils.js +0 -19
- package/dist/es5/parquetjs/utils/buffer-utils.js.map +0 -1
- package/dist/esm/parquetjs/encoder/writer.js.map +0 -1
- package/dist/esm/parquetjs/file.js +0 -81
- package/dist/esm/parquetjs/file.js.map +0 -1
- package/dist/esm/parquetjs/parser/parquet-cursor.js +0 -78
- package/dist/esm/parquetjs/parser/parquet-cursor.js.map +0 -1
- package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +0 -129
- package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
- package/dist/esm/parquetjs/utils/buffer-utils.js +0 -13
- package/dist/esm/parquetjs/utils/buffer-utils.js.map +0 -1
- package/dist/parquetjs/encoder/writer.d.ts.map +0 -1
- package/dist/parquetjs/file.d.ts +0 -10
- package/dist/parquetjs/file.d.ts.map +0 -1
- package/dist/parquetjs/file.js +0 -99
- package/dist/parquetjs/parser/parquet-cursor.d.ts +0 -36
- package/dist/parquetjs/parser/parquet-cursor.d.ts.map +0 -1
- package/dist/parquetjs/parser/parquet-cursor.js +0 -74
- package/dist/parquetjs/parser/parquet-envelope-reader.d.ts +0 -40
- package/dist/parquetjs/parser/parquet-envelope-reader.d.ts.map +0 -1
- package/dist/parquetjs/parser/parquet-envelope-reader.js +0 -136
- package/dist/parquetjs/utils/buffer-utils.d.ts +0 -10
- package/dist/parquetjs/utils/buffer-utils.d.ts.map +0 -1
- package/dist/parquetjs/utils/buffer-utils.js +0 -22
- package/src/parquetjs/file.ts +0 -90
- package/src/parquetjs/parser/parquet-cursor.ts +0 -94
- package/src/parquetjs/parser/parquet-envelope-reader.ts +0 -199
- package/src/parquetjs/utils/buffer-utils.ts +0 -18
package/src/lib/parse-parquet.ts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';
|
|
2
|
+
// import {ColumnarTableBatch} from '@loaders.gl/schema';
|
|
3
|
+
import {makeReadableFile} from '@loaders.gl/loader-utils';
|
|
2
4
|
import type {ParquetLoaderOptions} from '../parquet-loader';
|
|
3
|
-
|
|
4
5
|
import {ParquetReader} from '../parquetjs/parser/parquet-reader';
|
|
5
6
|
|
|
6
7
|
export async function parseParquet(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions) {
|
|
@@ -12,16 +13,28 @@ export async function parseParquet(arrayBuffer: ArrayBuffer, options?: ParquetLo
|
|
|
12
13
|
}
|
|
13
14
|
|
|
14
15
|
export async function* parseParquetFileInBatches(blob: Blob, options?: ParquetLoaderOptions) {
|
|
15
|
-
const
|
|
16
|
-
const
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
while ((record = await cursor.next())) {
|
|
21
|
-
rows.push(record);
|
|
22
|
-
}
|
|
23
|
-
} finally {
|
|
24
|
-
await reader.close();
|
|
16
|
+
const file = makeReadableFile(blob);
|
|
17
|
+
const reader = new ParquetReader(file);
|
|
18
|
+
const rowBatches = reader.rowBatchIterator(options?.parquet);
|
|
19
|
+
for await (const rows of rowBatches) {
|
|
20
|
+
yield rows;
|
|
25
21
|
}
|
|
26
|
-
yield rows;
|
|
27
22
|
}
|
|
23
|
+
|
|
24
|
+
// export async function* parseParquetFileInColumnarBatches(blob: Blob, options?: {columnList?: string[][]}): AsyncIterable<ColumnarTableBatch> {
|
|
25
|
+
// const rowGroupReader = new ParquetRowGroupReader({data: blob, columnList: options?.columnList});
|
|
26
|
+
// try {
|
|
27
|
+
// for await (const rowGroup of rowGroupReader) {
|
|
28
|
+
// yield convertRowGroupToTableBatch(rowGroup);
|
|
29
|
+
// }
|
|
30
|
+
// } finally {
|
|
31
|
+
// await rowGroupReader.close();
|
|
32
|
+
// }
|
|
33
|
+
// }
|
|
34
|
+
|
|
35
|
+
// function convertRowGroupToTableBatch(rowGroup): ColumnarTableBatch {
|
|
36
|
+
// // @ts-expect-error
|
|
37
|
+
// return {
|
|
38
|
+
// data: rowGroup
|
|
39
|
+
// };
|
|
40
|
+
// }
|
package/src/parquet-loader.ts
CHANGED
|
@@ -8,13 +8,15 @@ export type ParquetLoaderOptions = LoaderOptions & {
|
|
|
8
8
|
parquet?: {
|
|
9
9
|
type?: 'object-row-table';
|
|
10
10
|
url?: string;
|
|
11
|
+
columnList?: string[] | string[][];
|
|
11
12
|
};
|
|
12
13
|
};
|
|
13
14
|
|
|
14
15
|
const DEFAULT_PARQUET_LOADER_OPTIONS: ParquetLoaderOptions = {
|
|
15
16
|
parquet: {
|
|
16
17
|
type: 'object-row-table',
|
|
17
|
-
url: undefined
|
|
18
|
+
url: undefined,
|
|
19
|
+
columnList: []
|
|
18
20
|
}
|
|
19
21
|
};
|
|
20
22
|
|
|
@@ -14,7 +14,20 @@ import {
|
|
|
14
14
|
} from '@loaders.gl/compression';
|
|
15
15
|
|
|
16
16
|
import {ParquetCompression} from './schema/declare';
|
|
17
|
-
|
|
17
|
+
|
|
18
|
+
/** We can't use loaders-util buffer handling since we are dependent on buffers even in the browser */
|
|
19
|
+
function toBuffer(arrayBuffer: ArrayBuffer): Buffer {
|
|
20
|
+
return Buffer.from(arrayBuffer);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
function toArrayBuffer(buffer: Buffer): ArrayBuffer {
|
|
24
|
+
// TODO - per docs we should just be able to call buffer.buffer, but there are issues
|
|
25
|
+
if (Buffer.isBuffer(buffer)) {
|
|
26
|
+
const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);
|
|
27
|
+
return typedArray.slice().buffer;
|
|
28
|
+
}
|
|
29
|
+
return buffer;
|
|
30
|
+
}
|
|
18
31
|
|
|
19
32
|
// TODO switch to worker compression to avoid bundling...
|
|
20
33
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
|
|
2
2
|
/* eslint-disable camelcase */
|
|
3
|
-
import {
|
|
3
|
+
import {stream} from '@loaders.gl/loader-utils';
|
|
4
4
|
import {ParquetCodecOptions, PARQUET_CODECS} from '../codecs';
|
|
5
5
|
import * as Compression from '../compression';
|
|
6
6
|
import {
|
|
@@ -55,7 +55,7 @@ const PARQUET_DEFAULT_ROW_GROUP_SIZE = 4096;
|
|
|
55
55
|
const PARQUET_RDLVL_TYPE = 'INT32';
|
|
56
56
|
const PARQUET_RDLVL_ENCODING = 'RLE';
|
|
57
57
|
|
|
58
|
-
export interface
|
|
58
|
+
export interface ParquetEncoderOptions {
|
|
59
59
|
baseOffset?: number;
|
|
60
60
|
rowGroupSize?: number;
|
|
61
61
|
pageSize?: number;
|
|
@@ -71,12 +71,12 @@ export interface ParquetWriterOptions {
|
|
|
71
71
|
}
|
|
72
72
|
|
|
73
73
|
/**
|
|
74
|
-
* Write a parquet file to an output stream. The
|
|
74
|
+
* Write a parquet file to an output stream. The ParquetEncoder will perform
|
|
75
75
|
* buffering/batching for performance, so close() must be called after all rows
|
|
76
76
|
* are written.
|
|
77
77
|
*/
|
|
78
78
|
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
79
|
-
export class
|
|
79
|
+
export class ParquetEncoder<T> {
|
|
80
80
|
/**
|
|
81
81
|
* Convenience method to create a new buffered parquet writer that writes to
|
|
82
82
|
* the specified file
|
|
@@ -84,10 +84,10 @@ export class ParquetWriter<T> {
|
|
|
84
84
|
static async openFile<T>(
|
|
85
85
|
schema: ParquetSchema,
|
|
86
86
|
path: string,
|
|
87
|
-
opts?:
|
|
88
|
-
): Promise<
|
|
87
|
+
opts?: ParquetEncoderOptions
|
|
88
|
+
): Promise<ParquetEncoder<T>> {
|
|
89
89
|
const outputStream = await osopen(path, opts);
|
|
90
|
-
return
|
|
90
|
+
return ParquetEncoder.openStream(schema, outputStream, opts);
|
|
91
91
|
}
|
|
92
92
|
|
|
93
93
|
/**
|
|
@@ -96,17 +96,11 @@ export class ParquetWriter<T> {
|
|
|
96
96
|
*/
|
|
97
97
|
static async openStream<T>(
|
|
98
98
|
schema: ParquetSchema,
|
|
99
|
-
outputStream: Writable,
|
|
100
|
-
opts
|
|
101
|
-
): Promise<
|
|
102
|
-
if (!opts) {
|
|
103
|
-
// tslint:disable-next-line:no-parameter-reassignment
|
|
104
|
-
opts = {};
|
|
105
|
-
}
|
|
106
|
-
|
|
99
|
+
outputStream: stream.Writable,
|
|
100
|
+
opts: ParquetEncoderOptions = {}
|
|
101
|
+
): Promise<ParquetEncoder<T>> {
|
|
107
102
|
const envelopeWriter = await ParquetEnvelopeWriter.openStream(schema, outputStream, opts);
|
|
108
|
-
|
|
109
|
-
return new ParquetWriter(schema, envelopeWriter, opts);
|
|
103
|
+
return new ParquetEncoder(schema, envelopeWriter, opts);
|
|
110
104
|
}
|
|
111
105
|
|
|
112
106
|
public schema: ParquetSchema;
|
|
@@ -122,7 +116,7 @@ export class ParquetWriter<T> {
|
|
|
122
116
|
constructor(
|
|
123
117
|
schema: ParquetSchema,
|
|
124
118
|
envelopeWriter: ParquetEnvelopeWriter,
|
|
125
|
-
opts:
|
|
119
|
+
opts: ParquetEncoderOptions
|
|
126
120
|
) {
|
|
127
121
|
this.schema = schema;
|
|
128
122
|
this.envelopeWriter = envelopeWriter;
|
|
@@ -227,8 +221,8 @@ export class ParquetEnvelopeWriter {
|
|
|
227
221
|
*/
|
|
228
222
|
static async openStream(
|
|
229
223
|
schema: ParquetSchema,
|
|
230
|
-
outputStream: Writable,
|
|
231
|
-
opts:
|
|
224
|
+
outputStream: stream.Writable,
|
|
225
|
+
opts: ParquetEncoderOptions
|
|
232
226
|
): Promise<ParquetEnvelopeWriter> {
|
|
233
227
|
const writeFn = oswrite.bind(undefined, outputStream);
|
|
234
228
|
const closeFn = osclose.bind(undefined, outputStream);
|
|
@@ -249,7 +243,7 @@ export class ParquetEnvelopeWriter {
|
|
|
249
243
|
writeFn: (buf: Buffer) => Promise<void>,
|
|
250
244
|
closeFn: () => Promise<void>,
|
|
251
245
|
fileOffset: number,
|
|
252
|
-
opts:
|
|
246
|
+
opts: ParquetEncoderOptions
|
|
253
247
|
) {
|
|
254
248
|
this.schema = schema;
|
|
255
249
|
this.write = writeFn;
|
|
@@ -314,11 +308,10 @@ export class ParquetEnvelopeWriter {
|
|
|
314
308
|
|
|
315
309
|
/**
|
|
316
310
|
* Create a parquet transform stream
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
public writer: ParquetWriter<T>;
|
|
311
|
+
export class ParquetTransformer<T> extends stream.Transform {
|
|
312
|
+
public writer: ParquetEncoder<T>;
|
|
320
313
|
|
|
321
|
-
constructor(schema: ParquetSchema, opts:
|
|
314
|
+
constructor(schema: ParquetSchema, opts: ParquetEncoderOptions = {}) {
|
|
322
315
|
super({objectMode: true});
|
|
323
316
|
|
|
324
317
|
const writeProxy = (function (t: ParquetTransformer<any>) {
|
|
@@ -327,7 +320,7 @@ export class ParquetTransformer<T> extends Transform {
|
|
|
327
320
|
};
|
|
328
321
|
})(this);
|
|
329
322
|
|
|
330
|
-
this.writer = new
|
|
323
|
+
this.writer = new ParquetEncoder(
|
|
331
324
|
schema,
|
|
332
325
|
new ParquetEnvelopeWriter(schema, writeProxy, async () => {}, 0, opts),
|
|
333
326
|
opts
|
|
@@ -348,6 +341,7 @@ export class ParquetTransformer<T> extends Transform {
|
|
|
348
341
|
await this.writer.close(callback);
|
|
349
342
|
}
|
|
350
343
|
}
|
|
344
|
+
*/
|
|
351
345
|
|
|
352
346
|
/**
|
|
353
347
|
* Encode a consecutive array of data using one of the parquet encodings
|
|
@@ -490,7 +484,7 @@ async function encodeColumnChunk(
|
|
|
490
484
|
column: ParquetField,
|
|
491
485
|
buffer: ParquetBuffer,
|
|
492
486
|
offset: number,
|
|
493
|
-
opts:
|
|
487
|
+
opts: ParquetEncoderOptions
|
|
494
488
|
): Promise<{
|
|
495
489
|
body: Buffer;
|
|
496
490
|
metadata: ColumnMetaData;
|
|
@@ -546,7 +540,7 @@ async function encodeColumnChunk(
|
|
|
546
540
|
async function encodeRowGroup(
|
|
547
541
|
schema: ParquetSchema,
|
|
548
542
|
data: ParquetBuffer,
|
|
549
|
-
opts:
|
|
543
|
+
opts: ParquetEncoderOptions
|
|
550
544
|
): Promise<{
|
|
551
545
|
body: Buffer;
|
|
552
546
|
metadata: RowGroup;
|
|
@@ -1,163 +1,280 @@
|
|
|
1
1
|
// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
|
|
2
|
-
import {
|
|
3
|
-
|
|
2
|
+
import type {ReadableFile} from '@loaders.gl/loader-utils';
|
|
3
|
+
|
|
4
4
|
import {ParquetSchema} from '../schema/schema';
|
|
5
|
-
import {ParquetCursor} from './parquet-cursor';
|
|
6
|
-
import {PARQUET_VERSION} from '../../constants';
|
|
7
5
|
import {decodeSchema} from './decoders';
|
|
6
|
+
import {materializeRecords} from '../schema/shred';
|
|
7
|
+
|
|
8
|
+
import {PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED} from '../../constants';
|
|
9
|
+
import {ColumnChunk, CompressionCodec, FileMetaData, RowGroup, Type} from '../parquet-thrift';
|
|
10
|
+
import {
|
|
11
|
+
ParquetBuffer,
|
|
12
|
+
ParquetCompression,
|
|
13
|
+
ParquetData,
|
|
14
|
+
PrimitiveType,
|
|
15
|
+
ParquetOptions
|
|
16
|
+
} from '../schema/declare';
|
|
17
|
+
import {decodeFileMetadata, getThriftEnum, fieldIndexOf} from '../utils/read-utils';
|
|
18
|
+
import {decodeDataPages, decodePage} from './decoders';
|
|
19
|
+
|
|
20
|
+
export type ParquetReaderProps = {
|
|
21
|
+
defaultDictionarySize?: number;
|
|
22
|
+
};
|
|
23
|
+
|
|
24
|
+
/** Properties for initializing a ParquetRowGroupReader */
|
|
25
|
+
export type ParquetIterationProps = {
|
|
26
|
+
/** Filter allowing some columns to be dropped */
|
|
27
|
+
columnList?: string[] | string[][];
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
const DEFAULT_PROPS: Required<ParquetReaderProps> = {
|
|
31
|
+
defaultDictionarySize: 1e6
|
|
32
|
+
};
|
|
8
33
|
|
|
9
34
|
/**
|
|
10
|
-
*
|
|
11
|
-
*
|
|
12
|
-
*
|
|
13
|
-
*
|
|
14
|
-
* avoid leaking file descriptors.
|
|
35
|
+
* The parquet envelope reader allows direct, unbuffered access to the individual
|
|
36
|
+
* sections of the parquet file, namely the header, footer and the row groups.
|
|
37
|
+
* This class is intended for advanced/internal users; if you just want to retrieve
|
|
38
|
+
* rows from a parquet file use the ParquetReader instead
|
|
15
39
|
*/
|
|
16
|
-
export class ParquetReader
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
};
|
|
25
|
-
const closeFn = async () => {};
|
|
26
|
-
const size = blob.size;
|
|
27
|
-
const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);
|
|
28
|
-
try {
|
|
29
|
-
await envelopeReader.readHeader();
|
|
30
|
-
const metadata = await envelopeReader.readFooter();
|
|
31
|
-
return new ParquetReader(metadata, envelopeReader);
|
|
32
|
-
} catch (err) {
|
|
33
|
-
await envelopeReader.close();
|
|
34
|
-
throw err;
|
|
35
|
-
}
|
|
40
|
+
export class ParquetReader {
|
|
41
|
+
props: Required<ParquetReaderProps>;
|
|
42
|
+
file: ReadableFile;
|
|
43
|
+
metadata: Promise<FileMetaData> | null = null;
|
|
44
|
+
|
|
45
|
+
constructor(file: ReadableFile, props?: ParquetReaderProps) {
|
|
46
|
+
this.file = file;
|
|
47
|
+
this.props = {...DEFAULT_PROPS, ...props};
|
|
36
48
|
}
|
|
37
49
|
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
throw err;
|
|
50
|
+
close(): void {
|
|
51
|
+
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
|
52
|
+
this.file.close();
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// HIGH LEVEL METHODS
|
|
56
|
+
|
|
57
|
+
/** Yield one row at a time */
|
|
58
|
+
async *rowIterator(props?: ParquetIterationProps) {
|
|
59
|
+
for await (const rows of this.rowBatchIterator(props)) {
|
|
60
|
+
// yield *rows
|
|
61
|
+
for (const row of rows) {
|
|
62
|
+
yield row;
|
|
63
|
+
}
|
|
53
64
|
}
|
|
54
65
|
}
|
|
55
66
|
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
return new ParquetReader<T>(metadata, envelopeReader);
|
|
62
|
-
} catch (err) {
|
|
63
|
-
await envelopeReader.close();
|
|
64
|
-
throw err;
|
|
67
|
+
/** Yield one batch of rows at a time */
|
|
68
|
+
async *rowBatchIterator(props?: ParquetIterationProps) {
|
|
69
|
+
const schema = await this.getSchema();
|
|
70
|
+
for await (const rowGroup of this.rowGroupIterator(props)) {
|
|
71
|
+
yield materializeRecords(schema, rowGroup);
|
|
65
72
|
}
|
|
66
73
|
}
|
|
67
74
|
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
75
|
+
/** Iterate over the raw row groups */
|
|
76
|
+
async *rowGroupIterator(props?: ParquetIterationProps) {
|
|
77
|
+
// Ensure strings are nested in arrays
|
|
78
|
+
const columnList: string[][] = (props?.columnList || []).map((x) =>
|
|
79
|
+
Array.isArray(x) ? x : [x]
|
|
80
|
+
);
|
|
71
81
|
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
82
|
+
const metadata = await this.getFileMetadata();
|
|
83
|
+
const schema = await this.getSchema();
|
|
84
|
+
|
|
85
|
+
const rowGroupCount = metadata?.row_groups.length || 0;
|
|
86
|
+
|
|
87
|
+
for (let rowGroupIndex = 0; rowGroupIndex < rowGroupCount; rowGroupIndex++) {
|
|
88
|
+
const rowGroup = await this.readRowGroup(
|
|
89
|
+
schema,
|
|
90
|
+
metadata.row_groups[rowGroupIndex],
|
|
91
|
+
columnList
|
|
92
|
+
);
|
|
93
|
+
yield rowGroup;
|
|
81
94
|
}
|
|
95
|
+
}
|
|
82
96
|
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
const {schema} = decodeSchema(this.metadata.schema, 1, root.num_children!);
|
|
87
|
-
this.schema = new ParquetSchema(schema);
|
|
97
|
+
async getRowCount(): Promise<number> {
|
|
98
|
+
const metadata = await this.getFileMetadata();
|
|
99
|
+
return Number(metadata.num_rows);
|
|
88
100
|
}
|
|
89
101
|
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
// this.envelopeReader = null;
|
|
97
|
-
// this.metadata = null;
|
|
102
|
+
async getSchema(): Promise<ParquetSchema> {
|
|
103
|
+
const metadata = await this.getFileMetadata();
|
|
104
|
+
const root = metadata.schema[0];
|
|
105
|
+
const {schema: schemaDefinition} = decodeSchema(metadata.schema, 1, root.num_children!);
|
|
106
|
+
const schema = new ParquetSchema(schemaDefinition);
|
|
107
|
+
return schema;
|
|
98
108
|
}
|
|
99
109
|
|
|
100
110
|
/**
|
|
101
|
-
*
|
|
102
|
-
*
|
|
103
|
-
* the reader object.
|
|
104
|
-
*
|
|
105
|
-
* The required_columns parameter controls which columns are actually read
|
|
106
|
-
* from disk. An empty array or no value implies all columns. A list of column
|
|
107
|
-
* names means that only those columns should be loaded from disk.
|
|
111
|
+
* Returns the user (key/value) metadata for this file
|
|
112
|
+
* In parquet this is not stored on the schema like it is in arrow
|
|
108
113
|
*/
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
if (!columnList) {
|
|
115
|
-
// tslint:disable-next-line:no-parameter-reassignment
|
|
116
|
-
columnList = [];
|
|
114
|
+
async getSchemaMetadata(): Promise<Record<string, string>> {
|
|
115
|
+
const metadata = await this.getFileMetadata();
|
|
116
|
+
const md: Record<string, string> = {};
|
|
117
|
+
for (const kv of metadata.key_value_metadata!) {
|
|
118
|
+
md[kv.key] = kv.value!;
|
|
117
119
|
}
|
|
120
|
+
return md;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
async getFileMetadata(): Promise<FileMetaData> {
|
|
124
|
+
if (!this.metadata) {
|
|
125
|
+
await this.readHeader();
|
|
126
|
+
this.metadata = this.readFooter();
|
|
127
|
+
}
|
|
128
|
+
return this.metadata;
|
|
129
|
+
}
|
|
118
130
|
|
|
119
|
-
|
|
120
|
-
columnList = columnList.map((x) => (Array.isArray(x) ? x : [x]));
|
|
131
|
+
// LOW LEVEL METHODS
|
|
121
132
|
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
133
|
+
/** Metadata is stored in the footer */
|
|
134
|
+
async readHeader(): Promise<void> {
|
|
135
|
+
const buffer = await this.file.read(0, PARQUET_MAGIC.length);
|
|
136
|
+
const magic = buffer.toString();
|
|
137
|
+
switch (magic) {
|
|
138
|
+
case PARQUET_MAGIC:
|
|
139
|
+
break;
|
|
140
|
+
case PARQUET_MAGIC_ENCRYPTED:
|
|
141
|
+
throw new Error('Encrypted parquet file not supported');
|
|
142
|
+
default:
|
|
143
|
+
throw new Error(`Invalid parquet file (magic=${magic})`);
|
|
144
|
+
}
|
|
128
145
|
}
|
|
129
146
|
|
|
130
|
-
/**
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
147
|
+
/** Metadata is stored in the footer */
|
|
148
|
+
async readFooter(): Promise<FileMetaData> {
|
|
149
|
+
const trailerLen = PARQUET_MAGIC.length + 4;
|
|
150
|
+
const trailerBuf = await this.file.read(this.file.size - trailerLen, trailerLen);
|
|
151
|
+
|
|
152
|
+
const magic = trailerBuf.slice(4).toString();
|
|
153
|
+
if (magic !== PARQUET_MAGIC) {
|
|
154
|
+
throw new Error(`Not a valid parquet file (magic="${magic})`);
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
const metadataSize = trailerBuf.readUInt32LE(0);
|
|
158
|
+
const metadataOffset = this.file.size - metadataSize - trailerLen;
|
|
159
|
+
if (metadataOffset < PARQUET_MAGIC.length) {
|
|
160
|
+
throw new Error(`Invalid metadata size ${metadataOffset}`);
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
const metadataBuf = await this.file.read(metadataOffset, metadataSize);
|
|
164
|
+
// let metadata = new parquet_thrift.FileMetaData();
|
|
165
|
+
// parquet_util.decodeThrift(metadata, metadataBuf);
|
|
166
|
+
const {metadata} = decodeFileMetadata(metadataBuf);
|
|
167
|
+
return metadata;
|
|
136
168
|
}
|
|
137
169
|
|
|
138
|
-
/**
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
170
|
+
/** Data is stored in row groups (similar to Apache Arrow record batches) */
|
|
171
|
+
async readRowGroup(
|
|
172
|
+
schema: ParquetSchema,
|
|
173
|
+
rowGroup: RowGroup,
|
|
174
|
+
columnList: string[][]
|
|
175
|
+
): Promise<ParquetBuffer> {
|
|
176
|
+
const buffer: ParquetBuffer = {
|
|
177
|
+
rowCount: Number(rowGroup.num_rows),
|
|
178
|
+
columnData: {}
|
|
179
|
+
};
|
|
180
|
+
for (const colChunk of rowGroup.columns) {
|
|
181
|
+
const colMetadata = colChunk.meta_data;
|
|
182
|
+
const colKey = colMetadata?.path_in_schema;
|
|
183
|
+
if (columnList.length > 0 && fieldIndexOf(columnList, colKey!) < 0) {
|
|
184
|
+
continue; // eslint-disable-line no-continue
|
|
185
|
+
}
|
|
186
|
+
buffer.columnData[colKey!.join()] = await this.readColumnChunk(schema, colChunk);
|
|
187
|
+
}
|
|
188
|
+
return buffer;
|
|
143
189
|
}
|
|
144
190
|
|
|
145
191
|
/**
|
|
146
|
-
*
|
|
192
|
+
* Each row group contains column chunks for all the columns.
|
|
147
193
|
*/
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
md[kv.key] = kv.value!;
|
|
194
|
+
async readColumnChunk(schema: ParquetSchema, colChunk: ColumnChunk): Promise<ParquetData> {
|
|
195
|
+
if (colChunk.file_path !== undefined && colChunk.file_path !== null) {
|
|
196
|
+
throw new Error('external references are not supported');
|
|
152
197
|
}
|
|
153
|
-
|
|
198
|
+
|
|
199
|
+
const field = schema.findField(colChunk.meta_data?.path_in_schema!);
|
|
200
|
+
const type: PrimitiveType = getThriftEnum(Type, colChunk.meta_data?.type!) as any;
|
|
201
|
+
|
|
202
|
+
if (type !== field.primitiveType) {
|
|
203
|
+
throw new Error(`chunk type not matching schema: ${type}`);
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
const compression: ParquetCompression = getThriftEnum(
|
|
207
|
+
CompressionCodec,
|
|
208
|
+
colChunk.meta_data?.codec!
|
|
209
|
+
) as any;
|
|
210
|
+
|
|
211
|
+
const pagesOffset = Number(colChunk.meta_data?.data_page_offset!);
|
|
212
|
+
let pagesSize = Number(colChunk.meta_data?.total_compressed_size!);
|
|
213
|
+
|
|
214
|
+
if (!colChunk.file_path) {
|
|
215
|
+
pagesSize = Math.min(
|
|
216
|
+
this.file.size - pagesOffset,
|
|
217
|
+
Number(colChunk.meta_data?.total_compressed_size)
|
|
218
|
+
);
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
const options: ParquetOptions = {
|
|
222
|
+
type,
|
|
223
|
+
rLevelMax: field.rLevelMax,
|
|
224
|
+
dLevelMax: field.dLevelMax,
|
|
225
|
+
compression,
|
|
226
|
+
column: field,
|
|
227
|
+
numValues: colChunk.meta_data?.num_values,
|
|
228
|
+
dictionary: []
|
|
229
|
+
};
|
|
230
|
+
|
|
231
|
+
let dictionary;
|
|
232
|
+
|
|
233
|
+
const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;
|
|
234
|
+
|
|
235
|
+
if (dictionaryPageOffset) {
|
|
236
|
+
const dictionaryOffset = Number(dictionaryPageOffset);
|
|
237
|
+
// Getting dictionary from column chunk to iterate all over indexes to get dataPage values.
|
|
238
|
+
dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
dictionary = options.dictionary?.length ? options.dictionary : dictionary;
|
|
242
|
+
const pagesBuf = await this.file.read(pagesOffset, pagesSize);
|
|
243
|
+
return await decodeDataPages(pagesBuf, {...options, dictionary});
|
|
154
244
|
}
|
|
155
245
|
|
|
156
246
|
/**
|
|
157
|
-
*
|
|
247
|
+
* Getting dictionary for allows to flatten values by indices.
|
|
248
|
+
* @param dictionaryPageOffset
|
|
249
|
+
* @param options
|
|
250
|
+
* @param pagesOffset
|
|
251
|
+
* @returns
|
|
158
252
|
*/
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
253
|
+
async getDictionary(
|
|
254
|
+
dictionaryPageOffset: number,
|
|
255
|
+
options: ParquetOptions,
|
|
256
|
+
pagesOffset: number
|
|
257
|
+
): Promise<string[]> {
|
|
258
|
+
if (dictionaryPageOffset === 0) {
|
|
259
|
+
// dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);
|
|
260
|
+
// pagesBuf = await this.read(pagesOffset, dictionarySize);
|
|
261
|
+
|
|
262
|
+
// In this case we are working with parquet-mr files format. Problem is described below:
|
|
263
|
+
// https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding
|
|
264
|
+
// We need to get dictionary page from column chunk if it exists.
|
|
265
|
+
// Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.
|
|
266
|
+
return [];
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
const dictionarySize = Math.min(
|
|
270
|
+
this.file.size - dictionaryPageOffset,
|
|
271
|
+
this.props.defaultDictionarySize
|
|
272
|
+
);
|
|
273
|
+
const pagesBuf = await this.file.read(dictionaryPageOffset, dictionarySize);
|
|
274
|
+
|
|
275
|
+
const cursor = {buffer: pagesBuf, offset: 0, size: pagesBuf.length};
|
|
276
|
+
const decodedPage = await decodePage(cursor, options);
|
|
277
|
+
|
|
278
|
+
return decodedPage.dictionary!;
|
|
162
279
|
}
|
|
163
280
|
}
|