@loaders.gl/parquet 4.0.0-alpha.4 → 4.0.0-alpha.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle.d.ts +2 -0
- package/dist/bundle.d.ts.map +1 -0
- package/dist/constants.d.ts +15 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/dist.min.js +22 -6
- package/dist/dist.min.js.map +7 -1
- package/dist/index.d.ts +28 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/lib/convert-schema.d.ts +8 -0
- package/dist/lib/convert-schema.d.ts.map +1 -0
- package/dist/lib/parse-parquet.d.ts +4 -0
- package/dist/lib/parse-parquet.d.ts.map +1 -0
- package/dist/lib/read-array-buffer.d.ts +19 -0
- package/dist/lib/read-array-buffer.d.ts.map +1 -0
- package/dist/parquet-loader.d.ts +23 -0
- package/dist/parquet-loader.d.ts.map +1 -0
- package/dist/parquet-loader.js +1 -1
- package/dist/parquet-worker.js +27 -13
- package/dist/parquet-worker.js.map +7 -1
- package/dist/parquet-writer.d.ts +4 -0
- package/dist/parquet-writer.d.ts.map +1 -0
- package/dist/parquet-writer.js +1 -1
- package/dist/parquetjs/codecs/declare.d.ts +17 -0
- package/dist/parquetjs/codecs/declare.d.ts.map +1 -0
- package/dist/parquetjs/codecs/dictionary.d.ts +3 -0
- package/dist/parquetjs/codecs/dictionary.d.ts.map +1 -0
- package/dist/parquetjs/codecs/index.d.ts +5 -0
- package/dist/parquetjs/codecs/index.d.ts.map +1 -0
- package/dist/parquetjs/codecs/plain.d.ts +6 -0
- package/dist/parquetjs/codecs/plain.d.ts.map +1 -0
- package/dist/parquetjs/codecs/rle.d.ts +6 -0
- package/dist/parquetjs/codecs/rle.d.ts.map +1 -0
- package/dist/parquetjs/compression.d.ts +23 -0
- package/dist/parquetjs/compression.d.ts.map +1 -0
- package/dist/parquetjs/compression.js +1 -10
- package/dist/parquetjs/compression.js.map +1 -1
- package/dist/parquetjs/encoder/writer.d.ts +123 -0
- package/dist/parquetjs/encoder/writer.d.ts.map +1 -0
- package/dist/parquetjs/file.d.ts +10 -0
- package/dist/parquetjs/file.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/BoundaryOrder.d.ts +6 -0
- package/dist/parquetjs/parquet-thrift/BoundaryOrder.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/BsonType.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/BsonType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/ColumnChunk.d.ts +25 -0
- package/dist/parquetjs/parquet-thrift/ColumnChunk.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/ColumnIndex.d.ts +22 -0
- package/dist/parquetjs/parquet-thrift/ColumnIndex.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/ColumnMetaData.d.ts +42 -0
- package/dist/parquetjs/parquet-thrift/ColumnMetaData.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/ColumnOrder.d.ts +13 -0
- package/dist/parquetjs/parquet-thrift/ColumnOrder.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/CompressionCodec.d.ts +11 -0
- package/dist/parquetjs/parquet-thrift/CompressionCodec.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/ConvertedType.d.ts +25 -0
- package/dist/parquetjs/parquet-thrift/ConvertedType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/DataPageHeader.d.ts +21 -0
- package/dist/parquetjs/parquet-thrift/DataPageHeader.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.d.ts +27 -0
- package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/DateType.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/DateType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/DecimalType.d.ts +13 -0
- package/dist/parquetjs/parquet-thrift/DecimalType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.d.ts +16 -0
- package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/Encoding.d.ts +11 -0
- package/dist/parquetjs/parquet-thrift/Encoding.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/EnumType.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/EnumType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/FieldRepetitionType.d.ts +6 -0
- package/dist/parquetjs/parquet-thrift/FieldRepetitionType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/FileMetaData.d.ts +28 -0
- package/dist/parquetjs/parquet-thrift/FileMetaData.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/IndexPageHeader.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/IndexPageHeader.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/IntType.d.ts +13 -0
- package/dist/parquetjs/parquet-thrift/IntType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/JsonType.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/JsonType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/KeyValue.d.ts +13 -0
- package/dist/parquetjs/parquet-thrift/KeyValue.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/ListType.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/ListType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/LogicalType.d.ts +61 -0
- package/dist/parquetjs/parquet-thrift/LogicalType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/MapType.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/MapType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/MicroSeconds.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/MicroSeconds.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/MilliSeconds.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/MilliSeconds.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/NullType.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/NullType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/OffsetIndex.d.ts +12 -0
- package/dist/parquetjs/parquet-thrift/OffsetIndex.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/PageEncodingStats.d.ts +17 -0
- package/dist/parquetjs/parquet-thrift/PageEncodingStats.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/PageHeader.d.ts +30 -0
- package/dist/parquetjs/parquet-thrift/PageHeader.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/PageLocation.d.ts +16 -0
- package/dist/parquetjs/parquet-thrift/PageLocation.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/PageType.d.ts +7 -0
- package/dist/parquetjs/parquet-thrift/PageType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/RowGroup.d.ts +20 -0
- package/dist/parquetjs/parquet-thrift/RowGroup.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/SchemaElement.d.ts +33 -0
- package/dist/parquetjs/parquet-thrift/SchemaElement.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/SortingColumn.d.ts +15 -0
- package/dist/parquetjs/parquet-thrift/SortingColumn.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/Statistics.d.ts +23 -0
- package/dist/parquetjs/parquet-thrift/Statistics.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/StringType.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/StringType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/TimeType.d.ts +14 -0
- package/dist/parquetjs/parquet-thrift/TimeType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/TimeUnit.d.ts +17 -0
- package/dist/parquetjs/parquet-thrift/TimeUnit.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/TimestampType.d.ts +14 -0
- package/dist/parquetjs/parquet-thrift/TimestampType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/Type.d.ts +11 -0
- package/dist/parquetjs/parquet-thrift/Type.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/UUIDType.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/UUIDType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/index.d.ts +44 -0
- package/dist/parquetjs/parquet-thrift/index.d.ts.map +1 -0
- package/dist/parquetjs/parser/decoders.d.ts +34 -0
- package/dist/parquetjs/parser/decoders.d.ts.map +1 -0
- package/dist/parquetjs/parser/parquet-cursor.d.ts +36 -0
- package/dist/parquetjs/parser/parquet-cursor.d.ts.map +1 -0
- package/dist/parquetjs/parser/parquet-envelope-reader.d.ts +40 -0
- package/dist/parquetjs/parser/parquet-envelope-reader.d.ts.map +1 -0
- package/dist/parquetjs/parser/parquet-envelope-reader.js +0 -9
- package/dist/parquetjs/parser/parquet-envelope-reader.js.map +1 -1
- package/dist/parquetjs/parser/parquet-reader.d.ts +68 -0
- package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -0
- package/dist/parquetjs/parser/parquet-reader.js +0 -13
- package/dist/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/parquetjs/schema/declare.d.ts +80 -0
- package/dist/parquetjs/schema/declare.d.ts.map +1 -0
- package/dist/parquetjs/schema/schema.d.ts +26 -0
- package/dist/parquetjs/schema/schema.d.ts.map +1 -0
- package/dist/parquetjs/schema/shred.d.ts +48 -0
- package/dist/parquetjs/schema/shred.d.ts.map +1 -0
- package/dist/parquetjs/schema/types.d.ts +20 -0
- package/dist/parquetjs/schema/types.d.ts.map +1 -0
- package/dist/parquetjs/utils/buffer-utils.d.ts +10 -0
- package/dist/parquetjs/utils/buffer-utils.d.ts.map +1 -0
- package/dist/parquetjs/utils/file-utils.d.ts +16 -0
- package/dist/parquetjs/utils/file-utils.d.ts.map +1 -0
- package/dist/parquetjs/utils/file-utils.js +0 -45
- package/dist/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/parquetjs/utils/read-utils.d.ts +25 -0
- package/dist/parquetjs/utils/read-utils.d.ts.map +1 -0
- package/dist/workers/parquet-worker.d.ts +2 -0
- package/dist/workers/parquet-worker.d.ts.map +1 -0
- package/package.json +8 -8
- package/src/parquetjs/compression.ts +10 -10
- package/src/parquetjs/parser/parquet-envelope-reader.ts +0 -11
- package/src/parquetjs/parser/parquet-reader.ts +0 -16
- package/src/parquetjs/utils/file-utils.ts +0 -49
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"Type.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parquet-thrift/Type.ts"],"names":[],"mappings":"AAMA,oBAAY,IAAI;IACd,OAAO,IAAI;IACX,KAAK,IAAI;IACT,KAAK,IAAI;IACT,KAAK,IAAI;IACT,KAAK,IAAI;IACT,MAAM,IAAI;IACV,UAAU,IAAI;IACd,oBAAoB,IAAI;CACzB"}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import * as thrift from 'thrift';
|
|
2
|
+
export interface ITypeDefinedOrderArgs {
|
|
3
|
+
}
|
|
4
|
+
export declare class TypeDefinedOrder {
|
|
5
|
+
constructor();
|
|
6
|
+
write(output: thrift.TProtocol): void;
|
|
7
|
+
static read(input: thrift.TProtocol): TypeDefinedOrder;
|
|
8
|
+
}
|
|
9
|
+
//# sourceMappingURL=TypeDefinedOrder.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"TypeDefinedOrder.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parquet-thrift/TypeDefinedOrder.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,MAAM,WAAW,qBAAqB;CAAG;AACzC,qBAAa,gBAAgB;;IAEpB,KAAK,CAAC,MAAM,EAAE,MAAM,CAAC,SAAS,GAAG,IAAI;WAM9B,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,SAAS,GAAG,gBAAgB;CAmB9D"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"UUIDType.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parquet-thrift/UUIDType.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,MAAM,WAAW,aAAa;CAAG;AACjC,qBAAa,QAAQ;;IAEZ,KAAK,CAAC,MAAM,EAAE,MAAM,CAAC,SAAS,GAAG,IAAI;WAM9B,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,SAAS,GAAG,QAAQ;CAmBtD"}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
export * from './Type';
|
|
2
|
+
export * from './ConvertedType';
|
|
3
|
+
export * from './FieldRepetitionType';
|
|
4
|
+
export * from './Encoding';
|
|
5
|
+
export * from './CompressionCodec';
|
|
6
|
+
export * from './PageType';
|
|
7
|
+
export * from './BoundaryOrder';
|
|
8
|
+
export * from './Statistics';
|
|
9
|
+
export * from './StringType';
|
|
10
|
+
export * from './UUIDType';
|
|
11
|
+
export * from './MapType';
|
|
12
|
+
export * from './ListType';
|
|
13
|
+
export * from './EnumType';
|
|
14
|
+
export * from './DateType';
|
|
15
|
+
export * from './NullType';
|
|
16
|
+
export * from './DecimalType';
|
|
17
|
+
export * from './MilliSeconds';
|
|
18
|
+
export * from './MicroSeconds';
|
|
19
|
+
export * from './TimestampType';
|
|
20
|
+
export * from './TimeType';
|
|
21
|
+
export * from './IntType';
|
|
22
|
+
export * from './JsonType';
|
|
23
|
+
export * from './BsonType';
|
|
24
|
+
export * from './SchemaElement';
|
|
25
|
+
export * from './DataPageHeader';
|
|
26
|
+
export * from './IndexPageHeader';
|
|
27
|
+
export * from './DictionaryPageHeader';
|
|
28
|
+
export * from './DataPageHeaderV2';
|
|
29
|
+
export * from './PageHeader';
|
|
30
|
+
export * from './KeyValue';
|
|
31
|
+
export * from './SortingColumn';
|
|
32
|
+
export * from './PageEncodingStats';
|
|
33
|
+
export * from './ColumnMetaData';
|
|
34
|
+
export * from './ColumnChunk';
|
|
35
|
+
export * from './RowGroup';
|
|
36
|
+
export * from './TypeDefinedOrder';
|
|
37
|
+
export * from './PageLocation';
|
|
38
|
+
export * from './OffsetIndex';
|
|
39
|
+
export * from './ColumnIndex';
|
|
40
|
+
export * from './FileMetaData';
|
|
41
|
+
export * from './TimeUnit';
|
|
42
|
+
export * from './LogicalType';
|
|
43
|
+
export * from './ColumnOrder';
|
|
44
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parquet-thrift/index.ts"],"names":[],"mappings":"AAMA,cAAc,QAAQ,CAAC;AACvB,cAAc,iBAAiB,CAAC;AAChC,cAAc,uBAAuB,CAAC;AACtC,cAAc,YAAY,CAAC;AAC3B,cAAc,oBAAoB,CAAC;AACnC,cAAc,YAAY,CAAC;AAC3B,cAAc,iBAAiB,CAAC;AAChC,cAAc,cAAc,CAAC;AAC7B,cAAc,cAAc,CAAC;AAC7B,cAAc,YAAY,CAAC;AAC3B,cAAc,WAAW,CAAC;AAC1B,cAAc,YAAY,CAAC;AAC3B,cAAc,YAAY,CAAC;AAC3B,cAAc,YAAY,CAAC;AAC3B,cAAc,YAAY,CAAC;AAC3B,cAAc,eAAe,CAAC;AAC9B,cAAc,gBAAgB,CAAC;AAC/B,cAAc,gBAAgB,CAAC;AAC/B,cAAc,iBAAiB,CAAC;AAChC,cAAc,YAAY,CAAC;AAC3B,cAAc,WAAW,CAAC;AAC1B,cAAc,YAAY,CAAC;AAC3B,cAAc,YAAY,CAAC;AAC3B,cAAc,iBAAiB,CAAC;AAChC,cAAc,kBAAkB,CAAC;AACjC,cAAc,mBAAmB,CAAC;AAClC,cAAc,wBAAwB,CAAC;AACvC,cAAc,oBAAoB,CAAC;AACnC,cAAc,cAAc,CAAC;AAC7B,cAAc,YAAY,CAAC;AAC3B,cAAc,iBAAiB,CAAC;AAChC,cAAc,qBAAqB,CAAC;AACpC,cAAc,kBAAkB,CAAC;AACjC,cAAc,eAAe,CAAC;AAC9B,cAAc,YAAY,CAAC;AAC3B,cAAc,oBAAoB,CAAC;AACnC,cAAc,gBAAgB,CAAC;AAC/B,cAAc,eAAe,CAAC;AAC9B,cAAc,eAAe,CAAC;AAC9B,cAAc,gBAAgB,CAAC;AAC/B,cAAc,YAAY,CAAC;AAC3B,cAAc,eAAe,CAAC;AAC9B,cAAc,eAAe,CAAC"}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import { ParquetData, ParquetOptions, ParquetPageData, SchemaDefinition } from '../schema/declare';
|
|
3
|
+
import { CursorBuffer } from '../codecs';
|
|
4
|
+
import { SchemaElement } from '../parquet-thrift';
|
|
5
|
+
/**
|
|
6
|
+
* Decode data pages
|
|
7
|
+
* @param buffer - input data
|
|
8
|
+
* @param column - parquet column
|
|
9
|
+
* @param compression - compression type
|
|
10
|
+
* @returns parquet data page data
|
|
11
|
+
*/
|
|
12
|
+
export declare function decodeDataPages(buffer: Buffer, options: ParquetOptions): Promise<ParquetData>;
|
|
13
|
+
/**
|
|
14
|
+
* Decode parquet page based on page type
|
|
15
|
+
* @param cursor
|
|
16
|
+
* @param options
|
|
17
|
+
*/
|
|
18
|
+
export declare function decodePage(cursor: CursorBuffer, options: ParquetOptions): Promise<ParquetPageData>;
|
|
19
|
+
/**
|
|
20
|
+
* Decode parquet schema
|
|
21
|
+
* @param schemaElements input schema elements data
|
|
22
|
+
* @param offset offset to read from
|
|
23
|
+
* @param len length of data
|
|
24
|
+
* @returns result.offset
|
|
25
|
+
* result.next - offset at the end of function
|
|
26
|
+
* result.schema - schema read from the input data
|
|
27
|
+
* @todo output offset is the same as input - possibly excess output field
|
|
28
|
+
*/
|
|
29
|
+
export declare function decodeSchema(schemaElements: SchemaElement[], offset: number, len: number): {
|
|
30
|
+
offset: number;
|
|
31
|
+
next: number;
|
|
32
|
+
schema: SchemaDefinition;
|
|
33
|
+
};
|
|
34
|
+
//# sourceMappingURL=decoders.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"decoders.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parser/decoders.ts"],"names":[],"mappings":";AACA,OAAO,EAEL,WAAW,EACX,cAAc,EACd,eAAe,EAGf,gBAAgB,EACjB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAC,YAAY,EAAsC,MAAM,WAAW,CAAC;AAC5E,OAAO,EAML,aAAa,EAEd,MAAM,mBAAmB,CAAC;AAK3B;;;;;;GAMG;AACH,wBAAsB,eAAe,CACnC,MAAM,EAAE,MAAM,EACd,OAAO,EAAE,cAAc,GACtB,OAAO,CAAC,WAAW,CAAC,CAmDtB;AAED;;;;GAIG;AACH,wBAAsB,UAAU,CAC9B,MAAM,EAAE,YAAY,EACpB,OAAO,EAAE,cAAc,GACtB,OAAO,CAAC,eAAe,CAAC,CAyB1B;AAED;;;;;;;;;GASG;AACH,wBAAgB,YAAY,CAC1B,cAAc,EAAE,aAAa,EAAE,EAC/B,MAAM,EAAE,MAAM,EACd,GAAG,EAAE,MAAM,GACV;IACD,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,gBAAgB,CAAC;CAC1B,CA4DA"}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { FileMetaData } from '../parquet-thrift';
|
|
2
|
+
import { ParquetEnvelopeReader } from './parquet-envelope-reader';
|
|
3
|
+
import { ParquetSchema } from '../schema/schema';
|
|
4
|
+
import { ParquetRecord } from '../schema/declare';
|
|
5
|
+
/**
|
|
6
|
+
* A parquet cursor is used to retrieve rows from a parquet file in order
|
|
7
|
+
*/
|
|
8
|
+
export declare class ParquetCursor<T> implements AsyncIterable<T> {
|
|
9
|
+
metadata: FileMetaData;
|
|
10
|
+
envelopeReader: ParquetEnvelopeReader;
|
|
11
|
+
schema: ParquetSchema;
|
|
12
|
+
columnList: string[][];
|
|
13
|
+
rowGroup: ParquetRecord[];
|
|
14
|
+
rowGroupIndex: number;
|
|
15
|
+
/**
|
|
16
|
+
* Create a new parquet reader from the file metadata and an envelope reader.
|
|
17
|
+
* It is usually not recommended to call this constructor directly except for
|
|
18
|
+
* advanced and internal use cases. Consider using getCursor() on the
|
|
19
|
+
* ParquetReader instead
|
|
20
|
+
*/
|
|
21
|
+
constructor(metadata: FileMetaData, envelopeReader: ParquetEnvelopeReader, schema: ParquetSchema, columnList: string[][]);
|
|
22
|
+
/**
|
|
23
|
+
* Retrieve the next row from the cursor. Returns a row or NULL if the end
|
|
24
|
+
* of the file was reached
|
|
25
|
+
*/
|
|
26
|
+
next<T = any>(): Promise<T>;
|
|
27
|
+
/**
|
|
28
|
+
* Rewind the cursor the the beginning of the file
|
|
29
|
+
*/
|
|
30
|
+
rewind(): void;
|
|
31
|
+
/**
|
|
32
|
+
* Implement AsyncIterable
|
|
33
|
+
*/
|
|
34
|
+
[Symbol.asyncIterator](): AsyncIterator<T>;
|
|
35
|
+
}
|
|
36
|
+
//# sourceMappingURL=parquet-cursor.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parquet-cursor.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parser/parquet-cursor.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,YAAY,EAAC,MAAM,mBAAmB,CAAC;AAC/C,OAAO,EAAC,qBAAqB,EAAC,MAAM,2BAA2B,CAAC;AAChE,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAC,aAAa,EAAC,MAAM,mBAAmB,CAAC;AAGhD;;GAEG;AACH,qBAAa,aAAa,CAAC,CAAC,CAAE,YAAW,aAAa,CAAC,CAAC,CAAC;IAChD,QAAQ,EAAE,YAAY,CAAC;IACvB,cAAc,EAAE,qBAAqB,CAAC;IACtC,MAAM,EAAE,aAAa,CAAC;IACtB,UAAU,EAAE,MAAM,EAAE,EAAE,CAAC;IACvB,QAAQ,EAAE,aAAa,EAAE,CAAM;IAC/B,aAAa,EAAE,MAAM,CAAC;IAE7B;;;;;OAKG;gBAED,QAAQ,EAAE,YAAY,EACtB,cAAc,EAAE,qBAAqB,EACrC,MAAM,EAAE,aAAa,EACrB,UAAU,EAAE,MAAM,EAAE,EAAE;IASxB;;;OAGG;IACG,IAAI,CAAC,CAAC,GAAG,GAAG,KAAK,OAAO,CAAC,CAAC,CAAC;IAiBjC;;OAEG;IACH,MAAM,IAAI,IAAI;IAKd;;OAEG;IAEH,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,aAAa,CAAC,CAAC,CAAC;CAuB3C"}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import { ParquetSchema } from '../schema/schema';
|
|
3
|
+
import { ColumnChunk, FileMetaData, RowGroup } from '../parquet-thrift';
|
|
4
|
+
import { ParquetBuffer, ParquetData, ParquetOptions } from '../schema/declare';
|
|
5
|
+
/**
|
|
6
|
+
* The parquet envelope reader allows direct, unbuffered access to the individual
|
|
7
|
+
* sections of the parquet file, namely the header, footer and the row groups.
|
|
8
|
+
* This class is intended for advanced/internal users; if you just want to retrieve
|
|
9
|
+
* rows from a parquet file use the ParquetReader instead
|
|
10
|
+
*/
|
|
11
|
+
export declare class ParquetEnvelopeReader {
|
|
12
|
+
read: (position: number, length: number) => Promise<Buffer>;
|
|
13
|
+
/**
|
|
14
|
+
* Close this parquet reader. You MUST call this method once you're finished
|
|
15
|
+
* reading rows
|
|
16
|
+
*/
|
|
17
|
+
close: () => Promise<void>;
|
|
18
|
+
fileSize: number;
|
|
19
|
+
defaultDictionarySize: number;
|
|
20
|
+
static openBuffer(buffer: Buffer): Promise<ParquetEnvelopeReader>;
|
|
21
|
+
constructor(read: (position: number, length: number) => Promise<Buffer>, close: () => Promise<void>, fileSize: number, options?: any);
|
|
22
|
+
readHeader(): Promise<void>;
|
|
23
|
+
readRowGroup(schema: ParquetSchema, rowGroup: RowGroup, columnList: string[][]): Promise<ParquetBuffer>;
|
|
24
|
+
/**
|
|
25
|
+
* Do reading of parquet file's column chunk
|
|
26
|
+
* @param schema
|
|
27
|
+
* @param colChunk
|
|
28
|
+
*/
|
|
29
|
+
readColumnChunk(schema: ParquetSchema, colChunk: ColumnChunk): Promise<ParquetData>;
|
|
30
|
+
/**
|
|
31
|
+
* Getting dictionary for allows to flatten values by indices.
|
|
32
|
+
* @param dictionaryPageOffset
|
|
33
|
+
* @param options
|
|
34
|
+
* @param pagesOffset
|
|
35
|
+
* @returns
|
|
36
|
+
*/
|
|
37
|
+
getDictionary(dictionaryPageOffset: number, options: ParquetOptions, pagesOffset: number): Promise<string[]>;
|
|
38
|
+
readFooter(): Promise<FileMetaData>;
|
|
39
|
+
}
|
|
40
|
+
//# sourceMappingURL=parquet-envelope-reader.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parquet-envelope-reader.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parser/parquet-envelope-reader.ts"],"names":[],"mappings":";AACA,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAC;AAE/C,OAAO,EAAC,WAAW,EAAoB,YAAY,EAAE,QAAQ,EAAO,MAAM,mBAAmB,CAAC;AAC9F,OAAO,EACL,aAAa,EAEb,WAAW,EAEX,cAAc,EACf,MAAM,mBAAmB,CAAC;AAM3B;;;;;GAKG;AACH,qBAAa,qBAAqB;IACzB,IAAI,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC;IACnE;;;OAGG;IACI,KAAK,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAC3B,QAAQ,EAAE,MAAM,CAAC;IACjB,qBAAqB,EAAE,MAAM,CAAC;WAExB,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,qBAAqB,CAAC;gBAQrE,IAAI,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC,EAC3D,KAAK,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,EAC1B,QAAQ,EAAE,MAAM,EAChB,OAAO,CAAC,EAAE,GAAG;IAQT,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAc3B,YAAY,CAChB,MAAM,EAAE,aAAa,EACrB,QAAQ,EAAE,QAAQ,EAClB,UAAU,EAAE,MAAM,EAAE,EAAE,GACrB,OAAO,CAAC,aAAa,CAAC;IAgBzB;;;;OAIG;IACG,eAAe,CAAC,MAAM,EAAE,aAAa,EAAE,QAAQ,EAAE,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;IAoDzF;;;;;;OAMG;IACG,aAAa,CACjB,oBAAoB,EAAE,MAAM,EAC5B,OAAO,EAAE,cAAc,EACvB,WAAW,EAAE,MAAM,GAClB,OAAO,CAAC,MAAM,EAAE,CAAC;IAwBd,UAAU,IAAI,OAAO,CAAC,YAAY,CAAC;CAqB1C"}
|
|
@@ -1,19 +1,10 @@
|
|
|
1
1
|
import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
|
|
2
2
|
import { PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED } from '../../constants';
|
|
3
3
|
import { CompressionCodec, Type } from '../parquet-thrift';
|
|
4
|
-
import { fstat, fopen, fread, fclose } from '../utils/file-utils';
|
|
5
4
|
import { decodeFileMetadata, getThriftEnum, fieldIndexOf } from '../utils/read-utils';
|
|
6
5
|
import { decodeDataPages, decodePage } from './decoders';
|
|
7
6
|
const DEFAULT_DICTIONARY_SIZE = 1e6;
|
|
8
7
|
export class ParquetEnvelopeReader {
|
|
9
|
-
static async openFile(filePath) {
|
|
10
|
-
const fileStat = await fstat(filePath);
|
|
11
|
-
const fileDescriptor = await fopen(filePath);
|
|
12
|
-
const readFn = fread.bind(undefined, fileDescriptor);
|
|
13
|
-
const closeFn = fclose.bind(undefined, fileDescriptor);
|
|
14
|
-
return new ParquetEnvelopeReader(readFn, closeFn, fileStat.size);
|
|
15
|
-
}
|
|
16
|
-
|
|
17
8
|
static async openBuffer(buffer) {
|
|
18
9
|
const readFn = (position, length) => Promise.resolve(buffer.slice(position, position + length));
|
|
19
10
|
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/parquetjs/parser/parquet-envelope-reader.ts"],"names":["PARQUET_MAGIC","PARQUET_MAGIC_ENCRYPTED","CompressionCodec","Type","fstat","fopen","fread","fclose","decodeFileMetadata","getThriftEnum","fieldIndexOf","decodeDataPages","decodePage","DEFAULT_DICTIONARY_SIZE","ParquetEnvelopeReader","openFile","filePath","fileStat","fileDescriptor","readFn","bind","undefined","closeFn","size","openBuffer","buffer","position","length","Promise","resolve","slice","constructor","read","close","fileSize","options","defaultDictionarySize","readHeader","magic","toString","Error","readRowGroup","schema","rowGroup","columnList","rowCount","Number","num_rows","columnData","colChunk","columns","colMetadata","meta_data","colKey","path_in_schema","join","readColumnChunk","file_path","field","findField","type","primitiveType","compression","codec","pagesOffset","data_page_offset","pagesSize","total_compressed_size","Math","min","rLevelMax","dLevelMax","column","numValues","num_values","dictionary","dictionaryPageOffset","dictionary_page_offset","dictionaryOffset","getDictionary","pagesBuf","dictionarySize","cursor","offset","decodedPage","readFooter","trailerLen","trailerBuf","metadataSize","readUInt32LE","metadataOffset","metadataBuf","metadata"],"mappings":";AAEA,SAAQA,aAAR,EAAuBC,uBAAvB,QAAqD,iBAArD;AACA,SAAqBC,gBAArB,EAA+DC,IAA/D,QAA0E,mBAA1E;AAQA,SAAQC,KAAR,EAAeC,KAAf,EAAsBC,KAAtB,EAA6BC,MAA7B,QAA0C,qBAA1C;AACA,SAAQC,kBAAR,EAA4BC,aAA5B,EAA2CC,YAA3C,QAA8D,qBAA9D;AACA,SAAQC,eAAR,EAAyBC,UAAzB,QAA0C,YAA1C;AAEA,MAAMC,uBAAuB,GAAG,GAAhC;AAQA,OAAO,MAAMC,qBAAN,CAA4B;AAUZ,eAARC,QAAQ,CAACC,QAAD,EAAmD;AACtE,UAAMC,QAAQ,GAAG,MAAMb,KAAK,CAACY,QAAD,CAA5B;AACA,UAAME,cAAc,GAAG,MAAMb,KAAK,CAACW,QAAD,CAAlC;AAEA,UAAMG,MAAM,GAAGb,KAAK,CAACc,IAAN,CAAWC,SAAX,EAAsBH,cAAtB,CAAf;AACA,UAAMI,OAAO,GAAGf,MAAM,CAACa,IAAP,CAAYC,SAAZ,EAAuBH,cAAvB,CAAhB;AAEA,WAAO,IAAIJ,qBAAJ,CAA0BK,MAA1B,EAAkCG,OAAlC,EAA2CL,QAAQ,CAACM,IAApD,CAAP;AACD;;AAEsB,eAAVC,UAAU,CAACC,MAAD,EAAiD;AACtE,UAAMN,MAAM,GAAG,CAACO,QAAD,EAAmBC,MAAnB,KACbC,OAAO,CAACC,OAAR,CAAgBJ,MAAM,CAACK,KAAP,CAAaJ,QAAb,EAAuBA,QAAQ,GAAGC,MAAlC,CAAhB,CADF;;AAEA,UAAML,OAAO,GAAG,MAAMM,OAAO,CAACC,OAAR,EAAtB;;AACA,WAAO,IAAIf,qBAAJ,CAA0BK,MAA1B,EAAkCG,OAAlC,EAA2CG,MAAM,CAACE,MAAlD,CAAP;AACD;;AAEDI,EAAAA,WAAW,CACTC,IADS,EAETC,KAFS,EAGTC,QAHS,EAITC,OAJS,EAKT;AAAA;;AAAA;;AAAA;;AAAA;;AACA,SAAKH,IAAL,GAAYA,IAAZ;AACA,SAAKC,KAAL,GAAaA,KAAb;AACA,SAAKC,QAAL,GAAgBA,QAAhB;AACA,SAAKE,qBAAL,GAA6B,CAAAD,OAAO,SAAP,IAAAA,OAAO,WAAP,YAAAA,OAAO,CAAEC,qBAAT,KAAkCvB,uBAA/D;AACD;;AAEe,QAAVwB,UAAU,GAAkB;AAChC,UAAMZ,MAAM,GAAG,MAAM,KAAKO,IAAL,CAAU,CAAV,EAAahC,aAAa,CAAC2B,MAA3B,CAArB;AAEA,UAAMW,KAAK,GAAGb,MAAM,CAACc,QAAP,EAAd;;AACA,YAAQD,KAAR;AACE,WAAKtC,aAAL;AACE;;AACF,WAAKC,uBAAL;AACE,cAAM,IAAIuC,KAAJ,CAAU,sCAAV,CAAN;;AACF;AACE,cAAM,IAAIA,KAAJ,uCAAyCF,KAAzC,OAAN;AANJ;AAQD;;AAEiB,QAAZG,YAAY,CAChBC,MADgB,EAEhBC,QAFgB,EAGhBC,UAHgB,EAIQ;AACxB,UAAMnB,MAAqB,GAAG;AAC5BoB,MAAAA,QAAQ,EAAEC,MAAM,CAACH,QAAQ,CAACI,QAAV,CADY;AAE5BC,MAAAA,UAAU,EAAE;AAFgB,KAA9B;;AAIA,SAAK,MAAMC,QAAX,IAAuBN,QAAQ,CAACO,OAAhC,EAAyC;AACvC,YAAMC,WAAW,GAAGF,QAAQ,CAACG,SAA7B;AACA,YAAMC,MAAM,GAAGF,WAAH,aAAGA,WAAH,uBAAGA,WAAW,CAAEG,cAA5B;;AACA,UAAIV,UAAU,CAACjB,MAAX,GAAoB,CAApB,IAAyBjB,YAAY,CAACkC,UAAD,EAAaS,MAAb,CAAZ,GAAoC,CAAjE,EAAoE;AAClE;AACD;;AACD5B,MAAAA,MAAM,CAACuB,UAAP,CAAkBK,MAAM,CAAEE,IAAR,EAAlB,IAAoC,MAAM,KAAKC,eAAL,CAAqBd,MAArB,EAA6BO,QAA7B,CAA1C;AACD;;AACD,WAAOxB,MAAP;AACD;;AAOoB,QAAf+B,eAAe,CAACd,MAAD,EAAwBO,QAAxB,EAAqE;AAAA;;AACxF,QAAIA,QAAQ,CAACQ,SAAT,KAAuBpC,SAAvB,IAAoC4B,QAAQ,CAACQ,SAAT,KAAuB,IAA/D,EAAqE;AACnE,YAAM,IAAIjB,KAAJ,CAAU,uCAAV,CAAN;AACD;;AAED,UAAMkB,KAAK,GAAGhB,MAAM,CAACiB,SAAP,wBAAiBV,QAAQ,CAACG,SAA1B,wDAAiB,oBAAoBE,cAArC,CAAd;AACA,UAAMM,IAAmB,GAAGnD,aAAa,CAACN,IAAD,0BAAO8C,QAAQ,CAACG,SAAhB,yDAAO,qBAAoBQ,IAA3B,CAAzC;;AAEA,QAAIA,IAAI,KAAKF,KAAK,CAACG,aAAnB,EAAkC;AAChC,YAAM,IAAIrB,KAAJ,2CAA6CoB,IAA7C,EAAN;AACD;;AAED,UAAME,WAA+B,GAAGrD,aAAa,CACnDP,gBADmD,0BAEnD+C,QAAQ,CAACG,SAF0C,yDAEnD,qBAAoBW,KAF+B,CAArD;AAKA,UAAMC,WAAW,GAAGlB,MAAM,yBAACG,QAAQ,CAACG,SAAV,yDAAC,qBAAoBa,gBAArB,CAA1B;AACA,QAAIC,SAAS,GAAGpB,MAAM,yBAACG,QAAQ,CAACG,SAAV,yDAAC,qBAAoBe,qBAArB,CAAtB;;AAEA,QAAI,CAAClB,QAAQ,CAACQ,SAAd,EAAyB;AAAA;;AACvBS,MAAAA,SAAS,GAAGE,IAAI,CAACC,GAAL,CACV,KAAKnC,QAAL,GAAgB8B,WADN,EAEVlB,MAAM,yBAACG,QAAQ,CAACG,SAAV,yDAAC,qBAAoBe,qBAArB,CAFI,CAAZ;AAID;;AAED,UAAMhC,OAAuB,GAAG;AAC9ByB,MAAAA,IAD8B;AAE9BU,MAAAA,SAAS,EAAEZ,KAAK,CAACY,SAFa;AAG9BC,MAAAA,SAAS,EAAEb,KAAK,CAACa,SAHa;AAI9BT,MAAAA,WAJ8B;AAK9BU,MAAAA,MAAM,EAAEd,KALsB;AAM9Be,MAAAA,SAAS,0BAAExB,QAAQ,CAACG,SAAX,yDAAE,qBAAoBsB,UAND;AAO9BC,MAAAA,UAAU,EAAE;AAPkB,KAAhC;AAUA,QAAIA,UAAJ;AAEA,UAAMC,oBAAoB,GAAG3B,QAAH,aAAGA,QAAH,+CAAGA,QAAQ,CAAEG,SAAb,yDAAG,qBAAqByB,sBAAlD;;AAEA,QAAID,oBAAJ,EAA0B;AACxB,YAAME,gBAAgB,GAAGhC,MAAM,CAAC8B,oBAAD,CAA/B;AAEAD,MAAAA,UAAU,GAAG,MAAM,KAAKI,aAAL,CAAmBD,gBAAnB,EAAqC3C,OAArC,EAA8C6B,WAA9C,CAAnB;AACD;;AAEDW,IAAAA,UAAU,GAAG,uBAAAxC,OAAO,CAACwC,UAAR,oEAAoBhD,MAApB,GAA6BQ,OAAO,CAACwC,UAArC,GAAkDA,UAA/D;AACA,UAAMK,QAAQ,GAAG,MAAM,KAAKhD,IAAL,CAAUgC,WAAV,EAAuBE,SAAvB,CAAvB;AACA,WAAO,MAAMvD,eAAe,CAACqE,QAAD,EAAW,EAAC,GAAG7C,OAAJ;AAAawC,MAAAA;AAAb,KAAX,CAA5B;AACD;;AASkB,QAAbI,aAAa,CACjBH,oBADiB,EAEjBzC,OAFiB,EAGjB6B,WAHiB,EAIE;AACnB,QAAIY,oBAAoB,KAAK,CAA7B,EAAgC;AAQ9B,aAAO,EAAP;AACD;;AAED,UAAMK,cAAc,GAAGb,IAAI,CAACC,GAAL,CACrB,KAAKnC,QAAL,GAAgB0C,oBADK,EAErB,KAAKxC,qBAFgB,CAAvB;AAIA,UAAM4C,QAAQ,GAAG,MAAM,KAAKhD,IAAL,CAAU4C,oBAAV,EAAgCK,cAAhC,CAAvB;AAEA,UAAMC,MAAM,GAAG;AAACzD,MAAAA,MAAM,EAAEuD,QAAT;AAAmBG,MAAAA,MAAM,EAAE,CAA3B;AAA8B5D,MAAAA,IAAI,EAAEyD,QAAQ,CAACrD;AAA7C,KAAf;AACA,UAAMyD,WAAW,GAAG,MAAMxE,UAAU,CAACsE,MAAD,EAAS/C,OAAT,CAApC;AAEA,WAAOiD,WAAW,CAACT,UAAnB;AACD;;AAEe,QAAVU,UAAU,GAA0B;AACxC,UAAMC,UAAU,GAAGtF,aAAa,CAAC2B,MAAd,GAAuB,CAA1C;AACA,UAAM4D,UAAU,GAAG,MAAM,KAAKvD,IAAL,CAAU,KAAKE,QAAL,GAAgBoD,UAA1B,EAAsCA,UAAtC,CAAzB;AAEA,UAAMhD,KAAK,GAAGiD,UAAU,CAACzD,KAAX,CAAiB,CAAjB,EAAoBS,QAApB,EAAd;;AACA,QAAID,KAAK,KAAKtC,aAAd,EAA6B;AAC3B,YAAM,IAAIwC,KAAJ,6CAA8CF,KAA9C,OAAN;AACD;;AAED,UAAMkD,YAAY,GAAGD,UAAU,CAACE,YAAX,CAAwB,CAAxB,CAArB;AACA,UAAMC,cAAc,GAAG,KAAKxD,QAAL,GAAgBsD,YAAhB,GAA+BF,UAAtD;;AACA,QAAII,cAAc,GAAG1F,aAAa,CAAC2B,MAAnC,EAA2C;AACzC,YAAM,IAAIa,KAAJ,iCAAmCkD,cAAnC,EAAN;AACD;;AAED,UAAMC,WAAW,GAAG,MAAM,KAAK3D,IAAL,CAAU0D,cAAV,EAA0BF,YAA1B,CAA1B;AAGA,UAAM;AAACI,MAAAA;AAAD,QAAapF,kBAAkB,CAACmF,WAAD,CAArC;AACA,WAAOC,QAAP;AACD;;AAzLgC","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetSchema} from '../schema/schema';\nimport {PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED} from '../../constants';\nimport {ColumnChunk, CompressionCodec, FileMetaData, RowGroup, Type} from '../parquet-thrift';\nimport {\n ParquetBuffer,\n ParquetCompression,\n ParquetData,\n PrimitiveType,\n ParquetOptions\n} from '../schema/declare';\nimport {fstat, fopen, fread, fclose} from '../utils/file-utils';\nimport {decodeFileMetadata, getThriftEnum, fieldIndexOf} from '../utils/read-utils';\nimport {decodeDataPages, decodePage} from './decoders';\n\nconst DEFAULT_DICTIONARY_SIZE = 1e6;\n\n/**\n * The parquet envelope reader allows direct, unbuffered access to the individual\n * sections of the parquet file, namely the header, footer and the row groups.\n * This class is intended for advanced/internal users; if you just want to retrieve\n * rows from a parquet file use the ParquetReader instead\n */\nexport class ParquetEnvelopeReader {\n public read: (position: number, length: number) => Promise<Buffer>;\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n public close: () => Promise<void>;\n public fileSize: number;\n public defaultDictionarySize: number;\n\n static async openFile(filePath: string): Promise<ParquetEnvelopeReader> {\n const fileStat = await fstat(filePath);\n const fileDescriptor = await fopen(filePath);\n\n const readFn = fread.bind(undefined, fileDescriptor);\n const closeFn = fclose.bind(undefined, fileDescriptor);\n\n return new ParquetEnvelopeReader(readFn, closeFn, fileStat.size);\n }\n\n static async openBuffer(buffer: Buffer): Promise<ParquetEnvelopeReader> {\n const readFn = (position: number, length: number) =>\n Promise.resolve(buffer.slice(position, position + length));\n const closeFn = () => Promise.resolve();\n return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);\n }\n\n constructor(\n read: (position: number, length: number) => Promise<Buffer>,\n close: () => Promise<void>,\n fileSize: number,\n options?: any\n ) {\n this.read = read;\n this.close = close;\n this.fileSize = fileSize;\n this.defaultDictionarySize = options?.defaultDictionarySize || DEFAULT_DICTIONARY_SIZE;\n }\n\n async readHeader(): Promise<void> {\n const buffer = await this.read(0, PARQUET_MAGIC.length);\n\n const magic = buffer.toString();\n switch (magic) {\n case PARQUET_MAGIC:\n break;\n case PARQUET_MAGIC_ENCRYPTED:\n throw new Error('Encrypted parquet file not supported');\n default:\n throw new Error(`Invalid parquet file (magic=${magic})`);\n }\n }\n\n async readRowGroup(\n schema: ParquetSchema,\n rowGroup: RowGroup,\n columnList: string[][]\n ): Promise<ParquetBuffer> {\n const buffer: ParquetBuffer = {\n rowCount: Number(rowGroup.num_rows),\n columnData: {}\n };\n for (const colChunk of rowGroup.columns) {\n const colMetadata = colChunk.meta_data;\n const colKey = colMetadata?.path_in_schema;\n if (columnList.length > 0 && fieldIndexOf(columnList, colKey!) < 0) {\n continue; // eslint-disable-line no-continue\n }\n buffer.columnData[colKey!.join()] = await this.readColumnChunk(schema, colChunk);\n }\n return buffer;\n }\n\n /**\n * Do reading of parquet file's column chunk\n * @param schema\n * @param colChunk\n */\n async readColumnChunk(schema: ParquetSchema, colChunk: ColumnChunk): Promise<ParquetData> {\n if (colChunk.file_path !== undefined && colChunk.file_path !== null) {\n throw new Error('external references are not supported');\n }\n\n const field = schema.findField(colChunk.meta_data?.path_in_schema!);\n const type: PrimitiveType = getThriftEnum(Type, colChunk.meta_data?.type!) as any;\n\n if (type !== field.primitiveType) {\n throw new Error(`chunk type not matching schema: ${type}`);\n }\n\n const compression: ParquetCompression = getThriftEnum(\n CompressionCodec,\n colChunk.meta_data?.codec!\n ) as any;\n\n const pagesOffset = Number(colChunk.meta_data?.data_page_offset!);\n let pagesSize = Number(colChunk.meta_data?.total_compressed_size!);\n\n if (!colChunk.file_path) {\n pagesSize = Math.min(\n this.fileSize - pagesOffset,\n Number(colChunk.meta_data?.total_compressed_size)\n );\n }\n\n const options: ParquetOptions = {\n type,\n rLevelMax: field.rLevelMax,\n dLevelMax: field.dLevelMax,\n compression,\n column: field,\n numValues: colChunk.meta_data?.num_values,\n dictionary: []\n };\n\n let dictionary;\n\n const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;\n\n if (dictionaryPageOffset) {\n const dictionaryOffset = Number(dictionaryPageOffset);\n // Getting dictionary from column chunk to iterate all over indexes to get dataPage values.\n dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);\n }\n\n dictionary = options.dictionary?.length ? options.dictionary : dictionary;\n const pagesBuf = await this.read(pagesOffset, pagesSize);\n return await decodeDataPages(pagesBuf, {...options, dictionary});\n }\n\n /**\n * Getting dictionary for allows to flatten values by indices.\n * @param dictionaryPageOffset\n * @param options\n * @param pagesOffset\n * @returns\n */\n async getDictionary(\n dictionaryPageOffset: number,\n options: ParquetOptions,\n pagesOffset: number\n ): Promise<string[]> {\n if (dictionaryPageOffset === 0) {\n // dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);\n // pagesBuf = await this.read(pagesOffset, dictionarySize);\n\n // In this case we are working with parquet-mr files format. Problem is described below:\n // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding\n // We need to get dictionary page from column chunk if it exists.\n // Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.\n return [];\n }\n\n const dictionarySize = Math.min(\n this.fileSize - dictionaryPageOffset,\n this.defaultDictionarySize\n );\n const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);\n\n const cursor = {buffer: pagesBuf, offset: 0, size: pagesBuf.length};\n const decodedPage = await decodePage(cursor, options);\n\n return decodedPage.dictionary!;\n }\n\n async readFooter(): Promise<FileMetaData> {\n const trailerLen = PARQUET_MAGIC.length + 4;\n const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);\n\n const magic = trailerBuf.slice(4).toString();\n if (magic !== PARQUET_MAGIC) {\n throw new Error(`Not a valid parquet file (magic=\"${magic})`);\n }\n\n const metadataSize = trailerBuf.readUInt32LE(0);\n const metadataOffset = this.fileSize - metadataSize - trailerLen;\n if (metadataOffset < PARQUET_MAGIC.length) {\n throw new Error(`Invalid metadata size ${metadataOffset}`);\n }\n\n const metadataBuf = await this.read(metadataOffset, metadataSize);\n // let metadata = new parquet_thrift.FileMetaData();\n // parquet_util.decodeThrift(metadata, metadataBuf);\n const {metadata} = decodeFileMetadata(metadataBuf);\n return metadata;\n }\n}\n"],"file":"parquet-envelope-reader.js"}
|
|
1
|
+
{"version":3,"sources":["../../../src/parquetjs/parser/parquet-envelope-reader.ts"],"names":["PARQUET_MAGIC","PARQUET_MAGIC_ENCRYPTED","CompressionCodec","Type","decodeFileMetadata","getThriftEnum","fieldIndexOf","decodeDataPages","decodePage","DEFAULT_DICTIONARY_SIZE","ParquetEnvelopeReader","openBuffer","buffer","readFn","position","length","Promise","resolve","slice","closeFn","constructor","read","close","fileSize","options","defaultDictionarySize","readHeader","magic","toString","Error","readRowGroup","schema","rowGroup","columnList","rowCount","Number","num_rows","columnData","colChunk","columns","colMetadata","meta_data","colKey","path_in_schema","join","readColumnChunk","file_path","undefined","field","findField","type","primitiveType","compression","codec","pagesOffset","data_page_offset","pagesSize","total_compressed_size","Math","min","rLevelMax","dLevelMax","column","numValues","num_values","dictionary","dictionaryPageOffset","dictionary_page_offset","dictionaryOffset","getDictionary","pagesBuf","dictionarySize","cursor","offset","size","decodedPage","readFooter","trailerLen","trailerBuf","metadataSize","readUInt32LE","metadataOffset","metadataBuf","metadata"],"mappings":";AAEA,SAAQA,aAAR,EAAuBC,uBAAvB,QAAqD,iBAArD;AACA,SAAqBC,gBAArB,EAA+DC,IAA/D,QAA0E,mBAA1E;AAQA,SAAQC,kBAAR,EAA4BC,aAA5B,EAA2CC,YAA3C,QAA8D,qBAA9D;AACA,SAAQC,eAAR,EAAyBC,UAAzB,QAA0C,YAA1C;AAEA,MAAMC,uBAAuB,GAAG,GAAhC;AAQA,OAAO,MAAMC,qBAAN,CAA4B;AAUV,eAAVC,UAAU,CAACC,MAAD,EAAiD;AACtE,UAAMC,MAAM,GAAG,CAACC,QAAD,EAAmBC,MAAnB,KACbC,OAAO,CAACC,OAAR,CAAgBL,MAAM,CAACM,KAAP,CAAaJ,QAAb,EAAuBA,QAAQ,GAAGC,MAAlC,CAAhB,CADF;;AAEA,UAAMI,OAAO,GAAG,MAAMH,OAAO,CAACC,OAAR,EAAtB;;AACA,WAAO,IAAIP,qBAAJ,CAA0BG,MAA1B,EAAkCM,OAAlC,EAA2CP,MAAM,CAACG,MAAlD,CAAP;AACD;;AAEDK,EAAAA,WAAW,CACTC,IADS,EAETC,KAFS,EAGTC,QAHS,EAITC,OAJS,EAKT;AAAA;;AAAA;;AAAA;;AAAA;;AACA,SAAKH,IAAL,GAAYA,IAAZ;AACA,SAAKC,KAAL,GAAaA,KAAb;AACA,SAAKC,QAAL,GAAgBA,QAAhB;AACA,SAAKE,qBAAL,GAA6B,CAAAD,OAAO,SAAP,IAAAA,OAAO,WAAP,YAAAA,OAAO,CAAEC,qBAAT,KAAkChB,uBAA/D;AACD;;AAEe,QAAViB,UAAU,GAAkB;AAChC,UAAMd,MAAM,GAAG,MAAM,KAAKS,IAAL,CAAU,CAAV,EAAarB,aAAa,CAACe,MAA3B,CAArB;AAEA,UAAMY,KAAK,GAAGf,MAAM,CAACgB,QAAP,EAAd;;AACA,YAAQD,KAAR;AACE,WAAK3B,aAAL;AACE;;AACF,WAAKC,uBAAL;AACE,cAAM,IAAI4B,KAAJ,CAAU,sCAAV,CAAN;;AACF;AACE,cAAM,IAAIA,KAAJ,uCAAyCF,KAAzC,OAAN;AANJ;AAQD;;AAEiB,QAAZG,YAAY,CAChBC,MADgB,EAEhBC,QAFgB,EAGhBC,UAHgB,EAIQ;AACxB,UAAMrB,MAAqB,GAAG;AAC5BsB,MAAAA,QAAQ,EAAEC,MAAM,CAACH,QAAQ,CAACI,QAAV,CADY;AAE5BC,MAAAA,UAAU,EAAE;AAFgB,KAA9B;;AAIA,SAAK,MAAMC,QAAX,IAAuBN,QAAQ,CAACO,OAAhC,EAAyC;AACvC,YAAMC,WAAW,GAAGF,QAAQ,CAACG,SAA7B;AACA,YAAMC,MAAM,GAAGF,WAAH,aAAGA,WAAH,uBAAGA,WAAW,CAAEG,cAA5B;;AACA,UAAIV,UAAU,CAAClB,MAAX,GAAoB,CAApB,IAAyBT,YAAY,CAAC2B,UAAD,EAAaS,MAAb,CAAZ,GAAoC,CAAjE,EAAoE;AAClE;AACD;;AACD9B,MAAAA,MAAM,CAACyB,UAAP,CAAkBK,MAAM,CAAEE,IAAR,EAAlB,IAAoC,MAAM,KAAKC,eAAL,CAAqBd,MAArB,EAA6BO,QAA7B,CAA1C;AACD;;AACD,WAAO1B,MAAP;AACD;;AAOoB,QAAfiC,eAAe,CAACd,MAAD,EAAwBO,QAAxB,EAAqE;AAAA;;AACxF,QAAIA,QAAQ,CAACQ,SAAT,KAAuBC,SAAvB,IAAoCT,QAAQ,CAACQ,SAAT,KAAuB,IAA/D,EAAqE;AACnE,YAAM,IAAIjB,KAAJ,CAAU,uCAAV,CAAN;AACD;;AAED,UAAMmB,KAAK,GAAGjB,MAAM,CAACkB,SAAP,wBAAiBX,QAAQ,CAACG,SAA1B,wDAAiB,oBAAoBE,cAArC,CAAd;AACA,UAAMO,IAAmB,GAAG7C,aAAa,CAACF,IAAD,0BAAOmC,QAAQ,CAACG,SAAhB,yDAAO,qBAAoBS,IAA3B,CAAzC;;AAEA,QAAIA,IAAI,KAAKF,KAAK,CAACG,aAAnB,EAAkC;AAChC,YAAM,IAAItB,KAAJ,2CAA6CqB,IAA7C,EAAN;AACD;;AAED,UAAME,WAA+B,GAAG/C,aAAa,CACnDH,gBADmD,0BAEnDoC,QAAQ,CAACG,SAF0C,yDAEnD,qBAAoBY,KAF+B,CAArD;AAKA,UAAMC,WAAW,GAAGnB,MAAM,yBAACG,QAAQ,CAACG,SAAV,yDAAC,qBAAoBc,gBAArB,CAA1B;AACA,QAAIC,SAAS,GAAGrB,MAAM,yBAACG,QAAQ,CAACG,SAAV,yDAAC,qBAAoBgB,qBAArB,CAAtB;;AAEA,QAAI,CAACnB,QAAQ,CAACQ,SAAd,EAAyB;AAAA;;AACvBU,MAAAA,SAAS,GAAGE,IAAI,CAACC,GAAL,CACV,KAAKpC,QAAL,GAAgB+B,WADN,EAEVnB,MAAM,yBAACG,QAAQ,CAACG,SAAV,yDAAC,qBAAoBgB,qBAArB,CAFI,CAAZ;AAID;;AAED,UAAMjC,OAAuB,GAAG;AAC9B0B,MAAAA,IAD8B;AAE9BU,MAAAA,SAAS,EAAEZ,KAAK,CAACY,SAFa;AAG9BC,MAAAA,SAAS,EAAEb,KAAK,CAACa,SAHa;AAI9BT,MAAAA,WAJ8B;AAK9BU,MAAAA,MAAM,EAAEd,KALsB;AAM9Be,MAAAA,SAAS,0BAAEzB,QAAQ,CAACG,SAAX,yDAAE,qBAAoBuB,UAND;AAO9BC,MAAAA,UAAU,EAAE;AAPkB,KAAhC;AAUA,QAAIA,UAAJ;AAEA,UAAMC,oBAAoB,GAAG5B,QAAH,aAAGA,QAAH,+CAAGA,QAAQ,CAAEG,SAAb,yDAAG,qBAAqB0B,sBAAlD;;AAEA,QAAID,oBAAJ,EAA0B;AACxB,YAAME,gBAAgB,GAAGjC,MAAM,CAAC+B,oBAAD,CAA/B;AAEAD,MAAAA,UAAU,GAAG,MAAM,KAAKI,aAAL,CAAmBD,gBAAnB,EAAqC5C,OAArC,EAA8C8B,WAA9C,CAAnB;AACD;;AAEDW,IAAAA,UAAU,GAAG,uBAAAzC,OAAO,CAACyC,UAAR,oEAAoBlD,MAApB,GAA6BS,OAAO,CAACyC,UAArC,GAAkDA,UAA/D;AACA,UAAMK,QAAQ,GAAG,MAAM,KAAKjD,IAAL,CAAUiC,WAAV,EAAuBE,SAAvB,CAAvB;AACA,WAAO,MAAMjD,eAAe,CAAC+D,QAAD,EAAW,EAAC,GAAG9C,OAAJ;AAAayC,MAAAA;AAAb,KAAX,CAA5B;AACD;;AASkB,QAAbI,aAAa,CACjBH,oBADiB,EAEjB1C,OAFiB,EAGjB8B,WAHiB,EAIE;AACnB,QAAIY,oBAAoB,KAAK,CAA7B,EAAgC;AAQ9B,aAAO,EAAP;AACD;;AAED,UAAMK,cAAc,GAAGb,IAAI,CAACC,GAAL,CACrB,KAAKpC,QAAL,GAAgB2C,oBADK,EAErB,KAAKzC,qBAFgB,CAAvB;AAIA,UAAM6C,QAAQ,GAAG,MAAM,KAAKjD,IAAL,CAAU6C,oBAAV,EAAgCK,cAAhC,CAAvB;AAEA,UAAMC,MAAM,GAAG;AAAC5D,MAAAA,MAAM,EAAE0D,QAAT;AAAmBG,MAAAA,MAAM,EAAE,CAA3B;AAA8BC,MAAAA,IAAI,EAAEJ,QAAQ,CAACvD;AAA7C,KAAf;AACA,UAAM4D,WAAW,GAAG,MAAMnE,UAAU,CAACgE,MAAD,EAAShD,OAAT,CAApC;AAEA,WAAOmD,WAAW,CAACV,UAAnB;AACD;;AAEe,QAAVW,UAAU,GAA0B;AACxC,UAAMC,UAAU,GAAG7E,aAAa,CAACe,MAAd,GAAuB,CAA1C;AACA,UAAM+D,UAAU,GAAG,MAAM,KAAKzD,IAAL,CAAU,KAAKE,QAAL,GAAgBsD,UAA1B,EAAsCA,UAAtC,CAAzB;AAEA,UAAMlD,KAAK,GAAGmD,UAAU,CAAC5D,KAAX,CAAiB,CAAjB,EAAoBU,QAApB,EAAd;;AACA,QAAID,KAAK,KAAK3B,aAAd,EAA6B;AAC3B,YAAM,IAAI6B,KAAJ,6CAA8CF,KAA9C,OAAN;AACD;;AAED,UAAMoD,YAAY,GAAGD,UAAU,CAACE,YAAX,CAAwB,CAAxB,CAArB;AACA,UAAMC,cAAc,GAAG,KAAK1D,QAAL,GAAgBwD,YAAhB,GAA+BF,UAAtD;;AACA,QAAII,cAAc,GAAGjF,aAAa,CAACe,MAAnC,EAA2C;AACzC,YAAM,IAAIc,KAAJ,iCAAmCoD,cAAnC,EAAN;AACD;;AAED,UAAMC,WAAW,GAAG,MAAM,KAAK7D,IAAL,CAAU4D,cAAV,EAA0BF,YAA1B,CAA1B;AAGA,UAAM;AAACI,MAAAA;AAAD,QAAa/E,kBAAkB,CAAC8E,WAAD,CAArC;AACA,WAAOC,QAAP;AACD;;AA/KgC","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetSchema} from '../schema/schema';\nimport {PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED} from '../../constants';\nimport {ColumnChunk, CompressionCodec, FileMetaData, RowGroup, Type} from '../parquet-thrift';\nimport {\n ParquetBuffer,\n ParquetCompression,\n ParquetData,\n PrimitiveType,\n ParquetOptions\n} from '../schema/declare';\nimport {decodeFileMetadata, getThriftEnum, fieldIndexOf} from '../utils/read-utils';\nimport {decodeDataPages, decodePage} from './decoders';\n\nconst DEFAULT_DICTIONARY_SIZE = 1e6;\n\n/**\n * The parquet envelope reader allows direct, unbuffered access to the individual\n * sections of the parquet file, namely the header, footer and the row groups.\n * This class is intended for advanced/internal users; if you just want to retrieve\n * rows from a parquet file use the ParquetReader instead\n */\nexport class ParquetEnvelopeReader {\n public read: (position: number, length: number) => Promise<Buffer>;\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n public close: () => Promise<void>;\n public fileSize: number;\n public defaultDictionarySize: number;\n\n static async openBuffer(buffer: Buffer): Promise<ParquetEnvelopeReader> {\n const readFn = (position: number, length: number) =>\n Promise.resolve(buffer.slice(position, position + length));\n const closeFn = () => Promise.resolve();\n return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);\n }\n\n constructor(\n read: (position: number, length: number) => Promise<Buffer>,\n close: () => Promise<void>,\n fileSize: number,\n options?: any\n ) {\n this.read = read;\n this.close = close;\n this.fileSize = fileSize;\n this.defaultDictionarySize = options?.defaultDictionarySize || DEFAULT_DICTIONARY_SIZE;\n }\n\n async readHeader(): Promise<void> {\n const buffer = await this.read(0, PARQUET_MAGIC.length);\n\n const magic = buffer.toString();\n switch (magic) {\n case PARQUET_MAGIC:\n break;\n case PARQUET_MAGIC_ENCRYPTED:\n throw new Error('Encrypted parquet file not supported');\n default:\n throw new Error(`Invalid parquet file (magic=${magic})`);\n }\n }\n\n async readRowGroup(\n schema: ParquetSchema,\n rowGroup: RowGroup,\n columnList: string[][]\n ): Promise<ParquetBuffer> {\n const buffer: ParquetBuffer = {\n rowCount: Number(rowGroup.num_rows),\n columnData: {}\n };\n for (const colChunk of rowGroup.columns) {\n const colMetadata = colChunk.meta_data;\n const colKey = colMetadata?.path_in_schema;\n if (columnList.length > 0 && fieldIndexOf(columnList, colKey!) < 0) {\n continue; // eslint-disable-line no-continue\n }\n buffer.columnData[colKey!.join()] = await this.readColumnChunk(schema, colChunk);\n }\n return buffer;\n }\n\n /**\n * Do reading of parquet file's column chunk\n * @param schema\n * @param colChunk\n */\n async readColumnChunk(schema: ParquetSchema, colChunk: ColumnChunk): Promise<ParquetData> {\n if (colChunk.file_path !== undefined && colChunk.file_path !== null) {\n throw new Error('external references are not supported');\n }\n\n const field = schema.findField(colChunk.meta_data?.path_in_schema!);\n const type: PrimitiveType = getThriftEnum(Type, colChunk.meta_data?.type!) as any;\n\n if (type !== field.primitiveType) {\n throw new Error(`chunk type not matching schema: ${type}`);\n }\n\n const compression: ParquetCompression = getThriftEnum(\n CompressionCodec,\n colChunk.meta_data?.codec!\n ) as any;\n\n const pagesOffset = Number(colChunk.meta_data?.data_page_offset!);\n let pagesSize = Number(colChunk.meta_data?.total_compressed_size!);\n\n if (!colChunk.file_path) {\n pagesSize = Math.min(\n this.fileSize - pagesOffset,\n Number(colChunk.meta_data?.total_compressed_size)\n );\n }\n\n const options: ParquetOptions = {\n type,\n rLevelMax: field.rLevelMax,\n dLevelMax: field.dLevelMax,\n compression,\n column: field,\n numValues: colChunk.meta_data?.num_values,\n dictionary: []\n };\n\n let dictionary;\n\n const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;\n\n if (dictionaryPageOffset) {\n const dictionaryOffset = Number(dictionaryPageOffset);\n // Getting dictionary from column chunk to iterate all over indexes to get dataPage values.\n dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);\n }\n\n dictionary = options.dictionary?.length ? options.dictionary : dictionary;\n const pagesBuf = await this.read(pagesOffset, pagesSize);\n return await decodeDataPages(pagesBuf, {...options, dictionary});\n }\n\n /**\n * Getting dictionary for allows to flatten values by indices.\n * @param dictionaryPageOffset\n * @param options\n * @param pagesOffset\n * @returns\n */\n async getDictionary(\n dictionaryPageOffset: number,\n options: ParquetOptions,\n pagesOffset: number\n ): Promise<string[]> {\n if (dictionaryPageOffset === 0) {\n // dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);\n // pagesBuf = await this.read(pagesOffset, dictionarySize);\n\n // In this case we are working with parquet-mr files format. Problem is described below:\n // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding\n // We need to get dictionary page from column chunk if it exists.\n // Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.\n return [];\n }\n\n const dictionarySize = Math.min(\n this.fileSize - dictionaryPageOffset,\n this.defaultDictionarySize\n );\n const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);\n\n const cursor = {buffer: pagesBuf, offset: 0, size: pagesBuf.length};\n const decodedPage = await decodePage(cursor, options);\n\n return decodedPage.dictionary!;\n }\n\n async readFooter(): Promise<FileMetaData> {\n const trailerLen = PARQUET_MAGIC.length + 4;\n const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);\n\n const magic = trailerBuf.slice(4).toString();\n if (magic !== PARQUET_MAGIC) {\n throw new Error(`Not a valid parquet file (magic=\"${magic})`);\n }\n\n const metadataSize = trailerBuf.readUInt32LE(0);\n const metadataOffset = this.fileSize - metadataSize - trailerLen;\n if (metadataOffset < PARQUET_MAGIC.length) {\n throw new Error(`Invalid metadata size ${metadataOffset}`);\n }\n\n const metadataBuf = await this.read(metadataOffset, metadataSize);\n // let metadata = new parquet_thrift.FileMetaData();\n // parquet_util.decodeThrift(metadata, metadataBuf);\n const {metadata} = decodeFileMetadata(metadataBuf);\n return metadata;\n }\n}\n"],"file":"parquet-envelope-reader.js"}
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import { ParquetEnvelopeReader } from './parquet-envelope-reader';
|
|
3
|
+
import { FileMetaData } from '../parquet-thrift';
|
|
4
|
+
import { ParquetSchema } from '../schema/schema';
|
|
5
|
+
import { ParquetCursor } from './parquet-cursor';
|
|
6
|
+
/**
|
|
7
|
+
* A parquet reader allows retrieving the rows from a parquet file in order.
|
|
8
|
+
* The basic usage is to create a reader and then retrieve a cursor/iterator
|
|
9
|
+
* which allows you to consume row after row until all rows have been read. It is
|
|
10
|
+
* important that you call close() after you are finished reading the file to
|
|
11
|
+
* avoid leaking file descriptors.
|
|
12
|
+
*/
|
|
13
|
+
export declare class ParquetReader<T> implements AsyncIterable<T> {
|
|
14
|
+
/**
|
|
15
|
+
* return a new parquet reader initialized with a read function
|
|
16
|
+
*/
|
|
17
|
+
static openBlob<T>(blob: Blob): Promise<ParquetReader<T>>;
|
|
18
|
+
/**
|
|
19
|
+
* return a new parquet reader initialized with a read function
|
|
20
|
+
*/
|
|
21
|
+
static openArrayBuffer<T>(arrayBuffer: ArrayBuffer): Promise<ParquetReader<T>>;
|
|
22
|
+
static openBuffer<T>(buffer: Buffer): Promise<ParquetReader<T>>;
|
|
23
|
+
metadata: FileMetaData;
|
|
24
|
+
envelopeReader: ParquetEnvelopeReader;
|
|
25
|
+
schema: ParquetSchema;
|
|
26
|
+
/**
|
|
27
|
+
* Create a new parquet reader from the file metadata and an envelope reader.
|
|
28
|
+
* It is not recommended to call this constructor directly except for advanced
|
|
29
|
+
* and internal use cases. Consider using one of the open{File,Buffer} methods
|
|
30
|
+
* instead
|
|
31
|
+
*/
|
|
32
|
+
constructor(metadata: FileMetaData, envelopeReader: ParquetEnvelopeReader);
|
|
33
|
+
/**
|
|
34
|
+
* Close this parquet reader. You MUST call this method once you're finished
|
|
35
|
+
* reading rows
|
|
36
|
+
*/
|
|
37
|
+
close(): Promise<void>;
|
|
38
|
+
/**
|
|
39
|
+
* Return a cursor to the file. You may open more than one cursor and use
|
|
40
|
+
* them concurrently. All cursors become invalid once close() is called on
|
|
41
|
+
* the reader object.
|
|
42
|
+
*
|
|
43
|
+
* The required_columns parameter controls which columns are actually read
|
|
44
|
+
* from disk. An empty array or no value implies all columns. A list of column
|
|
45
|
+
* names means that only those columns should be loaded from disk.
|
|
46
|
+
*/
|
|
47
|
+
getCursor(): ParquetCursor<T>;
|
|
48
|
+
getCursor<K extends keyof T>(columnList: (K | K[])[]): ParquetCursor<Pick<T, K>>;
|
|
49
|
+
getCursor(columnList: (string | string[])[]): ParquetCursor<Partial<T>>;
|
|
50
|
+
/**
|
|
51
|
+
* Return the number of rows in this file. Note that the number of rows is
|
|
52
|
+
* not neccessarily equal to the number of rows in each column.
|
|
53
|
+
*/
|
|
54
|
+
getRowCount(): number;
|
|
55
|
+
/**
|
|
56
|
+
* Returns the ParquetSchema for this file
|
|
57
|
+
*/
|
|
58
|
+
getSchema(): ParquetSchema;
|
|
59
|
+
/**
|
|
60
|
+
* Returns the user (key/value) metadata for this file
|
|
61
|
+
*/
|
|
62
|
+
getMetadata(): Record<string, string>;
|
|
63
|
+
/**
|
|
64
|
+
* Implement AsyncIterable
|
|
65
|
+
*/
|
|
66
|
+
[Symbol.asyncIterator](): AsyncIterator<T>;
|
|
67
|
+
}
|
|
68
|
+
//# sourceMappingURL=parquet-reader.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parquet-reader.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parser/parquet-reader.ts"],"names":[],"mappings":";AACA,OAAO,EAAC,qBAAqB,EAAC,MAAM,2BAA2B,CAAC;AAChE,OAAO,EAAC,YAAY,EAAC,MAAM,mBAAmB,CAAC;AAC/C,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAC;AAI/C;;;;;;GAMG;AACH,qBAAa,aAAa,CAAC,CAAC,CAAE,YAAW,aAAa,CAAC,CAAC,CAAC;IACvD;;OAEG;WACU,QAAQ,CAAC,CAAC,EAAE,IAAI,EAAE,IAAI,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;IAkB/D;;OAEG;WACU,eAAe,CAAC,CAAC,EAAE,WAAW,EAAE,WAAW,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;WAevE,UAAU,CAAC,CAAC,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;IAY9D,QAAQ,EAAE,YAAY,CAAC;IACvB,cAAc,EAAE,qBAAqB,CAAC;IACtC,MAAM,EAAE,aAAa,CAAC;IAE7B;;;;;OAKG;gBACS,QAAQ,EAAE,YAAY,EAAE,cAAc,EAAE,qBAAqB;IAYzE;;;OAGG;IACG,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAM5B;;;;;;;;OAQG;IACH,SAAS,IAAI,aAAa,CAAC,CAAC,CAAC;IAE7B,SAAS,CAAC,CAAC,SAAS,MAAM,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,GAAG,aAAa,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IAChF,SAAS,CAAC,UAAU,EAAE,CAAC,MAAM,GAAG,MAAM,EAAE,CAAC,EAAE,GAAG,aAAa,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;IAkBvE;;;OAGG;IACH,WAAW,IAAI,MAAM;IAIrB;;OAEG;IACH,SAAS,IAAI,aAAa;IAI1B;;OAEG;IACH,WAAW,IAAI,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC;IAQrC;;OAEG;IAEH,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,aAAa,CAAC,CAAC,CAAC;CAG3C"}
|
|
@@ -48,19 +48,6 @@ export class ParquetReader {
|
|
|
48
48
|
}
|
|
49
49
|
}
|
|
50
50
|
|
|
51
|
-
static async openFile(filePath) {
|
|
52
|
-
const envelopeReader = await ParquetEnvelopeReader.openFile(filePath);
|
|
53
|
-
|
|
54
|
-
try {
|
|
55
|
-
await envelopeReader.readHeader();
|
|
56
|
-
const metadata = await envelopeReader.readFooter();
|
|
57
|
-
return new ParquetReader(metadata, envelopeReader);
|
|
58
|
-
} catch (err) {
|
|
59
|
-
await envelopeReader.close();
|
|
60
|
-
throw err;
|
|
61
|
-
}
|
|
62
|
-
}
|
|
63
|
-
|
|
64
51
|
static async openBuffer(buffer) {
|
|
65
52
|
const envelopeReader = await ParquetEnvelopeReader.openBuffer(buffer);
|
|
66
53
|
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/parquetjs/parser/parquet-reader.ts"],"names":["ParquetEnvelopeReader","ParquetSchema","ParquetCursor","PARQUET_VERSION","decodeSchema","Symbol","asyncIterator","ParquetReader","openBlob","blob","readFn","start","length","arrayBuffer","slice","Buffer","from","closeFn","size","envelopeReader","readHeader","metadata","readFooter","err","close","openArrayBuffer","byteLength","openFile","filePath","openBuffer","buffer","constructor","version","Error","root","schema","num_children","getCursor","columnList","map","x","Array","isArray","getRowCount","Number","num_rows","getSchema","getMetadata","md","kv","key_value_metadata","key","value"],"mappings":";;;;AACA,SAAQA,qBAAR,QAAoC,2BAApC;AAEA,SAAQC,aAAR,QAA4B,kBAA5B;AACA,SAAQC,aAAR,QAA4B,kBAA5B;AACA,SAAQC,eAAR,QAA8B,iBAA9B;AACA,SAAQC,YAAR,QAA2B,YAA3B;wBAyKGC,MAAM,CAACC,a;AAhKV,OAAO,MAAMC,aAAN,CAAmD;AAInC,eAARC,QAAQ,CAAIC,IAAJ,EAA2C;AAC9D,UAAMC,MAAM,GAAG,OAAOC,KAAP,EAAsBC,MAAtB,KAAyC;AACtD,YAAMC,WAAW,GAAG,MAAMJ,IAAI,CAACK,KAAL,CAAWH,KAAX,EAAkBA,KAAK,GAAGC,MAA1B,EAAkCC,WAAlC,EAA1B;AACA,aAAOE,MAAM,CAACC,IAAP,CAAYH,WAAZ,CAAP;AACD,KAHD;;AAIA,UAAMI,OAAO,GAAG,YAAY,CAAE,CAA9B;;AACA,UAAMC,IAAI,GAAGT,IAAI,CAACS,IAAlB;AACA,UAAMC,cAAc,GAAG,IAAInB,qBAAJ,CAA0BU,MAA1B,EAAkCO,OAAlC,EAA2CC,IAA3C,CAAvB;;AACA,QAAI;AACF,YAAMC,cAAc,CAACC,UAAf,EAAN;AACA,YAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAf,EAAvB;AACA,aAAO,IAAIf,aAAJ,CAAkBc,QAAlB,EAA4BF,cAA5B,CAAP;AACD,KAJD,CAIE,OAAOI,GAAP,EAAY;AACZ,YAAMJ,cAAc,CAACK,KAAf,EAAN;AACA,YAAMD,GAAN;AACD;AACF;;AAK2B,eAAfE,eAAe,CAAIZ,WAAJ,EAAyD;AACnF,UAAMH,MAAM,GAAG,OAAOC,KAAP,EAAsBC,MAAtB,KAAyCG,MAAM,CAACC,IAAP,CAAYH,WAAZ,EAAyBF,KAAzB,EAAgCC,MAAhC,CAAxD;;AACA,UAAMK,OAAO,GAAG,YAAY,CAAE,CAA9B;;AACA,UAAMC,IAAI,GAAGL,WAAW,CAACa,UAAzB;AACA,UAAMP,cAAc,GAAG,IAAInB,qBAAJ,CAA0BU,MAA1B,EAAkCO,OAAlC,EAA2CC,IAA3C,CAAvB;;AACA,QAAI;AACF,YAAMC,cAAc,CAACC,UAAf,EAAN;AACA,YAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAf,EAAvB;AACA,aAAO,IAAIf,aAAJ,CAAkBc,QAAlB,EAA4BF,cAA5B,CAAP;AACD,KAJD,CAIE,OAAOI,GAAP,EAAY;AACZ,YAAMJ,cAAc,CAACK,KAAf,EAAN;AACA,YAAMD,GAAN;AACD;AACF;;AAMoB,eAARI,QAAQ,CAAIC,QAAJ,EAAiD;AACpE,UAAMT,cAAc,GAAG,MAAMnB,qBAAqB,CAAC2B,QAAtB,CAA+BC,QAA/B,CAA7B;;AACA,QAAI;AACF,YAAMT,cAAc,CAACC,UAAf,EAAN;AACA,YAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAf,EAAvB;AACA,aAAO,IAAIf,aAAJ,CAAqBc,QAArB,EAA+BF,cAA/B,CAAP;AACD,KAJD,CAIE,OAAOI,GAAP,EAAY;AACZ,YAAMJ,cAAc,CAACK,KAAf,EAAN;AACA,YAAMD,GAAN;AACD;AACF;;AAEsB,eAAVM,UAAU,CAAIC,MAAJ,EAA+C;AACpE,UAAMX,cAAc,GAAG,MAAMnB,qBAAqB,CAAC6B,UAAtB,CAAiCC,MAAjC,CAA7B;;AACA,QAAI;AACF,YAAMX,cAAc,CAACC,UAAf,EAAN;AACA,YAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAf,EAAvB;AACA,aAAO,IAAIf,aAAJ,CAAqBc,QAArB,EAA+BF,cAA/B,CAAP;AACD,KAJD,CAIE,OAAOI,GAAP,EAAY;AACZ,YAAMJ,cAAc,CAACK,KAAf,EAAN;AACA,YAAMD,GAAN;AACD;AACF;;AAYDQ,EAAAA,WAAW,CAACV,QAAD,EAAyBF,cAAzB,EAAgE;AAAA;;AAAA;;AAAA;;AACzE,QAAIE,QAAQ,CAACW,OAAT,KAAqB7B,eAAzB,EAA0C;AACxC,YAAM,IAAI8B,KAAJ,CAAU,yBAAV,CAAN;AACD;;AAED,SAAKZ,QAAL,GAAgBA,QAAhB;AACA,SAAKF,cAAL,GAAsBA,cAAtB;AACA,UAAMe,IAAI,GAAG,KAAKb,QAAL,CAAcc,MAAd,CAAqB,CAArB,CAAb;AACA,UAAM;AAACA,MAAAA;AAAD,QAAW/B,YAAY,CAAC,KAAKiB,QAAL,CAAcc,MAAf,EAAuB,CAAvB,EAA0BD,IAAI,CAACE,YAA/B,CAA7B;AACA,SAAKD,MAAL,GAAc,IAAIlC,aAAJ,CAAkBkC,MAAlB,CAAd;AACD;;AAMU,QAALX,KAAK,GAAkB;AAC3B,UAAM,KAAKL,cAAL,CAAoBK,KAApB,EAAN;AAGD;;AAeDa,EAAAA,SAAS,CAACC,UAAD,EAAgE;AACvE,QAAI,CAACA,UAAL,EAAiB;AAEfA,MAAAA,UAAU,GAAG,EAAb;AACD;;AAGDA,IAAAA,UAAU,GAAGA,UAAU,CAACC,GAAX,CAAgBC,CAAD,IAAQC,KAAK,CAACC,OAAN,CAAcF,CAAd,IAAmBA,CAAnB,GAAuB,CAACA,CAAD,CAA9C,CAAb;AAEA,WAAO,IAAItC,aAAJ,CACL,KAAKmB,QADA,EAEL,KAAKF,cAFA,EAGL,KAAKgB,MAHA,EAILG,UAJK,CAAP;AAMD;;AAMDK,EAAAA,WAAW,GAAW;AACpB,WAAOC,MAAM,CAAC,KAAKvB,QAAL,CAAcwB,QAAf,CAAb;AACD;;AAKDC,EAAAA,SAAS,GAAkB;AACzB,WAAO,KAAKX,MAAZ;AACD;;AAKDY,EAAAA,WAAW,GAA2B;AACpC,UAAMC,EAA0B,GAAG,EAAnC;;AACA,SAAK,MAAMC,EAAX,IAAiB,KAAK5B,QAAL,CAAc6B,kBAA/B,EAAoD;AAClDF,MAAAA,EAAE,CAACC,EAAE,CAACE,GAAJ,CAAF,GAAaF,EAAE,CAACG,KAAhB;AACD;;AACD,WAAOJ,EAAP;AACD;;AAMD,4BAA2C;AACzC,WAAO,KAAKX,SAAL,GAAiBhC,MAAM,CAACC,aAAxB,GAAP;AACD;;AAlKuD","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetEnvelopeReader} from './parquet-envelope-reader';\nimport {FileMetaData} from '../parquet-thrift';\nimport {ParquetSchema} from '../schema/schema';\nimport {ParquetCursor} from './parquet-cursor';\nimport {PARQUET_VERSION} from '../../constants';\nimport {decodeSchema} from './decoders';\n\n/**\n * A parquet reader allows retrieving the rows from a parquet file in order.\n * The basic usage is to create a reader and then retrieve a cursor/iterator\n * which allows you to consume row after row until all rows have been read. It is\n * important that you call close() after you are finished reading the file to\n * avoid leaking file descriptors.\n */\nexport class ParquetReader<T> implements AsyncIterable<T> {\n /**\n * return a new parquet reader initialized with a read function\n */\n static async openBlob<T>(blob: Blob): Promise<ParquetReader<T>> {\n const readFn = async (start: number, length: number) => {\n const arrayBuffer = await blob.slice(start, start + length).arrayBuffer();\n return Buffer.from(arrayBuffer);\n };\n const closeFn = async () => {};\n const size = blob.size;\n const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n /**\n * return a new parquet reader initialized with a read function\n */\n static async openArrayBuffer<T>(arrayBuffer: ArrayBuffer): Promise<ParquetReader<T>> {\n const readFn = async (start: number, length: number) => Buffer.from(arrayBuffer, start, length);\n const closeFn = async () => {};\n const size = arrayBuffer.byteLength;\n const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n /**\n * Open the parquet file pointed to by the specified path and return a new\n * parquet reader\n */\n static async openFile<T>(filePath: string): Promise<ParquetReader<T>> {\n const envelopeReader = await ParquetEnvelopeReader.openFile(filePath);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader<T>(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n static async openBuffer<T>(buffer: Buffer): Promise<ParquetReader<T>> {\n const envelopeReader = await ParquetEnvelopeReader.openBuffer(buffer);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader<T>(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n public metadata: FileMetaData;\n public envelopeReader: ParquetEnvelopeReader;\n public schema: ParquetSchema;\n\n /**\n * Create a new parquet reader from the file metadata and an envelope reader.\n * It is not recommended to call this constructor directly except for advanced\n * and internal use cases. Consider using one of the open{File,Buffer} methods\n * instead\n */\n constructor(metadata: FileMetaData, envelopeReader: ParquetEnvelopeReader) {\n if (metadata.version !== PARQUET_VERSION) {\n throw new Error('invalid parquet version');\n }\n\n this.metadata = metadata;\n this.envelopeReader = envelopeReader;\n const root = this.metadata.schema[0];\n const {schema} = decodeSchema(this.metadata.schema, 1, root.num_children!);\n this.schema = new ParquetSchema(schema);\n }\n\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n async close(): Promise<void> {\n await this.envelopeReader.close();\n // this.envelopeReader = null;\n // this.metadata = null;\n }\n\n /**\n * Return a cursor to the file. You may open more than one cursor and use\n * them concurrently. All cursors become invalid once close() is called on\n * the reader object.\n *\n * The required_columns parameter controls which columns are actually read\n * from disk. An empty array or no value implies all columns. A list of column\n * names means that only those columns should be loaded from disk.\n */\n getCursor(): ParquetCursor<T>;\n // @ts-ignore\n getCursor<K extends keyof T>(columnList: (K | K[])[]): ParquetCursor<Pick<T, K>>;\n getCursor(columnList: (string | string[])[]): ParquetCursor<Partial<T>>;\n getCursor(columnList?: (string | string[])[]): ParquetCursor<Partial<T>> {\n if (!columnList) {\n // tslint:disable-next-line:no-parameter-reassignment\n columnList = [];\n }\n\n // tslint:disable-next-line:no-parameter-reassignment\n columnList = columnList.map((x) => (Array.isArray(x) ? x : [x]));\n\n return new ParquetCursor<T>(\n this.metadata,\n this.envelopeReader,\n this.schema,\n columnList as string[][]\n );\n }\n\n /**\n * Return the number of rows in this file. Note that the number of rows is\n * not neccessarily equal to the number of rows in each column.\n */\n getRowCount(): number {\n return Number(this.metadata.num_rows);\n }\n\n /**\n * Returns the ParquetSchema for this file\n */\n getSchema(): ParquetSchema {\n return this.schema;\n }\n\n /**\n * Returns the user (key/value) metadata for this file\n */\n getMetadata(): Record<string, string> {\n const md: Record<string, string> = {};\n for (const kv of this.metadata.key_value_metadata!) {\n md[kv.key] = kv.value!;\n }\n return md;\n }\n\n /**\n * Implement AsyncIterable\n */\n // tslint:disable-next-line:function-name\n [Symbol.asyncIterator](): AsyncIterator<T> {\n return this.getCursor()[Symbol.asyncIterator]();\n }\n}\n"],"file":"parquet-reader.js"}
|
|
1
|
+
{"version":3,"sources":["../../../src/parquetjs/parser/parquet-reader.ts"],"names":["ParquetEnvelopeReader","ParquetSchema","ParquetCursor","PARQUET_VERSION","decodeSchema","Symbol","asyncIterator","ParquetReader","openBlob","blob","readFn","start","length","arrayBuffer","slice","Buffer","from","closeFn","size","envelopeReader","readHeader","metadata","readFooter","err","close","openArrayBuffer","byteLength","openBuffer","buffer","constructor","version","Error","root","schema","num_children","getCursor","columnList","map","x","Array","isArray","getRowCount","Number","num_rows","getSchema","getMetadata","md","kv","key_value_metadata","key","value"],"mappings":";;;;AACA,SAAQA,qBAAR,QAAoC,2BAApC;AAEA,SAAQC,aAAR,QAA4B,kBAA5B;AACA,SAAQC,aAAR,QAA4B,kBAA5B;AACA,SAAQC,eAAR,QAA8B,iBAA9B;AACA,SAAQC,YAAR,QAA2B,YAA3B;wBAyJGC,MAAM,CAACC,a;AAhJV,OAAO,MAAMC,aAAN,CAAmD;AAInC,eAARC,QAAQ,CAAIC,IAAJ,EAA2C;AAC9D,UAAMC,MAAM,GAAG,OAAOC,KAAP,EAAsBC,MAAtB,KAAyC;AACtD,YAAMC,WAAW,GAAG,MAAMJ,IAAI,CAACK,KAAL,CAAWH,KAAX,EAAkBA,KAAK,GAAGC,MAA1B,EAAkCC,WAAlC,EAA1B;AACA,aAAOE,MAAM,CAACC,IAAP,CAAYH,WAAZ,CAAP;AACD,KAHD;;AAIA,UAAMI,OAAO,GAAG,YAAY,CAAE,CAA9B;;AACA,UAAMC,IAAI,GAAGT,IAAI,CAACS,IAAlB;AACA,UAAMC,cAAc,GAAG,IAAInB,qBAAJ,CAA0BU,MAA1B,EAAkCO,OAAlC,EAA2CC,IAA3C,CAAvB;;AACA,QAAI;AACF,YAAMC,cAAc,CAACC,UAAf,EAAN;AACA,YAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAf,EAAvB;AACA,aAAO,IAAIf,aAAJ,CAAkBc,QAAlB,EAA4BF,cAA5B,CAAP;AACD,KAJD,CAIE,OAAOI,GAAP,EAAY;AACZ,YAAMJ,cAAc,CAACK,KAAf,EAAN;AACA,YAAMD,GAAN;AACD;AACF;;AAK2B,eAAfE,eAAe,CAAIZ,WAAJ,EAAyD;AACnF,UAAMH,MAAM,GAAG,OAAOC,KAAP,EAAsBC,MAAtB,KAAyCG,MAAM,CAACC,IAAP,CAAYH,WAAZ,EAAyBF,KAAzB,EAAgCC,MAAhC,CAAxD;;AACA,UAAMK,OAAO,GAAG,YAAY,CAAE,CAA9B;;AACA,UAAMC,IAAI,GAAGL,WAAW,CAACa,UAAzB;AACA,UAAMP,cAAc,GAAG,IAAInB,qBAAJ,CAA0BU,MAA1B,EAAkCO,OAAlC,EAA2CC,IAA3C,CAAvB;;AACA,QAAI;AACF,YAAMC,cAAc,CAACC,UAAf,EAAN;AACA,YAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAf,EAAvB;AACA,aAAO,IAAIf,aAAJ,CAAkBc,QAAlB,EAA4BF,cAA5B,CAAP;AACD,KAJD,CAIE,OAAOI,GAAP,EAAY;AACZ,YAAMJ,cAAc,CAACK,KAAf,EAAN;AACA,YAAMD,GAAN;AACD;AACF;;AAEsB,eAAVI,UAAU,CAAIC,MAAJ,EAA+C;AACpE,UAAMT,cAAc,GAAG,MAAMnB,qBAAqB,CAAC2B,UAAtB,CAAiCC,MAAjC,CAA7B;;AACA,QAAI;AACF,YAAMT,cAAc,CAACC,UAAf,EAAN;AACA,YAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAf,EAAvB;AACA,aAAO,IAAIf,aAAJ,CAAqBc,QAArB,EAA+BF,cAA/B,CAAP;AACD,KAJD,CAIE,OAAOI,GAAP,EAAY;AACZ,YAAMJ,cAAc,CAACK,KAAf,EAAN;AACA,YAAMD,GAAN;AACD;AACF;;AAYDM,EAAAA,WAAW,CAACR,QAAD,EAAyBF,cAAzB,EAAgE;AAAA;;AAAA;;AAAA;;AACzE,QAAIE,QAAQ,CAACS,OAAT,KAAqB3B,eAAzB,EAA0C;AACxC,YAAM,IAAI4B,KAAJ,CAAU,yBAAV,CAAN;AACD;;AAED,SAAKV,QAAL,GAAgBA,QAAhB;AACA,SAAKF,cAAL,GAAsBA,cAAtB;AACA,UAAMa,IAAI,GAAG,KAAKX,QAAL,CAAcY,MAAd,CAAqB,CAArB,CAAb;AACA,UAAM;AAACA,MAAAA;AAAD,QAAW7B,YAAY,CAAC,KAAKiB,QAAL,CAAcY,MAAf,EAAuB,CAAvB,EAA0BD,IAAI,CAACE,YAA/B,CAA7B;AACA,SAAKD,MAAL,GAAc,IAAIhC,aAAJ,CAAkBgC,MAAlB,CAAd;AACD;;AAMU,QAALT,KAAK,GAAkB;AAC3B,UAAM,KAAKL,cAAL,CAAoBK,KAApB,EAAN;AAGD;;AAeDW,EAAAA,SAAS,CAACC,UAAD,EAAgE;AACvE,QAAI,CAACA,UAAL,EAAiB;AAEfA,MAAAA,UAAU,GAAG,EAAb;AACD;;AAGDA,IAAAA,UAAU,GAAGA,UAAU,CAACC,GAAX,CAAgBC,CAAD,IAAQC,KAAK,CAACC,OAAN,CAAcF,CAAd,IAAmBA,CAAnB,GAAuB,CAACA,CAAD,CAA9C,CAAb;AAEA,WAAO,IAAIpC,aAAJ,CACL,KAAKmB,QADA,EAEL,KAAKF,cAFA,EAGL,KAAKc,MAHA,EAILG,UAJK,CAAP;AAMD;;AAMDK,EAAAA,WAAW,GAAW;AACpB,WAAOC,MAAM,CAAC,KAAKrB,QAAL,CAAcsB,QAAf,CAAb;AACD;;AAKDC,EAAAA,SAAS,GAAkB;AACzB,WAAO,KAAKX,MAAZ;AACD;;AAKDY,EAAAA,WAAW,GAA2B;AACpC,UAAMC,EAA0B,GAAG,EAAnC;;AACA,SAAK,MAAMC,EAAX,IAAiB,KAAK1B,QAAL,CAAc2B,kBAA/B,EAAoD;AAClDF,MAAAA,EAAE,CAACC,EAAE,CAACE,GAAJ,CAAF,GAAaF,EAAE,CAACG,KAAhB;AACD;;AACD,WAAOJ,EAAP;AACD;;AAMD,4BAA2C;AACzC,WAAO,KAAKX,SAAL,GAAiB9B,MAAM,CAACC,aAAxB,GAAP;AACD;;AAlJuD","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetEnvelopeReader} from './parquet-envelope-reader';\nimport {FileMetaData} from '../parquet-thrift';\nimport {ParquetSchema} from '../schema/schema';\nimport {ParquetCursor} from './parquet-cursor';\nimport {PARQUET_VERSION} from '../../constants';\nimport {decodeSchema} from './decoders';\n\n/**\n * A parquet reader allows retrieving the rows from a parquet file in order.\n * The basic usage is to create a reader and then retrieve a cursor/iterator\n * which allows you to consume row after row until all rows have been read. It is\n * important that you call close() after you are finished reading the file to\n * avoid leaking file descriptors.\n */\nexport class ParquetReader<T> implements AsyncIterable<T> {\n /**\n * return a new parquet reader initialized with a read function\n */\n static async openBlob<T>(blob: Blob): Promise<ParquetReader<T>> {\n const readFn = async (start: number, length: number) => {\n const arrayBuffer = await blob.slice(start, start + length).arrayBuffer();\n return Buffer.from(arrayBuffer);\n };\n const closeFn = async () => {};\n const size = blob.size;\n const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n /**\n * return a new parquet reader initialized with a read function\n */\n static async openArrayBuffer<T>(arrayBuffer: ArrayBuffer): Promise<ParquetReader<T>> {\n const readFn = async (start: number, length: number) => Buffer.from(arrayBuffer, start, length);\n const closeFn = async () => {};\n const size = arrayBuffer.byteLength;\n const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n static async openBuffer<T>(buffer: Buffer): Promise<ParquetReader<T>> {\n const envelopeReader = await ParquetEnvelopeReader.openBuffer(buffer);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader<T>(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n public metadata: FileMetaData;\n public envelopeReader: ParquetEnvelopeReader;\n public schema: ParquetSchema;\n\n /**\n * Create a new parquet reader from the file metadata and an envelope reader.\n * It is not recommended to call this constructor directly except for advanced\n * and internal use cases. Consider using one of the open{File,Buffer} methods\n * instead\n */\n constructor(metadata: FileMetaData, envelopeReader: ParquetEnvelopeReader) {\n if (metadata.version !== PARQUET_VERSION) {\n throw new Error('invalid parquet version');\n }\n\n this.metadata = metadata;\n this.envelopeReader = envelopeReader;\n const root = this.metadata.schema[0];\n const {schema} = decodeSchema(this.metadata.schema, 1, root.num_children!);\n this.schema = new ParquetSchema(schema);\n }\n\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n async close(): Promise<void> {\n await this.envelopeReader.close();\n // this.envelopeReader = null;\n // this.metadata = null;\n }\n\n /**\n * Return a cursor to the file. You may open more than one cursor and use\n * them concurrently. All cursors become invalid once close() is called on\n * the reader object.\n *\n * The required_columns parameter controls which columns are actually read\n * from disk. An empty array or no value implies all columns. A list of column\n * names means that only those columns should be loaded from disk.\n */\n getCursor(): ParquetCursor<T>;\n // @ts-ignore\n getCursor<K extends keyof T>(columnList: (K | K[])[]): ParquetCursor<Pick<T, K>>;\n getCursor(columnList: (string | string[])[]): ParquetCursor<Partial<T>>;\n getCursor(columnList?: (string | string[])[]): ParquetCursor<Partial<T>> {\n if (!columnList) {\n // tslint:disable-next-line:no-parameter-reassignment\n columnList = [];\n }\n\n // tslint:disable-next-line:no-parameter-reassignment\n columnList = columnList.map((x) => (Array.isArray(x) ? x : [x]));\n\n return new ParquetCursor<T>(\n this.metadata,\n this.envelopeReader,\n this.schema,\n columnList as string[][]\n );\n }\n\n /**\n * Return the number of rows in this file. Note that the number of rows is\n * not neccessarily equal to the number of rows in each column.\n */\n getRowCount(): number {\n return Number(this.metadata.num_rows);\n }\n\n /**\n * Returns the ParquetSchema for this file\n */\n getSchema(): ParquetSchema {\n return this.schema;\n }\n\n /**\n * Returns the user (key/value) metadata for this file\n */\n getMetadata(): Record<string, string> {\n const md: Record<string, string> = {};\n for (const kv of this.metadata.key_value_metadata!) {\n md[kv.key] = kv.value!;\n }\n return md;\n }\n\n /**\n * Implement AsyncIterable\n */\n // tslint:disable-next-line:function-name\n [Symbol.asyncIterator](): AsyncIterator<T> {\n return this.getCursor()[Symbol.asyncIterator]();\n }\n}\n"],"file":"parquet-reader.js"}
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import Int64 from 'node-int64';
|
|
2
|
+
import type { PageHeader } from '../parquet-thrift';
|
|
3
|
+
export declare type ParquetCodec = 'PLAIN' | 'RLE' | 'PLAIN_DICTIONARY';
|
|
4
|
+
export declare type ParquetCompression = 'UNCOMPRESSED' | 'GZIP' | 'SNAPPY' | 'LZO' | 'BROTLI' | 'LZ4' | 'LZ4_RAW' | 'ZSTD';
|
|
5
|
+
export declare type RepetitionType = 'REQUIRED' | 'OPTIONAL' | 'REPEATED';
|
|
6
|
+
export declare type ParquetType = PrimitiveType | OriginalType;
|
|
7
|
+
/**
|
|
8
|
+
* Physical type
|
|
9
|
+
*/
|
|
10
|
+
export declare type PrimitiveType = 'BOOLEAN' | 'INT32' | 'INT64' | 'INT96' | 'FLOAT' | 'DOUBLE' | 'BYTE_ARRAY' | 'FIXED_LEN_BYTE_ARRAY';
|
|
11
|
+
/**
|
|
12
|
+
* Logical type
|
|
13
|
+
*/
|
|
14
|
+
export declare type OriginalType = 'UTF8' | 'DECIMAL_INT32' | 'DECIMAL_INT64' | 'DECIMAL_BYTE_ARRAY' | 'DECIMAL_FIXED_LEN_BYTE_ARRAY' | 'DATE' | 'TIME_MILLIS' | 'TIME_MICROS' | 'TIMESTAMP_MILLIS' | 'TIMESTAMP_MICROS' | 'UINT_8' | 'UINT_16' | 'UINT_32' | 'UINT_64' | 'INT_8' | 'INT_16' | 'INT_32' | 'INT_64' | 'JSON' | 'BSON' | 'INTERVAL';
|
|
15
|
+
export declare type ParquetDictionary = string[];
|
|
16
|
+
export interface SchemaDefinition {
|
|
17
|
+
[string: string]: FieldDefinition;
|
|
18
|
+
}
|
|
19
|
+
export interface FieldDefinition {
|
|
20
|
+
type?: ParquetType;
|
|
21
|
+
typeLength?: number;
|
|
22
|
+
presision?: number;
|
|
23
|
+
scale?: number;
|
|
24
|
+
encoding?: ParquetCodec;
|
|
25
|
+
compression?: ParquetCompression;
|
|
26
|
+
optional?: boolean;
|
|
27
|
+
repeated?: boolean;
|
|
28
|
+
fields?: SchemaDefinition;
|
|
29
|
+
}
|
|
30
|
+
export interface ParquetField {
|
|
31
|
+
name: string;
|
|
32
|
+
path: string[];
|
|
33
|
+
key: string;
|
|
34
|
+
primitiveType?: PrimitiveType;
|
|
35
|
+
originalType?: OriginalType;
|
|
36
|
+
repetitionType: RepetitionType;
|
|
37
|
+
typeLength?: number;
|
|
38
|
+
presision?: number;
|
|
39
|
+
scale?: number;
|
|
40
|
+
encoding?: ParquetCodec;
|
|
41
|
+
compression?: ParquetCompression;
|
|
42
|
+
rLevelMax: number;
|
|
43
|
+
dLevelMax: number;
|
|
44
|
+
isNested?: boolean;
|
|
45
|
+
fieldCount?: number;
|
|
46
|
+
fields?: Record<string, ParquetField>;
|
|
47
|
+
}
|
|
48
|
+
export interface ParquetOptions {
|
|
49
|
+
type: ParquetType;
|
|
50
|
+
rLevelMax: number;
|
|
51
|
+
dLevelMax: number;
|
|
52
|
+
compression: ParquetCompression;
|
|
53
|
+
column: ParquetField;
|
|
54
|
+
numValues?: Int64;
|
|
55
|
+
dictionary?: ParquetDictionary;
|
|
56
|
+
}
|
|
57
|
+
export interface ParquetData {
|
|
58
|
+
dlevels: number[];
|
|
59
|
+
rlevels: number[];
|
|
60
|
+
values: any[];
|
|
61
|
+
count: number;
|
|
62
|
+
pageHeaders: PageHeader[];
|
|
63
|
+
}
|
|
64
|
+
export interface ParquetPageData {
|
|
65
|
+
dlevels: number[];
|
|
66
|
+
rlevels: number[];
|
|
67
|
+
values: any[];
|
|
68
|
+
count: number;
|
|
69
|
+
dictionary?: ParquetDictionary;
|
|
70
|
+
pageHeader: PageHeader;
|
|
71
|
+
}
|
|
72
|
+
export interface ParquetRecord {
|
|
73
|
+
[key: string]: any;
|
|
74
|
+
}
|
|
75
|
+
export declare class ParquetBuffer {
|
|
76
|
+
rowCount: number;
|
|
77
|
+
columnData: Record<string, ParquetData>;
|
|
78
|
+
constructor(rowCount?: number, columnData?: Record<string, ParquetData>);
|
|
79
|
+
}
|
|
80
|
+
//# sourceMappingURL=declare.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"declare.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/schema/declare.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,MAAM,YAAY,CAAC;AAC/B,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,mBAAmB,CAAC;AAElD,oBAAY,YAAY,GAAG,OAAO,GAAG,KAAK,GAAG,kBAAkB,CAAC;AAChE,oBAAY,kBAAkB,GAC1B,cAAc,GACd,MAAM,GACN,QAAQ,GACR,KAAK,GACL,QAAQ,GACR,KAAK,GACL,SAAS,GACT,MAAM,CAAC;AACX,oBAAY,cAAc,GAAG,UAAU,GAAG,UAAU,GAAG,UAAU,CAAC;AAClE,oBAAY,WAAW,GAAG,aAAa,GAAG,YAAY,CAAC;AAEvD;;GAEG;AACH,oBAAY,aAAa,GAErB,SAAS,GACT,OAAO,GACP,OAAO,GACP,OAAO,GACP,OAAO,GACP,QAAQ,GACR,YAAY,GACZ,sBAAsB,CAAC;AAE3B;;GAEG;AACH,oBAAY,YAAY,GAEpB,MAAM,GAMN,eAAe,GACf,eAAe,GACf,oBAAoB,GACpB,8BAA8B,GAC9B,MAAM,GACN,aAAa,GACb,aAAa,GACb,kBAAkB,GAClB,kBAAkB,GAClB,QAAQ,GACR,SAAS,GACT,SAAS,GACT,SAAS,GACT,OAAO,GACP,QAAQ,GACR,QAAQ,GACR,QAAQ,GACR,MAAM,GACN,MAAM,GACN,UAAU,CAAC;AAEf,oBAAY,iBAAiB,GAAG,MAAM,EAAE,CAAC;AAEzC,MAAM,WAAW,gBAAgB;IAC/B,CAAC,MAAM,EAAE,MAAM,GAAG,eAAe,CAAC;CACnC;AAED,MAAM,WAAW,eAAe;IAC9B,IAAI,CAAC,EAAE,WAAW,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,YAAY,CAAC;IACxB,WAAW,CAAC,EAAE,kBAAkB,CAAC;IACjC,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,MAAM,CAAC,EAAE,gBAAgB,CAAC;CAC3B;AAED,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,EAAE,CAAC;IACf,GAAG,EAAE,MAAM,CAAC;IACZ,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B,cAAc,EAAE,cAAc,CAAC;IAC/B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,YAAY,CAAC;IACxB,WAAW,CAAC,EAAE,kBAAkB,CAAC;IACjC,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;CACvC;AAED,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,WAAW,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,EAAE,kBAAkB,CAAC;IAChC,MAAM,EAAE,YAAY,CAAC;IACrB,SAAS,CAAC,EAAE,KAAK,CAAC;IAClB,UAAU,CAAC,EAAE,iBAAiB,CAAC;CAChC;AAED,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,MAAM,EAAE,GAAG,EAAE,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,WAAW,EAAE,UAAU,EAAE,CAAC;CAC3B;AAED,MAAM,WAAW,eAAe;IAC9B,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,MAAM,EAAE,GAAG,EAAE,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,iBAAiB,CAAC;IAC/B,UAAU,EAAE,UAAU,CAAC;CACxB;AAED,MAAM,WAAW,aAAa;IAC5B,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED,qBAAa,aAAa;IACxB,QAAQ,EAAE,MAAM,CAAC;IACjB,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,CAAC;gBAC5B,QAAQ,GAAE,MAAU,EAAE,UAAU,GAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAM;CAI/E"}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { FieldDefinition, ParquetBuffer, ParquetCompression, ParquetField, ParquetRecord, SchemaDefinition } from './declare';
|
|
2
|
+
/**
|
|
3
|
+
* A parquet file schema
|
|
4
|
+
*/
|
|
5
|
+
export declare class ParquetSchema {
|
|
6
|
+
schema: Record<string, FieldDefinition>;
|
|
7
|
+
fields: Record<string, ParquetField>;
|
|
8
|
+
fieldList: ParquetField[];
|
|
9
|
+
/**
|
|
10
|
+
* Create a new schema from a JSON schema definition
|
|
11
|
+
*/
|
|
12
|
+
constructor(schema: SchemaDefinition);
|
|
13
|
+
/**
|
|
14
|
+
* Retrieve a field definition
|
|
15
|
+
*/
|
|
16
|
+
findField(path: string | string[]): ParquetField;
|
|
17
|
+
/**
|
|
18
|
+
* Retrieve a field definition and all the field's ancestors
|
|
19
|
+
*/
|
|
20
|
+
findFieldBranch(path: string | string[]): ParquetField[];
|
|
21
|
+
shredRecord(record: ParquetRecord, buffer: ParquetBuffer): void;
|
|
22
|
+
materializeRecords(buffer: ParquetBuffer): ParquetRecord[];
|
|
23
|
+
compress(type: ParquetCompression): this;
|
|
24
|
+
buffer(): ParquetBuffer;
|
|
25
|
+
}
|
|
26
|
+
//# sourceMappingURL=schema.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/schema/schema.ts"],"names":[],"mappings":"AAIA,OAAO,EACL,eAAe,EACf,aAAa,EACb,kBAAkB,EAClB,YAAY,EACZ,aAAa,EAEb,gBAAgB,EACjB,MAAM,WAAW,CAAC;AAInB;;GAEG;AACH,qBAAa,aAAa;IACjB,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;IACrC,SAAS,EAAE,YAAY,EAAE,CAAC;IAEjC;;OAEG;gBACS,MAAM,EAAE,gBAAgB;IAMpC;;OAEG;IACH,SAAS,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,YAAY;IAiBhD;;OAEG;IACH,eAAe,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,YAAY,EAAE;IAgBxD,WAAW,CAAC,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,aAAa,GAAG,IAAI;IAI/D,kBAAkB,CAAC,MAAM,EAAE,aAAa,GAAG,aAAa,EAAE;IAI1D,QAAQ,CAAC,IAAI,EAAE,kBAAkB,GAAG,IAAI;IAMxC,MAAM,IAAI,aAAa;CAGxB"}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import { ParquetBuffer, ParquetRecord } from './declare';
|
|
2
|
+
import { ParquetSchema } from './schema';
|
|
3
|
+
export { ParquetBuffer };
|
|
4
|
+
export declare function shredBuffer(schema: ParquetSchema): ParquetBuffer;
|
|
5
|
+
/**
|
|
6
|
+
* 'Shred' a record into a list of <value, repetition_level, definition_level>
|
|
7
|
+
* tuples per column using the Google Dremel Algorithm..
|
|
8
|
+
*
|
|
9
|
+
* The buffer argument must point to an object into which the shredded record
|
|
10
|
+
* will be returned. You may re-use the buffer for repeated calls to this function
|
|
11
|
+
* to append to an existing buffer, as long as the schema is unchanged.
|
|
12
|
+
*
|
|
13
|
+
* The format in which the shredded records will be stored in the buffer is as
|
|
14
|
+
* follows:
|
|
15
|
+
*
|
|
16
|
+
* buffer = {
|
|
17
|
+
* columnData: [
|
|
18
|
+
* 'my_col': {
|
|
19
|
+
* dlevels: [d1, d2, .. dN],
|
|
20
|
+
* rlevels: [r1, r2, .. rN],
|
|
21
|
+
* values: [v1, v2, .. vN],
|
|
22
|
+
* }, ...
|
|
23
|
+
* ],
|
|
24
|
+
* rowCount: X,
|
|
25
|
+
* }
|
|
26
|
+
*/
|
|
27
|
+
export declare function shredRecord(schema: ParquetSchema, record: any, buffer: ParquetBuffer): void;
|
|
28
|
+
/**
|
|
29
|
+
* 'Materialize' a list of <value, repetition_level, definition_level>
|
|
30
|
+
* tuples back to nested records (objects/arrays) using the Google Dremel
|
|
31
|
+
* Algorithm..
|
|
32
|
+
*
|
|
33
|
+
* The buffer argument must point to an object with the following structure (i.e.
|
|
34
|
+
* the same structure that is returned by shredRecords):
|
|
35
|
+
*
|
|
36
|
+
* buffer = {
|
|
37
|
+
* columnData: [
|
|
38
|
+
* 'my_col': {
|
|
39
|
+
* dlevels: [d1, d2, .. dN],
|
|
40
|
+
* rlevels: [r1, r2, .. rN],
|
|
41
|
+
* values: [v1, v2, .. vN],
|
|
42
|
+
* }, ...
|
|
43
|
+
* ],
|
|
44
|
+
* rowCount: X,
|
|
45
|
+
* }
|
|
46
|
+
*/
|
|
47
|
+
export declare function materializeRecords(schema: ParquetSchema, buffer: ParquetBuffer): ParquetRecord[];
|
|
48
|
+
//# sourceMappingURL=shred.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"shred.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/schema/shred.ts"],"names":[],"mappings":"AAEA,OAAO,EAAC,aAAa,EAA6B,aAAa,EAAC,MAAM,WAAW,CAAC;AAClF,OAAO,EAAC,aAAa,EAAC,MAAM,UAAU,CAAC;AAGvC,OAAO,EAAC,aAAa,EAAC,CAAC;AAEvB,wBAAgB,WAAW,CAAC,MAAM,EAAE,aAAa,GAAG,aAAa,CAYhE;AAED;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,wBAAgB,WAAW,CAAC,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,aAAa,GAAG,IAAI,CAmB3F;AAgED;;;;;;;;;;;;;;;;;;GAkBG;AACH,wBAAgB,kBAAkB,CAAC,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,aAAa,GAAG,aAAa,EAAE,CAOhG"}
|