@loaders.gl/parquet 4.0.0-alpha.4 → 4.0.0-alpha.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle.d.ts +2 -0
- package/dist/bundle.d.ts.map +1 -0
- package/dist/constants.d.ts +15 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/dist.min.js +22 -6
- package/dist/dist.min.js.map +7 -1
- package/dist/index.d.ts +28 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/lib/convert-schema.d.ts +8 -0
- package/dist/lib/convert-schema.d.ts.map +1 -0
- package/dist/lib/parse-parquet.d.ts +4 -0
- package/dist/lib/parse-parquet.d.ts.map +1 -0
- package/dist/lib/read-array-buffer.d.ts +19 -0
- package/dist/lib/read-array-buffer.d.ts.map +1 -0
- package/dist/parquet-loader.d.ts +23 -0
- package/dist/parquet-loader.d.ts.map +1 -0
- package/dist/parquet-loader.js +1 -1
- package/dist/parquet-worker.js +27 -13
- package/dist/parquet-worker.js.map +7 -1
- package/dist/parquet-writer.d.ts +4 -0
- package/dist/parquet-writer.d.ts.map +1 -0
- package/dist/parquet-writer.js +1 -1
- package/dist/parquetjs/codecs/declare.d.ts +17 -0
- package/dist/parquetjs/codecs/declare.d.ts.map +1 -0
- package/dist/parquetjs/codecs/dictionary.d.ts +3 -0
- package/dist/parquetjs/codecs/dictionary.d.ts.map +1 -0
- package/dist/parquetjs/codecs/index.d.ts +5 -0
- package/dist/parquetjs/codecs/index.d.ts.map +1 -0
- package/dist/parquetjs/codecs/plain.d.ts +6 -0
- package/dist/parquetjs/codecs/plain.d.ts.map +1 -0
- package/dist/parquetjs/codecs/rle.d.ts +6 -0
- package/dist/parquetjs/codecs/rle.d.ts.map +1 -0
- package/dist/parquetjs/compression.d.ts +23 -0
- package/dist/parquetjs/compression.d.ts.map +1 -0
- package/dist/parquetjs/compression.js +1 -10
- package/dist/parquetjs/compression.js.map +1 -1
- package/dist/parquetjs/encoder/writer.d.ts +123 -0
- package/dist/parquetjs/encoder/writer.d.ts.map +1 -0
- package/dist/parquetjs/file.d.ts +10 -0
- package/dist/parquetjs/file.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/BoundaryOrder.d.ts +6 -0
- package/dist/parquetjs/parquet-thrift/BoundaryOrder.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/BsonType.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/BsonType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/ColumnChunk.d.ts +25 -0
- package/dist/parquetjs/parquet-thrift/ColumnChunk.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/ColumnIndex.d.ts +22 -0
- package/dist/parquetjs/parquet-thrift/ColumnIndex.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/ColumnMetaData.d.ts +42 -0
- package/dist/parquetjs/parquet-thrift/ColumnMetaData.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/ColumnOrder.d.ts +13 -0
- package/dist/parquetjs/parquet-thrift/ColumnOrder.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/CompressionCodec.d.ts +11 -0
- package/dist/parquetjs/parquet-thrift/CompressionCodec.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/ConvertedType.d.ts +25 -0
- package/dist/parquetjs/parquet-thrift/ConvertedType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/DataPageHeader.d.ts +21 -0
- package/dist/parquetjs/parquet-thrift/DataPageHeader.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.d.ts +27 -0
- package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/DateType.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/DateType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/DecimalType.d.ts +13 -0
- package/dist/parquetjs/parquet-thrift/DecimalType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.d.ts +16 -0
- package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/Encoding.d.ts +11 -0
- package/dist/parquetjs/parquet-thrift/Encoding.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/EnumType.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/EnumType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/FieldRepetitionType.d.ts +6 -0
- package/dist/parquetjs/parquet-thrift/FieldRepetitionType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/FileMetaData.d.ts +28 -0
- package/dist/parquetjs/parquet-thrift/FileMetaData.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/IndexPageHeader.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/IndexPageHeader.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/IntType.d.ts +13 -0
- package/dist/parquetjs/parquet-thrift/IntType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/JsonType.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/JsonType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/KeyValue.d.ts +13 -0
- package/dist/parquetjs/parquet-thrift/KeyValue.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/ListType.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/ListType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/LogicalType.d.ts +61 -0
- package/dist/parquetjs/parquet-thrift/LogicalType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/MapType.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/MapType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/MicroSeconds.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/MicroSeconds.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/MilliSeconds.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/MilliSeconds.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/NullType.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/NullType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/OffsetIndex.d.ts +12 -0
- package/dist/parquetjs/parquet-thrift/OffsetIndex.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/PageEncodingStats.d.ts +17 -0
- package/dist/parquetjs/parquet-thrift/PageEncodingStats.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/PageHeader.d.ts +30 -0
- package/dist/parquetjs/parquet-thrift/PageHeader.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/PageLocation.d.ts +16 -0
- package/dist/parquetjs/parquet-thrift/PageLocation.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/PageType.d.ts +7 -0
- package/dist/parquetjs/parquet-thrift/PageType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/RowGroup.d.ts +20 -0
- package/dist/parquetjs/parquet-thrift/RowGroup.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/SchemaElement.d.ts +33 -0
- package/dist/parquetjs/parquet-thrift/SchemaElement.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/SortingColumn.d.ts +15 -0
- package/dist/parquetjs/parquet-thrift/SortingColumn.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/Statistics.d.ts +23 -0
- package/dist/parquetjs/parquet-thrift/Statistics.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/StringType.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/StringType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/TimeType.d.ts +14 -0
- package/dist/parquetjs/parquet-thrift/TimeType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/TimeUnit.d.ts +17 -0
- package/dist/parquetjs/parquet-thrift/TimeUnit.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/TimestampType.d.ts +14 -0
- package/dist/parquetjs/parquet-thrift/TimestampType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/Type.d.ts +11 -0
- package/dist/parquetjs/parquet-thrift/Type.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/UUIDType.d.ts +9 -0
- package/dist/parquetjs/parquet-thrift/UUIDType.d.ts.map +1 -0
- package/dist/parquetjs/parquet-thrift/index.d.ts +44 -0
- package/dist/parquetjs/parquet-thrift/index.d.ts.map +1 -0
- package/dist/parquetjs/parser/decoders.d.ts +34 -0
- package/dist/parquetjs/parser/decoders.d.ts.map +1 -0
- package/dist/parquetjs/parser/parquet-cursor.d.ts +36 -0
- package/dist/parquetjs/parser/parquet-cursor.d.ts.map +1 -0
- package/dist/parquetjs/parser/parquet-envelope-reader.d.ts +40 -0
- package/dist/parquetjs/parser/parquet-envelope-reader.d.ts.map +1 -0
- package/dist/parquetjs/parser/parquet-envelope-reader.js +0 -9
- package/dist/parquetjs/parser/parquet-envelope-reader.js.map +1 -1
- package/dist/parquetjs/parser/parquet-reader.d.ts +68 -0
- package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -0
- package/dist/parquetjs/parser/parquet-reader.js +0 -13
- package/dist/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/parquetjs/schema/declare.d.ts +80 -0
- package/dist/parquetjs/schema/declare.d.ts.map +1 -0
- package/dist/parquetjs/schema/schema.d.ts +26 -0
- package/dist/parquetjs/schema/schema.d.ts.map +1 -0
- package/dist/parquetjs/schema/shred.d.ts +48 -0
- package/dist/parquetjs/schema/shred.d.ts.map +1 -0
- package/dist/parquetjs/schema/types.d.ts +20 -0
- package/dist/parquetjs/schema/types.d.ts.map +1 -0
- package/dist/parquetjs/utils/buffer-utils.d.ts +10 -0
- package/dist/parquetjs/utils/buffer-utils.d.ts.map +1 -0
- package/dist/parquetjs/utils/file-utils.d.ts +16 -0
- package/dist/parquetjs/utils/file-utils.d.ts.map +1 -0
- package/dist/parquetjs/utils/file-utils.js +0 -45
- package/dist/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/parquetjs/utils/read-utils.d.ts +25 -0
- package/dist/parquetjs/utils/read-utils.d.ts.map +1 -0
- package/dist/workers/parquet-worker.d.ts +2 -0
- package/dist/workers/parquet-worker.d.ts.map +1 -0
- package/package.json +8 -8
- package/src/parquetjs/compression.ts +10 -10
- package/src/parquetjs/parser/parquet-envelope-reader.ts +0 -11
- package/src/parquetjs/parser/parquet-reader.ts +0 -16
- package/src/parquetjs/utils/file-utils.ts +0 -49
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parquet-writer.d.ts","sourceRoot":"","sources":["../src/parquet-writer.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,MAAM,EAAC,MAAM,0BAA0B,CAAC;AAMrD,oBAAY,oBAAoB,GAAG,EAAE,CAAC;AAItC,eAAO,MAAM,aAAa,EAAE,MAU3B,CAAC"}
|
package/dist/parquet-writer.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
const VERSION = typeof "4.0.0-alpha.
|
|
1
|
+
const VERSION = typeof "4.0.0-alpha.5" !== 'undefined' ? "4.0.0-alpha.5" : 'latest';
|
|
2
2
|
const DEFAULT_PARQUET_LOADER_OPTIONS = {};
|
|
3
3
|
export const ParquetWriter = {
|
|
4
4
|
name: 'Apache Parquet',
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import { PrimitiveType } from '../schema/declare';
|
|
3
|
+
export interface CursorBuffer {
|
|
4
|
+
buffer: Buffer;
|
|
5
|
+
offset: number;
|
|
6
|
+
size?: number;
|
|
7
|
+
}
|
|
8
|
+
export interface ParquetCodecOptions {
|
|
9
|
+
bitWidth?: number;
|
|
10
|
+
disableEnvelope?: boolean;
|
|
11
|
+
typeLength?: number;
|
|
12
|
+
}
|
|
13
|
+
export interface ParquetCodecKit {
|
|
14
|
+
encodeValues(type: PrimitiveType, values: any[], opts?: ParquetCodecOptions): Buffer;
|
|
15
|
+
decodeValues(type: PrimitiveType, cursor: CursorBuffer, count: number, opts: ParquetCodecOptions): any[];
|
|
16
|
+
}
|
|
17
|
+
//# sourceMappingURL=declare.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"declare.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/codecs/declare.ts"],"names":[],"mappings":";AACA,OAAO,EAAC,aAAa,EAAC,MAAM,mBAAmB,CAAC;AAEhD,MAAM,WAAW,YAAY;IAC3B,MAAM,EAAE,MAAM,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,mBAAmB;IAClC,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,eAAe;IAC9B,YAAY,CAAC,IAAI,EAAE,aAAa,EAAE,MAAM,EAAE,GAAG,EAAE,EAAE,IAAI,CAAC,EAAE,mBAAmB,GAAG,MAAM,CAAC;IACrF,YAAY,CACV,IAAI,EAAE,aAAa,EACnB,MAAM,EAAE,YAAY,EACpB,KAAK,EAAE,MAAM,EACb,IAAI,EAAE,mBAAmB,GACxB,GAAG,EAAE,CAAC;CACV"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dictionary.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/codecs/dictionary.ts"],"names":[],"mappings":"AAEA,wBAAgB,YAAY,CAAC,IAAI,KAAA,EAAE,MAAM,KAAA,EAAE,KAAK,KAAA,EAAE,IAAI,KAAA,YAIrD;AAED,wBAAgB,YAAY,CAAC,IAAI,KAAA,EAAE,MAAM,KAAA,EAAE,KAAK,KAAA,EAAE,IAAI,KAAA,QAErD"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/codecs/index.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,mBAAmB,CAAC;AACpD,OAAO,KAAK,EAAC,eAAe,EAAC,MAAM,WAAW,CAAC;AAK/C,cAAc,WAAW,CAAC;AAE1B,eAAO,MAAM,cAAc,EAAE,MAAM,CAAC,YAAY,EAAE,eAAe,CAqBhE,CAAC"}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import type { PrimitiveType } from '../schema/declare';
|
|
3
|
+
import type { CursorBuffer, ParquetCodecOptions } from './declare';
|
|
4
|
+
export declare function encodeValues(type: PrimitiveType, values: any[], opts: ParquetCodecOptions): Buffer;
|
|
5
|
+
export declare function decodeValues(type: PrimitiveType, cursor: CursorBuffer, count: number, opts: ParquetCodecOptions): any[];
|
|
6
|
+
//# sourceMappingURL=plain.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"plain.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/codecs/plain.ts"],"names":[],"mappings":";AAEA,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,mBAAmB,CAAC;AACrD,OAAO,KAAK,EAAC,YAAY,EAAE,mBAAmB,EAAC,MAAM,WAAW,CAAC;AAGjE,wBAAgB,YAAY,CAC1B,IAAI,EAAE,aAAa,EACnB,MAAM,EAAE,GAAG,EAAE,EACb,IAAI,EAAE,mBAAmB,GACxB,MAAM,CAqBR;AAED,wBAAgB,YAAY,CAC1B,IAAI,EAAE,aAAa,EACnB,MAAM,EAAE,YAAY,EACpB,KAAK,EAAE,MAAM,EACb,IAAI,EAAE,mBAAmB,GACxB,GAAG,EAAE,CAqBP"}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import type { PrimitiveType } from '../schema/declare';
|
|
3
|
+
import type { CursorBuffer, ParquetCodecOptions } from './declare';
|
|
4
|
+
export declare function encodeValues(type: PrimitiveType, values: any[], opts: ParquetCodecOptions): Buffer;
|
|
5
|
+
export declare function decodeValues(type: PrimitiveType, cursor: CursorBuffer, count: number, opts: ParquetCodecOptions): number[];
|
|
6
|
+
//# sourceMappingURL=rle.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"rle.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/codecs/rle.ts"],"names":[],"mappings":";AAEA,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,mBAAmB,CAAC;AACrD,OAAO,KAAK,EAAC,YAAY,EAAE,mBAAmB,EAAC,MAAM,WAAW,CAAC;AAIjE,wBAAgB,YAAY,CAC1B,IAAI,EAAE,aAAa,EACnB,MAAM,EAAE,GAAG,EAAE,EACb,IAAI,EAAE,mBAAmB,GACxB,MAAM,CA0DR;AAED,wBAAgB,YAAY,CAC1B,IAAI,EAAE,aAAa,EACnB,MAAM,EAAE,YAAY,EACpB,KAAK,EAAE,MAAM,EACb,IAAI,EAAE,mBAAmB,GACxB,MAAM,EAAE,CA4BV"}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import { Compression } from '@loaders.gl/compression';
|
|
3
|
+
import { ParquetCompression } from './schema/declare';
|
|
4
|
+
export declare const PARQUET_COMPRESSION_METHODS: Record<ParquetCompression, Compression>;
|
|
5
|
+
/**
|
|
6
|
+
* Register compressions that have big external libraries
|
|
7
|
+
* @param options.modules External library dependencies
|
|
8
|
+
*/
|
|
9
|
+
export declare function preloadCompressions(options?: {
|
|
10
|
+
modules: {
|
|
11
|
+
[key: string]: any;
|
|
12
|
+
};
|
|
13
|
+
}): Promise<void[]>;
|
|
14
|
+
/**
|
|
15
|
+
* Deflate a value using compression method `method`
|
|
16
|
+
*/
|
|
17
|
+
export declare function deflate(method: ParquetCompression, value: Buffer): Promise<Buffer>;
|
|
18
|
+
/**
|
|
19
|
+
* Inflate a value using compression method `method`
|
|
20
|
+
*/
|
|
21
|
+
export declare function decompress(method: ParquetCompression, value: Buffer, size: number): Promise<Buffer>;
|
|
22
|
+
export declare function inflate(method: ParquetCompression, value: Buffer, size: number): Buffer;
|
|
23
|
+
//# sourceMappingURL=compression.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"compression.d.ts","sourceRoot":"","sources":["../../src/parquetjs/compression.ts"],"names":[],"mappings":";AAIA,OAAO,EACL,WAAW,EAQZ,MAAM,yBAAyB,CAAC;AAEjC,OAAO,EAAC,kBAAkB,EAAC,MAAM,kBAAkB,CAAC;AA0BpD,eAAO,MAAM,2BAA2B,EAAE,MAAM,CAAC,kBAAkB,EAAE,WAAW,CAU/E,CAAC;AAEF;;;GAGG;AACH,wBAAsB,mBAAmB,CAAC,OAAO,CAAC,EAAE;IAAC,OAAO,EAAE;QAAC,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAC,CAAA;CAAC,mBAGlF;AAED;;GAEG;AACH,wBAAsB,OAAO,CAAC,MAAM,EAAE,kBAAkB,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAQxF;AAED;;GAEG;AACH,wBAAsB,UAAU,CAC9B,MAAM,EAAE,kBAAkB,EAC1B,KAAK,EAAE,MAAM,EACb,IAAI,EAAE,MAAM,GACX,OAAO,CAAC,MAAM,CAAC,CAQjB;AAKD,wBAAgB,OAAO,CAAC,MAAM,EAAE,kBAAkB,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,GAAG,MAAM,CAMvF"}
|
|
@@ -1,19 +1,10 @@
|
|
|
1
1
|
import { NoCompression, GZipCompression, SnappyCompression, BrotliCompression, LZOCompression, LZ4Compression, ZstdCompression } from '@loaders.gl/compression';
|
|
2
2
|
import { toArrayBuffer, toBuffer } from './utils/buffer-utils';
|
|
3
|
-
import brotliDecompress from 'brotli/decompress';
|
|
4
3
|
import lz4js from 'lz4js';
|
|
5
4
|
import lzo from 'lzo';
|
|
6
|
-
import { ZstdCodec } from 'zstd-codec';
|
|
7
5
|
const modules = {
|
|
8
|
-
brotli: {
|
|
9
|
-
decompress: brotliDecompress,
|
|
10
|
-
compress: () => {
|
|
11
|
-
throw new Error('brotli compress');
|
|
12
|
-
}
|
|
13
|
-
},
|
|
14
6
|
lz4js,
|
|
15
|
-
lzo
|
|
16
|
-
'zstd-codec': ZstdCodec
|
|
7
|
+
lzo
|
|
17
8
|
};
|
|
18
9
|
export const PARQUET_COMPRESSION_METHODS = {
|
|
19
10
|
UNCOMPRESSED: new NoCompression(),
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/parquetjs/compression.ts"],"names":["NoCompression","GZipCompression","SnappyCompression","BrotliCompression","LZOCompression","LZ4Compression","ZstdCompression","toArrayBuffer","toBuffer","
|
|
1
|
+
{"version":3,"sources":["../../src/parquetjs/compression.ts"],"names":["NoCompression","GZipCompression","SnappyCompression","BrotliCompression","LZOCompression","LZ4Compression","ZstdCompression","toArrayBuffer","toBuffer","lz4js","lzo","modules","PARQUET_COMPRESSION_METHODS","UNCOMPRESSED","GZIP","SNAPPY","BROTLI","LZ4","LZ4_RAW","LZO","ZSTD","preloadCompressions","options","compressions","Object","values","Promise","all","map","compression","preload","deflate","method","value","Error","inputArrayBuffer","compressedArrayBuffer","compress","decompress","size","inflate"],"mappings":"AAIA,SAEEA,aAFF,EAGEC,eAHF,EAIEC,iBAJF,EAKEC,iBALF,EAMEC,cANF,EAOEC,cAPF,EAQEC,eARF,QASO,yBATP;AAYA,SAAQC,aAAR,EAAuBC,QAAvB,QAAsC,sBAAtC;AAMA,OAAOC,KAAP,MAAkB,OAAlB;AACA,OAAOC,GAAP,MAAgB,KAAhB;AAIA,MAAMC,OAAO,GAAG;AAQdF,EAAAA,KARc;AASdC,EAAAA;AATc,CAAhB;AAcA,OAAO,MAAME,2BAAoE,GAAG;AAClFC,EAAAA,YAAY,EAAE,IAAIb,aAAJ,EADoE;AAElFc,EAAAA,IAAI,EAAE,IAAIb,eAAJ,EAF4E;AAGlFc,EAAAA,MAAM,EAAE,IAAIb,iBAAJ,EAH0E;AAIlFc,EAAAA,MAAM,EAAE,IAAIb,iBAAJ,CAAsB;AAACQ,IAAAA;AAAD,GAAtB,CAJ0E;AAMlFM,EAAAA,GAAG,EAAE,IAAIZ,cAAJ,CAAmB;AAACM,IAAAA;AAAD,GAAnB,CAN6E;AAOlFO,EAAAA,OAAO,EAAE,IAAIb,cAAJ,CAAmB;AAACM,IAAAA;AAAD,GAAnB,CAPyE;AAQlFQ,EAAAA,GAAG,EAAE,IAAIf,cAAJ,CAAmB;AAACO,IAAAA;AAAD,GAAnB,CAR6E;AASlFS,EAAAA,IAAI,EAAE,IAAId,eAAJ,CAAoB;AAACK,IAAAA;AAAD,GAApB;AAT4E,CAA7E;AAgBP,OAAO,eAAeU,mBAAf,CAAmCC,OAAnC,EAA8E;AACnF,QAAMC,YAAY,GAAGC,MAAM,CAACC,MAAP,CAAcb,2BAAd,CAArB;AACA,SAAO,MAAMc,OAAO,CAACC,GAAR,CAAYJ,YAAY,CAACK,GAAb,CAAkBC,WAAD,IAAiBA,WAAW,CAACC,OAAZ,EAAlC,CAAZ,CAAb;AACD;AAKD,OAAO,eAAeC,OAAf,CAAuBC,MAAvB,EAAmDC,KAAnD,EAAmF;AACxF,QAAMJ,WAAW,GAAGjB,2BAA2B,CAACoB,MAAD,CAA/C;;AACA,MAAI,CAACH,WAAL,EAAkB;AAChB,UAAM,IAAIK,KAAJ,gDAAkDF,MAAlD,EAAN;AACD;;AACD,QAAMG,gBAAgB,GAAG5B,aAAa,CAAC0B,KAAD,CAAtC;AACA,QAAMG,qBAAqB,GAAG,MAAMP,WAAW,CAACQ,QAAZ,CAAqBF,gBAArB,CAApC;AACA,SAAO3B,QAAQ,CAAC4B,qBAAD,CAAf;AACD;AAKD,OAAO,eAAeE,UAAf,CACLN,MADK,EAELC,KAFK,EAGLM,IAHK,EAIY;AACjB,QAAMV,WAAW,GAAGjB,2BAA2B,CAACoB,MAAD,CAA/C;;AACA,MAAI,CAACH,WAAL,EAAkB;AAChB,UAAM,IAAIK,KAAJ,gDAAkDF,MAAlD,EAAN;AACD;;AACD,QAAMG,gBAAgB,GAAG5B,aAAa,CAAC0B,KAAD,CAAtC;AACA,QAAMG,qBAAqB,GAAG,MAAMP,WAAW,CAACS,UAAZ,CAAuBH,gBAAvB,EAAyCI,IAAzC,CAApC;AACA,SAAO/B,QAAQ,CAAC4B,qBAAD,CAAf;AACD;AAKD,OAAO,SAASI,OAAT,CAAiBR,MAAjB,EAA6CC,KAA7C,EAA4DM,IAA5D,EAAkF;AACvF,MAAI,EAAEP,MAAM,IAAIpB,2BAAZ,CAAJ,EAA8C;AAC5C,UAAM,IAAIsB,KAAJ,uCAAyCF,MAAzC,EAAN;AACD;;AAED,SAAOpB,2BAA2B,CAACoB,MAAD,CAA3B,CAAoCQ,OAApC,CAA4CP,KAA5C,EAAmDM,IAAnD,CAAP;AACD","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/* eslint-disable camelcase */\n// Forked from https://github.com/ironSource/parquetjs under MIT license\n\nimport {\n Compression,\n NoCompression,\n GZipCompression,\n SnappyCompression,\n BrotliCompression,\n LZOCompression,\n LZ4Compression,\n ZstdCompression\n} from '@loaders.gl/compression';\n\nimport {ParquetCompression} from './schema/declare';\nimport {toArrayBuffer, toBuffer} from './utils/buffer-utils';\n\n// TODO switch to worker compression to avoid bundling...\n\n// import brotli from 'brotli'; - brotli has problems with decompress in browsers\n// import brotliDecompress from 'brotli/decompress';\nimport lz4js from 'lz4js';\nimport lzo from 'lzo';\n// import {ZstdCodec} from 'zstd-codec';\n\n// Inject large dependencies through Compression constructor options\nconst modules = {\n // brotli has problems with decompress in browsers\n // brotli: {\n // decompress: brotliDecompress,\n // compress: () => {\n // throw new Error('brotli compress');\n // }\n // },\n lz4js,\n lzo\n // 'zstd-codec': ZstdCodec\n};\n\n// See https://github.com/apache/parquet-format/blob/master/Compression.md\nexport const PARQUET_COMPRESSION_METHODS: Record<ParquetCompression, Compression> = {\n UNCOMPRESSED: new NoCompression(),\n GZIP: new GZipCompression(),\n SNAPPY: new SnappyCompression(),\n BROTLI: new BrotliCompression({modules}),\n // TODO: Understand difference between LZ4 and LZ4_RAW\n LZ4: new LZ4Compression({modules}),\n LZ4_RAW: new LZ4Compression({modules}),\n LZO: new LZOCompression({modules}),\n ZSTD: new ZstdCompression({modules})\n};\n\n/**\n * Register compressions that have big external libraries\n * @param options.modules External library dependencies\n */\nexport async function preloadCompressions(options?: {modules: {[key: string]: any}}) {\n const compressions = Object.values(PARQUET_COMPRESSION_METHODS);\n return await Promise.all(compressions.map((compression) => compression.preload()));\n}\n\n/**\n * Deflate a value using compression method `method`\n */\nexport async function deflate(method: ParquetCompression, value: Buffer): Promise<Buffer> {\n const compression = PARQUET_COMPRESSION_METHODS[method];\n if (!compression) {\n throw new Error(`parquet: invalid compression method: ${method}`);\n }\n const inputArrayBuffer = toArrayBuffer(value);\n const compressedArrayBuffer = await compression.compress(inputArrayBuffer);\n return toBuffer(compressedArrayBuffer);\n}\n\n/**\n * Inflate a value using compression method `method`\n */\nexport async function decompress(\n method: ParquetCompression,\n value: Buffer,\n size: number\n): Promise<Buffer> {\n const compression = PARQUET_COMPRESSION_METHODS[method];\n if (!compression) {\n throw new Error(`parquet: invalid compression method: ${method}`);\n }\n const inputArrayBuffer = toArrayBuffer(value);\n const compressedArrayBuffer = await compression.decompress(inputArrayBuffer, size);\n return toBuffer(compressedArrayBuffer);\n}\n\n/*\n * Inflate a value using compression method `method`\n */\nexport function inflate(method: ParquetCompression, value: Buffer, size: number): Buffer {\n if (!(method in PARQUET_COMPRESSION_METHODS)) {\n throw new Error(`invalid compression method: ${method}`);\n }\n // @ts-ignore\n return PARQUET_COMPRESSION_METHODS[method].inflate(value, size);\n}\n\n/*\nfunction deflate_identity(value: Buffer): Buffer {\n return value;\n}\n\nfunction deflate_gzip(value: Buffer): Buffer {\n return zlib.gzipSync(value);\n}\n\nfunction deflate_snappy(value: Buffer): Buffer {\n return snappyjs.compress(value);\n}\n\nfunction deflate_lzo(value: Buffer): Buffer {\n lzo = lzo || Util.load('lzo');\n return lzo.compress(value);\n}\n\nfunction deflate_brotli(value: Buffer): Buffer {\n brotli = brotli || Util.load('brotli');\n const result = brotli.compress(value, {\n mode: 0,\n quality: 8,\n lgwin: 22\n });\n return result ? Buffer.from(result) : Buffer.alloc(0);\n}\n\nfunction deflate_lz4(value: Buffer): Buffer {\n lz4js = lz4js || Util.load('lz4js');\n try {\n // let result = Buffer.alloc(lz4js.encodeBound(value.length));\n // const compressedSize = lz4.encodeBlock(value, result);\n // // remove unnecessary bytes\n // result = result.slice(0, compressedSize);\n // return result;\n return Buffer.from(lz4js.compress(value));\n } catch (err) {\n throw err;\n }\n}\nfunction inflate_identity(value: Buffer): Buffer {\n return value;\n}\n\nfunction inflate_gzip(value: Buffer): Buffer {\n return zlib.gunzipSync(value);\n}\n\nfunction inflate_snappy(value: Buffer): Buffer {\n return snappyjs.uncompress(value);\n}\n\nfunction inflate_lzo(value: Buffer, size: number): Buffer {\n lzo = lzo || Util.load('lzo');\n return lzo.decompress(value, size);\n}\n\nfunction inflate_lz4(value: Buffer, size: number): Buffer {\n lz4js = lz4js || Util.load('lz4js');\n try {\n // let result = Buffer.alloc(size);\n // const uncompressedSize = lz4js.decodeBlock(value, result);\n // // remove unnecessary bytes\n // result = result.slice(0, uncompressedSize);\n // return result;\n return Buffer.from(lz4js.decompress(value, size));\n } catch (err) {\n throw err;\n }\n}\n\nfunction inflate_brotli(value: Buffer): Buffer {\n brotli = brotli || Util.load('brotli');\n if (!value.length) {\n return Buffer.alloc(0);\n }\n return Buffer.from(brotli.decompress(value));\n}\n*/\n"],"file":"compression.js"}
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import { Transform, Writable } from 'stream';
|
|
3
|
+
import { ParquetBuffer } from '../schema/declare';
|
|
4
|
+
import { ParquetSchema } from '../schema/schema';
|
|
5
|
+
import { RowGroup } from '../parquet-thrift';
|
|
6
|
+
export interface ParquetWriterOptions {
|
|
7
|
+
baseOffset?: number;
|
|
8
|
+
rowGroupSize?: number;
|
|
9
|
+
pageSize?: number;
|
|
10
|
+
useDataPageV2?: boolean;
|
|
11
|
+
flags?: string;
|
|
12
|
+
encoding?: string;
|
|
13
|
+
fd?: number;
|
|
14
|
+
mode?: number;
|
|
15
|
+
autoClose?: boolean;
|
|
16
|
+
start?: number;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Write a parquet file to an output stream. The ParquetWriter will perform
|
|
20
|
+
* buffering/batching for performance, so close() must be called after all rows
|
|
21
|
+
* are written.
|
|
22
|
+
*/
|
|
23
|
+
export declare class ParquetWriter<T> {
|
|
24
|
+
/**
|
|
25
|
+
* Convenience method to create a new buffered parquet writer that writes to
|
|
26
|
+
* the specified file
|
|
27
|
+
*/
|
|
28
|
+
static openFile<T>(schema: ParquetSchema, path: string, opts?: ParquetWriterOptions): Promise<ParquetWriter<T>>;
|
|
29
|
+
/**
|
|
30
|
+
* Convenience method to create a new buffered parquet writer that writes to
|
|
31
|
+
* the specified stream
|
|
32
|
+
*/
|
|
33
|
+
static openStream<T>(schema: ParquetSchema, outputStream: Writable, opts?: ParquetWriterOptions): Promise<ParquetWriter<T>>;
|
|
34
|
+
schema: ParquetSchema;
|
|
35
|
+
envelopeWriter: ParquetEnvelopeWriter;
|
|
36
|
+
rowBuffer: ParquetBuffer;
|
|
37
|
+
rowGroupSize: number;
|
|
38
|
+
closed: boolean;
|
|
39
|
+
userMetadata: Record<string, string>;
|
|
40
|
+
/**
|
|
41
|
+
* Create a new buffered parquet writer for a given envelope writer
|
|
42
|
+
*/
|
|
43
|
+
constructor(schema: ParquetSchema, envelopeWriter: ParquetEnvelopeWriter, opts: ParquetWriterOptions);
|
|
44
|
+
writeHeader(): Promise<void>;
|
|
45
|
+
/**
|
|
46
|
+
* Append a single row to the parquet file. Rows are buffered in memory until
|
|
47
|
+
* rowGroupSize rows are in the buffer or close() is called
|
|
48
|
+
*/
|
|
49
|
+
appendRow<T>(row: T): Promise<void>;
|
|
50
|
+
/**
|
|
51
|
+
* Finish writing the parquet file and commit the footer to disk. This method
|
|
52
|
+
* MUST be called after you are finished adding rows. You must not call this
|
|
53
|
+
* method twice on the same object or add any rows after the close() method has
|
|
54
|
+
* been called
|
|
55
|
+
*/
|
|
56
|
+
close(callback?: () => void): Promise<void>;
|
|
57
|
+
/**
|
|
58
|
+
* Add key<>value metadata to the file
|
|
59
|
+
*/
|
|
60
|
+
setMetadata(key: string, value: string): void;
|
|
61
|
+
/**
|
|
62
|
+
* Set the parquet row group size. This values controls the maximum number
|
|
63
|
+
* of rows that are buffered in memory at any given time as well as the number
|
|
64
|
+
* of rows that are co-located on disk. A higher value is generally better for
|
|
65
|
+
* read-time I/O performance at the tradeoff of write-time memory usage.
|
|
66
|
+
*/
|
|
67
|
+
setRowGroupSize(cnt: number): void;
|
|
68
|
+
/**
|
|
69
|
+
* Set the parquet data page size. The data page size controls the maximum
|
|
70
|
+
* number of column values that are written to disk as a consecutive array
|
|
71
|
+
*/
|
|
72
|
+
setPageSize(cnt: number): void;
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Create a parquet file from a schema and a number of row groups. This class
|
|
76
|
+
* performs direct, unbuffered writes to the underlying output stream and is
|
|
77
|
+
* intendend for advanced and internal users; the writeXXX methods must be
|
|
78
|
+
* called in the correct order to produce a valid file.
|
|
79
|
+
*/
|
|
80
|
+
export declare class ParquetEnvelopeWriter {
|
|
81
|
+
/**
|
|
82
|
+
* Create a new parquet envelope writer that writes to the specified stream
|
|
83
|
+
*/
|
|
84
|
+
static openStream(schema: ParquetSchema, outputStream: Writable, opts: ParquetWriterOptions): Promise<ParquetEnvelopeWriter>;
|
|
85
|
+
schema: ParquetSchema;
|
|
86
|
+
write: (buf: Buffer) => Promise<void>;
|
|
87
|
+
close: () => Promise<void>;
|
|
88
|
+
offset: number;
|
|
89
|
+
rowCount: number;
|
|
90
|
+
rowGroups: RowGroup[];
|
|
91
|
+
pageSize: number;
|
|
92
|
+
useDataPageV2: boolean;
|
|
93
|
+
constructor(schema: ParquetSchema, writeFn: (buf: Buffer) => Promise<void>, closeFn: () => Promise<void>, fileOffset: number, opts: ParquetWriterOptions);
|
|
94
|
+
writeSection(buf: Buffer): Promise<void>;
|
|
95
|
+
/**
|
|
96
|
+
* Encode the parquet file header
|
|
97
|
+
*/
|
|
98
|
+
writeHeader(): Promise<void>;
|
|
99
|
+
/**
|
|
100
|
+
* Encode a parquet row group. The records object should be created using the
|
|
101
|
+
* shredRecord method
|
|
102
|
+
*/
|
|
103
|
+
writeRowGroup(records: ParquetBuffer): Promise<void>;
|
|
104
|
+
/**
|
|
105
|
+
* Write the parquet file footer
|
|
106
|
+
*/
|
|
107
|
+
writeFooter(userMetadata: Record<string, string>): Promise<void>;
|
|
108
|
+
/**
|
|
109
|
+
* Set the parquet data page size. The data page size controls the maximum
|
|
110
|
+
* number of column values that are written to disk as a consecutive array
|
|
111
|
+
*/
|
|
112
|
+
setPageSize(cnt: number): void;
|
|
113
|
+
}
|
|
114
|
+
/**
|
|
115
|
+
* Create a parquet transform stream
|
|
116
|
+
*/
|
|
117
|
+
export declare class ParquetTransformer<T> extends Transform {
|
|
118
|
+
writer: ParquetWriter<T>;
|
|
119
|
+
constructor(schema: ParquetSchema, opts?: ParquetWriterOptions);
|
|
120
|
+
_transform(row: any, encoding: string, callback: (val?: any) => void): Promise<void>;
|
|
121
|
+
_flush(callback: (val?: any) => void): Promise<void>;
|
|
122
|
+
}
|
|
123
|
+
//# sourceMappingURL=writer.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"writer.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/encoder/writer.ts"],"names":[],"mappings":";AAEA,OAAO,EAAC,SAAS,EAAE,QAAQ,EAAC,MAAM,QAAQ,CAAC;AAG3C,OAAO,EACL,aAAa,EAKd,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAC;AAE/C,OAAO,EAaL,QAAQ,EAGT,MAAM,mBAAmB,CAAC;AA2B3B,MAAM,WAAW,oBAAoB;IACnC,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,aAAa,CAAC,EAAE,OAAO,CAAC;IAGxB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,EAAE,CAAC,EAAE,MAAM,CAAC;IACZ,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED;;;;GAIG;AAEH,qBAAa,aAAa,CAAC,CAAC;IAC1B;;;OAGG;WACU,QAAQ,CAAC,CAAC,EACrB,MAAM,EAAE,aAAa,EACrB,IAAI,EAAE,MAAM,EACZ,IAAI,CAAC,EAAE,oBAAoB,GAC1B,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;IAK5B;;;OAGG;WACU,UAAU,CAAC,CAAC,EACvB,MAAM,EAAE,aAAa,EACrB,YAAY,EAAE,QAAQ,EACtB,IAAI,CAAC,EAAE,oBAAoB,GAC1B,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;IAWrB,MAAM,EAAE,aAAa,CAAC;IACtB,cAAc,EAAE,qBAAqB,CAAC;IACtC,SAAS,EAAE,aAAa,CAAC;IACzB,YAAY,EAAE,MAAM,CAAC;IACrB,MAAM,EAAE,OAAO,CAAC;IAChB,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE5C;;OAEG;gBAED,MAAM,EAAE,aAAa,EACrB,cAAc,EAAE,qBAAqB,EACrC,IAAI,EAAE,oBAAoB;IActB,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC;IAUlC;;;OAGG;IACG,SAAS,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAWzC;;;;;OAKG;IACG,KAAK,CAAC,QAAQ,CAAC,EAAE,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAqBjD;;OAEG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,IAAI;IAK7C;;;;;OAKG;IACH,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI;IAIlC;;;OAGG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI;CAG/B;AAED;;;;;GAKG;AACH,qBAAa,qBAAqB;IAChC;;OAEG;WACU,UAAU,CACrB,MAAM,EAAE,aAAa,EACrB,YAAY,EAAE,QAAQ,EACtB,IAAI,EAAE,oBAAoB,GACzB,OAAO,CAAC,qBAAqB,CAAC;IAM1B,MAAM,EAAE,aAAa,CAAC;IACtB,KAAK,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACtC,KAAK,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAC3B,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,MAAM,CAAC;IACjB,SAAS,EAAE,QAAQ,EAAE,CAAC;IACtB,QAAQ,EAAE,MAAM,CAAC;IACjB,aAAa,EAAE,OAAO,CAAC;gBAG5B,MAAM,EAAE,aAAa,EACrB,OAAO,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,EACvC,OAAO,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,EAC5B,UAAU,EAAE,MAAM,EAClB,IAAI,EAAE,oBAAoB;IAY5B,YAAY,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAKxC;;OAEG;IACH,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC;IAI5B;;;OAGG;IACG,aAAa,CAAC,OAAO,EAAE,aAAa,GAAG,OAAO,CAAC,IAAI,CAAC;IAY1D;;OAEG;IACH,WAAW,CAAC,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAWhE;;;OAGG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI;CAG/B;AAED;;GAEG;AACH,qBAAa,kBAAkB,CAAC,CAAC,CAAE,SAAQ,SAAS;IAC3C,MAAM,EAAE,aAAa,CAAC,CAAC,CAAC,CAAC;gBAEpB,MAAM,EAAE,aAAa,EAAE,IAAI,GAAE,oBAAyB;IAiBlE,UAAU,CAAC,GAAG,EAAE,GAAG,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,GAAG,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAS9E,MAAM,CAAC,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,GAAG,KAAK,IAAI;CAG3C"}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import fs from 'fs';
|
|
3
|
+
export declare function fopen(filePath: any): Promise<unknown>;
|
|
4
|
+
export declare function fstat(filePath: any): Promise<fs.Stats>;
|
|
5
|
+
export declare function fread(fd: any, position: any, length: any): Promise<unknown>;
|
|
6
|
+
export declare function fclose(fd: any): Promise<unknown>;
|
|
7
|
+
export declare function oswrite(os: any, buf: any): Promise<void>;
|
|
8
|
+
export declare function osclose(os: any): Promise<void>;
|
|
9
|
+
export declare function osopen(path: any, opts: any): Promise<unknown>;
|
|
10
|
+
//# sourceMappingURL=file.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../src/parquetjs/file.ts"],"names":[],"mappings":";AACA,OAAO,EAAE,MAAM,IAAI,CAAC;AAEpB,wBAAgB,KAAK,CAAC,QAAQ,KAAA,oBAU7B;AAED,wBAAgB,KAAK,CAAC,QAAQ,KAAA,qBAU7B;AAED,wBAAgB,KAAK,CAAC,EAAE,KAAA,EAAE,QAAQ,KAAA,EAAE,MAAM,KAAA,oBAYzC;AAED,wBAAgB,MAAM,CAAC,EAAE,KAAA,oBAUxB;AAED,wBAAgB,OAAO,CAAC,EAAE,KAAA,EAAE,GAAG,KAAA,GAAG,OAAO,CAAC,IAAI,CAAC,CAU9C;AAED,wBAAgB,OAAO,CAAC,EAAE,KAAA,GAAG,OAAO,CAAC,IAAI,CAAC,CAUzC;AAED,wBAAgB,MAAM,CAAC,IAAI,KAAA,EAAE,IAAI,KAAA,oBAYhC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"BoundaryOrder.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parquet-thrift/BoundaryOrder.ts"],"names":[],"mappings":"AAMA,oBAAY,aAAa;IACvB,SAAS,IAAI;IACb,SAAS,IAAI;IACb,UAAU,IAAI;CACf"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"BsonType.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parquet-thrift/BsonType.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,MAAM,WAAW,aAAa;CAAG;AACjC,qBAAa,QAAQ;;IAEZ,KAAK,CAAC,MAAM,EAAE,MAAM,CAAC,SAAS,GAAG,IAAI;WAM9B,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,SAAS,GAAG,QAAQ;CAmBtD"}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import Int64 from 'node-int64';
|
|
2
|
+
import * as thrift from 'thrift';
|
|
3
|
+
import * as ColumnMetaData from './ColumnMetaData';
|
|
4
|
+
export interface IColumnChunkArgs {
|
|
5
|
+
file_path?: string;
|
|
6
|
+
file_offset: number | Int64;
|
|
7
|
+
meta_data?: ColumnMetaData.ColumnMetaData;
|
|
8
|
+
offset_index_offset?: number | Int64;
|
|
9
|
+
offset_index_length?: number;
|
|
10
|
+
column_index_offset?: number | Int64;
|
|
11
|
+
column_index_length?: number;
|
|
12
|
+
}
|
|
13
|
+
export declare class ColumnChunk {
|
|
14
|
+
file_path?: string;
|
|
15
|
+
file_offset: Int64;
|
|
16
|
+
meta_data?: ColumnMetaData.ColumnMetaData;
|
|
17
|
+
offset_index_offset?: Int64;
|
|
18
|
+
offset_index_length?: number;
|
|
19
|
+
column_index_offset?: Int64;
|
|
20
|
+
column_index_length?: number;
|
|
21
|
+
constructor(args: IColumnChunkArgs);
|
|
22
|
+
write(output: thrift.TProtocol): void;
|
|
23
|
+
static read(input: thrift.TProtocol): ColumnChunk;
|
|
24
|
+
}
|
|
25
|
+
//# sourceMappingURL=ColumnChunk.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ColumnChunk.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parquet-thrift/ColumnChunk.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,MAAM,YAAY,CAAC;AAC/B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,KAAK,cAAc,MAAM,kBAAkB,CAAC;AACnD,MAAM,WAAW,gBAAgB;IAC/B,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,EAAE,MAAM,GAAG,KAAK,CAAC;IAC5B,SAAS,CAAC,EAAE,cAAc,CAAC,cAAc,CAAC;IAC1C,mBAAmB,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC;IACrC,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B,mBAAmB,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC;IACrC,mBAAmB,CAAC,EAAE,MAAM,CAAC;CAC9B;AACD,qBAAa,WAAW;IACf,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,EAAE,KAAK,CAAC;IACnB,SAAS,CAAC,EAAE,cAAc,CAAC,cAAc,CAAC;IAC1C,mBAAmB,CAAC,EAAE,KAAK,CAAC;IAC5B,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B,mBAAmB,CAAC,EAAE,KAAK,CAAC;IAC5B,mBAAmB,CAAC,EAAE,MAAM,CAAC;gBACxB,IAAI,EAAE,gBAAgB;IAwC3B,KAAK,CAAC,MAAM,EAAE,MAAM,CAAC,SAAS,GAAG,IAAI;WAyC9B,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,SAAS,GAAG,WAAW;CAoFzD"}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import Int64 from 'node-int64';
|
|
3
|
+
import * as thrift from 'thrift';
|
|
4
|
+
import * as BoundaryOrder from './BoundaryOrder';
|
|
5
|
+
export interface IColumnIndexArgs {
|
|
6
|
+
null_pages: Array<boolean>;
|
|
7
|
+
min_values: Array<Buffer>;
|
|
8
|
+
max_values: Array<Buffer>;
|
|
9
|
+
boundary_order: BoundaryOrder.BoundaryOrder;
|
|
10
|
+
null_counts?: Array<number | Int64>;
|
|
11
|
+
}
|
|
12
|
+
export declare class ColumnIndex {
|
|
13
|
+
null_pages: Array<boolean>;
|
|
14
|
+
min_values: Array<Buffer>;
|
|
15
|
+
max_values: Array<Buffer>;
|
|
16
|
+
boundary_order: BoundaryOrder.BoundaryOrder;
|
|
17
|
+
null_counts?: Array<Int64>;
|
|
18
|
+
constructor(args: IColumnIndexArgs);
|
|
19
|
+
write(output: thrift.TProtocol): void;
|
|
20
|
+
static read(input: thrift.TProtocol): ColumnIndex;
|
|
21
|
+
}
|
|
22
|
+
//# sourceMappingURL=ColumnIndex.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ColumnIndex.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parquet-thrift/ColumnIndex.ts"],"names":[],"mappings":";AAMA,OAAO,KAAK,MAAM,YAAY,CAAC;AAC/B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,KAAK,aAAa,MAAM,iBAAiB,CAAC;AACjD,MAAM,WAAW,gBAAgB;IAC/B,UAAU,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;IAC3B,UAAU,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;IAC1B,UAAU,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;IAC1B,cAAc,EAAE,aAAa,CAAC,aAAa,CAAC;IAC5C,WAAW,CAAC,EAAE,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC;CACrC;AACD,qBAAa,WAAW;IACf,UAAU,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;IAC3B,UAAU,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;IAC1B,UAAU,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;IAC1B,cAAc,EAAE,aAAa,CAAC,aAAa,CAAC;IAC5C,WAAW,CAAC,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;gBACtB,IAAI,EAAE,gBAAgB;IAqC3B,KAAK,CAAC,MAAM,EAAE,MAAM,CAAC,SAAS,GAAG,IAAI;WA+C9B,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,SAAS,GAAG,WAAW;CAoGzD"}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import Int64 from 'node-int64';
|
|
2
|
+
import * as thrift from 'thrift';
|
|
3
|
+
import * as CompressionCodec from './CompressionCodec';
|
|
4
|
+
import * as Encoding from './Encoding';
|
|
5
|
+
import * as KeyValue from './KeyValue';
|
|
6
|
+
import * as PageEncodingStats from './PageEncodingStats';
|
|
7
|
+
import * as Statistics from './Statistics';
|
|
8
|
+
import * as Type from './Type';
|
|
9
|
+
export interface IColumnMetaDataArgs {
|
|
10
|
+
type: Type.Type;
|
|
11
|
+
encodings: Array<Encoding.Encoding>;
|
|
12
|
+
path_in_schema: Array<string>;
|
|
13
|
+
codec: CompressionCodec.CompressionCodec;
|
|
14
|
+
num_values: number | Int64;
|
|
15
|
+
total_uncompressed_size: number | Int64;
|
|
16
|
+
total_compressed_size: number | Int64;
|
|
17
|
+
key_value_metadata?: Array<KeyValue.KeyValue>;
|
|
18
|
+
data_page_offset: number | Int64;
|
|
19
|
+
index_page_offset?: number | Int64;
|
|
20
|
+
dictionary_page_offset?: number | Int64;
|
|
21
|
+
statistics?: Statistics.Statistics;
|
|
22
|
+
encoding_stats?: Array<PageEncodingStats.PageEncodingStats>;
|
|
23
|
+
}
|
|
24
|
+
export declare class ColumnMetaData {
|
|
25
|
+
type: Type.Type;
|
|
26
|
+
encodings: Array<Encoding.Encoding>;
|
|
27
|
+
path_in_schema: Array<string>;
|
|
28
|
+
codec: CompressionCodec.CompressionCodec;
|
|
29
|
+
num_values: Int64;
|
|
30
|
+
total_uncompressed_size: Int64;
|
|
31
|
+
total_compressed_size: Int64;
|
|
32
|
+
key_value_metadata?: Array<KeyValue.KeyValue>;
|
|
33
|
+
data_page_offset: Int64;
|
|
34
|
+
index_page_offset?: Int64;
|
|
35
|
+
dictionary_page_offset?: Int64;
|
|
36
|
+
statistics?: Statistics.Statistics;
|
|
37
|
+
encoding_stats?: Array<PageEncodingStats.PageEncodingStats>;
|
|
38
|
+
constructor(args: IColumnMetaDataArgs);
|
|
39
|
+
write(output: thrift.TProtocol): void;
|
|
40
|
+
static read(input: thrift.TProtocol): ColumnMetaData;
|
|
41
|
+
}
|
|
42
|
+
//# sourceMappingURL=ColumnMetaData.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ColumnMetaData.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parquet-thrift/ColumnMetaData.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,MAAM,YAAY,CAAC;AAC/B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,KAAK,gBAAgB,MAAM,oBAAoB,CAAC;AACvD,OAAO,KAAK,QAAQ,MAAM,YAAY,CAAC;AACvC,OAAO,KAAK,QAAQ,MAAM,YAAY,CAAC;AACvC,OAAO,KAAK,iBAAiB,MAAM,qBAAqB,CAAC;AACzD,OAAO,KAAK,UAAU,MAAM,cAAc,CAAC;AAC3C,OAAO,KAAK,IAAI,MAAM,QAAQ,CAAC;AAC/B,MAAM,WAAW,mBAAmB;IAClC,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC;IAChB,SAAS,EAAE,KAAK,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;IACpC,cAAc,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;IAC9B,KAAK,EAAE,gBAAgB,CAAC,gBAAgB,CAAC;IACzC,UAAU,EAAE,MAAM,GAAG,KAAK,CAAC;IAC3B,uBAAuB,EAAE,MAAM,GAAG,KAAK,CAAC;IACxC,qBAAqB,EAAE,MAAM,GAAG,KAAK,CAAC;IACtC,kBAAkB,CAAC,EAAE,KAAK,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;IAC9C,gBAAgB,EAAE,MAAM,GAAG,KAAK,CAAC;IACjC,iBAAiB,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC;IACnC,sBAAsB,CAAC,EAAE,MAAM,GAAG,KAAK,CAAC;IACxC,UAAU,CAAC,EAAE,UAAU,CAAC,UAAU,CAAC;IACnC,cAAc,CAAC,EAAE,KAAK,CAAC,iBAAiB,CAAC,iBAAiB,CAAC,CAAC;CAC7D;AACD,qBAAa,cAAc;IAClB,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC;IAChB,SAAS,EAAE,KAAK,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;IACpC,cAAc,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC;IAC9B,KAAK,EAAE,gBAAgB,CAAC,gBAAgB,CAAC;IACzC,UAAU,EAAE,KAAK,CAAC;IAClB,uBAAuB,EAAE,KAAK,CAAC;IAC/B,qBAAqB,EAAE,KAAK,CAAC;IAC7B,kBAAkB,CAAC,EAAE,KAAK,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;IAC9C,gBAAgB,EAAE,KAAK,CAAC;IACxB,iBAAiB,CAAC,EAAE,KAAK,CAAC;IAC1B,sBAAsB,CAAC,EAAE,KAAK,CAAC;IAC/B,UAAU,CAAC,EAAE,UAAU,CAAC,UAAU,CAAC;IACnC,cAAc,CAAC,EAAE,KAAK,CAAC,iBAAiB,CAAC,iBAAiB,CAAC,CAAC;gBACvD,IAAI,EAAE,mBAAmB;IAyG9B,KAAK,CAAC,MAAM,EAAE,MAAM,CAAC,SAAS,GAAG,IAAI;WAuF9B,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,SAAS,GAAG,cAAc;CA0K5D"}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import * as thrift from 'thrift';
|
|
2
|
+
import * as TypeDefinedOrder from './TypeDefinedOrder';
|
|
3
|
+
export interface IColumnOrderArgs {
|
|
4
|
+
TYPE_ORDER?: TypeDefinedOrder.TypeDefinedOrder;
|
|
5
|
+
}
|
|
6
|
+
export declare class ColumnOrder {
|
|
7
|
+
TYPE_ORDER?: TypeDefinedOrder.TypeDefinedOrder;
|
|
8
|
+
constructor(args?: IColumnOrderArgs);
|
|
9
|
+
static fromTYPE_ORDER(TYPE_ORDER: TypeDefinedOrder.TypeDefinedOrder): ColumnOrder;
|
|
10
|
+
write(output: thrift.TProtocol): void;
|
|
11
|
+
static read(input: thrift.TProtocol): ColumnOrder;
|
|
12
|
+
}
|
|
13
|
+
//# sourceMappingURL=ColumnOrder.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ColumnOrder.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parquet-thrift/ColumnOrder.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,KAAK,gBAAgB,MAAM,oBAAoB,CAAC;AACvD,MAAM,WAAW,gBAAgB;IAC/B,UAAU,CAAC,EAAE,gBAAgB,CAAC,gBAAgB,CAAC;CAChD;AACD,qBAAa,WAAW;IACf,UAAU,CAAC,EAAE,gBAAgB,CAAC,gBAAgB,CAAC;gBAC1C,IAAI,CAAC,EAAE,gBAAgB;WAoBrB,cAAc,CAAC,UAAU,EAAE,gBAAgB,CAAC,gBAAgB,GAAG,WAAW;IAGjF,KAAK,CAAC,MAAM,EAAE,MAAM,CAAC,SAAS,GAAG,IAAI;WAW9B,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,SAAS,GAAG,WAAW;CAiDzD"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"CompressionCodec.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parquet-thrift/CompressionCodec.ts"],"names":[],"mappings":"AAMA,oBAAY,gBAAgB;IAC1B,YAAY,IAAI;IAChB,MAAM,IAAI;IACV,IAAI,IAAI;IACR,GAAG,IAAI;IACP,MAAM,IAAI;IACV,GAAG,IAAI;IACP,IAAI,IAAI;IACR,OAAO,IAAI;CACZ"}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
export declare enum ConvertedType {
|
|
2
|
+
UTF8 = 0,
|
|
3
|
+
MAP = 1,
|
|
4
|
+
MAP_KEY_VALUE = 2,
|
|
5
|
+
LIST = 3,
|
|
6
|
+
ENUM = 4,
|
|
7
|
+
DECIMAL = 5,
|
|
8
|
+
DATE = 6,
|
|
9
|
+
TIME_MILLIS = 7,
|
|
10
|
+
TIME_MICROS = 8,
|
|
11
|
+
TIMESTAMP_MILLIS = 9,
|
|
12
|
+
TIMESTAMP_MICROS = 10,
|
|
13
|
+
UINT_8 = 11,
|
|
14
|
+
UINT_16 = 12,
|
|
15
|
+
UINT_32 = 13,
|
|
16
|
+
UINT_64 = 14,
|
|
17
|
+
INT_8 = 15,
|
|
18
|
+
INT_16 = 16,
|
|
19
|
+
INT_32 = 17,
|
|
20
|
+
INT_64 = 18,
|
|
21
|
+
JSON = 19,
|
|
22
|
+
BSON = 20,
|
|
23
|
+
INTERVAL = 21
|
|
24
|
+
}
|
|
25
|
+
//# sourceMappingURL=ConvertedType.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ConvertedType.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parquet-thrift/ConvertedType.ts"],"names":[],"mappings":"AAMA,oBAAY,aAAa;IACvB,IAAI,IAAI;IACR,GAAG,IAAI;IACP,aAAa,IAAI;IACjB,IAAI,IAAI;IACR,IAAI,IAAI;IACR,OAAO,IAAI;IACX,IAAI,IAAI;IACR,WAAW,IAAI;IACf,WAAW,IAAI;IACf,gBAAgB,IAAI;IACpB,gBAAgB,KAAK;IACrB,MAAM,KAAK;IACX,OAAO,KAAK;IACZ,OAAO,KAAK;IACZ,OAAO,KAAK;IACZ,KAAK,KAAK;IACV,MAAM,KAAK;IACX,MAAM,KAAK;IACX,MAAM,KAAK;IACX,IAAI,KAAK;IACT,IAAI,KAAK;IACT,QAAQ,KAAK;CACd"}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import * as thrift from 'thrift';
|
|
2
|
+
import * as Encoding from './Encoding';
|
|
3
|
+
import * as Statistics from './Statistics';
|
|
4
|
+
export interface IDataPageHeaderArgs {
|
|
5
|
+
num_values: number;
|
|
6
|
+
encoding: Encoding.Encoding;
|
|
7
|
+
definition_level_encoding: Encoding.Encoding;
|
|
8
|
+
repetition_level_encoding: Encoding.Encoding;
|
|
9
|
+
statistics?: Statistics.Statistics;
|
|
10
|
+
}
|
|
11
|
+
export declare class DataPageHeader {
|
|
12
|
+
num_values: number;
|
|
13
|
+
encoding: Encoding.Encoding;
|
|
14
|
+
definition_level_encoding: Encoding.Encoding;
|
|
15
|
+
repetition_level_encoding: Encoding.Encoding;
|
|
16
|
+
statistics?: Statistics.Statistics;
|
|
17
|
+
constructor(args: IDataPageHeaderArgs);
|
|
18
|
+
write(output: thrift.TProtocol): void;
|
|
19
|
+
static read(input: thrift.TProtocol): DataPageHeader;
|
|
20
|
+
}
|
|
21
|
+
//# sourceMappingURL=DataPageHeader.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"DataPageHeader.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parquet-thrift/DataPageHeader.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,KAAK,QAAQ,MAAM,YAAY,CAAC;AACvC,OAAO,KAAK,UAAU,MAAM,cAAc,CAAC;AAC3C,MAAM,WAAW,mBAAmB;IAClC,UAAU,EAAE,MAAM,CAAC;IACnB,QAAQ,EAAE,QAAQ,CAAC,QAAQ,CAAC;IAC5B,yBAAyB,EAAE,QAAQ,CAAC,QAAQ,CAAC;IAC7C,yBAAyB,EAAE,QAAQ,CAAC,QAAQ,CAAC;IAC7C,UAAU,CAAC,EAAE,UAAU,CAAC,UAAU,CAAC;CACpC;AACD,qBAAa,cAAc;IAClB,UAAU,EAAE,MAAM,CAAC;IACnB,QAAQ,EAAE,QAAQ,CAAC,QAAQ,CAAC;IAC5B,yBAAyB,EAAE,QAAQ,CAAC,QAAQ,CAAC;IAC7C,yBAAyB,EAAE,QAAQ,CAAC,QAAQ,CAAC;IAC7C,UAAU,CAAC,EAAE,UAAU,CAAC,UAAU,CAAC;gBAC9B,IAAI,EAAE,mBAAmB;IAqC9B,KAAK,CAAC,MAAM,EAAE,MAAM,CAAC,SAAS,GAAG,IAAI;WA+B9B,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,SAAS,GAAG,cAAc;CAwE5D"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import * as thrift from 'thrift';
|
|
2
|
+
import * as Encoding from './Encoding';
|
|
3
|
+
import * as Statistics from './Statistics';
|
|
4
|
+
export interface IDataPageHeaderV2Args {
|
|
5
|
+
num_values: number;
|
|
6
|
+
num_nulls: number;
|
|
7
|
+
num_rows: number;
|
|
8
|
+
encoding: Encoding.Encoding;
|
|
9
|
+
definition_levels_byte_length: number;
|
|
10
|
+
repetition_levels_byte_length: number;
|
|
11
|
+
is_compressed?: boolean;
|
|
12
|
+
statistics?: Statistics.Statistics;
|
|
13
|
+
}
|
|
14
|
+
export declare class DataPageHeaderV2 {
|
|
15
|
+
num_values: number;
|
|
16
|
+
num_nulls: number;
|
|
17
|
+
num_rows: number;
|
|
18
|
+
encoding: Encoding.Encoding;
|
|
19
|
+
definition_levels_byte_length: number;
|
|
20
|
+
repetition_levels_byte_length: number;
|
|
21
|
+
is_compressed?: boolean;
|
|
22
|
+
statistics?: Statistics.Statistics;
|
|
23
|
+
constructor(args: IDataPageHeaderV2Args);
|
|
24
|
+
write(output: thrift.TProtocol): void;
|
|
25
|
+
static read(input: thrift.TProtocol): DataPageHeaderV2;
|
|
26
|
+
}
|
|
27
|
+
//# sourceMappingURL=DataPageHeaderV2.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"DataPageHeaderV2.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parquet-thrift/DataPageHeaderV2.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,KAAK,QAAQ,MAAM,YAAY,CAAC;AACvC,OAAO,KAAK,UAAU,MAAM,cAAc,CAAC;AAC3C,MAAM,WAAW,qBAAqB;IACpC,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,QAAQ,CAAC,QAAQ,CAAC;IAC5B,6BAA6B,EAAE,MAAM,CAAC;IACtC,6BAA6B,EAAE,MAAM,CAAC;IACtC,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,UAAU,CAAC,EAAE,UAAU,CAAC,UAAU,CAAC;CACpC;AACD,qBAAa,gBAAgB;IACpB,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,QAAQ,CAAC,QAAQ,CAAC;IAC5B,6BAA6B,EAAE,MAAM,CAAC;IACtC,6BAA6B,EAAE,MAAM,CAAC;IACtC,aAAa,CAAC,EAAE,OAAO,CAAQ;IAC/B,UAAU,CAAC,EAAE,UAAU,CAAC,UAAU,CAAC;gBAC9B,IAAI,EAAE,qBAAqB;IAwDhC,KAAK,CAAC,MAAM,EAAE,MAAM,CAAC,SAAS,GAAG,IAAI;WA8C9B,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,SAAS,GAAG,gBAAgB;CAkG9D"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"DateType.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parquet-thrift/DateType.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,MAAM,WAAW,aAAa;CAAG;AACjC,qBAAa,QAAQ;;IAEZ,KAAK,CAAC,MAAM,EAAE,MAAM,CAAC,SAAS,GAAG,IAAI;WAM9B,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,SAAS,GAAG,QAAQ;CAmBtD"}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import * as thrift from 'thrift';
|
|
2
|
+
export interface IDecimalTypeArgs {
|
|
3
|
+
scale: number;
|
|
4
|
+
precision: number;
|
|
5
|
+
}
|
|
6
|
+
export declare class DecimalType {
|
|
7
|
+
scale: number;
|
|
8
|
+
precision: number;
|
|
9
|
+
constructor(args: IDecimalTypeArgs);
|
|
10
|
+
write(output: thrift.TProtocol): void;
|
|
11
|
+
static read(input: thrift.TProtocol): DecimalType;
|
|
12
|
+
}
|
|
13
|
+
//# sourceMappingURL=DecimalType.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"DecimalType.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parquet-thrift/DecimalType.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,MAAM,WAAW,gBAAgB;IAC/B,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,EAAE,MAAM,CAAC;CACnB;AACD,qBAAa,WAAW;IACf,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,EAAE,MAAM,CAAC;gBACb,IAAI,EAAE,gBAAgB;IAkB3B,KAAK,CAAC,MAAM,EAAE,MAAM,CAAC,SAAS,GAAG,IAAI;WAgB9B,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,SAAS,GAAG,WAAW;CA2CzD"}
|