@loaders.gl/parquet 3.3.0 → 3.4.0-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (131) hide show
  1. package/dist/dist.min.js +26 -17
  2. package/dist/dist.min.js.map +3 -3
  3. package/dist/es5/index.js +3 -3
  4. package/dist/es5/index.js.map +1 -1
  5. package/dist/es5/lib/parse-parquet.js +25 -49
  6. package/dist/es5/lib/parse-parquet.js.map +1 -1
  7. package/dist/es5/parquet-loader.js +2 -3
  8. package/dist/es5/parquet-loader.js.map +1 -1
  9. package/dist/es5/parquet-wasm-loader.js +1 -1
  10. package/dist/es5/parquet-wasm-loader.js.map +1 -1
  11. package/dist/es5/parquet-wasm-writer.js +1 -1
  12. package/dist/es5/parquet-wasm-writer.js.map +1 -1
  13. package/dist/es5/parquet-writer.js +1 -1
  14. package/dist/es5/parquet-writer.js.map +1 -1
  15. package/dist/es5/parquetjs/compression.js +5 -15
  16. package/dist/es5/parquetjs/compression.js.map +1 -1
  17. package/dist/es5/parquetjs/encoder/{parquet-encoder.js → writer.js} +158 -70
  18. package/dist/es5/parquetjs/encoder/writer.js.map +1 -0
  19. package/dist/es5/parquetjs/file.js +94 -0
  20. package/dist/es5/parquetjs/file.js.map +1 -0
  21. package/dist/es5/parquetjs/parser/parquet-cursor.js +183 -0
  22. package/dist/es5/parquetjs/parser/parquet-cursor.js.map +1 -0
  23. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +327 -0
  24. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +1 -0
  25. package/dist/es5/parquetjs/parser/parquet-reader.js +222 -553
  26. package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
  27. package/dist/es5/parquetjs/schema/declare.js +1 -3
  28. package/dist/es5/parquetjs/schema/declare.js.map +1 -1
  29. package/dist/es5/parquetjs/schema/shred.js +33 -39
  30. package/dist/es5/parquetjs/schema/shred.js.map +1 -1
  31. package/dist/es5/parquetjs/schema/types.js.map +1 -1
  32. package/dist/es5/parquetjs/utils/buffer-utils.js +19 -0
  33. package/dist/es5/parquetjs/utils/buffer-utils.js.map +1 -0
  34. package/dist/es5/parquetjs/utils/file-utils.js +3 -2
  35. package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
  36. package/dist/esm/index.js +1 -1
  37. package/dist/esm/index.js.map +1 -1
  38. package/dist/esm/lib/parse-parquet.js +12 -6
  39. package/dist/esm/lib/parse-parquet.js.map +1 -1
  40. package/dist/esm/parquet-loader.js +2 -3
  41. package/dist/esm/parquet-loader.js.map +1 -1
  42. package/dist/esm/parquet-wasm-loader.js +1 -1
  43. package/dist/esm/parquet-wasm-loader.js.map +1 -1
  44. package/dist/esm/parquet-wasm-writer.js +1 -1
  45. package/dist/esm/parquet-wasm-writer.js.map +1 -1
  46. package/dist/esm/parquet-writer.js +1 -1
  47. package/dist/esm/parquet-writer.js.map +1 -1
  48. package/dist/esm/parquetjs/compression.js +1 -10
  49. package/dist/esm/parquetjs/compression.js.map +1 -1
  50. package/dist/esm/parquetjs/encoder/{parquet-encoder.js → writer.js} +37 -7
  51. package/dist/esm/parquetjs/encoder/writer.js.map +1 -0
  52. package/dist/esm/parquetjs/file.js +81 -0
  53. package/dist/esm/parquetjs/file.js.map +1 -0
  54. package/dist/esm/parquetjs/parser/parquet-cursor.js +78 -0
  55. package/dist/esm/parquetjs/parser/parquet-cursor.js.map +1 -0
  56. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +129 -0
  57. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +1 -0
  58. package/dist/esm/parquetjs/parser/parquet-reader.js +72 -158
  59. package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
  60. package/dist/esm/parquetjs/schema/declare.js +0 -1
  61. package/dist/esm/parquetjs/schema/declare.js.map +1 -1
  62. package/dist/esm/parquetjs/schema/shred.js +34 -42
  63. package/dist/esm/parquetjs/schema/shred.js.map +1 -1
  64. package/dist/esm/parquetjs/schema/types.js.map +1 -1
  65. package/dist/esm/parquetjs/utils/buffer-utils.js +13 -0
  66. package/dist/esm/parquetjs/utils/buffer-utils.js.map +1 -0
  67. package/dist/esm/parquetjs/utils/file-utils.js +1 -1
  68. package/dist/esm/parquetjs/utils/file-utils.js.map +1 -1
  69. package/dist/index.d.ts +1 -1
  70. package/dist/index.d.ts.map +1 -1
  71. package/dist/index.js +4 -3
  72. package/dist/lib/parse-parquet.d.ts +2 -2
  73. package/dist/lib/parse-parquet.d.ts.map +1 -1
  74. package/dist/lib/parse-parquet.js +12 -24
  75. package/dist/parquet-loader.d.ts +0 -1
  76. package/dist/parquet-loader.d.ts.map +1 -1
  77. package/dist/parquet-loader.js +1 -2
  78. package/dist/parquet-worker.js +24 -15
  79. package/dist/parquet-worker.js.map +3 -3
  80. package/dist/parquetjs/compression.d.ts.map +1 -1
  81. package/dist/parquetjs/compression.js +5 -16
  82. package/dist/parquetjs/encoder/{parquet-encoder.d.ts → writer.d.ts} +19 -10
  83. package/dist/parquetjs/encoder/writer.d.ts.map +1 -0
  84. package/dist/parquetjs/encoder/{parquet-encoder.js → writer.js} +37 -39
  85. package/dist/parquetjs/file.d.ts +10 -0
  86. package/dist/parquetjs/file.d.ts.map +1 -0
  87. package/dist/parquetjs/file.js +99 -0
  88. package/dist/parquetjs/parser/parquet-cursor.d.ts +36 -0
  89. package/dist/parquetjs/parser/parquet-cursor.d.ts.map +1 -0
  90. package/dist/parquetjs/parser/parquet-cursor.js +74 -0
  91. package/dist/parquetjs/parser/parquet-envelope-reader.d.ts +40 -0
  92. package/dist/parquetjs/parser/parquet-envelope-reader.d.ts.map +1 -0
  93. package/dist/parquetjs/parser/parquet-envelope-reader.js +136 -0
  94. package/dist/parquetjs/parser/parquet-reader.d.ts +57 -47
  95. package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -1
  96. package/dist/parquetjs/parser/parquet-reader.js +102 -168
  97. package/dist/parquetjs/schema/declare.d.ts +7 -14
  98. package/dist/parquetjs/schema/declare.d.ts.map +1 -1
  99. package/dist/parquetjs/schema/declare.js +0 -2
  100. package/dist/parquetjs/schema/shred.d.ts +0 -115
  101. package/dist/parquetjs/schema/shred.d.ts.map +1 -1
  102. package/dist/parquetjs/schema/shred.js +43 -161
  103. package/dist/parquetjs/schema/types.d.ts +2 -2
  104. package/dist/parquetjs/schema/types.d.ts.map +1 -1
  105. package/dist/parquetjs/utils/buffer-utils.d.ts +10 -0
  106. package/dist/parquetjs/utils/buffer-utils.d.ts.map +1 -0
  107. package/dist/parquetjs/utils/buffer-utils.js +22 -0
  108. package/dist/parquetjs/utils/file-utils.d.ts +4 -3
  109. package/dist/parquetjs/utils/file-utils.d.ts.map +1 -1
  110. package/dist/parquetjs/utils/file-utils.js +5 -2
  111. package/package.json +5 -7
  112. package/src/index.ts +2 -2
  113. package/src/lib/parse-parquet.ts +12 -25
  114. package/src/parquet-loader.ts +1 -3
  115. package/src/parquetjs/compression.ts +1 -14
  116. package/src/parquetjs/encoder/{parquet-encoder.ts → writer.ts} +28 -22
  117. package/src/parquetjs/file.ts +90 -0
  118. package/src/parquetjs/parser/parquet-cursor.ts +94 -0
  119. package/src/parquetjs/parser/parquet-envelope-reader.ts +199 -0
  120. package/src/parquetjs/parser/parquet-reader.ts +122 -239
  121. package/src/parquetjs/schema/declare.ts +9 -17
  122. package/src/parquetjs/schema/shred.ts +28 -157
  123. package/src/parquetjs/schema/types.ts +27 -21
  124. package/src/parquetjs/utils/buffer-utils.ts +18 -0
  125. package/src/parquetjs/utils/file-utils.ts +4 -3
  126. package/dist/es5/lib/convert-schema-deep.ts.disabled +0 -910
  127. package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +0 -1
  128. package/dist/esm/lib/convert-schema-deep.ts.disabled +0 -910
  129. package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +0 -1
  130. package/dist/parquetjs/encoder/parquet-encoder.d.ts.map +0 -1
  131. package/src/lib/convert-schema-deep.ts.disabled +0 -910
@@ -0,0 +1,136 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ParquetEnvelopeReader = void 0;
4
+ const constants_1 = require("../../constants");
5
+ const parquet_thrift_1 = require("../parquet-thrift");
6
+ const read_utils_1 = require("../utils/read-utils");
7
+ const decoders_1 = require("./decoders");
8
+ const DEFAULT_DICTIONARY_SIZE = 1e6;
9
+ /**
10
+ * The parquet envelope reader allows direct, unbuffered access to the individual
11
+ * sections of the parquet file, namely the header, footer and the row groups.
12
+ * This class is intended for advanced/internal users; if you just want to retrieve
13
+ * rows from a parquet file use the ParquetReader instead
14
+ */
15
+ class ParquetEnvelopeReader {
16
+ static async openBuffer(buffer) {
17
+ const readFn = (position, length) => Promise.resolve(buffer.slice(position, position + length));
18
+ const closeFn = () => Promise.resolve();
19
+ return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);
20
+ }
21
+ constructor(read, close, fileSize, options) {
22
+ this.read = read;
23
+ this.close = close;
24
+ this.fileSize = fileSize;
25
+ this.defaultDictionarySize = options?.defaultDictionarySize || DEFAULT_DICTIONARY_SIZE;
26
+ }
27
+ async readHeader() {
28
+ const buffer = await this.read(0, constants_1.PARQUET_MAGIC.length);
29
+ const magic = buffer.toString();
30
+ switch (magic) {
31
+ case constants_1.PARQUET_MAGIC:
32
+ break;
33
+ case constants_1.PARQUET_MAGIC_ENCRYPTED:
34
+ throw new Error('Encrypted parquet file not supported');
35
+ default:
36
+ throw new Error(`Invalid parquet file (magic=${magic})`);
37
+ }
38
+ }
39
+ async readRowGroup(schema, rowGroup, columnList) {
40
+ const buffer = {
41
+ rowCount: Number(rowGroup.num_rows),
42
+ columnData: {}
43
+ };
44
+ for (const colChunk of rowGroup.columns) {
45
+ const colMetadata = colChunk.meta_data;
46
+ const colKey = colMetadata?.path_in_schema;
47
+ if (columnList.length > 0 && (0, read_utils_1.fieldIndexOf)(columnList, colKey) < 0) {
48
+ continue; // eslint-disable-line no-continue
49
+ }
50
+ buffer.columnData[colKey.join()] = await this.readColumnChunk(schema, colChunk);
51
+ }
52
+ return buffer;
53
+ }
54
+ /**
55
+ * Do reading of parquet file's column chunk
56
+ * @param schema
57
+ * @param colChunk
58
+ */
59
+ async readColumnChunk(schema, colChunk) {
60
+ if (colChunk.file_path !== undefined && colChunk.file_path !== null) {
61
+ throw new Error('external references are not supported');
62
+ }
63
+ const field = schema.findField(colChunk.meta_data?.path_in_schema);
64
+ const type = (0, read_utils_1.getThriftEnum)(parquet_thrift_1.Type, colChunk.meta_data?.type);
65
+ if (type !== field.primitiveType) {
66
+ throw new Error(`chunk type not matching schema: ${type}`);
67
+ }
68
+ const compression = (0, read_utils_1.getThriftEnum)(parquet_thrift_1.CompressionCodec, colChunk.meta_data?.codec);
69
+ const pagesOffset = Number(colChunk.meta_data?.data_page_offset);
70
+ let pagesSize = Number(colChunk.meta_data?.total_compressed_size);
71
+ if (!colChunk.file_path) {
72
+ pagesSize = Math.min(this.fileSize - pagesOffset, Number(colChunk.meta_data?.total_compressed_size));
73
+ }
74
+ const options = {
75
+ type,
76
+ rLevelMax: field.rLevelMax,
77
+ dLevelMax: field.dLevelMax,
78
+ compression,
79
+ column: field,
80
+ numValues: colChunk.meta_data?.num_values,
81
+ dictionary: []
82
+ };
83
+ let dictionary;
84
+ const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;
85
+ if (dictionaryPageOffset) {
86
+ const dictionaryOffset = Number(dictionaryPageOffset);
87
+ // Getting dictionary from column chunk to iterate all over indexes to get dataPage values.
88
+ dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);
89
+ }
90
+ dictionary = options.dictionary?.length ? options.dictionary : dictionary;
91
+ const pagesBuf = await this.read(pagesOffset, pagesSize);
92
+ return await (0, decoders_1.decodeDataPages)(pagesBuf, { ...options, dictionary });
93
+ }
94
+ /**
95
+ * Getting dictionary for allows to flatten values by indices.
96
+ * @param dictionaryPageOffset
97
+ * @param options
98
+ * @param pagesOffset
99
+ * @returns
100
+ */
101
+ async getDictionary(dictionaryPageOffset, options, pagesOffset) {
102
+ if (dictionaryPageOffset === 0) {
103
+ // dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);
104
+ // pagesBuf = await this.read(pagesOffset, dictionarySize);
105
+ // In this case we are working with parquet-mr files format. Problem is described below:
106
+ // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding
107
+ // We need to get dictionary page from column chunk if it exists.
108
+ // Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.
109
+ return [];
110
+ }
111
+ const dictionarySize = Math.min(this.fileSize - dictionaryPageOffset, this.defaultDictionarySize);
112
+ const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);
113
+ const cursor = { buffer: pagesBuf, offset: 0, size: pagesBuf.length };
114
+ const decodedPage = await (0, decoders_1.decodePage)(cursor, options);
115
+ return decodedPage.dictionary;
116
+ }
117
+ async readFooter() {
118
+ const trailerLen = constants_1.PARQUET_MAGIC.length + 4;
119
+ const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);
120
+ const magic = trailerBuf.slice(4).toString();
121
+ if (magic !== constants_1.PARQUET_MAGIC) {
122
+ throw new Error(`Not a valid parquet file (magic="${magic})`);
123
+ }
124
+ const metadataSize = trailerBuf.readUInt32LE(0);
125
+ const metadataOffset = this.fileSize - metadataSize - trailerLen;
126
+ if (metadataOffset < constants_1.PARQUET_MAGIC.length) {
127
+ throw new Error(`Invalid metadata size ${metadataOffset}`);
128
+ }
129
+ const metadataBuf = await this.read(metadataOffset, metadataSize);
130
+ // let metadata = new parquet_thrift.FileMetaData();
131
+ // parquet_util.decodeThrift(metadata, metadataBuf);
132
+ const { metadata } = (0, read_utils_1.decodeFileMetadata)(metadataBuf);
133
+ return metadata;
134
+ }
135
+ }
136
+ exports.ParquetEnvelopeReader = ParquetEnvelopeReader;
@@ -1,58 +1,68 @@
1
- import type { ReadableFile } from '@loaders.gl/loader-utils';
1
+ /// <reference types="node" />
2
+ import { ParquetEnvelopeReader } from './parquet-envelope-reader';
3
+ import { FileMetaData } from '../parquet-thrift';
2
4
  import { ParquetSchema } from '../schema/schema';
3
- import { ColumnChunk, FileMetaData, RowGroup } from '../parquet-thrift';
4
- import { ParquetBuffer, ParquetData, ParquetOptions } from '../schema/declare';
5
- export type ParquetReaderProps = {
6
- defaultDictionarySize?: number;
7
- };
8
- /** Properties for initializing a ParquetRowGroupReader */
9
- export type ParquetIterationProps = {
10
- /** Filter allowing some columns to be dropped */
11
- columnList?: string[] | string[][];
12
- };
5
+ import { ParquetCursor } from './parquet-cursor';
13
6
  /**
14
- * The parquet envelope reader allows direct, unbuffered access to the individual
15
- * sections of the parquet file, namely the header, footer and the row groups.
16
- * This class is intended for advanced/internal users; if you just want to retrieve
17
- * rows from a parquet file use the ParquetReader instead
7
+ * A parquet reader allows retrieving the rows from a parquet file in order.
8
+ * The basic usage is to create a reader and then retrieve a cursor/iterator
9
+ * which allows you to consume row after row until all rows have been read. It is
10
+ * important that you call close() after you are finished reading the file to
11
+ * avoid leaking file descriptors.
18
12
  */
19
- export declare class ParquetReader {
20
- props: Required<ParquetReaderProps>;
21
- file: ReadableFile;
22
- metadata: Promise<FileMetaData> | null;
23
- constructor(file: ReadableFile, props?: ParquetReaderProps);
24
- close(): void;
25
- /** Yield one row at a time */
26
- rowIterator(props?: ParquetIterationProps): AsyncGenerator<import("../schema/declare").ParquetRecord, void, unknown>;
27
- /** Yield one batch of rows at a time */
28
- rowBatchIterator(props?: ParquetIterationProps): AsyncGenerator<import("../schema/declare").ParquetRecord[], void, unknown>;
29
- /** Iterate over the raw row groups */
30
- rowGroupIterator(props?: ParquetIterationProps): AsyncGenerator<ParquetBuffer, void, unknown>;
31
- getRowCount(): Promise<number>;
32
- getSchema(): Promise<ParquetSchema>;
13
+ export declare class ParquetReader<T> implements AsyncIterable<T> {
33
14
  /**
34
- * Returns the user (key/value) metadata for this file
35
- * In parquet this is not stored on the schema like it is in arrow
15
+ * return a new parquet reader initialized with a read function
16
+ */
17
+ static openBlob<T>(blob: Blob): Promise<ParquetReader<T>>;
18
+ /**
19
+ * return a new parquet reader initialized with a read function
20
+ */
21
+ static openArrayBuffer<T>(arrayBuffer: ArrayBuffer): Promise<ParquetReader<T>>;
22
+ static openBuffer<T>(buffer: Buffer): Promise<ParquetReader<T>>;
23
+ metadata: FileMetaData;
24
+ envelopeReader: ParquetEnvelopeReader;
25
+ schema: ParquetSchema;
26
+ /**
27
+ * Create a new parquet reader from the file metadata and an envelope reader.
28
+ * It is not recommended to call this constructor directly except for advanced
29
+ * and internal use cases. Consider using one of the open{File,Buffer} methods
30
+ * instead
31
+ */
32
+ constructor(metadata: FileMetaData, envelopeReader: ParquetEnvelopeReader);
33
+ /**
34
+ * Close this parquet reader. You MUST call this method once you're finished
35
+ * reading rows
36
36
  */
37
- getSchemaMetadata(): Promise<Record<string, string>>;
38
- getFileMetadata(): Promise<FileMetaData>;
39
- /** Metadata is stored in the footer */
40
- readHeader(): Promise<void>;
41
- /** Metadata is stored in the footer */
42
- readFooter(): Promise<FileMetaData>;
43
- /** Data is stored in row groups (similar to Apache Arrow record batches) */
44
- readRowGroup(schema: ParquetSchema, rowGroup: RowGroup, columnList: string[][]): Promise<ParquetBuffer>;
37
+ close(): Promise<void>;
45
38
  /**
46
- * Each row group contains column chunks for all the columns.
39
+ * Return a cursor to the file. You may open more than one cursor and use
40
+ * them concurrently. All cursors become invalid once close() is called on
41
+ * the reader object.
42
+ *
43
+ * The required_columns parameter controls which columns are actually read
44
+ * from disk. An empty array or no value implies all columns. A list of column
45
+ * names means that only those columns should be loaded from disk.
46
+ */
47
+ getCursor(): ParquetCursor<T>;
48
+ getCursor<K extends keyof T>(columnList: (K | K[])[]): ParquetCursor<Pick<T, K>>;
49
+ getCursor(columnList: (string | string[])[]): ParquetCursor<Partial<T>>;
50
+ /**
51
+ * Return the number of rows in this file. Note that the number of rows is
52
+ * not neccessarily equal to the number of rows in each column.
53
+ */
54
+ getRowCount(): number;
55
+ /**
56
+ * Returns the ParquetSchema for this file
57
+ */
58
+ getSchema(): ParquetSchema;
59
+ /**
60
+ * Returns the user (key/value) metadata for this file
47
61
  */
48
- readColumnChunk(schema: ParquetSchema, colChunk: ColumnChunk): Promise<ParquetData>;
62
+ getMetadata(): Record<string, string>;
49
63
  /**
50
- * Getting dictionary for allows to flatten values by indices.
51
- * @param dictionaryPageOffset
52
- * @param options
53
- * @param pagesOffset
54
- * @returns
64
+ * Implement AsyncIterable
55
65
  */
56
- getDictionary(dictionaryPageOffset: number, options: ParquetOptions, pagesOffset: number): Promise<string[]>;
66
+ [Symbol.asyncIterator](): AsyncIterator<T>;
57
67
  }
58
68
  //# sourceMappingURL=parquet-reader.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"parquet-reader.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parser/parquet-reader.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,0BAA0B,CAAC;AAE3D,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAC;AAK/C,OAAO,EAAC,WAAW,EAAoB,YAAY,EAAE,QAAQ,EAAO,MAAM,mBAAmB,CAAC;AAC9F,OAAO,EACL,aAAa,EAEb,WAAW,EAEX,cAAc,EACf,MAAM,mBAAmB,CAAC;AAI3B,MAAM,MAAM,kBAAkB,GAAG;IAC/B,qBAAqB,CAAC,EAAE,MAAM,CAAC;CAChC,CAAC;AAEF,0DAA0D;AAC1D,MAAM,MAAM,qBAAqB,GAAG;IAClC,iDAAiD;IACjD,UAAU,CAAC,EAAE,MAAM,EAAE,GAAG,MAAM,EAAE,EAAE,CAAC;CACpC,CAAC;AAMF;;;;;GAKG;AACH,qBAAa,aAAa;IACxB,KAAK,EAAE,QAAQ,CAAC,kBAAkB,CAAC,CAAC;IACpC,IAAI,EAAE,YAAY,CAAC;IACnB,QAAQ,EAAE,OAAO,CAAC,YAAY,CAAC,GAAG,IAAI,CAAQ;gBAElC,IAAI,EAAE,YAAY,EAAE,KAAK,CAAC,EAAE,kBAAkB;IAK1D,KAAK,IAAI,IAAI;IAOb,8BAA8B;IACvB,WAAW,CAAC,KAAK,CAAC,EAAE,qBAAqB;IAShD,wCAAwC;IACjC,gBAAgB,CAAC,KAAK,CAAC,EAAE,qBAAqB;IAOrD,sCAAsC;IAC/B,gBAAgB,CAAC,KAAK,CAAC,EAAE,qBAAqB;IAqB/C,WAAW,IAAI,OAAO,CAAC,MAAM,CAAC;IAK9B,SAAS,IAAI,OAAO,CAAC,aAAa,CAAC;IAQzC;;;OAGG;IACG,iBAAiB,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IASpD,eAAe,IAAI,OAAO,CAAC,YAAY,CAAC;IAU9C,uCAAuC;IACjC,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAajC,uCAAuC;IACjC,UAAU,IAAI,OAAO,CAAC,YAAY,CAAC;IAsBzC,4EAA4E;IACtE,YAAY,CAChB,MAAM,EAAE,aAAa,EACrB,QAAQ,EAAE,QAAQ,EAClB,UAAU,EAAE,MAAM,EAAE,EAAE,GACrB,OAAO,CAAC,aAAa,CAAC;IAgBzB;;OAEG;IACG,eAAe,CAAC,MAAM,EAAE,aAAa,EAAE,QAAQ,EAAE,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;IAoDzF;;;;;;OAMG;IACG,aAAa,CACjB,oBAAoB,EAAE,MAAM,EAC5B,OAAO,EAAE,cAAc,EACvB,WAAW,EAAE,MAAM,GAClB,OAAO,CAAC,MAAM,EAAE,CAAC;CAuBrB"}
1
+ {"version":3,"file":"parquet-reader.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parser/parquet-reader.ts"],"names":[],"mappings":";AACA,OAAO,EAAC,qBAAqB,EAAC,MAAM,2BAA2B,CAAC;AAChE,OAAO,EAAC,YAAY,EAAC,MAAM,mBAAmB,CAAC;AAC/C,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAC;AAI/C;;;;;;GAMG;AACH,qBAAa,aAAa,CAAC,CAAC,CAAE,YAAW,aAAa,CAAC,CAAC,CAAC;IACvD;;OAEG;WACU,QAAQ,CAAC,CAAC,EAAE,IAAI,EAAE,IAAI,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;IAkB/D;;OAEG;WACU,eAAe,CAAC,CAAC,EAAE,WAAW,EAAE,WAAW,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;WAevE,UAAU,CAAC,CAAC,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;IAY9D,QAAQ,EAAE,YAAY,CAAC;IACvB,cAAc,EAAE,qBAAqB,CAAC;IACtC,MAAM,EAAE,aAAa,CAAC;IAE7B;;;;;OAKG;gBACS,QAAQ,EAAE,YAAY,EAAE,cAAc,EAAE,qBAAqB;IAYzE;;;OAGG;IACG,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAM5B;;;;;;;;OAQG;IACH,SAAS,IAAI,aAAa,CAAC,CAAC,CAAC;IAE7B,SAAS,CAAC,CAAC,SAAS,MAAM,CAAC,EAAE,UAAU,EAAE,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,GAAG,aAAa,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IAChF,SAAS,CAAC,UAAU,EAAE,CAAC,MAAM,GAAG,MAAM,EAAE,CAAC,EAAE,GAAG,aAAa,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;IAkBvE;;;OAGG;IACH,WAAW,IAAI,MAAM;IAIrB;;OAEG;IACH,SAAS,IAAI,aAAa;IAI1B;;OAEG;IACH,WAAW,IAAI,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC;IAQrC;;OAEG;IAEH,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,aAAa,CAAC,CAAC,CAAC;CAG3C"}
@@ -1,200 +1,134 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.ParquetReader = void 0;
4
+ // Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
5
+ const parquet_envelope_reader_1 = require("./parquet-envelope-reader");
4
6
  const schema_1 = require("../schema/schema");
5
- const decoders_1 = require("./decoders");
6
- const shred_1 = require("../schema/shred");
7
+ const parquet_cursor_1 = require("./parquet-cursor");
7
8
  const constants_1 = require("../../constants");
8
- const parquet_thrift_1 = require("../parquet-thrift");
9
- const read_utils_1 = require("../utils/read-utils");
10
- const decoders_2 = require("./decoders");
11
- const DEFAULT_PROPS = {
12
- defaultDictionarySize: 1e6
13
- };
9
+ const decoders_1 = require("./decoders");
14
10
  /**
15
- * The parquet envelope reader allows direct, unbuffered access to the individual
16
- * sections of the parquet file, namely the header, footer and the row groups.
17
- * This class is intended for advanced/internal users; if you just want to retrieve
18
- * rows from a parquet file use the ParquetReader instead
11
+ * A parquet reader allows retrieving the rows from a parquet file in order.
12
+ * The basic usage is to create a reader and then retrieve a cursor/iterator
13
+ * which allows you to consume row after row until all rows have been read. It is
14
+ * important that you call close() after you are finished reading the file to
15
+ * avoid leaking file descriptors.
19
16
  */
20
17
  class ParquetReader {
21
- constructor(file, props) {
22
- this.metadata = null;
23
- this.file = file;
24
- this.props = { ...DEFAULT_PROPS, ...props };
25
- }
26
- close() {
27
- // eslint-disable-next-line @typescript-eslint/no-floating-promises
28
- this.file.close();
29
- }
30
- // HIGH LEVEL METHODS
31
- /** Yield one row at a time */
32
- async *rowIterator(props) {
33
- for await (const rows of this.rowBatchIterator(props)) {
34
- // yield *rows
35
- for (const row of rows) {
36
- yield row;
37
- }
18
+ /**
19
+ * return a new parquet reader initialized with a read function
20
+ */
21
+ static async openBlob(blob) {
22
+ const readFn = async (start, length) => {
23
+ const arrayBuffer = await blob.slice(start, start + length).arrayBuffer();
24
+ return Buffer.from(arrayBuffer);
25
+ };
26
+ const closeFn = async () => { };
27
+ const size = blob.size;
28
+ const envelopeReader = new parquet_envelope_reader_1.ParquetEnvelopeReader(readFn, closeFn, size);
29
+ try {
30
+ await envelopeReader.readHeader();
31
+ const metadata = await envelopeReader.readFooter();
32
+ return new ParquetReader(metadata, envelopeReader);
38
33
  }
39
- }
40
- /** Yield one batch of rows at a time */
41
- async *rowBatchIterator(props) {
42
- const schema = await this.getSchema();
43
- for await (const rowGroup of this.rowGroupIterator(props)) {
44
- yield (0, shred_1.materializeRecords)(schema, rowGroup);
34
+ catch (err) {
35
+ await envelopeReader.close();
36
+ throw err;
45
37
  }
46
38
  }
47
- /** Iterate over the raw row groups */
48
- async *rowGroupIterator(props) {
49
- // Ensure strings are nested in arrays
50
- const columnList = (props?.columnList || []).map((x) => Array.isArray(x) ? x : [x]);
51
- const metadata = await this.getFileMetadata();
52
- const schema = await this.getSchema();
53
- const rowGroupCount = metadata?.row_groups.length || 0;
54
- for (let rowGroupIndex = 0; rowGroupIndex < rowGroupCount; rowGroupIndex++) {
55
- const rowGroup = await this.readRowGroup(schema, metadata.row_groups[rowGroupIndex], columnList);
56
- yield rowGroup;
39
+ /**
40
+ * return a new parquet reader initialized with a read function
41
+ */
42
+ static async openArrayBuffer(arrayBuffer) {
43
+ const readFn = async (start, length) => Buffer.from(arrayBuffer, start, length);
44
+ const closeFn = async () => { };
45
+ const size = arrayBuffer.byteLength;
46
+ const envelopeReader = new parquet_envelope_reader_1.ParquetEnvelopeReader(readFn, closeFn, size);
47
+ try {
48
+ await envelopeReader.readHeader();
49
+ const metadata = await envelopeReader.readFooter();
50
+ return new ParquetReader(metadata, envelopeReader);
51
+ }
52
+ catch (err) {
53
+ await envelopeReader.close();
54
+ throw err;
57
55
  }
58
56
  }
59
- async getRowCount() {
60
- const metadata = await this.getFileMetadata();
61
- return Number(metadata.num_rows);
62
- }
63
- async getSchema() {
64
- const metadata = await this.getFileMetadata();
65
- const root = metadata.schema[0];
66
- const { schema: schemaDefinition } = (0, decoders_1.decodeSchema)(metadata.schema, 1, root.num_children);
67
- const schema = new schema_1.ParquetSchema(schemaDefinition);
68
- return schema;
57
+ static async openBuffer(buffer) {
58
+ const envelopeReader = await parquet_envelope_reader_1.ParquetEnvelopeReader.openBuffer(buffer);
59
+ try {
60
+ await envelopeReader.readHeader();
61
+ const metadata = await envelopeReader.readFooter();
62
+ return new ParquetReader(metadata, envelopeReader);
63
+ }
64
+ catch (err) {
65
+ await envelopeReader.close();
66
+ throw err;
67
+ }
69
68
  }
70
69
  /**
71
- * Returns the user (key/value) metadata for this file
72
- * In parquet this is not stored on the schema like it is in arrow
70
+ * Create a new parquet reader from the file metadata and an envelope reader.
71
+ * It is not recommended to call this constructor directly except for advanced
72
+ * and internal use cases. Consider using one of the open{File,Buffer} methods
73
+ * instead
73
74
  */
74
- async getSchemaMetadata() {
75
- const metadata = await this.getFileMetadata();
76
- const md = {};
77
- for (const kv of metadata.key_value_metadata) {
78
- md[kv.key] = kv.value;
75
+ constructor(metadata, envelopeReader) {
76
+ if (metadata.version !== constants_1.PARQUET_VERSION) {
77
+ throw new Error('invalid parquet version');
79
78
  }
80
- return md;
79
+ this.metadata = metadata;
80
+ this.envelopeReader = envelopeReader;
81
+ const root = this.metadata.schema[0];
82
+ const { schema } = (0, decoders_1.decodeSchema)(this.metadata.schema, 1, root.num_children);
83
+ this.schema = new schema_1.ParquetSchema(schema);
81
84
  }
82
- async getFileMetadata() {
83
- if (!this.metadata) {
84
- await this.readHeader();
85
- this.metadata = this.readFooter();
86
- }
87
- return this.metadata;
85
+ /**
86
+ * Close this parquet reader. You MUST call this method once you're finished
87
+ * reading rows
88
+ */
89
+ async close() {
90
+ await this.envelopeReader.close();
91
+ // this.envelopeReader = null;
92
+ // this.metadata = null;
88
93
  }
89
- // LOW LEVEL METHODS
90
- /** Metadata is stored in the footer */
91
- async readHeader() {
92
- const buffer = await this.file.read(0, constants_1.PARQUET_MAGIC.length);
93
- const magic = buffer.toString();
94
- switch (magic) {
95
- case constants_1.PARQUET_MAGIC:
96
- break;
97
- case constants_1.PARQUET_MAGIC_ENCRYPTED:
98
- throw new Error('Encrypted parquet file not supported');
99
- default:
100
- throw new Error(`Invalid parquet file (magic=${magic})`);
94
+ getCursor(columnList) {
95
+ if (!columnList) {
96
+ // tslint:disable-next-line:no-parameter-reassignment
97
+ columnList = [];
101
98
  }
99
+ // tslint:disable-next-line:no-parameter-reassignment
100
+ columnList = columnList.map((x) => (Array.isArray(x) ? x : [x]));
101
+ return new parquet_cursor_1.ParquetCursor(this.metadata, this.envelopeReader, this.schema, columnList);
102
102
  }
103
- /** Metadata is stored in the footer */
104
- async readFooter() {
105
- const trailerLen = constants_1.PARQUET_MAGIC.length + 4;
106
- const trailerBuf = await this.file.read(this.file.size - trailerLen, trailerLen);
107
- const magic = trailerBuf.slice(4).toString();
108
- if (magic !== constants_1.PARQUET_MAGIC) {
109
- throw new Error(`Not a valid parquet file (magic="${magic})`);
110
- }
111
- const metadataSize = trailerBuf.readUInt32LE(0);
112
- const metadataOffset = this.file.size - metadataSize - trailerLen;
113
- if (metadataOffset < constants_1.PARQUET_MAGIC.length) {
114
- throw new Error(`Invalid metadata size ${metadataOffset}`);
115
- }
116
- const metadataBuf = await this.file.read(metadataOffset, metadataSize);
117
- // let metadata = new parquet_thrift.FileMetaData();
118
- // parquet_util.decodeThrift(metadata, metadataBuf);
119
- const { metadata } = (0, read_utils_1.decodeFileMetadata)(metadataBuf);
120
- return metadata;
103
+ /**
104
+ * Return the number of rows in this file. Note that the number of rows is
105
+ * not neccessarily equal to the number of rows in each column.
106
+ */
107
+ getRowCount() {
108
+ return Number(this.metadata.num_rows);
121
109
  }
122
- /** Data is stored in row groups (similar to Apache Arrow record batches) */
123
- async readRowGroup(schema, rowGroup, columnList) {
124
- const buffer = {
125
- rowCount: Number(rowGroup.num_rows),
126
- columnData: {}
127
- };
128
- for (const colChunk of rowGroup.columns) {
129
- const colMetadata = colChunk.meta_data;
130
- const colKey = colMetadata?.path_in_schema;
131
- if (columnList.length > 0 && (0, read_utils_1.fieldIndexOf)(columnList, colKey) < 0) {
132
- continue; // eslint-disable-line no-continue
133
- }
134
- buffer.columnData[colKey.join()] = await this.readColumnChunk(schema, colChunk);
135
- }
136
- return buffer;
110
+ /**
111
+ * Returns the ParquetSchema for this file
112
+ */
113
+ getSchema() {
114
+ return this.schema;
137
115
  }
138
116
  /**
139
- * Each row group contains column chunks for all the columns.
117
+ * Returns the user (key/value) metadata for this file
140
118
  */
141
- async readColumnChunk(schema, colChunk) {
142
- if (colChunk.file_path !== undefined && colChunk.file_path !== null) {
143
- throw new Error('external references are not supported');
144
- }
145
- const field = schema.findField(colChunk.meta_data?.path_in_schema);
146
- const type = (0, read_utils_1.getThriftEnum)(parquet_thrift_1.Type, colChunk.meta_data?.type);
147
- if (type !== field.primitiveType) {
148
- throw new Error(`chunk type not matching schema: ${type}`);
149
- }
150
- const compression = (0, read_utils_1.getThriftEnum)(parquet_thrift_1.CompressionCodec, colChunk.meta_data?.codec);
151
- const pagesOffset = Number(colChunk.meta_data?.data_page_offset);
152
- let pagesSize = Number(colChunk.meta_data?.total_compressed_size);
153
- if (!colChunk.file_path) {
154
- pagesSize = Math.min(this.file.size - pagesOffset, Number(colChunk.meta_data?.total_compressed_size));
155
- }
156
- const options = {
157
- type,
158
- rLevelMax: field.rLevelMax,
159
- dLevelMax: field.dLevelMax,
160
- compression,
161
- column: field,
162
- numValues: colChunk.meta_data?.num_values,
163
- dictionary: []
164
- };
165
- let dictionary;
166
- const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;
167
- if (dictionaryPageOffset) {
168
- const dictionaryOffset = Number(dictionaryPageOffset);
169
- // Getting dictionary from column chunk to iterate all over indexes to get dataPage values.
170
- dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);
119
+ getMetadata() {
120
+ const md = {};
121
+ for (const kv of this.metadata.key_value_metadata) {
122
+ md[kv.key] = kv.value;
171
123
  }
172
- dictionary = options.dictionary?.length ? options.dictionary : dictionary;
173
- const pagesBuf = await this.file.read(pagesOffset, pagesSize);
174
- return await (0, decoders_2.decodeDataPages)(pagesBuf, { ...options, dictionary });
124
+ return md;
175
125
  }
176
126
  /**
177
- * Getting dictionary for allows to flatten values by indices.
178
- * @param dictionaryPageOffset
179
- * @param options
180
- * @param pagesOffset
181
- * @returns
127
+ * Implement AsyncIterable
182
128
  */
183
- async getDictionary(dictionaryPageOffset, options, pagesOffset) {
184
- if (dictionaryPageOffset === 0) {
185
- // dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);
186
- // pagesBuf = await this.read(pagesOffset, dictionarySize);
187
- // In this case we are working with parquet-mr files format. Problem is described below:
188
- // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding
189
- // We need to get dictionary page from column chunk if it exists.
190
- // Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.
191
- return [];
192
- }
193
- const dictionarySize = Math.min(this.file.size - dictionaryPageOffset, this.props.defaultDictionarySize);
194
- const pagesBuf = await this.file.read(dictionaryPageOffset, dictionarySize);
195
- const cursor = { buffer: pagesBuf, offset: 0, size: pagesBuf.length };
196
- const decodedPage = await (0, decoders_2.decodePage)(cursor, options);
197
- return decodedPage.dictionary;
129
+ // tslint:disable-next-line:function-name
130
+ [Symbol.asyncIterator]() {
131
+ return this.getCursor()[Symbol.asyncIterator]();
198
132
  }
199
133
  }
200
134
  exports.ParquetReader = ParquetReader;
@@ -45,7 +45,6 @@ export interface ParquetField {
45
45
  fieldCount?: number;
46
46
  fields?: Record<string, ParquetField>;
47
47
  }
48
- /** @todo better name, this is an internal type? */
49
48
  export interface ParquetOptions {
50
49
  type: ParquetType;
51
50
  rLevelMax: number;
@@ -55,33 +54,27 @@ export interface ParquetOptions {
55
54
  numValues?: Int64;
56
55
  dictionary?: ParquetDictionary;
57
56
  }
57
+ export interface ParquetData {
58
+ dlevels: number[];
59
+ rlevels: number[];
60
+ values: any[];
61
+ count: number;
62
+ pageHeaders: PageHeader[];
63
+ }
58
64
  export interface ParquetPageData {
59
65
  dlevels: number[];
60
66
  rlevels: number[];
61
- /** Actual column chunks */
62
67
  values: any[];
63
68
  count: number;
64
69
  dictionary?: ParquetDictionary;
65
- /** The "raw" page header from the file */
66
70
  pageHeader: PageHeader;
67
71
  }
68
72
  export interface ParquetRecord {
69
73
  [key: string]: any;
70
74
  }
71
- /** @
72
- * Holds data for one row group (column chunks) */
73
75
  export declare class ParquetBuffer {
74
- /** Number of rows in this page */
75
76
  rowCount: number;
76
77
  columnData: Record<string, ParquetData>;
77
78
  constructor(rowCount?: number, columnData?: Record<string, ParquetData>);
78
79
  }
79
- /** Holds the data for one column chunk */
80
- export interface ParquetData {
81
- dlevels: number[];
82
- rlevels: number[];
83
- values: any[];
84
- count: number;
85
- pageHeaders: PageHeader[];
86
- }
87
80
  //# sourceMappingURL=declare.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"declare.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/schema/declare.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,MAAM,YAAY,CAAC;AAC/B,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,mBAAmB,CAAC;AAElD,MAAM,MAAM,YAAY,GAAG,OAAO,GAAG,KAAK,GAAG,kBAAkB,CAAC;AAChE,MAAM,MAAM,kBAAkB,GAC1B,cAAc,GACd,MAAM,GACN,QAAQ,GACR,KAAK,GACL,QAAQ,GACR,KAAK,GACL,SAAS,GACT,MAAM,CAAC;AACX,MAAM,MAAM,cAAc,GAAG,UAAU,GAAG,UAAU,GAAG,UAAU,CAAC;AAClE,MAAM,MAAM,WAAW,GAAG,aAAa,GAAG,YAAY,CAAC;AAEvD;;GAEG;AACH,MAAM,MAAM,aAAa,GAErB,SAAS,GACT,OAAO,GACP,OAAO,GACP,OAAO,GACP,OAAO,GACP,QAAQ,GACR,YAAY,GACZ,sBAAsB,CAAC;AAE3B;;GAEG;AACH,MAAM,MAAM,YAAY,GAEpB,MAAM,GAMN,eAAe,GACf,eAAe,GACf,oBAAoB,GACpB,8BAA8B,GAC9B,MAAM,GACN,aAAa,GACb,aAAa,GACb,kBAAkB,GAClB,kBAAkB,GAClB,QAAQ,GACR,SAAS,GACT,SAAS,GACT,SAAS,GACT,OAAO,GACP,QAAQ,GACR,QAAQ,GACR,QAAQ,GACR,MAAM,GACN,MAAM,GACN,UAAU,CAAC;AAEf,MAAM,MAAM,iBAAiB,GAAG,MAAM,EAAE,CAAC;AAEzC,MAAM,WAAW,gBAAgB;IAC/B,CAAC,MAAM,EAAE,MAAM,GAAG,eAAe,CAAC;CACnC;AAED,MAAM,WAAW,eAAe;IAC9B,IAAI,CAAC,EAAE,WAAW,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,YAAY,CAAC;IACxB,WAAW,CAAC,EAAE,kBAAkB,CAAC;IACjC,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,MAAM,CAAC,EAAE,gBAAgB,CAAC;CAC3B;AAED,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,EAAE,CAAC;IACf,GAAG,EAAE,MAAM,CAAC;IACZ,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B,cAAc,EAAE,cAAc,CAAC;IAC/B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,YAAY,CAAC;IACxB,WAAW,CAAC,EAAE,kBAAkB,CAAC;IACjC,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;CACvC;AAED,mDAAmD;AACnD,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,WAAW,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,EAAE,kBAAkB,CAAC;IAChC,MAAM,EAAE,YAAY,CAAC;IACrB,SAAS,CAAC,EAAE,KAAK,CAAC;IAClB,UAAU,CAAC,EAAE,iBAAiB,CAAC;CAChC;AAED,MAAM,WAAW,eAAe;IAC9B,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,2BAA2B;IAC3B,MAAM,EAAE,GAAG,EAAE,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,iBAAiB,CAAC;IAC/B,0CAA0C;IAC1C,UAAU,EAAE,UAAU,CAAC;CACxB;AAED,MAAM,WAAW,aAAa;IAC5B,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED;kDACkD;AAClD,qBAAa,aAAa;IACxB,kCAAkC;IAClC,QAAQ,EAAE,MAAM,CAAC;IAEjB,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,CAAC;gBAC5B,QAAQ,GAAE,MAAU,EAAE,UAAU,GAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAM;CAI/E;AAED,0CAA0C;AAC1C,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,MAAM,EAAE,GAAG,EAAE,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,WAAW,EAAE,UAAU,EAAE,CAAC;CAC3B"}
1
+ {"version":3,"file":"declare.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/schema/declare.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,MAAM,YAAY,CAAC;AAC/B,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,mBAAmB,CAAC;AAElD,MAAM,MAAM,YAAY,GAAG,OAAO,GAAG,KAAK,GAAG,kBAAkB,CAAC;AAChE,MAAM,MAAM,kBAAkB,GAC1B,cAAc,GACd,MAAM,GACN,QAAQ,GACR,KAAK,GACL,QAAQ,GACR,KAAK,GACL,SAAS,GACT,MAAM,CAAC;AACX,MAAM,MAAM,cAAc,GAAG,UAAU,GAAG,UAAU,GAAG,UAAU,CAAC;AAClE,MAAM,MAAM,WAAW,GAAG,aAAa,GAAG,YAAY,CAAC;AAEvD;;GAEG;AACH,MAAM,MAAM,aAAa,GAErB,SAAS,GACT,OAAO,GACP,OAAO,GACP,OAAO,GACP,OAAO,GACP,QAAQ,GACR,YAAY,GACZ,sBAAsB,CAAC;AAE3B;;GAEG;AACH,MAAM,MAAM,YAAY,GAEpB,MAAM,GAMN,eAAe,GACf,eAAe,GACf,oBAAoB,GACpB,8BAA8B,GAC9B,MAAM,GACN,aAAa,GACb,aAAa,GACb,kBAAkB,GAClB,kBAAkB,GAClB,QAAQ,GACR,SAAS,GACT,SAAS,GACT,SAAS,GACT,OAAO,GACP,QAAQ,GACR,QAAQ,GACR,QAAQ,GACR,MAAM,GACN,MAAM,GACN,UAAU,CAAC;AAEf,MAAM,MAAM,iBAAiB,GAAG,MAAM,EAAE,CAAC;AAEzC,MAAM,WAAW,gBAAgB;IAC/B,CAAC,MAAM,EAAE,MAAM,GAAG,eAAe,CAAC;CACnC;AAED,MAAM,WAAW,eAAe;IAC9B,IAAI,CAAC,EAAE,WAAW,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,YAAY,CAAC;IACxB,WAAW,CAAC,EAAE,kBAAkB,CAAC;IACjC,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,MAAM,CAAC,EAAE,gBAAgB,CAAC;CAC3B;AAED,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,EAAE,CAAC;IACf,GAAG,EAAE,MAAM,CAAC;IACZ,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B,cAAc,EAAE,cAAc,CAAC;IAC/B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,YAAY,CAAC;IACxB,WAAW,CAAC,EAAE,kBAAkB,CAAC;IACjC,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;CACvC;AAED,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,WAAW,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,EAAE,kBAAkB,CAAC;IAChC,MAAM,EAAE,YAAY,CAAC;IACrB,SAAS,CAAC,EAAE,KAAK,CAAC;IAClB,UAAU,CAAC,EAAE,iBAAiB,CAAC;CAChC;AAED,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,MAAM,EAAE,GAAG,EAAE,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,WAAW,EAAE,UAAU,EAAE,CAAC;CAC3B;AAED,MAAM,WAAW,eAAe;IAC9B,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,MAAM,EAAE,GAAG,EAAE,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,iBAAiB,CAAC;IAC/B,UAAU,EAAE,UAAU,CAAC;CACxB;AAED,MAAM,WAAW,aAAa;IAC5B,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED,qBAAa,aAAa;IACxB,QAAQ,EAAE,MAAM,CAAC;IACjB,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,CAAC;gBAC5B,QAAQ,GAAE,MAAU,EAAE,UAAU,GAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAM;CAI/E"}
@@ -1,8 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.ParquetBuffer = void 0;
4
- /** @
5
- * Holds data for one row group (column chunks) */
6
4
  class ParquetBuffer {
7
5
  constructor(rowCount = 0, columnData = {}) {
8
6
  this.rowCount = rowCount;