@loaders.gl/parquet 3.0.12 → 3.1.0-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (143) hide show
  1. package/dist/dist.min.js +7 -18
  2. package/dist/dist.min.js.map +1 -1
  3. package/dist/es5/bundle.js +2 -4
  4. package/dist/es5/bundle.js.map +1 -1
  5. package/dist/es5/constants.js +17 -0
  6. package/dist/es5/constants.js.map +1 -0
  7. package/dist/es5/index.js +53 -21
  8. package/dist/es5/index.js.map +1 -1
  9. package/dist/es5/lib/convert-schema.js +82 -0
  10. package/dist/es5/lib/convert-schema.js.map +1 -0
  11. package/dist/es5/lib/parse-parquet.js +173 -0
  12. package/dist/es5/lib/parse-parquet.js.map +1 -0
  13. package/dist/es5/lib/read-array-buffer.js +53 -0
  14. package/dist/es5/lib/read-array-buffer.js.map +1 -0
  15. package/dist/es5/parquet-loader.js +6 -79
  16. package/dist/es5/parquet-loader.js.map +1 -1
  17. package/dist/es5/parquet-writer.js +1 -1
  18. package/dist/es5/parquet-writer.js.map +1 -1
  19. package/dist/es5/parquetjs/codecs/dictionary.js +30 -0
  20. package/dist/es5/parquetjs/codecs/dictionary.js.map +1 -0
  21. package/dist/es5/parquetjs/codecs/index.js +10 -0
  22. package/dist/es5/parquetjs/codecs/index.js.map +1 -1
  23. package/dist/es5/parquetjs/codecs/rle.js +2 -2
  24. package/dist/es5/parquetjs/codecs/rle.js.map +1 -1
  25. package/dist/es5/parquetjs/compression.js +138 -104
  26. package/dist/es5/parquetjs/compression.js.map +1 -1
  27. package/dist/es5/parquetjs/{writer.js → encoder/writer.js} +397 -228
  28. package/dist/es5/parquetjs/encoder/writer.js.map +1 -0
  29. package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js +1 -0
  30. package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js.map +1 -1
  31. package/dist/es5/parquetjs/parser/decoders.js +495 -0
  32. package/dist/es5/parquetjs/parser/decoders.js.map +1 -0
  33. package/dist/es5/parquetjs/parser/parquet-cursor.js +215 -0
  34. package/dist/es5/parquetjs/parser/parquet-cursor.js.map +1 -0
  35. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +452 -0
  36. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +1 -0
  37. package/dist/es5/parquetjs/parser/parquet-reader.js +413 -0
  38. package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -0
  39. package/dist/es5/parquetjs/schema/declare.js.map +1 -1
  40. package/dist/es5/parquetjs/schema/schema.js +2 -0
  41. package/dist/es5/parquetjs/schema/schema.js.map +1 -1
  42. package/dist/es5/parquetjs/schema/shred.js +2 -1
  43. package/dist/es5/parquetjs/schema/shred.js.map +1 -1
  44. package/dist/es5/parquetjs/schema/types.js +79 -4
  45. package/dist/es5/parquetjs/schema/types.js.map +1 -1
  46. package/dist/es5/parquetjs/utils/buffer-utils.js +21 -0
  47. package/dist/es5/parquetjs/utils/buffer-utils.js.map +1 -0
  48. package/dist/es5/parquetjs/utils/file-utils.js +108 -0
  49. package/dist/es5/parquetjs/utils/file-utils.js.map +1 -0
  50. package/dist/es5/parquetjs/{util.js → utils/read-utils.js} +13 -113
  51. package/dist/es5/parquetjs/utils/read-utils.js.map +1 -0
  52. package/dist/esm/bundle.js +2 -4
  53. package/dist/esm/bundle.js.map +1 -1
  54. package/dist/esm/constants.js +6 -0
  55. package/dist/esm/constants.js.map +1 -0
  56. package/dist/esm/index.js +14 -4
  57. package/dist/esm/index.js.map +1 -1
  58. package/dist/esm/lib/convert-schema.js +71 -0
  59. package/dist/esm/lib/convert-schema.js.map +1 -0
  60. package/dist/esm/lib/parse-parquet.js +28 -0
  61. package/dist/esm/lib/parse-parquet.js.map +1 -0
  62. package/dist/esm/lib/read-array-buffer.js +9 -0
  63. package/dist/esm/lib/read-array-buffer.js.map +1 -0
  64. package/dist/esm/parquet-loader.js +4 -24
  65. package/dist/esm/parquet-loader.js.map +1 -1
  66. package/dist/esm/parquet-writer.js +1 -1
  67. package/dist/esm/parquet-writer.js.map +1 -1
  68. package/dist/esm/parquetjs/codecs/dictionary.js +12 -0
  69. package/dist/esm/parquetjs/codecs/dictionary.js.map +1 -0
  70. package/dist/esm/parquetjs/codecs/index.js +9 -0
  71. package/dist/esm/parquetjs/codecs/index.js.map +1 -1
  72. package/dist/esm/parquetjs/codecs/rle.js +2 -2
  73. package/dist/esm/parquetjs/codecs/rle.js.map +1 -1
  74. package/dist/esm/parquetjs/compression.js +54 -105
  75. package/dist/esm/parquetjs/compression.js.map +1 -1
  76. package/dist/esm/parquetjs/{writer.js → encoder/writer.js} +32 -35
  77. package/dist/esm/parquetjs/encoder/writer.js.map +1 -0
  78. package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js +1 -0
  79. package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js.map +1 -1
  80. package/dist/esm/parquetjs/parser/decoders.js +300 -0
  81. package/dist/esm/parquetjs/parser/decoders.js.map +1 -0
  82. package/dist/esm/parquetjs/parser/parquet-cursor.js +90 -0
  83. package/dist/esm/parquetjs/parser/parquet-cursor.js.map +1 -0
  84. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +164 -0
  85. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +1 -0
  86. package/dist/esm/parquetjs/parser/parquet-reader.js +133 -0
  87. package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -0
  88. package/dist/esm/parquetjs/schema/declare.js.map +1 -1
  89. package/dist/esm/parquetjs/schema/schema.js +2 -0
  90. package/dist/esm/parquetjs/schema/schema.js.map +1 -1
  91. package/dist/esm/parquetjs/schema/shred.js +2 -1
  92. package/dist/esm/parquetjs/schema/shred.js.map +1 -1
  93. package/dist/esm/parquetjs/schema/types.js +78 -4
  94. package/dist/esm/parquetjs/schema/types.js.map +1 -1
  95. package/dist/esm/parquetjs/utils/buffer-utils.js +12 -0
  96. package/dist/esm/parquetjs/utils/buffer-utils.js.map +1 -0
  97. package/dist/esm/parquetjs/utils/file-utils.js +79 -0
  98. package/dist/esm/parquetjs/utils/file-utils.js.map +1 -0
  99. package/dist/esm/parquetjs/{util.js → utils/read-utils.js} +11 -89
  100. package/dist/esm/parquetjs/utils/read-utils.js.map +1 -0
  101. package/dist/parquet-worker.js +7 -18
  102. package/dist/parquet-worker.js.map +1 -1
  103. package/package.json +10 -10
  104. package/src/bundle.ts +2 -3
  105. package/src/constants.ts +17 -0
  106. package/src/index.ts +30 -4
  107. package/src/lib/convert-schema.ts +95 -0
  108. package/src/lib/parse-parquet.ts +27 -0
  109. package/{dist/es5/libs → src/lib}/read-array-buffer.ts +0 -0
  110. package/src/parquet-loader.ts +4 -24
  111. package/src/parquetjs/codecs/dictionary.ts +11 -0
  112. package/src/parquetjs/codecs/index.ts +13 -0
  113. package/src/parquetjs/codecs/rle.ts +4 -2
  114. package/src/parquetjs/compression.ts +89 -50
  115. package/src/parquetjs/{writer.ts → encoder/writer.ts} +46 -45
  116. package/src/parquetjs/parquet-thrift/CompressionCodec.ts +2 -1
  117. package/src/parquetjs/parser/decoders.ts +448 -0
  118. package/src/parquetjs/parser/parquet-cursor.ts +94 -0
  119. package/src/parquetjs/parser/parquet-envelope-reader.ts +210 -0
  120. package/src/parquetjs/parser/parquet-reader.ts +179 -0
  121. package/src/parquetjs/schema/declare.ts +48 -2
  122. package/src/parquetjs/schema/schema.ts +2 -0
  123. package/src/parquetjs/schema/shred.ts +3 -1
  124. package/src/parquetjs/schema/types.ts +82 -5
  125. package/src/parquetjs/utils/buffer-utils.ts +18 -0
  126. package/src/parquetjs/utils/file-utils.ts +96 -0
  127. package/src/parquetjs/{util.ts → utils/read-utils.ts} +13 -110
  128. package/dist/dist.es5.min.js +0 -51
  129. package/dist/dist.es5.min.js.map +0 -1
  130. package/dist/es5/parquetjs/compression.ts.disabled +0 -105
  131. package/dist/es5/parquetjs/reader.js +0 -1078
  132. package/dist/es5/parquetjs/reader.js.map +0 -1
  133. package/dist/es5/parquetjs/util.js.map +0 -1
  134. package/dist/es5/parquetjs/writer.js.map +0 -1
  135. package/dist/esm/libs/read-array-buffer.ts +0 -31
  136. package/dist/esm/parquetjs/compression.ts.disabled +0 -105
  137. package/dist/esm/parquetjs/reader.js +0 -524
  138. package/dist/esm/parquetjs/reader.js.map +0 -1
  139. package/dist/esm/parquetjs/util.js.map +0 -1
  140. package/dist/esm/parquetjs/writer.js.map +0 -1
  141. package/src/libs/read-array-buffer.ts +0 -31
  142. package/src/parquetjs/compression.ts.disabled +0 -105
  143. package/src/parquetjs/reader.ts +0 -707
@@ -0,0 +1,94 @@
1
+ // Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
2
+ import {FileMetaData} from '../parquet-thrift';
3
+ import {ParquetEnvelopeReader} from './parquet-envelope-reader';
4
+ import {ParquetSchema} from '../schema/schema';
5
+ import {ParquetRecord} from '../schema/declare';
6
+ import {materializeRecords} from '../schema/shred';
7
+
8
+ /**
9
+ * A parquet cursor is used to retrieve rows from a parquet file in order
10
+ */
11
+ export class ParquetCursor<T> implements AsyncIterable<T> {
12
+ public metadata: FileMetaData;
13
+ public envelopeReader: ParquetEnvelopeReader;
14
+ public schema: ParquetSchema;
15
+ public columnList: string[][];
16
+ public rowGroup: ParquetRecord[] = [];
17
+ public rowGroupIndex: number;
18
+
19
+ /**
20
+ * Create a new parquet reader from the file metadata and an envelope reader.
21
+ * It is usually not recommended to call this constructor directly except for
22
+ * advanced and internal use cases. Consider using getCursor() on the
23
+ * ParquetReader instead
24
+ */
25
+ constructor(
26
+ metadata: FileMetaData,
27
+ envelopeReader: ParquetEnvelopeReader,
28
+ schema: ParquetSchema,
29
+ columnList: string[][]
30
+ ) {
31
+ this.metadata = metadata;
32
+ this.envelopeReader = envelopeReader;
33
+ this.schema = schema;
34
+ this.columnList = columnList;
35
+ this.rowGroupIndex = 0;
36
+ }
37
+
38
+ /**
39
+ * Retrieve the next row from the cursor. Returns a row or NULL if the end
40
+ * of the file was reached
41
+ */
42
+ async next<T = any>(): Promise<T> {
43
+ if (this.rowGroup.length === 0) {
44
+ if (this.rowGroupIndex >= this.metadata.row_groups.length) {
45
+ // @ts-ignore
46
+ return null;
47
+ }
48
+ const rowBuffer = await this.envelopeReader.readRowGroup(
49
+ this.schema,
50
+ this.metadata.row_groups[this.rowGroupIndex],
51
+ this.columnList
52
+ );
53
+ this.rowGroup = materializeRecords(this.schema, rowBuffer);
54
+ this.rowGroupIndex++;
55
+ }
56
+ return this.rowGroup.shift() as any;
57
+ }
58
+
59
+ /**
60
+ * Rewind the cursor the the beginning of the file
61
+ */
62
+ rewind(): void {
63
+ this.rowGroup = [];
64
+ this.rowGroupIndex = 0;
65
+ }
66
+
67
+ /**
68
+ * Implement AsyncIterable
69
+ */
70
+ // tslint:disable-next-line:function-name
71
+ [Symbol.asyncIterator](): AsyncIterator<T> {
72
+ let done = false;
73
+ return {
74
+ next: async () => {
75
+ if (done) {
76
+ return {done, value: null};
77
+ }
78
+ const value = await this.next();
79
+ if (value === null) {
80
+ return {done: true, value};
81
+ }
82
+ return {done: false, value};
83
+ },
84
+ return: async () => {
85
+ done = true;
86
+ return {done, value: null};
87
+ },
88
+ throw: async () => {
89
+ done = true;
90
+ return {done: true, value: null};
91
+ }
92
+ };
93
+ }
94
+ }
@@ -0,0 +1,210 @@
1
+ // Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
2
+ import {ParquetSchema} from '../schema/schema';
3
+ import {PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED} from '../../constants';
4
+ import {ColumnChunk, CompressionCodec, FileMetaData, RowGroup, Type} from '../parquet-thrift';
5
+ import {
6
+ ParquetBuffer,
7
+ ParquetCompression,
8
+ ParquetData,
9
+ PrimitiveType,
10
+ ParquetOptions
11
+ } from '../schema/declare';
12
+ import {fstat, fopen, fread, fclose} from '../utils/file-utils';
13
+ import {decodeFileMetadata, getThriftEnum, fieldIndexOf} from '../utils/read-utils';
14
+ import {decodeDataPages, decodePage} from './decoders';
15
+
16
+ const DEFAULT_DICTIONARY_SIZE = 1e6;
17
+
18
+ /**
19
+ * The parquet envelope reader allows direct, unbuffered access to the individual
20
+ * sections of the parquet file, namely the header, footer and the row groups.
21
+ * This class is intended for advanced/internal users; if you just want to retrieve
22
+ * rows from a parquet file use the ParquetReader instead
23
+ */
24
+ export class ParquetEnvelopeReader {
25
+ public read: (position: number, length: number) => Promise<Buffer>;
26
+ /**
27
+ * Close this parquet reader. You MUST call this method once you're finished
28
+ * reading rows
29
+ */
30
+ public close: () => Promise<void>;
31
+ public fileSize: number;
32
+ public defaultDictionarySize: number;
33
+
34
+ static async openFile(filePath: string): Promise<ParquetEnvelopeReader> {
35
+ const fileStat = await fstat(filePath);
36
+ const fileDescriptor = await fopen(filePath);
37
+
38
+ const readFn = fread.bind(undefined, fileDescriptor);
39
+ const closeFn = fclose.bind(undefined, fileDescriptor);
40
+
41
+ return new ParquetEnvelopeReader(readFn, closeFn, fileStat.size);
42
+ }
43
+
44
+ static async openBuffer(buffer: Buffer): Promise<ParquetEnvelopeReader> {
45
+ const readFn = (position: number, length: number) =>
46
+ Promise.resolve(buffer.slice(position, position + length));
47
+ const closeFn = () => Promise.resolve();
48
+ return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);
49
+ }
50
+
51
+ constructor(
52
+ read: (position: number, length: number) => Promise<Buffer>,
53
+ close: () => Promise<void>,
54
+ fileSize: number,
55
+ options?: any
56
+ ) {
57
+ this.read = read;
58
+ this.close = close;
59
+ this.fileSize = fileSize;
60
+ this.defaultDictionarySize = options?.defaultDictionarySize || DEFAULT_DICTIONARY_SIZE;
61
+ }
62
+
63
+ async readHeader(): Promise<void> {
64
+ const buffer = await this.read(0, PARQUET_MAGIC.length);
65
+
66
+ const magic = buffer.toString();
67
+ switch (magic) {
68
+ case PARQUET_MAGIC:
69
+ break;
70
+ case PARQUET_MAGIC_ENCRYPTED:
71
+ throw new Error('Encrypted parquet file not supported');
72
+ default:
73
+ throw new Error(`Invalid parquet file (magic=${magic})`);
74
+ }
75
+ }
76
+
77
+ async readRowGroup(
78
+ schema: ParquetSchema,
79
+ rowGroup: RowGroup,
80
+ columnList: string[][]
81
+ ): Promise<ParquetBuffer> {
82
+ const buffer: ParquetBuffer = {
83
+ rowCount: Number(rowGroup.num_rows),
84
+ columnData: {}
85
+ };
86
+ for (const colChunk of rowGroup.columns) {
87
+ const colMetadata = colChunk.meta_data;
88
+ const colKey = colMetadata?.path_in_schema;
89
+ if (columnList.length > 0 && fieldIndexOf(columnList, colKey!) < 0) {
90
+ continue; // eslint-disable-line no-continue
91
+ }
92
+ buffer.columnData[colKey!.join()] = await this.readColumnChunk(schema, colChunk);
93
+ }
94
+ return buffer;
95
+ }
96
+
97
+ /**
98
+ * Do reading of parquet file's column chunk
99
+ * @param schema
100
+ * @param colChunk
101
+ */
102
+ async readColumnChunk(schema: ParquetSchema, colChunk: ColumnChunk): Promise<ParquetData> {
103
+ if (colChunk.file_path !== undefined && colChunk.file_path !== null) {
104
+ throw new Error('external references are not supported');
105
+ }
106
+
107
+ const field = schema.findField(colChunk.meta_data?.path_in_schema!);
108
+ const type: PrimitiveType = getThriftEnum(Type, colChunk.meta_data?.type!) as any;
109
+
110
+ if (type !== field.primitiveType) {
111
+ throw new Error(`chunk type not matching schema: ${type}`);
112
+ }
113
+
114
+ const compression: ParquetCompression = getThriftEnum(
115
+ CompressionCodec,
116
+ colChunk.meta_data?.codec!
117
+ ) as any;
118
+
119
+ const pagesOffset = Number(colChunk.meta_data?.data_page_offset!);
120
+ let pagesSize = Number(colChunk.meta_data?.total_compressed_size!);
121
+
122
+ if (!colChunk.file_path) {
123
+ pagesSize = Math.min(
124
+ this.fileSize - pagesOffset,
125
+ Number(colChunk.meta_data?.total_compressed_size)
126
+ );
127
+ }
128
+
129
+ const options: ParquetOptions = {
130
+ type,
131
+ rLevelMax: field.rLevelMax,
132
+ dLevelMax: field.dLevelMax,
133
+ compression,
134
+ column: field,
135
+ numValues: colChunk.meta_data?.num_values,
136
+ dictionary: []
137
+ };
138
+
139
+ let dictionary;
140
+
141
+ const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;
142
+
143
+ if (dictionaryPageOffset) {
144
+ const dictionaryOffset = Number(dictionaryPageOffset);
145
+ // Getting dictionary from column chunk to iterate all over indexes to get dataPage values.
146
+ dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);
147
+ }
148
+
149
+ dictionary = options.dictionary?.length ? options.dictionary : dictionary;
150
+ const pagesBuf = await this.read(pagesOffset, pagesSize);
151
+ return await decodeDataPages(pagesBuf, {...options, dictionary});
152
+ }
153
+
154
+ /**
155
+ * Getting dictionary for allows to flatten values by indices.
156
+ * @param dictionaryPageOffset
157
+ * @param options
158
+ * @param pagesOffset
159
+ * @returns
160
+ */
161
+ async getDictionary(
162
+ dictionaryPageOffset: number,
163
+ options: ParquetOptions,
164
+ pagesOffset: number
165
+ ): Promise<string[]> {
166
+ if (dictionaryPageOffset === 0) {
167
+ // dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);
168
+ // pagesBuf = await this.read(pagesOffset, dictionarySize);
169
+
170
+ // In this case we are working with parquet-mr files format. Problem is described below:
171
+ // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding
172
+ // We need to get dictionary page from column chunk if it exists.
173
+ // Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.
174
+ return [];
175
+ }
176
+
177
+ const dictionarySize = Math.min(
178
+ this.fileSize - dictionaryPageOffset,
179
+ this.defaultDictionarySize
180
+ );
181
+ const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);
182
+
183
+ const cursor = {buffer: pagesBuf, offset: 0, size: pagesBuf.length};
184
+ const decodedPage = await decodePage(cursor, options);
185
+
186
+ return decodedPage.dictionary!;
187
+ }
188
+
189
+ async readFooter(): Promise<FileMetaData> {
190
+ const trailerLen = PARQUET_MAGIC.length + 4;
191
+ const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);
192
+
193
+ const magic = trailerBuf.slice(4).toString();
194
+ if (magic !== PARQUET_MAGIC) {
195
+ throw new Error(`Not a valid parquet file (magic="${magic})`);
196
+ }
197
+
198
+ const metadataSize = trailerBuf.readUInt32LE(0);
199
+ const metadataOffset = this.fileSize - metadataSize - trailerLen;
200
+ if (metadataOffset < PARQUET_MAGIC.length) {
201
+ throw new Error(`Invalid metadata size ${metadataOffset}`);
202
+ }
203
+
204
+ const metadataBuf = await this.read(metadataOffset, metadataSize);
205
+ // let metadata = new parquet_thrift.FileMetaData();
206
+ // parquet_util.decodeThrift(metadata, metadataBuf);
207
+ const {metadata} = decodeFileMetadata(metadataBuf);
208
+ return metadata;
209
+ }
210
+ }
@@ -0,0 +1,179 @@
1
+ // Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
2
+ import {ParquetEnvelopeReader} from './parquet-envelope-reader';
3
+ import {FileMetaData} from '../parquet-thrift';
4
+ import {ParquetSchema} from '../schema/schema';
5
+ import {ParquetCursor} from './parquet-cursor';
6
+ import {PARQUET_VERSION} from '../../constants';
7
+ import {decodeSchema} from './decoders';
8
+
9
+ /**
10
+ * A parquet reader allows retrieving the rows from a parquet file in order.
11
+ * The basic usage is to create a reader and then retrieve a cursor/iterator
12
+ * which allows you to consume row after row until all rows have been read. It is
13
+ * important that you call close() after you are finished reading the file to
14
+ * avoid leaking file descriptors.
15
+ */
16
+ export class ParquetReader<T> implements AsyncIterable<T> {
17
+ /**
18
+ * return a new parquet reader initialized with a read function
19
+ */
20
+ static async openBlob<T>(blob: Blob): Promise<ParquetReader<T>> {
21
+ const readFn = async (start: number, length: number) => {
22
+ const arrayBuffer = await blob.slice(start, start + length).arrayBuffer();
23
+ return Buffer.from(arrayBuffer);
24
+ };
25
+ const closeFn = async () => {};
26
+ const size = blob.size;
27
+ const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);
28
+ try {
29
+ await envelopeReader.readHeader();
30
+ const metadata = await envelopeReader.readFooter();
31
+ return new ParquetReader(metadata, envelopeReader);
32
+ } catch (err) {
33
+ await envelopeReader.close();
34
+ throw err;
35
+ }
36
+ }
37
+
38
+ /**
39
+ * return a new parquet reader initialized with a read function
40
+ */
41
+ static async openArrayBuffer<T>(arrayBuffer: ArrayBuffer): Promise<ParquetReader<T>> {
42
+ const readFn = async (start: number, length: number) => Buffer.from(arrayBuffer, start, length);
43
+ const closeFn = async () => {};
44
+ const size = arrayBuffer.byteLength;
45
+ const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);
46
+ try {
47
+ await envelopeReader.readHeader();
48
+ const metadata = await envelopeReader.readFooter();
49
+ return new ParquetReader(metadata, envelopeReader);
50
+ } catch (err) {
51
+ await envelopeReader.close();
52
+ throw err;
53
+ }
54
+ }
55
+
56
+ /**
57
+ * Open the parquet file pointed to by the specified path and return a new
58
+ * parquet reader
59
+ */
60
+ static async openFile<T>(filePath: string): Promise<ParquetReader<T>> {
61
+ const envelopeReader = await ParquetEnvelopeReader.openFile(filePath);
62
+ try {
63
+ await envelopeReader.readHeader();
64
+ const metadata = await envelopeReader.readFooter();
65
+ return new ParquetReader<T>(metadata, envelopeReader);
66
+ } catch (err) {
67
+ await envelopeReader.close();
68
+ throw err;
69
+ }
70
+ }
71
+
72
+ static async openBuffer<T>(buffer: Buffer): Promise<ParquetReader<T>> {
73
+ const envelopeReader = await ParquetEnvelopeReader.openBuffer(buffer);
74
+ try {
75
+ await envelopeReader.readHeader();
76
+ const metadata = await envelopeReader.readFooter();
77
+ return new ParquetReader<T>(metadata, envelopeReader);
78
+ } catch (err) {
79
+ await envelopeReader.close();
80
+ throw err;
81
+ }
82
+ }
83
+
84
+ public metadata: FileMetaData;
85
+ public envelopeReader: ParquetEnvelopeReader;
86
+ public schema: ParquetSchema;
87
+
88
+ /**
89
+ * Create a new parquet reader from the file metadata and an envelope reader.
90
+ * It is not recommended to call this constructor directly except for advanced
91
+ * and internal use cases. Consider using one of the open{File,Buffer} methods
92
+ * instead
93
+ */
94
+ constructor(metadata: FileMetaData, envelopeReader: ParquetEnvelopeReader) {
95
+ if (metadata.version !== PARQUET_VERSION) {
96
+ throw new Error('invalid parquet version');
97
+ }
98
+
99
+ this.metadata = metadata;
100
+ this.envelopeReader = envelopeReader;
101
+ const root = this.metadata.schema[0];
102
+ const {schema} = decodeSchema(this.metadata.schema, 1, root.num_children!);
103
+ this.schema = new ParquetSchema(schema);
104
+ }
105
+
106
+ /**
107
+ * Close this parquet reader. You MUST call this method once you're finished
108
+ * reading rows
109
+ */
110
+ async close(): Promise<void> {
111
+ await this.envelopeReader.close();
112
+ // this.envelopeReader = null;
113
+ // this.metadata = null;
114
+ }
115
+
116
+ /**
117
+ * Return a cursor to the file. You may open more than one cursor and use
118
+ * them concurrently. All cursors become invalid once close() is called on
119
+ * the reader object.
120
+ *
121
+ * The required_columns parameter controls which columns are actually read
122
+ * from disk. An empty array or no value implies all columns. A list of column
123
+ * names means that only those columns should be loaded from disk.
124
+ */
125
+ getCursor(): ParquetCursor<T>;
126
+ // @ts-ignore
127
+ getCursor<K extends keyof T>(columnList: (K | K[])[]): ParquetCursor<Pick<T, K>>;
128
+ getCursor(columnList: (string | string[])[]): ParquetCursor<Partial<T>>;
129
+ getCursor(columnList?: (string | string[])[]): ParquetCursor<Partial<T>> {
130
+ if (!columnList) {
131
+ // tslint:disable-next-line:no-parameter-reassignment
132
+ columnList = [];
133
+ }
134
+
135
+ // tslint:disable-next-line:no-parameter-reassignment
136
+ columnList = columnList.map((x) => (Array.isArray(x) ? x : [x]));
137
+
138
+ return new ParquetCursor<T>(
139
+ this.metadata,
140
+ this.envelopeReader,
141
+ this.schema,
142
+ columnList as string[][]
143
+ );
144
+ }
145
+
146
+ /**
147
+ * Return the number of rows in this file. Note that the number of rows is
148
+ * not neccessarily equal to the number of rows in each column.
149
+ */
150
+ getRowCount(): number {
151
+ return Number(this.metadata.num_rows);
152
+ }
153
+
154
+ /**
155
+ * Returns the ParquetSchema for this file
156
+ */
157
+ getSchema(): ParquetSchema {
158
+ return this.schema;
159
+ }
160
+
161
+ /**
162
+ * Returns the user (key/value) metadata for this file
163
+ */
164
+ getMetadata(): Record<string, string> {
165
+ const md: Record<string, string> = {};
166
+ for (const kv of this.metadata.key_value_metadata!) {
167
+ md[kv.key] = kv.value!;
168
+ }
169
+ return md;
170
+ }
171
+
172
+ /**
173
+ * Implement AsyncIterable
174
+ */
175
+ // tslint:disable-next-line:function-name
176
+ [Symbol.asyncIterator](): AsyncIterator<T> {
177
+ return this.getCursor()[Symbol.asyncIterator]();
178
+ }
179
+ }