@loaders.gl/parquet 3.3.0-alpha.7 → 3.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +17 -26
- package/dist/dist.min.js.map +3 -3
- package/dist/es5/index.js +3 -3
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/convert-schema-deep.ts.disabled +910 -0
- package/dist/es5/lib/parse-parquet.js +49 -25
- package/dist/es5/lib/parse-parquet.js.map +1 -1
- package/dist/es5/parquet-loader.js +3 -2
- package/dist/es5/parquet-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-loader.js +1 -1
- package/dist/es5/parquet-wasm-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-writer.js +1 -1
- package/dist/es5/parquet-wasm-writer.js.map +1 -1
- package/dist/es5/parquet-writer.js +1 -1
- package/dist/es5/parquet-writer.js.map +1 -1
- package/dist/es5/parquetjs/compression.js +15 -5
- package/dist/es5/parquetjs/compression.js.map +1 -1
- package/dist/es5/parquetjs/encoder/{writer.js → parquet-encoder.js} +70 -158
- package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -0
- package/dist/es5/parquetjs/parser/parquet-reader.js +553 -222
- package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/es5/parquetjs/schema/declare.js +3 -1
- package/dist/es5/parquetjs/schema/declare.js.map +1 -1
- package/dist/es5/parquetjs/schema/shred.js +39 -33
- package/dist/es5/parquetjs/schema/shred.js.map +1 -1
- package/dist/es5/parquetjs/schema/types.js.map +1 -1
- package/dist/es5/parquetjs/utils/file-utils.js +2 -3
- package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/esm/index.js +1 -1
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/convert-schema-deep.ts.disabled +910 -0
- package/dist/esm/lib/parse-parquet.js +6 -12
- package/dist/esm/lib/parse-parquet.js.map +1 -1
- package/dist/esm/parquet-loader.js +3 -2
- package/dist/esm/parquet-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-loader.js +1 -1
- package/dist/esm/parquet-wasm-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-writer.js +1 -1
- package/dist/esm/parquet-wasm-writer.js.map +1 -1
- package/dist/esm/parquet-writer.js +1 -1
- package/dist/esm/parquet-writer.js.map +1 -1
- package/dist/esm/parquetjs/compression.js +10 -1
- package/dist/esm/parquetjs/compression.js.map +1 -1
- package/dist/esm/parquetjs/encoder/{writer.js → parquet-encoder.js} +7 -37
- package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +1 -0
- package/dist/esm/parquetjs/parser/parquet-reader.js +158 -72
- package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/esm/parquetjs/schema/declare.js +1 -0
- package/dist/esm/parquetjs/schema/declare.js.map +1 -1
- package/dist/esm/parquetjs/schema/shred.js +42 -34
- package/dist/esm/parquetjs/schema/shred.js.map +1 -1
- package/dist/esm/parquetjs/schema/types.js.map +1 -1
- package/dist/esm/parquetjs/utils/file-utils.js +1 -1
- package/dist/esm/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +3 -4
- package/dist/lib/parse-parquet.d.ts +2 -2
- package/dist/lib/parse-parquet.d.ts.map +1 -1
- package/dist/lib/parse-parquet.js +24 -12
- package/dist/lib/wasm/encode-parquet-wasm.d.ts +1 -1
- package/dist/lib/wasm/encode-parquet-wasm.d.ts.map +1 -1
- package/dist/lib/wasm/parse-parquet-wasm.d.ts +1 -1
- package/dist/lib/wasm/parse-parquet-wasm.d.ts.map +1 -1
- package/dist/parquet-loader.d.ts +2 -1
- package/dist/parquet-loader.d.ts.map +1 -1
- package/dist/parquet-loader.js +2 -1
- package/dist/parquet-wasm-loader.d.ts +1 -1
- package/dist/parquet-wasm-loader.d.ts.map +1 -1
- package/dist/parquet-worker.js +15 -24
- package/dist/parquet-worker.js.map +3 -3
- package/dist/parquet-writer.d.ts +1 -1
- package/dist/parquet-writer.d.ts.map +1 -1
- package/dist/parquetjs/compression.d.ts.map +1 -1
- package/dist/parquetjs/compression.js +16 -5
- package/dist/parquetjs/encoder/{writer.d.ts → parquet-encoder.d.ts} +10 -19
- package/dist/parquetjs/encoder/parquet-encoder.d.ts.map +1 -0
- package/dist/parquetjs/encoder/{writer.js → parquet-encoder.js} +60 -58
- package/dist/parquetjs/parser/parquet-reader.d.ts +47 -57
- package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -1
- package/dist/parquetjs/parser/parquet-reader.js +168 -102
- package/dist/parquetjs/schema/declare.d.ts +21 -14
- package/dist/parquetjs/schema/declare.d.ts.map +1 -1
- package/dist/parquetjs/schema/declare.js +2 -0
- package/dist/parquetjs/schema/shred.d.ts +115 -0
- package/dist/parquetjs/schema/shred.d.ts.map +1 -1
- package/dist/parquetjs/schema/shred.js +161 -43
- package/dist/parquetjs/schema/types.d.ts +2 -2
- package/dist/parquetjs/schema/types.d.ts.map +1 -1
- package/dist/parquetjs/utils/file-utils.d.ts +3 -4
- package/dist/parquetjs/utils/file-utils.d.ts.map +1 -1
- package/dist/parquetjs/utils/file-utils.js +2 -5
- package/package.json +7 -5
- package/src/index.ts +2 -2
- package/src/lib/convert-schema-deep.ts.disabled +910 -0
- package/src/lib/parse-parquet.ts +25 -12
- package/src/parquet-loader.ts +3 -1
- package/src/parquetjs/compression.ts +14 -1
- package/src/parquetjs/encoder/{writer.ts → parquet-encoder.ts} +22 -28
- package/src/parquetjs/parser/parquet-reader.ts +239 -122
- package/src/parquetjs/schema/declare.ts +17 -9
- package/src/parquetjs/schema/shred.ts +157 -28
- package/src/parquetjs/schema/types.ts +21 -27
- package/src/parquetjs/utils/file-utils.ts +3 -4
- package/dist/es5/parquetjs/encoder/writer.js.map +0 -1
- package/dist/es5/parquetjs/file.js +0 -94
- package/dist/es5/parquetjs/file.js.map +0 -1
- package/dist/es5/parquetjs/parser/parquet-cursor.js +0 -183
- package/dist/es5/parquetjs/parser/parquet-cursor.js.map +0 -1
- package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +0 -327
- package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
- package/dist/es5/parquetjs/utils/buffer-utils.js +0 -19
- package/dist/es5/parquetjs/utils/buffer-utils.js.map +0 -1
- package/dist/esm/parquetjs/encoder/writer.js.map +0 -1
- package/dist/esm/parquetjs/file.js +0 -81
- package/dist/esm/parquetjs/file.js.map +0 -1
- package/dist/esm/parquetjs/parser/parquet-cursor.js +0 -78
- package/dist/esm/parquetjs/parser/parquet-cursor.js.map +0 -1
- package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +0 -129
- package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
- package/dist/esm/parquetjs/utils/buffer-utils.js +0 -13
- package/dist/esm/parquetjs/utils/buffer-utils.js.map +0 -1
- package/dist/parquetjs/encoder/writer.d.ts.map +0 -1
- package/dist/parquetjs/file.d.ts +0 -10
- package/dist/parquetjs/file.d.ts.map +0 -1
- package/dist/parquetjs/file.js +0 -99
- package/dist/parquetjs/parser/parquet-cursor.d.ts +0 -36
- package/dist/parquetjs/parser/parquet-cursor.d.ts.map +0 -1
- package/dist/parquetjs/parser/parquet-cursor.js +0 -74
- package/dist/parquetjs/parser/parquet-envelope-reader.d.ts +0 -40
- package/dist/parquetjs/parser/parquet-envelope-reader.d.ts.map +0 -1
- package/dist/parquetjs/parser/parquet-envelope-reader.js +0 -136
- package/dist/parquetjs/utils/buffer-utils.d.ts +0 -10
- package/dist/parquetjs/utils/buffer-utils.d.ts.map +0 -1
- package/dist/parquetjs/utils/buffer-utils.js +0 -22
- package/src/parquetjs/file.ts +0 -90
- package/src/parquetjs/parser/parquet-cursor.ts +0 -94
- package/src/parquetjs/parser/parquet-envelope-reader.ts +0 -199
- package/src/parquetjs/utils/buffer-utils.ts +0 -18
|
@@ -98,6 +98,7 @@ export interface ParquetField {
|
|
|
98
98
|
fields?: Record<string, ParquetField>;
|
|
99
99
|
}
|
|
100
100
|
|
|
101
|
+
/** @todo better name, this is an internal type? */
|
|
101
102
|
export interface ParquetOptions {
|
|
102
103
|
type: ParquetType;
|
|
103
104
|
rLevelMax: number;
|
|
@@ -108,20 +109,14 @@ export interface ParquetOptions {
|
|
|
108
109
|
dictionary?: ParquetDictionary;
|
|
109
110
|
}
|
|
110
111
|
|
|
111
|
-
export interface ParquetData {
|
|
112
|
-
dlevels: number[];
|
|
113
|
-
rlevels: number[];
|
|
114
|
-
values: any[];
|
|
115
|
-
count: number;
|
|
116
|
-
pageHeaders: PageHeader[];
|
|
117
|
-
}
|
|
118
|
-
|
|
119
112
|
export interface ParquetPageData {
|
|
120
113
|
dlevels: number[];
|
|
121
114
|
rlevels: number[];
|
|
122
|
-
|
|
115
|
+
/** Actual column chunks */
|
|
116
|
+
values: any[]; // ArrayLike<any>;
|
|
123
117
|
count: number;
|
|
124
118
|
dictionary?: ParquetDictionary;
|
|
119
|
+
/** The "raw" page header from the file */
|
|
125
120
|
pageHeader: PageHeader;
|
|
126
121
|
}
|
|
127
122
|
|
|
@@ -129,11 +124,24 @@ export interface ParquetRecord {
|
|
|
129
124
|
[key: string]: any;
|
|
130
125
|
}
|
|
131
126
|
|
|
127
|
+
/** @
|
|
128
|
+
* Holds data for one row group (column chunks) */
|
|
132
129
|
export class ParquetBuffer {
|
|
130
|
+
/** Number of rows in this page */
|
|
133
131
|
rowCount: number;
|
|
132
|
+
|
|
134
133
|
columnData: Record<string, ParquetData>;
|
|
135
134
|
constructor(rowCount: number = 0, columnData: Record<string, ParquetData> = {}) {
|
|
136
135
|
this.rowCount = rowCount;
|
|
137
136
|
this.columnData = columnData;
|
|
138
137
|
}
|
|
139
138
|
}
|
|
139
|
+
|
|
140
|
+
/** Holds the data for one column chunk */
|
|
141
|
+
export interface ParquetData {
|
|
142
|
+
dlevels: number[];
|
|
143
|
+
rlevels: number[];
|
|
144
|
+
values: any[];
|
|
145
|
+
count: number;
|
|
146
|
+
pageHeaders: PageHeader[];
|
|
147
|
+
}
|
|
@@ -146,9 +146,14 @@ function shredRecordFields(
|
|
|
146
146
|
*/
|
|
147
147
|
export function materializeRecords(schema: ParquetSchema, buffer: ParquetBuffer): ParquetRecord[] {
|
|
148
148
|
const records: ParquetRecord[] = [];
|
|
149
|
-
for (let i = 0; i < buffer.rowCount; i++)
|
|
149
|
+
for (let i = 0; i < buffer.rowCount; i++) {
|
|
150
|
+
records.push({});
|
|
151
|
+
}
|
|
150
152
|
for (const key in buffer.columnData) {
|
|
151
|
-
|
|
153
|
+
const columnData = buffer.columnData[key];
|
|
154
|
+
if (columnData.count) {
|
|
155
|
+
materializeColumn(schema, columnData, key, records);
|
|
156
|
+
}
|
|
152
157
|
}
|
|
153
158
|
return records;
|
|
154
159
|
}
|
|
@@ -156,33 +161,151 @@ export function materializeRecords(schema: ParquetSchema, buffer: ParquetBuffer)
|
|
|
156
161
|
// eslint-disable-next-line max-statements, complexity
|
|
157
162
|
function materializeColumn(
|
|
158
163
|
schema: ParquetSchema,
|
|
159
|
-
|
|
164
|
+
columnData: ParquetData,
|
|
160
165
|
key: string,
|
|
161
166
|
records: ParquetRecord[]
|
|
162
|
-
) {
|
|
163
|
-
const data = buffer.columnData[key];
|
|
164
|
-
if (!data.count) return;
|
|
165
|
-
|
|
167
|
+
): void {
|
|
166
168
|
const field = schema.findField(key);
|
|
167
169
|
const branch = schema.findFieldBranch(key);
|
|
168
170
|
|
|
169
171
|
// tslint:disable-next-line:prefer-array-literal
|
|
170
172
|
const rLevels: number[] = new Array(field.rLevelMax + 1).fill(0);
|
|
171
173
|
let vIndex = 0;
|
|
172
|
-
for (let i = 0; i <
|
|
173
|
-
const dLevel =
|
|
174
|
-
const rLevel =
|
|
174
|
+
for (let i = 0; i < columnData.count; i++) {
|
|
175
|
+
const dLevel = columnData.dlevels[i];
|
|
176
|
+
const rLevel = columnData.rlevels[i];
|
|
175
177
|
rLevels[rLevel]++;
|
|
176
178
|
rLevels.fill(0, rLevel + 1);
|
|
177
179
|
|
|
178
180
|
let rIndex = 0;
|
|
179
181
|
let record = records[rLevels[rIndex++] - 1];
|
|
180
182
|
|
|
181
|
-
// Internal nodes
|
|
183
|
+
// Internal nodes - Build a nested row object
|
|
182
184
|
for (const step of branch) {
|
|
183
|
-
if (step === field)
|
|
184
|
-
|
|
185
|
-
|
|
185
|
+
if (step === field || dLevel < step.dLevelMax) {
|
|
186
|
+
break;
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
switch (step.repetitionType) {
|
|
190
|
+
case 'REPEATED':
|
|
191
|
+
if (!(step.name in record)) {
|
|
192
|
+
// eslint-disable max-depth
|
|
193
|
+
record[step.name] = [];
|
|
194
|
+
}
|
|
195
|
+
const ix = rLevels[rIndex++];
|
|
196
|
+
while (record[step.name].length <= ix) {
|
|
197
|
+
// eslint-disable max-depth
|
|
198
|
+
record[step.name].push({});
|
|
199
|
+
}
|
|
200
|
+
record = record[step.name][ix];
|
|
201
|
+
break;
|
|
202
|
+
|
|
203
|
+
default:
|
|
204
|
+
record[step.name] = record[step.name] || {};
|
|
205
|
+
record = record[step.name];
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
// Leaf node - Add the value
|
|
210
|
+
if (dLevel === field.dLevelMax) {
|
|
211
|
+
const value = Types.fromPrimitive(
|
|
212
|
+
// @ts-ignore
|
|
213
|
+
field.originalType || field.primitiveType,
|
|
214
|
+
columnData.values[vIndex],
|
|
215
|
+
field
|
|
216
|
+
);
|
|
217
|
+
vIndex++;
|
|
218
|
+
|
|
219
|
+
switch (field.repetitionType) {
|
|
220
|
+
case 'REPEATED':
|
|
221
|
+
if (!(field.name in record)) {
|
|
222
|
+
// eslint-disable max-depth
|
|
223
|
+
record[field.name] = [];
|
|
224
|
+
}
|
|
225
|
+
const ix = rLevels[rIndex];
|
|
226
|
+
while (record[field.name].length <= ix) {
|
|
227
|
+
// eslint-disable max-depth
|
|
228
|
+
record[field.name].push(null);
|
|
229
|
+
}
|
|
230
|
+
record[field.name][ix] = value;
|
|
231
|
+
break;
|
|
232
|
+
|
|
233
|
+
default:
|
|
234
|
+
record[field.name] = value;
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
// Columnar export
|
|
241
|
+
|
|
242
|
+
/**
|
|
243
|
+
* 'Materialize' a list of <value, repetition_level, definition_level>
|
|
244
|
+
* tuples back to nested records (objects/arrays) using the Google Dremel
|
|
245
|
+
* Algorithm..
|
|
246
|
+
*
|
|
247
|
+
* The buffer argument must point to an object with the following structure (i.e.
|
|
248
|
+
* the same structure that is returned by shredRecords):
|
|
249
|
+
*
|
|
250
|
+
* buffer = {
|
|
251
|
+
* columnData: [
|
|
252
|
+
* 'my_col': {
|
|
253
|
+
* dlevels: [d1, d2, .. dN],
|
|
254
|
+
* rlevels: [r1, r2, .. rN],
|
|
255
|
+
* values: [v1, v2, .. vN],
|
|
256
|
+
* }, ...
|
|
257
|
+
* ],
|
|
258
|
+
* rowCount: X,
|
|
259
|
+
* }
|
|
260
|
+
*
|
|
261
|
+
export function extractColumns(schema: ParquetSchema, buffer: ParquetBuffer): Record<string, unknown> {
|
|
262
|
+
const columns: ParquetRecord = {};
|
|
263
|
+
for (const key in buffer.columnData) {
|
|
264
|
+
const columnData = buffer.columnData[key];
|
|
265
|
+
if (columnData.count) {
|
|
266
|
+
extractColumn(schema, columnData, key, columns);
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
return columns;
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
// eslint-disable-next-line max-statements, complexity
|
|
273
|
+
function extractColumn(
|
|
274
|
+
schema: ParquetSchema,
|
|
275
|
+
columnData: ParquetData,
|
|
276
|
+
key: string,
|
|
277
|
+
columns: Record<string, unknown>
|
|
278
|
+
) {
|
|
279
|
+
if (columnData.count <= 0) {
|
|
280
|
+
return;
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
const record = columns;
|
|
284
|
+
|
|
285
|
+
const field = schema.findField(key);
|
|
286
|
+
const branch = schema.findFieldBranch(key);
|
|
287
|
+
|
|
288
|
+
// tslint:disable-next-line:prefer-array-literal
|
|
289
|
+
const rLevels: number[] = new Array(field.rLevelMax + 1).fill(0);
|
|
290
|
+
let vIndex = 0;
|
|
291
|
+
|
|
292
|
+
let i = 0;
|
|
293
|
+
const dLevel = columnData.dlevels[i];
|
|
294
|
+
const rLevel = columnData.rlevels[i];
|
|
295
|
+
rLevels[rLevel]++;
|
|
296
|
+
rLevels.fill(0, rLevel + 1);
|
|
297
|
+
|
|
298
|
+
let rIndex = 0;
|
|
299
|
+
let record = records[rLevels[rIndex++] - 1];
|
|
300
|
+
|
|
301
|
+
// Internal nodes
|
|
302
|
+
for (const step of branch) {
|
|
303
|
+
if (step === field || dLevel < step.dLevelMax) {
|
|
304
|
+
break;
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
switch (step.repetitionType) {
|
|
308
|
+
case 'REPEATED':
|
|
186
309
|
if (!(step.name in record)) {
|
|
187
310
|
// eslint-disable max-depth
|
|
188
311
|
record[step.name] = [];
|
|
@@ -193,22 +316,26 @@ function materializeColumn(
|
|
|
193
316
|
record[step.name].push({});
|
|
194
317
|
}
|
|
195
318
|
record = record[step.name][ix];
|
|
196
|
-
|
|
319
|
+
break;
|
|
320
|
+
|
|
321
|
+
default:
|
|
197
322
|
record[step.name] = record[step.name] || {};
|
|
198
323
|
record = record[step.name];
|
|
199
|
-
}
|
|
200
324
|
}
|
|
325
|
+
}
|
|
201
326
|
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
327
|
+
// Leaf node
|
|
328
|
+
if (dLevel === field.dLevelMax) {
|
|
329
|
+
const value = Types.fromPrimitive(
|
|
330
|
+
// @ts-ignore
|
|
331
|
+
field.originalType || field.primitiveType,
|
|
332
|
+
columnData.values[vIndex],
|
|
333
|
+
field
|
|
334
|
+
);
|
|
335
|
+
vIndex++;
|
|
336
|
+
|
|
337
|
+
switch (field.repetitionType) {
|
|
338
|
+
case 'REPEATED':
|
|
212
339
|
if (!(field.name in record)) {
|
|
213
340
|
// eslint-disable max-depth
|
|
214
341
|
record[field.name] = [];
|
|
@@ -219,9 +346,11 @@ function materializeColumn(
|
|
|
219
346
|
record[field.name].push(null);
|
|
220
347
|
}
|
|
221
348
|
record[field.name][ix] = value;
|
|
222
|
-
|
|
349
|
+
break;
|
|
350
|
+
|
|
351
|
+
default:
|
|
223
352
|
record[field.name] = value;
|
|
224
|
-
}
|
|
225
353
|
}
|
|
226
354
|
}
|
|
227
355
|
}
|
|
356
|
+
*/
|
|
@@ -168,7 +168,7 @@ export const PARQUET_LOGICAL_TYPES: Record<ParquetType, ParquetTypeKit> = {
|
|
|
168
168
|
* Convert a value from it's native representation to the internal/underlying
|
|
169
169
|
* primitive type
|
|
170
170
|
*/
|
|
171
|
-
export function toPrimitive(type: ParquetType, value:
|
|
171
|
+
export function toPrimitive(type: ParquetType, value: unknown, field?: ParquetField): unknown {
|
|
172
172
|
if (!(type in PARQUET_LOGICAL_TYPES)) {
|
|
173
173
|
throw new Error(`invalid type: ${type}`);
|
|
174
174
|
}
|
|
@@ -180,7 +180,7 @@ export function toPrimitive(type: ParquetType, value: any, field?: ParquetField)
|
|
|
180
180
|
* Convert a value from it's internal/underlying primitive representation to
|
|
181
181
|
* the native representation
|
|
182
182
|
*/
|
|
183
|
-
export function fromPrimitive(type: ParquetType, value:
|
|
183
|
+
export function fromPrimitive(type: ParquetType, value: unknown, field?: ParquetField) {
|
|
184
184
|
if (!(type in PARQUET_LOGICAL_TYPES)) {
|
|
185
185
|
throw new Error(`invalid type: ${type}`);
|
|
186
186
|
}
|
|
@@ -192,29 +192,27 @@ export function fromPrimitive(type: ParquetType, value: any, field?: ParquetFiel
|
|
|
192
192
|
return value;
|
|
193
193
|
}
|
|
194
194
|
|
|
195
|
-
function toPrimitive_BOOLEAN(value:
|
|
195
|
+
function toPrimitive_BOOLEAN(value: unknown): boolean {
|
|
196
196
|
return Boolean(value);
|
|
197
197
|
}
|
|
198
198
|
|
|
199
|
-
function fromPrimitive_BOOLEAN(value: any) {
|
|
199
|
+
function fromPrimitive_BOOLEAN(value: any): boolean {
|
|
200
200
|
return Boolean(value);
|
|
201
201
|
}
|
|
202
202
|
|
|
203
|
-
function toPrimitive_FLOAT(value: any) {
|
|
203
|
+
function toPrimitive_FLOAT(value: any): number {
|
|
204
204
|
const v = parseFloat(value);
|
|
205
205
|
if (isNaN(v)) {
|
|
206
206
|
throw new Error(`invalid value for FLOAT: ${value}`);
|
|
207
207
|
}
|
|
208
|
-
|
|
209
208
|
return v;
|
|
210
209
|
}
|
|
211
210
|
|
|
212
|
-
function toPrimitive_DOUBLE(value: any) {
|
|
211
|
+
function toPrimitive_DOUBLE(value: any): number {
|
|
213
212
|
const v = parseFloat(value);
|
|
214
213
|
if (isNaN(v)) {
|
|
215
214
|
throw new Error(`invalid value for DOUBLE: ${value}`);
|
|
216
215
|
}
|
|
217
|
-
|
|
218
216
|
return v;
|
|
219
217
|
}
|
|
220
218
|
|
|
@@ -263,31 +261,28 @@ function toPrimitive_INT32(value: any) {
|
|
|
263
261
|
return v;
|
|
264
262
|
}
|
|
265
263
|
|
|
266
|
-
function decimalToPrimitive_INT32(value: number, field: ParquetField) {
|
|
264
|
+
function decimalToPrimitive_INT32(value: number, field: ParquetField): number {
|
|
267
265
|
const primitiveValue = value * 10 ** (field.scale || 0);
|
|
268
266
|
const v = Math.round(((primitiveValue * 10 ** -field.presision!) % 1) * 10 ** field.presision!);
|
|
269
267
|
if (v < -0x80000000 || v > 0x7fffffff || isNaN(v)) {
|
|
270
268
|
throw new Error(`invalid value for INT32: ${value}`);
|
|
271
269
|
}
|
|
272
|
-
|
|
273
270
|
return v;
|
|
274
271
|
}
|
|
275
272
|
|
|
276
|
-
function toPrimitive_UINT32(value: any) {
|
|
273
|
+
function toPrimitive_UINT32(value: any): number {
|
|
277
274
|
const v = parseInt(value, 10);
|
|
278
275
|
if (v < 0 || v > 0xffffffffffff || isNaN(v)) {
|
|
279
276
|
throw new Error(`invalid value for UINT32: ${value}`);
|
|
280
277
|
}
|
|
281
|
-
|
|
282
278
|
return v;
|
|
283
279
|
}
|
|
284
280
|
|
|
285
|
-
function toPrimitive_INT64(value: any) {
|
|
281
|
+
function toPrimitive_INT64(value: any): number {
|
|
286
282
|
const v = parseInt(value, 10);
|
|
287
283
|
if (isNaN(v)) {
|
|
288
284
|
throw new Error(`invalid value for INT64: ${value}`);
|
|
289
285
|
}
|
|
290
|
-
|
|
291
286
|
return v;
|
|
292
287
|
}
|
|
293
288
|
|
|
@@ -319,32 +314,32 @@ function toPrimitive_INT96(value: any) {
|
|
|
319
314
|
return v;
|
|
320
315
|
}
|
|
321
316
|
|
|
322
|
-
function toPrimitive_BYTE_ARRAY(value: any) {
|
|
317
|
+
function toPrimitive_BYTE_ARRAY(value: any): Buffer {
|
|
323
318
|
return Buffer.from(value);
|
|
324
319
|
}
|
|
325
320
|
|
|
326
|
-
function decimalToPrimitive_BYTE_ARRAY(value: any) {
|
|
321
|
+
function decimalToPrimitive_BYTE_ARRAY(value: any): Buffer {
|
|
327
322
|
// TBD
|
|
328
323
|
return Buffer.from(value);
|
|
329
324
|
}
|
|
330
325
|
|
|
331
|
-
function toPrimitive_UTF8(value: any) {
|
|
326
|
+
function toPrimitive_UTF8(value: any): Buffer {
|
|
332
327
|
return Buffer.from(value, 'utf8');
|
|
333
328
|
}
|
|
334
329
|
|
|
335
|
-
function fromPrimitive_UTF8(value: any) {
|
|
330
|
+
function fromPrimitive_UTF8(value: any): string {
|
|
336
331
|
return value.toString();
|
|
337
332
|
}
|
|
338
333
|
|
|
339
|
-
function toPrimitive_JSON(value: any) {
|
|
334
|
+
function toPrimitive_JSON(value: any): Buffer {
|
|
340
335
|
return Buffer.from(JSON.stringify(value));
|
|
341
336
|
}
|
|
342
337
|
|
|
343
|
-
function fromPrimitive_JSON(value: any) {
|
|
338
|
+
function fromPrimitive_JSON(value: any): unknown {
|
|
344
339
|
return JSON.parse(value);
|
|
345
340
|
}
|
|
346
341
|
|
|
347
|
-
function toPrimitive_BSON(value: any) {
|
|
342
|
+
function toPrimitive_BSON(value: any): Buffer {
|
|
348
343
|
return Buffer.from(BSON.serialize(value));
|
|
349
344
|
}
|
|
350
345
|
|
|
@@ -361,18 +356,17 @@ function toPrimitive_TIME_MILLIS(value: any) {
|
|
|
361
356
|
return v;
|
|
362
357
|
}
|
|
363
358
|
|
|
364
|
-
function toPrimitive_TIME_MICROS(value: any) {
|
|
359
|
+
function toPrimitive_TIME_MICROS(value: any): number {
|
|
365
360
|
const v = parseInt(value, 10);
|
|
366
361
|
if (v < 0 || isNaN(v)) {
|
|
367
362
|
throw new Error(`invalid value for TIME_MICROS: ${value}`);
|
|
368
363
|
}
|
|
369
|
-
|
|
370
364
|
return v;
|
|
371
365
|
}
|
|
372
366
|
|
|
373
367
|
const kMillisPerDay = 86400000;
|
|
374
368
|
|
|
375
|
-
function toPrimitive_DATE(value: any) {
|
|
369
|
+
function toPrimitive_DATE(value: any): number {
|
|
376
370
|
/* convert from date */
|
|
377
371
|
if (value instanceof Date) {
|
|
378
372
|
return value.getTime() / kMillisPerDay;
|
|
@@ -389,11 +383,11 @@ function toPrimitive_DATE(value: any) {
|
|
|
389
383
|
}
|
|
390
384
|
}
|
|
391
385
|
|
|
392
|
-
function fromPrimitive_DATE(value: any) {
|
|
386
|
+
function fromPrimitive_DATE(value: any): Date {
|
|
393
387
|
return new Date(value * kMillisPerDay);
|
|
394
388
|
}
|
|
395
389
|
|
|
396
|
-
function toPrimitive_TIMESTAMP_MILLIS(value: any) {
|
|
390
|
+
function toPrimitive_TIMESTAMP_MILLIS(value: any): number {
|
|
397
391
|
/* convert from date */
|
|
398
392
|
if (value instanceof Date) {
|
|
399
393
|
return value.getTime();
|
|
@@ -410,7 +404,7 @@ function toPrimitive_TIMESTAMP_MILLIS(value: any) {
|
|
|
410
404
|
}
|
|
411
405
|
}
|
|
412
406
|
|
|
413
|
-
function fromPrimitive_TIMESTAMP_MILLIS(value: any) {
|
|
407
|
+
function fromPrimitive_TIMESTAMP_MILLIS(value: any): Date {
|
|
414
408
|
return new Date(value);
|
|
415
409
|
}
|
|
416
410
|
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
|
|
2
|
-
import fs from '
|
|
3
|
-
import {Writable} from 'stream';
|
|
2
|
+
import {fs, stream} from '@loaders.gl/loader-utils';
|
|
4
3
|
|
|
5
4
|
export function load(name: string): any {
|
|
6
5
|
return (module || (global as any)).require(name);
|
|
@@ -14,7 +13,7 @@ export interface WriteStreamOptions {
|
|
|
14
13
|
start?: number;
|
|
15
14
|
}
|
|
16
15
|
|
|
17
|
-
export function oswrite(os: Writable, buf: Buffer): Promise<void> {
|
|
16
|
+
export function oswrite(os: stream.Writable, buf: Buffer): Promise<void> {
|
|
18
17
|
return new Promise((resolve, reject) => {
|
|
19
18
|
os.write(buf, (err) => {
|
|
20
19
|
if (err) {
|
|
@@ -26,7 +25,7 @@ export function oswrite(os: Writable, buf: Buffer): Promise<void> {
|
|
|
26
25
|
});
|
|
27
26
|
}
|
|
28
27
|
|
|
29
|
-
export function osclose(os: Writable): Promise<void> {
|
|
28
|
+
export function osclose(os: stream.Writable): Promise<void> {
|
|
30
29
|
return new Promise((resolve, reject) => {
|
|
31
30
|
(os as any).close((err: any) => {
|
|
32
31
|
if (err) {
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"writer.js","names":["PARQUET_MAGIC","PARQUET_VERSION","PARQUET_DEFAULT_PAGE_SIZE","PARQUET_DEFAULT_ROW_GROUP_SIZE","PARQUET_RDLVL_TYPE","PARQUET_RDLVL_ENCODING","ParquetWriter","schema","envelopeWriter","opts","rowBuffer","rowGroupSize","closed","userMetadata","writeHeader","close","row","Error","Shred","shredRecord","rowCount","callback","writeFooter","key","value","String","cnt","setPageSize","path","osopen","outputStream","openStream","ParquetEnvelopeWriter","writeFn","closeFn","fileOffset","write","offset","rowGroups","pageSize","useDataPageV2","Boolean","buf","length","writeSection","Buffer","from","records","encodeRowGroup","baseOffset","rgroup","push","metadata","body","encodeFooter","oswrite","bind","undefined","osclose","ParquetTransformer","objectMode","writeProxy","t","b","writer","encoding","appendRow","then","Promise","resolve","Transform","encodeValues","type","values","PARQUET_CODECS","encodeDataPage","column","data","rLevelsBuf","alloc","rLevelMax","rlevels","bitWidth","getBitWidth","dLevelsBuf","dLevelMax","dlevels","valuesBuf","primitiveType","typeLength","dataBuf","concat","Compression","deflate","compression","compressedBuf","header","PageHeader","PageType","DATA_PAGE","data_page_header","DataPageHeader","num_values","count","Encoding","definition_level_encoding","repetition_level_encoding","uncompressed_page_size","compressed_page_size","headerBuf","serializeThrift","page","headerSize","encodeDataPageV2","disableEnvelope","DATA_PAGE_V2","data_page_header_v2","DataPageHeaderV2","num_nulls","num_rows","definition_levels_byte_length","repetition_levels_byte_length","is_compressed","encodeColumnChunk","buffer","columnData","join","total_uncompressed_size","total_compressed_size","result","pageBuf","ColumnMetaData","path_in_schema","data_page_offset","encodings","Type","codec","CompressionCodec","metadataOffset","RowGroup","columns","total_byte_size","fieldList","field","isNested","cchunkData","cchunk","ColumnChunk","file_offset","meta_data","Int64","Number","FileMetaData","version","created_by","row_groups","key_value_metadata","kv","KeyValue","schemaRoot","SchemaElement","name","num_children","Object","keys","fields","relt","FieldRepetitionType","repetitionType","schemaElem","repetition_type","fieldCount","originalType","converted_type","ConvertedType","type_length","metadataEncoded","footerEncoded","copy","writeUInt32LE"],"sources":["../../../../src/parquetjs/encoder/writer.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/* eslint-disable camelcase */\nimport {Transform, Writable} from 'stream';\nimport {ParquetCodecOptions, PARQUET_CODECS} from '../codecs';\nimport * as Compression from '../compression';\nimport {\n ParquetBuffer,\n ParquetCodec,\n ParquetData,\n ParquetField,\n PrimitiveType\n} from '../schema/declare';\nimport {ParquetSchema} from '../schema/schema';\nimport * as Shred from '../schema/shred';\nimport {\n ColumnChunk,\n ColumnMetaData,\n CompressionCodec,\n ConvertedType,\n DataPageHeader,\n DataPageHeaderV2,\n Encoding,\n FieldRepetitionType,\n FileMetaData,\n KeyValue,\n PageHeader,\n PageType,\n RowGroup,\n SchemaElement,\n Type\n} from '../parquet-thrift';\nimport {osopen, oswrite, osclose} from '../utils/file-utils';\nimport {getBitWidth, serializeThrift} from '../utils/read-utils';\nimport Int64 from 'node-int64';\n\n/**\n * Parquet File Magic String\n */\nconst PARQUET_MAGIC = 'PAR1';\n\n/**\n * Parquet File Format Version\n */\nconst PARQUET_VERSION = 1;\n\n/**\n * Default Page and Row Group sizes\n */\nconst PARQUET_DEFAULT_PAGE_SIZE = 8192;\nconst PARQUET_DEFAULT_ROW_GROUP_SIZE = 4096;\n\n/**\n * Repetition and Definition Level Encoding\n */\nconst PARQUET_RDLVL_TYPE = 'INT32';\nconst PARQUET_RDLVL_ENCODING = 'RLE';\n\nexport interface ParquetWriterOptions {\n baseOffset?: number;\n rowGroupSize?: number;\n pageSize?: number;\n useDataPageV2?: boolean;\n\n // Write Stream Options\n flags?: string;\n encoding?: string;\n fd?: number;\n mode?: number;\n autoClose?: boolean;\n start?: number;\n}\n\n/**\n * Write a parquet file to an output stream. The ParquetWriter will perform\n * buffering/batching for performance, so close() must be called after all rows\n * are written.\n */\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nexport class ParquetWriter<T> {\n /**\n * Convenience method to create a new buffered parquet writer that writes to\n * the specified file\n */\n static async openFile<T>(\n schema: ParquetSchema,\n path: string,\n opts?: ParquetWriterOptions\n ): Promise<ParquetWriter<T>> {\n const outputStream = await osopen(path, opts);\n return ParquetWriter.openStream(schema, outputStream, opts);\n }\n\n /**\n * Convenience method to create a new buffered parquet writer that writes to\n * the specified stream\n */\n static async openStream<T>(\n schema: ParquetSchema,\n outputStream: Writable,\n opts?: ParquetWriterOptions\n ): Promise<ParquetWriter<T>> {\n if (!opts) {\n // tslint:disable-next-line:no-parameter-reassignment\n opts = {};\n }\n\n const envelopeWriter = await ParquetEnvelopeWriter.openStream(schema, outputStream, opts);\n\n return new ParquetWriter(schema, envelopeWriter, opts);\n }\n\n public schema: ParquetSchema;\n public envelopeWriter: ParquetEnvelopeWriter;\n public rowBuffer: ParquetBuffer;\n public rowGroupSize: number;\n public closed: boolean;\n public userMetadata: Record<string, string>;\n\n /**\n * Create a new buffered parquet writer for a given envelope writer\n */\n constructor(\n schema: ParquetSchema,\n envelopeWriter: ParquetEnvelopeWriter,\n opts: ParquetWriterOptions\n ) {\n this.schema = schema;\n this.envelopeWriter = envelopeWriter;\n // @ts-ignore Row buffer typings...\n this.rowBuffer = {};\n this.rowGroupSize = opts.rowGroupSize || PARQUET_DEFAULT_ROW_GROUP_SIZE;\n this.closed = false;\n this.userMetadata = {};\n\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n this.writeHeader();\n }\n\n async writeHeader(): Promise<void> {\n // TODO - better not mess with promises in the constructor\n try {\n await this.envelopeWriter.writeHeader();\n } catch (err) {\n await this.envelopeWriter.close();\n throw err;\n }\n }\n\n /**\n * Append a single row to the parquet file. Rows are buffered in memory until\n * rowGroupSize rows are in the buffer or close() is called\n */\n async appendRow<T>(row: T): Promise<void> {\n if (this.closed) {\n throw new Error('writer was closed');\n }\n Shred.shredRecord(this.schema, row, this.rowBuffer);\n if (this.rowBuffer.rowCount >= this.rowGroupSize) {\n // @ts-ignore\n this.rowBuffer = {};\n }\n }\n\n /**\n * Finish writing the parquet file and commit the footer to disk. This method\n * MUST be called after you are finished adding rows. You must not call this\n * method twice on the same object or add any rows after the close() method has\n * been called\n */\n async close(callback?: () => void): Promise<void> {\n if (this.closed) {\n throw new Error('writer was closed');\n }\n\n this.closed = true;\n\n if (this.rowBuffer.rowCount > 0 || this.rowBuffer.rowCount >= this.rowGroupSize) {\n // @ts-ignore\n this.rowBuffer = {};\n }\n\n await this.envelopeWriter.writeFooter(this.userMetadata);\n await this.envelopeWriter.close();\n // this.envelopeWriter = null;\n\n if (callback) {\n callback();\n }\n }\n\n /**\n * Add key<>value metadata to the file\n */\n setMetadata(key: string, value: string): void {\n // TODO: value to be any, obj -> JSON\n this.userMetadata[String(key)] = String(value);\n }\n\n /**\n * Set the parquet row group size. This values controls the maximum number\n * of rows that are buffered in memory at any given time as well as the number\n * of rows that are co-located on disk. A higher value is generally better for\n * read-time I/O performance at the tradeoff of write-time memory usage.\n */\n setRowGroupSize(cnt: number): void {\n this.rowGroupSize = cnt;\n }\n\n /**\n * Set the parquet data page size. The data page size controls the maximum\n * number of column values that are written to disk as a consecutive array\n */\n setPageSize(cnt: number): void {\n this.envelopeWriter.setPageSize(cnt);\n }\n}\n\n/**\n * Create a parquet file from a schema and a number of row groups. This class\n * performs direct, unbuffered writes to the underlying output stream and is\n * intendend for advanced and internal users; the writeXXX methods must be\n * called in the correct order to produce a valid file.\n */\nexport class ParquetEnvelopeWriter {\n /**\n * Create a new parquet envelope writer that writes to the specified stream\n */\n static async openStream(\n schema: ParquetSchema,\n outputStream: Writable,\n opts: ParquetWriterOptions\n ): Promise<ParquetEnvelopeWriter> {\n const writeFn = oswrite.bind(undefined, outputStream);\n const closeFn = osclose.bind(undefined, outputStream);\n return new ParquetEnvelopeWriter(schema, writeFn, closeFn, 0, opts);\n }\n\n public schema: ParquetSchema;\n public write: (buf: Buffer) => Promise<void>;\n public close: () => Promise<void>;\n public offset: number;\n public rowCount: number;\n public rowGroups: RowGroup[];\n public pageSize: number;\n public useDataPageV2: boolean;\n\n constructor(\n schema: ParquetSchema,\n writeFn: (buf: Buffer) => Promise<void>,\n closeFn: () => Promise<void>,\n fileOffset: number,\n opts: ParquetWriterOptions\n ) {\n this.schema = schema;\n this.write = writeFn;\n this.close = closeFn;\n this.offset = fileOffset;\n this.rowCount = 0;\n this.rowGroups = [];\n this.pageSize = opts.pageSize || PARQUET_DEFAULT_PAGE_SIZE;\n this.useDataPageV2 = 'useDataPageV2' in opts ? Boolean(opts.useDataPageV2) : false;\n }\n\n writeSection(buf: Buffer): Promise<void> {\n this.offset += buf.length;\n return this.write(buf);\n }\n\n /**\n * Encode the parquet file header\n */\n writeHeader(): Promise<void> {\n return this.writeSection(Buffer.from(PARQUET_MAGIC));\n }\n\n /**\n * Encode a parquet row group. The records object should be created using the\n * shredRecord method\n */\n async writeRowGroup(records: ParquetBuffer): Promise<void> {\n const rgroup = await encodeRowGroup(this.schema, records, {\n baseOffset: this.offset,\n pageSize: this.pageSize,\n useDataPageV2: this.useDataPageV2\n });\n\n this.rowCount += records.rowCount;\n this.rowGroups.push(rgroup.metadata);\n return await this.writeSection(rgroup.body);\n }\n\n /**\n * Write the parquet file footer\n */\n writeFooter(userMetadata: Record<string, string>): Promise<void> {\n if (!userMetadata) {\n // tslint:disable-next-line:no-parameter-reassignment\n userMetadata = {};\n }\n\n return this.writeSection(\n encodeFooter(this.schema, this.rowCount, this.rowGroups, userMetadata)\n );\n }\n\n /**\n * Set the parquet data page size. The data page size controls the maximum\n * number of column values that are written to disk as a consecutive array\n */\n setPageSize(cnt: number): void {\n this.pageSize = cnt;\n }\n}\n\n/**\n * Create a parquet transform stream\n */\nexport class ParquetTransformer<T> extends Transform {\n public writer: ParquetWriter<T>;\n\n constructor(schema: ParquetSchema, opts: ParquetWriterOptions = {}) {\n super({objectMode: true});\n\n const writeProxy = (function (t: ParquetTransformer<any>) {\n return async function (b: any): Promise<void> {\n t.push(b);\n };\n })(this);\n\n this.writer = new ParquetWriter(\n schema,\n new ParquetEnvelopeWriter(schema, writeProxy, async () => {}, 0, opts),\n opts\n );\n }\n\n // tslint:disable-next-line:function-name\n _transform(row: any, encoding: string, callback: (val?: any) => void): Promise<void> {\n if (row) {\n return this.writer.appendRow(row).then(callback);\n }\n callback();\n return Promise.resolve();\n }\n\n // tslint:disable-next-line:function-name\n async _flush(callback: (val?: any) => void) {\n await this.writer.close(callback);\n }\n}\n\n/**\n * Encode a consecutive array of data using one of the parquet encodings\n */\nfunction encodeValues(\n type: PrimitiveType,\n encoding: ParquetCodec,\n values: any[],\n opts: ParquetCodecOptions\n) {\n if (!(encoding in PARQUET_CODECS)) {\n throw new Error(`invalid encoding: ${encoding}`);\n }\n return PARQUET_CODECS[encoding].encodeValues(type, values, opts);\n}\n\n/**\n * Encode a parquet data page\n */\nasync function encodeDataPage(\n column: ParquetField,\n data: ParquetData\n): Promise<{\n header: PageHeader;\n headerSize: number;\n page: Buffer;\n}> {\n /* encode repetition and definition levels */\n let rLevelsBuf = Buffer.alloc(0);\n if (column.rLevelMax > 0) {\n rLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.rlevels, {\n bitWidth: getBitWidth(column.rLevelMax)\n // disableEnvelope: false\n });\n }\n\n let dLevelsBuf = Buffer.alloc(0);\n if (column.dLevelMax > 0) {\n dLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.dlevels, {\n bitWidth: getBitWidth(column.dLevelMax)\n // disableEnvelope: false\n });\n }\n\n /* encode values */\n const valuesBuf = encodeValues(column.primitiveType!, column.encoding!, data.values, {\n typeLength: column.typeLength,\n bitWidth: column.typeLength\n });\n\n const dataBuf = Buffer.concat([rLevelsBuf, dLevelsBuf, valuesBuf]);\n\n // compression = column.compression === 'UNCOMPRESSED' ? (compression || 'UNCOMPRESSED') : column.compression;\n const compressedBuf = await Compression.deflate(column.compression!, dataBuf);\n\n /* build page header */\n const header = new PageHeader({\n type: PageType.DATA_PAGE,\n data_page_header: new DataPageHeader({\n num_values: data.count,\n encoding: Encoding[column.encoding!] as any,\n definition_level_encoding: Encoding[PARQUET_RDLVL_ENCODING], // [PARQUET_RDLVL_ENCODING],\n repetition_level_encoding: Encoding[PARQUET_RDLVL_ENCODING] // [PARQUET_RDLVL_ENCODING]\n }),\n uncompressed_page_size: dataBuf.length,\n compressed_page_size: compressedBuf.length\n });\n\n /* concat page header, repetition and definition levels and values */\n const headerBuf = serializeThrift(header);\n const page = Buffer.concat([headerBuf, compressedBuf]);\n\n return {header, headerSize: headerBuf.length, page};\n}\n\n/**\n * Encode a parquet data page (v2)\n */\nasync function encodeDataPageV2(\n column: ParquetField,\n data: ParquetData,\n rowCount: number\n): Promise<{\n header: PageHeader;\n headerSize: number;\n page: Buffer;\n}> {\n /* encode values */\n const valuesBuf = encodeValues(column.primitiveType!, column.encoding!, data.values, {\n typeLength: column.typeLength,\n bitWidth: column.typeLength\n });\n\n // compression = column.compression === 'UNCOMPRESSED' ? (compression || 'UNCOMPRESSED') : column.compression;\n const compressedBuf = await Compression.deflate(column.compression!, valuesBuf);\n\n /* encode repetition and definition levels */\n let rLevelsBuf = Buffer.alloc(0);\n if (column.rLevelMax > 0) {\n rLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.rlevels, {\n bitWidth: getBitWidth(column.rLevelMax),\n disableEnvelope: true\n });\n }\n\n let dLevelsBuf = Buffer.alloc(0);\n if (column.dLevelMax > 0) {\n dLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.dlevels, {\n bitWidth: getBitWidth(column.dLevelMax),\n disableEnvelope: true\n });\n }\n\n /* build page header */\n const header = new PageHeader({\n type: PageType.DATA_PAGE_V2,\n data_page_header_v2: new DataPageHeaderV2({\n num_values: data.count,\n num_nulls: data.count - data.values.length,\n num_rows: rowCount,\n encoding: Encoding[column.encoding!] as any,\n definition_levels_byte_length: dLevelsBuf.length,\n repetition_levels_byte_length: rLevelsBuf.length,\n is_compressed: column.compression !== 'UNCOMPRESSED'\n }),\n uncompressed_page_size: rLevelsBuf.length + dLevelsBuf.length + valuesBuf.length,\n compressed_page_size: rLevelsBuf.length + dLevelsBuf.length + compressedBuf.length\n });\n\n /* concat page header, repetition and definition levels and values */\n const headerBuf = serializeThrift(header);\n const page = Buffer.concat([headerBuf, rLevelsBuf, dLevelsBuf, compressedBuf]);\n return {header, headerSize: headerBuf.length, page};\n}\n\n/**\n * Encode an array of values into a parquet column chunk\n */\nasync function encodeColumnChunk(\n column: ParquetField,\n buffer: ParquetBuffer,\n offset: number,\n opts: ParquetWriterOptions\n): Promise<{\n body: Buffer;\n metadata: ColumnMetaData;\n metadataOffset: number;\n}> {\n const data = buffer.columnData[column.path.join()];\n const baseOffset = (opts.baseOffset || 0) + offset;\n /* encode data page(s) */\n // const pages: Buffer[] = [];\n let pageBuf: Buffer;\n // tslint:disable-next-line:variable-name\n let total_uncompressed_size = 0;\n // tslint:disable-next-line:variable-name\n let total_compressed_size = 0;\n {\n const result = opts.useDataPageV2\n ? await encodeDataPageV2(column, data, buffer.rowCount)\n : await encodeDataPage(column, data);\n // pages.push(result.page);\n pageBuf = result.page;\n total_uncompressed_size += result.header.uncompressed_page_size + result.headerSize;\n total_compressed_size += result.header.compressed_page_size + result.headerSize;\n }\n\n // const pagesBuf = Buffer.concat(pages);\n // const compression = column.compression === 'UNCOMPRESSED' ? (opts.compression || 'UNCOMPRESSED') : column.compression;\n\n /* prepare metadata header */\n const metadata = new ColumnMetaData({\n path_in_schema: column.path,\n num_values: data.count,\n data_page_offset: baseOffset,\n encodings: [],\n total_uncompressed_size, // : pagesBuf.length,\n total_compressed_size,\n type: Type[column.primitiveType!],\n codec: CompressionCodec[column.compression!]\n });\n\n /* list encodings */\n metadata.encodings.push(Encoding[PARQUET_RDLVL_ENCODING]);\n metadata.encodings.push(Encoding[column.encoding!]);\n\n /* concat metadata header and data pages */\n const metadataOffset = baseOffset + pageBuf.length;\n const body = Buffer.concat([pageBuf, serializeThrift(metadata)]);\n return {body, metadata, metadataOffset};\n}\n\n/**\n * Encode a list of column values into a parquet row group\n */\nasync function encodeRowGroup(\n schema: ParquetSchema,\n data: ParquetBuffer,\n opts: ParquetWriterOptions\n): Promise<{\n body: Buffer;\n metadata: RowGroup;\n}> {\n const metadata = new RowGroup({\n num_rows: data.rowCount,\n columns: [],\n total_byte_size: 0\n });\n\n let body = Buffer.alloc(0);\n for (const field of schema.fieldList) {\n if (field.isNested) {\n continue; // eslint-disable-line no-continue\n }\n\n const cchunkData = await encodeColumnChunk(field, data, body.length, opts);\n\n const cchunk = new ColumnChunk({\n file_offset: cchunkData.metadataOffset,\n meta_data: cchunkData.metadata\n });\n\n metadata.columns.push(cchunk);\n metadata.total_byte_size = new Int64(Number(metadata.total_byte_size) + cchunkData.body.length);\n\n body = Buffer.concat([body, cchunkData.body]);\n }\n\n return {body, metadata};\n}\n\n/**\n * Encode a parquet file metadata footer\n */\nfunction encodeFooter(\n schema: ParquetSchema,\n rowCount: number,\n rowGroups: RowGroup[],\n userMetadata: Record<string, string>\n): Buffer {\n const metadata = new FileMetaData({\n version: PARQUET_VERSION,\n created_by: 'parquets',\n num_rows: rowCount,\n row_groups: rowGroups,\n schema: [],\n key_value_metadata: []\n });\n\n for (const key in userMetadata) {\n const kv = new KeyValue({\n key,\n value: userMetadata[key]\n });\n metadata.key_value_metadata?.push?.(kv);\n }\n\n {\n const schemaRoot = new SchemaElement({\n name: 'root',\n num_children: Object.keys(schema.fields).length\n });\n metadata.schema.push(schemaRoot);\n }\n\n for (const field of schema.fieldList) {\n const relt = FieldRepetitionType[field.repetitionType];\n const schemaElem = new SchemaElement({\n name: field.name,\n repetition_type: relt as any\n });\n\n if (field.isNested) {\n schemaElem.num_children = field.fieldCount;\n } else {\n schemaElem.type = Type[field.primitiveType!] as Type;\n }\n\n if (field.originalType) {\n schemaElem.converted_type = ConvertedType[field.originalType] as ConvertedType;\n }\n\n schemaElem.type_length = field.typeLength;\n\n metadata.schema.push(schemaElem);\n }\n\n const metadataEncoded = serializeThrift(metadata);\n const footerEncoded = Buffer.alloc(metadataEncoded.length + 8);\n metadataEncoded.copy(footerEncoded);\n footerEncoded.writeUInt32LE(metadataEncoded.length, metadataEncoded.length);\n footerEncoded.write(PARQUET_MAGIC, metadataEncoded.length + 4);\n return footerEncoded;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AAEA;AACA;AACA;AASA;AACA;AAiBA;AACA;AACA;AAA+B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAK/B,IAAMA,aAAa,GAAG,MAAM;;AAK5B,IAAMC,eAAe,GAAG,CAAC;;AAKzB,IAAMC,yBAAyB,GAAG,IAAI;AACtC,IAAMC,8BAA8B,GAAG,IAAI;;AAK3C,IAAMC,kBAAkB,GAAG,OAAO;AAClC,IAAMC,sBAAsB,GAAG,KAAK;AAAC,IAuBxBC,aAAa;EA2CxB,uBACEC,MAAqB,EACrBC,cAAqC,EACrCC,IAA0B,EAC1B;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IACA,IAAI,CAACF,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACC,cAAc,GAAGA,cAAc;IAEpC,IAAI,CAACE,SAAS,GAAG,CAAC,CAAC;IACnB,IAAI,CAACC,YAAY,GAAGF,IAAI,CAACE,YAAY,IAAIR,8BAA8B;IACvE,IAAI,CAACS,MAAM,GAAG,KAAK;IACnB,IAAI,CAACC,YAAY,GAAG,CAAC,CAAC;;IAGtB,IAAI,CAACC,WAAW,EAAE;EACpB;EAAC;IAAA;IAAA;MAAA,6EAED;QAAA;UAAA;YAAA;cAAA;gBAAA;gBAAA;gBAAA,OAGU,IAAI,CAACN,cAAc,CAACM,WAAW,EAAE;cAAA;gBAAA;gBAAA;cAAA;gBAAA;gBAAA;gBAAA;gBAAA,OAEjC,IAAI,CAACN,cAAc,CAACO,KAAK,EAAE;cAAA;gBAAA;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CAGpC;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;MAAA,2EAMD,kBAAmBC,GAAM;QAAA;UAAA;YAAA;cAAA;gBAAA,KACnB,IAAI,CAACJ,MAAM;kBAAA;kBAAA;gBAAA;gBAAA,MACP,IAAIK,KAAK,CAAC,mBAAmB,CAAC;cAAA;gBAEtCC,KAAK,CAACC,WAAW,CAAC,IAAI,CAACZ,MAAM,EAAES,GAAG,EAAE,IAAI,CAACN,SAAS,CAAC;gBACnD,IAAI,IAAI,CAACA,SAAS,CAACU,QAAQ,IAAI,IAAI,CAACT,YAAY,EAAE;kBAEhD,IAAI,CAACD,SAAS,GAAG,CAAC,CAAC;gBACrB;cAAC;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CACF;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;MAAA,uEAQD,kBAAYW,QAAqB;QAAA;UAAA;YAAA;cAAA;gBAAA,KAC3B,IAAI,CAACT,MAAM;kBAAA;kBAAA;gBAAA;gBAAA,MACP,IAAIK,KAAK,CAAC,mBAAmB,CAAC;cAAA;gBAGtC,IAAI,CAACL,MAAM,GAAG,IAAI;gBAElB,IAAI,IAAI,CAACF,SAAS,CAACU,QAAQ,GAAG,CAAC,IAAI,IAAI,CAACV,SAAS,CAACU,QAAQ,IAAI,IAAI,CAACT,YAAY,EAAE;kBAE/E,IAAI,CAACD,SAAS,GAAG,CAAC,CAAC;gBACrB;gBAAC;gBAAA,OAEK,IAAI,CAACF,cAAc,CAACc,WAAW,CAAC,IAAI,CAACT,YAAY,CAAC;cAAA;gBAAA;gBAAA,OAClD,IAAI,CAACL,cAAc,CAACO,KAAK,EAAE;cAAA;;gBAGjC,IAAIM,QAAQ,EAAE;kBACZA,QAAQ,EAAE;gBACZ;cAAC;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CACF;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;IAKD,qBAAYE,GAAW,EAAEC,KAAa,EAAQ;MAE5C,IAAI,CAACX,YAAY,CAACY,MAAM,CAACF,GAAG,CAAC,CAAC,GAAGE,MAAM,CAACD,KAAK,CAAC;IAChD;;EAAC;IAAA;IAAA;IAQD,yBAAgBE,GAAW,EAAQ;MACjC,IAAI,CAACf,YAAY,GAAGe,GAAG;IACzB;;EAAC;IAAA;IAAA;IAMD,qBAAYA,GAAW,EAAQ;MAC7B,IAAI,CAAClB,cAAc,CAACmB,WAAW,CAACD,GAAG,CAAC;IACtC;EAAC;IAAA;IAAA;MAAA,0EAnID,kBACEnB,MAAqB,EACrBqB,IAAY,EACZnB,IAA2B;QAAA;QAAA;UAAA;YAAA;cAAA;gBAAA;gBAAA,OAEA,IAAAoB,iBAAM,EAACD,IAAI,EAAEnB,IAAI,CAAC;cAAA;gBAAvCqB,YAAY;gBAAA,kCACXxB,aAAa,CAACyB,UAAU,CAACxB,MAAM,EAAEuB,YAAY,EAAErB,IAAI,CAAC;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CAC5D;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;MAAA,4EAMD,kBACEF,MAAqB,EACrBuB,YAAsB,EACtBrB,IAA2B;QAAA;QAAA;UAAA;YAAA;cAAA;gBAE3B,IAAI,CAACA,IAAI,EAAE;kBAETA,IAAI,GAAG,CAAC,CAAC;gBACX;gBAAC;gBAAA,OAE4BuB,qBAAqB,CAACD,UAAU,CAACxB,MAAM,EAAEuB,YAAY,EAAErB,IAAI,CAAC;cAAA;gBAAnFD,cAAc;gBAAA,kCAEb,IAAIF,aAAa,CAACC,MAAM,EAAEC,cAAc,EAAEC,IAAI,CAAC;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CACvD;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;EAAA;AAAA;AAAA;AAAA,IAkHUuB,qBAAqB;EAuBhC,+BACEzB,MAAqB,EACrB0B,OAAuC,EACvCC,OAA4B,EAC5BC,UAAkB,EAClB1B,IAA0B,EAC1B;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IACA,IAAI,CAACF,MAAM,GAAGA,MAAM;IACpB,IAAI,CAAC6B,KAAK,GAAGH,OAAO;IACpB,IAAI,CAAClB,KAAK,GAAGmB,OAAO;IACpB,IAAI,CAACG,MAAM,GAAGF,UAAU;IACxB,IAAI,CAACf,QAAQ,GAAG,CAAC;IACjB,IAAI,CAACkB,SAAS,GAAG,EAAE;IACnB,IAAI,CAACC,QAAQ,GAAG9B,IAAI,CAAC8B,QAAQ,IAAIrC,yBAAyB;IAC1D,IAAI,CAACsC,aAAa,GAAG,eAAe,IAAI/B,IAAI,GAAGgC,OAAO,CAAChC,IAAI,CAAC+B,aAAa,CAAC,GAAG,KAAK;EACpF;EAAC;IAAA;IAAA,OAED,sBAAaE,GAAW,EAAiB;MACvC,IAAI,CAACL,MAAM,IAAIK,GAAG,CAACC,MAAM;MACzB,OAAO,IAAI,CAACP,KAAK,CAACM,GAAG,CAAC;IACxB;;EAAC;IAAA;IAAA;IAKD,uBAA6B;MAC3B,OAAO,IAAI,CAACE,YAAY,CAACC,MAAM,CAACC,IAAI,CAAC9C,aAAa,CAAC,CAAC;IACtD;;EAAC;IAAA;IAAA;MAAA,+EAMD,kBAAoB+C,OAAsB;QAAA;QAAA;UAAA;YAAA;cAAA;gBAAA;gBAAA,OACnBC,cAAc,CAAC,IAAI,CAACzC,MAAM,EAAEwC,OAAO,EAAE;kBACxDE,UAAU,EAAE,IAAI,CAACZ,MAAM;kBACvBE,QAAQ,EAAE,IAAI,CAACA,QAAQ;kBACvBC,aAAa,EAAE,IAAI,CAACA;gBACtB,CAAC,CAAC;cAAA;gBAJIU,MAAM;gBAMZ,IAAI,CAAC9B,QAAQ,IAAI2B,OAAO,CAAC3B,QAAQ;gBACjC,IAAI,CAACkB,SAAS,CAACa,IAAI,CAACD,MAAM,CAACE,QAAQ,CAAC;gBAAC;gBAAA,OACxB,IAAI,CAACR,YAAY,CAACM,MAAM,CAACG,IAAI,CAAC;cAAA;gBAAA;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CAC5C;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;IAKD,qBAAYxC,YAAoC,EAAiB;MAC/D,IAAI,CAACA,YAAY,EAAE;QAEjBA,YAAY,GAAG,CAAC,CAAC;MACnB;MAEA,OAAO,IAAI,CAAC+B,YAAY,CACtBU,YAAY,CAAC,IAAI,CAAC/C,MAAM,EAAE,IAAI,CAACa,QAAQ,EAAE,IAAI,CAACkB,SAAS,EAAEzB,YAAY,CAAC,CACvE;IACH;;EAAC;IAAA;IAAA;IAMD,qBAAYa,GAAW,EAAQ;MAC7B,IAAI,CAACa,QAAQ,GAAGb,GAAG;IACrB;EAAC;IAAA;IAAA;MAAA,6EApFD,kBACEnB,MAAqB,EACrBuB,YAAsB,EACtBrB,IAA0B;QAAA;QAAA;UAAA;YAAA;cAAA;gBAEpBwB,OAAO,GAAGsB,kBAAO,CAACC,IAAI,CAACC,SAAS,EAAE3B,YAAY,CAAC;gBAC/CI,OAAO,GAAGwB,kBAAO,CAACF,IAAI,CAACC,SAAS,EAAE3B,YAAY,CAAC;gBAAA,kCAC9C,IAAIE,qBAAqB,CAACzB,MAAM,EAAE0B,OAAO,EAAEC,OAAO,EAAE,CAAC,EAAEzB,IAAI,CAAC;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CACpE;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;EAAA;AAAA;AAAA;AAAA,IAkFUkD,kBAAkB;EAAA;EAAA;EAG7B,4BAAYpD,MAAqB,EAAmC;IAAA;IAAA,IAAjCE,IAA0B,uEAAG,CAAC,CAAC;IAAA;IAChE,0BAAM;MAACmD,UAAU,EAAE;IAAI,CAAC;IAAE;IAE1B,IAAMC,UAAU,GAAI,UAAUC,CAA0B,EAAE;MACxD;QAAA,qEAAO,kBAAgBC,CAAM;UAAA;YAAA;cAAA;gBAAA;kBAC3BD,CAAC,CAACX,IAAI,CAACY,CAAC,CAAC;gBAAC;gBAAA;kBAAA;cAAA;YAAA;UAAA;QAAA,CACX;QAAA;UAAA;QAAA;MAAA;IACH,CAAC,6CAAO;IAER,MAAKC,MAAM,GAAG,IAAI1D,aAAa,CAC7BC,MAAM,EACN,IAAIyB,qBAAqB,CAACzB,MAAM,EAAEsD,UAAU,4DAAE;MAAA;QAAA;UAAA;YAAA;YAAA;cAAA;UAAA;QAAA;MAAA;IAAA,CAAc,IAAE,CAAC,EAAEpD,IAAI,CAAC,EACtEA,IAAI,CACL;IAAC;EACJ;;EAAC;IAAA;IAAA;IAGD,oBAAWO,GAAQ,EAAEiD,QAAgB,EAAE5C,QAA6B,EAAiB;MACnF,IAAIL,GAAG,EAAE;QACP,OAAO,IAAI,CAACgD,MAAM,CAACE,SAAS,CAAClD,GAAG,CAAC,CAACmD,IAAI,CAAC9C,QAAQ,CAAC;MAClD;MACAA,QAAQ,EAAE;MACV,OAAO+C,OAAO,CAACC,OAAO,EAAE;IAC1B;;EAAC;IAAA;IAAA;MAAA,wEAGD,mBAAahD,QAA6B;QAAA;UAAA;YAAA;cAAA;gBAAA;gBAAA,OAClC,IAAI,CAAC2C,MAAM,CAACjD,KAAK,CAACM,QAAQ,CAAC;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CAClC;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;EAAA;AAAA,EA/BwCiD,iBAAS;AAAA;AAqCpD,SAASC,YAAY,CACnBC,IAAmB,EACnBP,QAAsB,EACtBQ,MAAa,EACbhE,IAAyB,EACzB;EACA,IAAI,EAAEwD,QAAQ,IAAIS,sBAAc,CAAC,EAAE;IACjC,MAAM,IAAIzD,KAAK,6BAAsBgD,QAAQ,EAAG;EAClD;EACA,OAAOS,sBAAc,CAACT,QAAQ,CAAC,CAACM,YAAY,CAACC,IAAI,EAAEC,MAAM,EAAEhE,IAAI,CAAC;AAClE;;AAAC,SAKckE,cAAc;EAAA;AAAA;AAAA;EAAA,4EAA7B,mBACEC,MAAoB,EACpBC,IAAiB;IAAA;IAAA;MAAA;QAAA;UAAA;YAObC,UAAU,GAAGjC,MAAM,CAACkC,KAAK,CAAC,CAAC,CAAC;YAChC,IAAIH,MAAM,CAACI,SAAS,GAAG,CAAC,EAAE;cACxBF,UAAU,GAAGP,YAAY,CAACnE,kBAAkB,EAAEC,sBAAsB,EAAEwE,IAAI,CAACI,OAAO,EAAE;gBAClFC,QAAQ,EAAE,IAAAC,sBAAW,EAACP,MAAM,CAACI,SAAS;cAExC,CAAC,CAAC;YACJ;YAEII,UAAU,GAAGvC,MAAM,CAACkC,KAAK,CAAC,CAAC,CAAC;YAChC,IAAIH,MAAM,CAACS,SAAS,GAAG,CAAC,EAAE;cACxBD,UAAU,GAAGb,YAAY,CAACnE,kBAAkB,EAAEC,sBAAsB,EAAEwE,IAAI,CAACS,OAAO,EAAE;gBAClFJ,QAAQ,EAAE,IAAAC,sBAAW,EAACP,MAAM,CAACS,SAAS;cAExC,CAAC,CAAC;YACJ;;YAGME,SAAS,GAAGhB,YAAY,CAACK,MAAM,CAACY,aAAa,EAAGZ,MAAM,CAACX,QAAQ,EAAGY,IAAI,CAACJ,MAAM,EAAE;cACnFgB,UAAU,EAAEb,MAAM,CAACa,UAAU;cAC7BP,QAAQ,EAAEN,MAAM,CAACa;YACnB,CAAC,CAAC;YAEIC,OAAO,GAAG7C,MAAM,CAAC8C,MAAM,CAAC,CAACb,UAAU,EAAEM,UAAU,EAAEG,SAAS,CAAC,CAAC;YAAA;YAAA,OAGtCK,WAAW,CAACC,OAAO,CAACjB,MAAM,CAACkB,WAAW,EAAGJ,OAAO,CAAC;UAAA;YAAvEK,aAAa;YAGbC,MAAM,GAAG,IAAIC,yBAAU,CAAC;cAC5BzB,IAAI,EAAE0B,uBAAQ,CAACC,SAAS;cACxBC,gBAAgB,EAAE,IAAIC,6BAAc,CAAC;gBACnCC,UAAU,EAAEzB,IAAI,CAAC0B,KAAK;gBACtBtC,QAAQ,EAAEuC,uBAAQ,CAAC5B,MAAM,CAACX,QAAQ,CAAS;gBAC3CwC,yBAAyB,EAAED,uBAAQ,CAACnG,sBAAsB,CAAC;gBAC3DqG,yBAAyB,EAAEF,uBAAQ,CAACnG,sBAAsB;cAC5D,CAAC,CAAC;;cACFsG,sBAAsB,EAAEjB,OAAO,CAAC/C,MAAM;cACtCiE,oBAAoB,EAAEb,aAAa,CAACpD;YACtC,CAAC,CAAC;YAGIkE,SAAS,GAAG,IAAAC,0BAAe,EAACd,MAAM,CAAC;YACnCe,IAAI,GAAGlE,MAAM,CAAC8C,MAAM,CAAC,CAACkB,SAAS,EAAEd,aAAa,CAAC,CAAC;YAAA,mCAE/C;cAACC,MAAM,EAANA,MAAM;cAAEgB,UAAU,EAAEH,SAAS,CAAClE,MAAM;cAAEoE,IAAI,EAAJA;YAAI,CAAC;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CACpD;EAAA;AAAA;AAAA,SAKcE,gBAAgB;EAAA;AAAA;AAAA;EAAA,6EAA/B,mBACErC,MAAoB,EACpBC,IAAiB,EACjBzD,QAAgB;IAAA;IAAA;MAAA;QAAA;UAAA;YAOVmE,SAAS,GAAGhB,YAAY,CAACK,MAAM,CAACY,aAAa,EAAGZ,MAAM,CAACX,QAAQ,EAAGY,IAAI,CAACJ,MAAM,EAAE;cACnFgB,UAAU,EAAEb,MAAM,CAACa,UAAU;cAC7BP,QAAQ,EAAEN,MAAM,CAACa;YACnB,CAAC,CAAC;YAAA;YAAA,OAG0BG,WAAW,CAACC,OAAO,CAACjB,MAAM,CAACkB,WAAW,EAAGP,SAAS,CAAC;UAAA;YAAzEQ,aAAa;YAGfjB,UAAU,GAAGjC,MAAM,CAACkC,KAAK,CAAC,CAAC,CAAC;YAChC,IAAIH,MAAM,CAACI,SAAS,GAAG,CAAC,EAAE;cACxBF,UAAU,GAAGP,YAAY,CAACnE,kBAAkB,EAAEC,sBAAsB,EAAEwE,IAAI,CAACI,OAAO,EAAE;gBAClFC,QAAQ,EAAE,IAAAC,sBAAW,EAACP,MAAM,CAACI,SAAS,CAAC;gBACvCkC,eAAe,EAAE;cACnB,CAAC,CAAC;YACJ;YAEI9B,UAAU,GAAGvC,MAAM,CAACkC,KAAK,CAAC,CAAC,CAAC;YAChC,IAAIH,MAAM,CAACS,SAAS,GAAG,CAAC,EAAE;cACxBD,UAAU,GAAGb,YAAY,CAACnE,kBAAkB,EAAEC,sBAAsB,EAAEwE,IAAI,CAACS,OAAO,EAAE;gBAClFJ,QAAQ,EAAE,IAAAC,sBAAW,EAACP,MAAM,CAACS,SAAS,CAAC;gBACvC6B,eAAe,EAAE;cACnB,CAAC,CAAC;YACJ;;YAGMlB,MAAM,GAAG,IAAIC,yBAAU,CAAC;cAC5BzB,IAAI,EAAE0B,uBAAQ,CAACiB,YAAY;cAC3BC,mBAAmB,EAAE,IAAIC,+BAAgB,CAAC;gBACxCf,UAAU,EAAEzB,IAAI,CAAC0B,KAAK;gBACtBe,SAAS,EAAEzC,IAAI,CAAC0B,KAAK,GAAG1B,IAAI,CAACJ,MAAM,CAAC9B,MAAM;gBAC1C4E,QAAQ,EAAEnG,QAAQ;gBAClB6C,QAAQ,EAAEuC,uBAAQ,CAAC5B,MAAM,CAACX,QAAQ,CAAS;gBAC3CuD,6BAA6B,EAAEpC,UAAU,CAACzC,MAAM;gBAChD8E,6BAA6B,EAAE3C,UAAU,CAACnC,MAAM;gBAChD+E,aAAa,EAAE9C,MAAM,CAACkB,WAAW,KAAK;cACxC,CAAC,CAAC;cACFa,sBAAsB,EAAE7B,UAAU,CAACnC,MAAM,GAAGyC,UAAU,CAACzC,MAAM,GAAG4C,SAAS,CAAC5C,MAAM;cAChFiE,oBAAoB,EAAE9B,UAAU,CAACnC,MAAM,GAAGyC,UAAU,CAACzC,MAAM,GAAGoD,aAAa,CAACpD;YAC9E,CAAC,CAAC;YAGIkE,SAAS,GAAG,IAAAC,0BAAe,EAACd,MAAM,CAAC;YACnCe,IAAI,GAAGlE,MAAM,CAAC8C,MAAM,CAAC,CAACkB,SAAS,EAAE/B,UAAU,EAAEM,UAAU,EAAEW,aAAa,CAAC,CAAC;YAAA,mCACvE;cAACC,MAAM,EAANA,MAAM;cAAEgB,UAAU,EAAEH,SAAS,CAAClE,MAAM;cAAEoE,IAAI,EAAJA;YAAI,CAAC;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CACpD;EAAA;AAAA;AAAA,SAKcY,iBAAiB;EAAA;AAAA;AAAA;EAAA,+EAAhC,mBACE/C,MAAoB,EACpBgD,MAAqB,EACrBvF,MAAc,EACd5B,IAA0B;IAAA;IAAA;MAAA;QAAA;UAAA;YAMpBoE,IAAI,GAAG+C,MAAM,CAACC,UAAU,CAACjD,MAAM,CAAChD,IAAI,CAACkG,IAAI,EAAE,CAAC;YAC5C7E,UAAU,GAAG,CAACxC,IAAI,CAACwC,UAAU,IAAI,CAAC,IAAIZ,MAAM;YAK9C0F,uBAAuB,GAAG,CAAC;YAE3BC,qBAAqB,GAAG,CAAC;YAAA,KAEZvH,IAAI,CAAC+B,aAAa;cAAA;cAAA;YAAA;YAAA;YAAA,OACvByE,gBAAgB,CAACrC,MAAM,EAAEC,IAAI,EAAE+C,MAAM,CAACxG,QAAQ,CAAC;UAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA,OAC/CuD,cAAc,CAACC,MAAM,EAAEC,IAAI,CAAC;UAAA;YAAA;UAAA;YAFhCoD,MAAM;YAIZC,OAAO,GAAGD,MAAM,CAAClB,IAAI;YACrBgB,uBAAuB,IAAIE,MAAM,CAACjC,MAAM,CAACW,sBAAsB,GAAGsB,MAAM,CAACjB,UAAU;YACnFgB,qBAAqB,IAAIC,MAAM,CAACjC,MAAM,CAACY,oBAAoB,GAAGqB,MAAM,CAACjB,UAAU;YAO3E5D,QAAQ,GAAG,IAAI+E,6BAAc,CAAC;cAClCC,cAAc,EAAExD,MAAM,CAAChD,IAAI;cAC3B0E,UAAU,EAAEzB,IAAI,CAAC0B,KAAK;cACtB8B,gBAAgB,EAAEpF,UAAU;cAC5BqF,SAAS,EAAE,EAAE;cACbP,uBAAuB,EAAvBA,uBAAuB;cACvBC,qBAAqB,EAArBA,qBAAqB;cACrBxD,IAAI,EAAE+D,mBAAI,CAAC3D,MAAM,CAACY,aAAa,CAAE;cACjCgD,KAAK,EAAEC,+BAAgB,CAAC7D,MAAM,CAACkB,WAAW;YAC5C,CAAC,CAAC;YAGF1C,QAAQ,CAACkF,SAAS,CAACnF,IAAI,CAACqD,uBAAQ,CAACnG,sBAAsB,CAAC,CAAC;YACzD+C,QAAQ,CAACkF,SAAS,CAACnF,IAAI,CAACqD,uBAAQ,CAAC5B,MAAM,CAACX,QAAQ,CAAE,CAAC;;YAG7CyE,cAAc,GAAGzF,UAAU,GAAGiF,OAAO,CAACvF,MAAM;YAC5CU,IAAI,GAAGR,MAAM,CAAC8C,MAAM,CAAC,CAACuC,OAAO,EAAE,IAAApB,0BAAe,EAAC1D,QAAQ,CAAC,CAAC,CAAC;YAAA,mCACzD;cAACC,IAAI,EAAJA,IAAI;cAAED,QAAQ,EAARA,QAAQ;cAAEsF,cAAc,EAAdA;YAAc,CAAC;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CACxC;EAAA;AAAA;AAAA,SAKc1F,cAAc;EAAA;AAAA;AAAA;EAAA,4EAA7B,mBACEzC,MAAqB,EACrBsE,IAAmB,EACnBpE,IAA0B;IAAA;IAAA;MAAA;QAAA;UAAA;YAKpB2C,QAAQ,GAAG,IAAIuF,uBAAQ,CAAC;cAC5BpB,QAAQ,EAAE1C,IAAI,CAACzD,QAAQ;cACvBwH,OAAO,EAAE,EAAE;cACXC,eAAe,EAAE;YACnB,CAAC,CAAC;YAEExF,IAAI,GAAGR,MAAM,CAACkC,KAAK,CAAC,CAAC,CAAC;YAAA,wCACNxE,MAAM,CAACuI,SAAS;YAAA;YAAA;UAAA;YAAA;cAAA;cAAA;YAAA;YAAzBC,KAAK;YAAA,KACVA,KAAK,CAACC,QAAQ;cAAA;cAAA;YAAA;YAAA;UAAA;YAAA;YAAA,OAIOrB,iBAAiB,CAACoB,KAAK,EAAElE,IAAI,EAAExB,IAAI,CAACV,MAAM,EAAElC,IAAI,CAAC;UAAA;YAApEwI,UAAU;YAEVC,MAAM,GAAG,IAAIC,0BAAW,CAAC;cAC7BC,WAAW,EAAEH,UAAU,CAACP,cAAc;cACtCW,SAAS,EAAEJ,UAAU,CAAC7F;YACxB,CAAC,CAAC;YAEFA,QAAQ,CAACwF,OAAO,CAACzF,IAAI,CAAC+F,MAAM,CAAC;YAC7B9F,QAAQ,CAACyF,eAAe,GAAG,IAAIS,gBAAK,CAACC,MAAM,CAACnG,QAAQ,CAACyF,eAAe,CAAC,GAAGI,UAAU,CAAC5F,IAAI,CAACV,MAAM,CAAC;YAE/FU,IAAI,GAAGR,MAAM,CAAC8C,MAAM,CAAC,CAACtC,IAAI,EAAE4F,UAAU,CAAC5F,IAAI,CAAC,CAAC;UAAC;YAAA;YAAA;UAAA;YAAA;YAAA;UAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;YAAA;UAAA;YAAA,mCAGzC;cAACA,IAAI,EAAJA,IAAI;cAAED,QAAQ,EAARA;YAAQ,CAAC;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CACxB;EAAA;AAAA;AAKD,SAASE,YAAY,CACnB/C,MAAqB,EACrBa,QAAgB,EAChBkB,SAAqB,EACrBzB,YAAoC,EAC5B;EACR,IAAMuC,QAAQ,GAAG,IAAIoG,2BAAY,CAAC;IAChCC,OAAO,EAAExJ,eAAe;IACxByJ,UAAU,EAAE,UAAU;IACtBnC,QAAQ,EAAEnG,QAAQ;IAClBuI,UAAU,EAAErH,SAAS;IACrB/B,MAAM,EAAE,EAAE;IACVqJ,kBAAkB,EAAE;EACtB,CAAC,CAAC;EAEF,KAAK,IAAMrI,GAAG,IAAIV,YAAY,EAAE;IAAA;IAC9B,IAAMgJ,EAAE,GAAG,IAAIC,uBAAQ,CAAC;MACtBvI,GAAG,EAAHA,GAAG;MACHC,KAAK,EAAEX,YAAY,CAACU,GAAG;IACzB,CAAC,CAAC;IACF,yBAAA6B,QAAQ,CAACwG,kBAAkB,oFAA3B,iDAA6BzG,IAAI,2DAAjC,oDAAoC0G,EAAE,CAAC;EACzC;EAEA;IACE,IAAME,UAAU,GAAG,IAAIC,4BAAa,CAAC;MACnCC,IAAI,EAAE,MAAM;MACZC,YAAY,EAAEC,MAAM,CAACC,IAAI,CAAC7J,MAAM,CAAC8J,MAAM,CAAC,CAAC1H;IAC3C,CAAC,CAAC;IACFS,QAAQ,CAAC7C,MAAM,CAAC4C,IAAI,CAAC4G,UAAU,CAAC;EAClC;EAAC,2CAEmBxJ,MAAM,CAACuI,SAAS;IAAA;EAAA;IAApC,oDAAsC;MAAA,IAA3BC,KAAK;MACd,IAAMuB,IAAI,GAAGC,kCAAmB,CAACxB,KAAK,CAACyB,cAAc,CAAC;MACtD,IAAMC,UAAU,GAAG,IAAIT,4BAAa,CAAC;QACnCC,IAAI,EAAElB,KAAK,CAACkB,IAAI;QAChBS,eAAe,EAAEJ;MACnB,CAAC,CAAC;MAEF,IAAIvB,KAAK,CAACC,QAAQ,EAAE;QAClByB,UAAU,CAACP,YAAY,GAAGnB,KAAK,CAAC4B,UAAU;MAC5C,CAAC,MAAM;QACLF,UAAU,CAACjG,IAAI,GAAG+D,mBAAI,CAACQ,KAAK,CAACvD,aAAa,CAAU;MACtD;MAEA,IAAIuD,KAAK,CAAC6B,YAAY,EAAE;QACtBH,UAAU,CAACI,cAAc,GAAGC,4BAAa,CAAC/B,KAAK,CAAC6B,YAAY,CAAkB;MAChF;MAEAH,UAAU,CAACM,WAAW,GAAGhC,KAAK,CAACtD,UAAU;MAEzCrC,QAAQ,CAAC7C,MAAM,CAAC4C,IAAI,CAACsH,UAAU,CAAC;IAClC;EAAC;IAAA;EAAA;IAAA;EAAA;EAED,IAAMO,eAAe,GAAG,IAAAlE,0BAAe,EAAC1D,QAAQ,CAAC;EACjD,IAAM6H,aAAa,GAAGpI,MAAM,CAACkC,KAAK,CAACiG,eAAe,CAACrI,MAAM,GAAG,CAAC,CAAC;EAC9DqI,eAAe,CAACE,IAAI,CAACD,aAAa,CAAC;EACnCA,aAAa,CAACE,aAAa,CAACH,eAAe,CAACrI,MAAM,EAAEqI,eAAe,CAACrI,MAAM,CAAC;EAC3EsI,aAAa,CAAC7I,KAAK,CAACpC,aAAa,EAAEgL,eAAe,CAACrI,MAAM,GAAG,CAAC,CAAC;EAC9D,OAAOsI,aAAa;AACtB"}
|
|
@@ -1,94 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
-
Object.defineProperty(exports, "__esModule", {
|
|
5
|
-
value: true
|
|
6
|
-
});
|
|
7
|
-
exports.fclose = fclose;
|
|
8
|
-
exports.fopen = fopen;
|
|
9
|
-
exports.fread = fread;
|
|
10
|
-
exports.fstat = fstat;
|
|
11
|
-
exports.osclose = osclose;
|
|
12
|
-
exports.osopen = osopen;
|
|
13
|
-
exports.oswrite = oswrite;
|
|
14
|
-
var _fs = _interopRequireDefault(require("fs"));
|
|
15
|
-
|
|
16
|
-
function fopen(filePath) {
|
|
17
|
-
return new Promise(function (resolve, reject) {
|
|
18
|
-
_fs.default.open(filePath, 'r', function (err, fd) {
|
|
19
|
-
if (err) {
|
|
20
|
-
reject(err);
|
|
21
|
-
} else {
|
|
22
|
-
resolve(fd);
|
|
23
|
-
}
|
|
24
|
-
});
|
|
25
|
-
});
|
|
26
|
-
}
|
|
27
|
-
function fstat(filePath) {
|
|
28
|
-
return new Promise(function (resolve, reject) {
|
|
29
|
-
_fs.default.stat(filePath, function (err, stat) {
|
|
30
|
-
if (err) {
|
|
31
|
-
reject(err);
|
|
32
|
-
} else {
|
|
33
|
-
resolve(stat);
|
|
34
|
-
}
|
|
35
|
-
});
|
|
36
|
-
});
|
|
37
|
-
}
|
|
38
|
-
function fread(fd, position, length) {
|
|
39
|
-
var buffer = Buffer.alloc(length);
|
|
40
|
-
return new Promise(function (resolve, reject) {
|
|
41
|
-
_fs.default.read(fd, buffer, 0, length, position, function (err, bytesRead, buf) {
|
|
42
|
-
if (err || bytesRead !== length) {
|
|
43
|
-
reject(err || Error('read failed'));
|
|
44
|
-
} else {
|
|
45
|
-
resolve(buf);
|
|
46
|
-
}
|
|
47
|
-
});
|
|
48
|
-
});
|
|
49
|
-
}
|
|
50
|
-
function fclose(fd) {
|
|
51
|
-
return new Promise(function (resolve, reject) {
|
|
52
|
-
_fs.default.close(fd, function (err) {
|
|
53
|
-
if (err) {
|
|
54
|
-
reject(err);
|
|
55
|
-
} else {
|
|
56
|
-
resolve(err);
|
|
57
|
-
}
|
|
58
|
-
});
|
|
59
|
-
});
|
|
60
|
-
}
|
|
61
|
-
function oswrite(os, buf) {
|
|
62
|
-
return new Promise(function (resolve, reject) {
|
|
63
|
-
os.write(buf, function (err) {
|
|
64
|
-
if (err) {
|
|
65
|
-
reject(err);
|
|
66
|
-
} else {
|
|
67
|
-
resolve();
|
|
68
|
-
}
|
|
69
|
-
});
|
|
70
|
-
});
|
|
71
|
-
}
|
|
72
|
-
function osclose(os) {
|
|
73
|
-
return new Promise(function (resolve, reject) {
|
|
74
|
-
os.close(function (err) {
|
|
75
|
-
if (err) {
|
|
76
|
-
reject(err);
|
|
77
|
-
} else {
|
|
78
|
-
resolve();
|
|
79
|
-
}
|
|
80
|
-
});
|
|
81
|
-
});
|
|
82
|
-
}
|
|
83
|
-
function osopen(path, opts) {
|
|
84
|
-
return new Promise(function (resolve, reject) {
|
|
85
|
-
var outputStream = _fs.default.createWriteStream(path, opts);
|
|
86
|
-
outputStream.on('open', function (fd) {
|
|
87
|
-
resolve(outputStream);
|
|
88
|
-
});
|
|
89
|
-
outputStream.on('error', function (err) {
|
|
90
|
-
reject(err);
|
|
91
|
-
});
|
|
92
|
-
});
|
|
93
|
-
}
|
|
94
|
-
//# sourceMappingURL=file.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"file.js","names":["fopen","filePath","Promise","resolve","reject","fs","open","err","fd","fstat","stat","fread","position","length","buffer","Buffer","alloc","read","bytesRead","buf","Error","fclose","close","oswrite","os","write","osclose","osopen","path","opts","outputStream","createWriteStream","on"],"sources":["../../../src/parquetjs/file.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport fs from 'fs';\n\nexport function fopen(filePath) {\n return new Promise((resolve, reject) => {\n fs.open(filePath, 'r', (err, fd) => {\n if (err) {\n reject(err);\n } else {\n resolve(fd);\n }\n });\n });\n}\n\nexport function fstat(filePath) {\n return new Promise<fs.Stats>((resolve, reject) => {\n fs.stat(filePath, (err, stat) => {\n if (err) {\n reject(err);\n } else {\n resolve(stat);\n }\n });\n });\n}\n\nexport function fread(fd, position, length) {\n const buffer = Buffer.alloc(length);\n\n return new Promise((resolve, reject) => {\n fs.read(fd, buffer, 0, length, position, (err, bytesRead, buf) => {\n if (err || bytesRead !== length) {\n reject(err || Error('read failed'));\n } else {\n resolve(buf);\n }\n });\n });\n}\n\nexport function fclose(fd) {\n return new Promise((resolve, reject) => {\n fs.close(fd, (err) => {\n if (err) {\n reject(err);\n } else {\n resolve(err);\n }\n });\n });\n}\n\nexport function oswrite(os, buf): Promise<void> {\n return new Promise((resolve, reject) => {\n os.write(buf, (err) => {\n if (err) {\n reject(err);\n } else {\n resolve();\n }\n });\n });\n}\n\nexport function osclose(os): Promise<void> {\n return new Promise((resolve, reject) => {\n os.close((err) => {\n if (err) {\n reject(err);\n } else {\n resolve();\n }\n });\n });\n}\n\nexport function osopen(path, opts) {\n return new Promise((resolve, reject) => {\n const outputStream = fs.createWriteStream(path, opts);\n\n outputStream.on('open', function (fd) {\n resolve(outputStream);\n });\n\n outputStream.on('error', function (err) {\n reject(err);\n });\n });\n}\n"],"mappings":";;;;;;;;;;;;;AACA;;AAEO,SAASA,KAAK,CAACC,QAAQ,EAAE;EAC9B,OAAO,IAAIC,OAAO,CAAC,UAACC,OAAO,EAAEC,MAAM,EAAK;IACtCC,WAAE,CAACC,IAAI,CAACL,QAAQ,EAAE,GAAG,EAAE,UAACM,GAAG,EAAEC,EAAE,EAAK;MAClC,IAAID,GAAG,EAAE;QACPH,MAAM,CAACG,GAAG,CAAC;MACb,CAAC,MAAM;QACLJ,OAAO,CAACK,EAAE,CAAC;MACb;IACF,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ;AAEO,SAASC,KAAK,CAACR,QAAQ,EAAE;EAC9B,OAAO,IAAIC,OAAO,CAAW,UAACC,OAAO,EAAEC,MAAM,EAAK;IAChDC,WAAE,CAACK,IAAI,CAACT,QAAQ,EAAE,UAACM,GAAG,EAAEG,IAAI,EAAK;MAC/B,IAAIH,GAAG,EAAE;QACPH,MAAM,CAACG,GAAG,CAAC;MACb,CAAC,MAAM;QACLJ,OAAO,CAACO,IAAI,CAAC;MACf;IACF,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ;AAEO,SAASC,KAAK,CAACH,EAAE,EAAEI,QAAQ,EAAEC,MAAM,EAAE;EAC1C,IAAMC,MAAM,GAAGC,MAAM,CAACC,KAAK,CAACH,MAAM,CAAC;EAEnC,OAAO,IAAIX,OAAO,CAAC,UAACC,OAAO,EAAEC,MAAM,EAAK;IACtCC,WAAE,CAACY,IAAI,CAACT,EAAE,EAAEM,MAAM,EAAE,CAAC,EAAED,MAAM,EAAED,QAAQ,EAAE,UAACL,GAAG,EAAEW,SAAS,EAAEC,GAAG,EAAK;MAChE,IAAIZ,GAAG,IAAIW,SAAS,KAAKL,MAAM,EAAE;QAC/BT,MAAM,CAACG,GAAG,IAAIa,KAAK,CAAC,aAAa,CAAC,CAAC;MACrC,CAAC,MAAM;QACLjB,OAAO,CAACgB,GAAG,CAAC;MACd;IACF,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ;AAEO,SAASE,MAAM,CAACb,EAAE,EAAE;EACzB,OAAO,IAAIN,OAAO,CAAC,UAACC,OAAO,EAAEC,MAAM,EAAK;IACtCC,WAAE,CAACiB,KAAK,CAACd,EAAE,EAAE,UAACD,GAAG,EAAK;MACpB,IAAIA,GAAG,EAAE;QACPH,MAAM,CAACG,GAAG,CAAC;MACb,CAAC,MAAM;QACLJ,OAAO,CAACI,GAAG,CAAC;MACd;IACF,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ;AAEO,SAASgB,OAAO,CAACC,EAAE,EAAEL,GAAG,EAAiB;EAC9C,OAAO,IAAIjB,OAAO,CAAC,UAACC,OAAO,EAAEC,MAAM,EAAK;IACtCoB,EAAE,CAACC,KAAK,CAACN,GAAG,EAAE,UAACZ,GAAG,EAAK;MACrB,IAAIA,GAAG,EAAE;QACPH,MAAM,CAACG,GAAG,CAAC;MACb,CAAC,MAAM;QACLJ,OAAO,EAAE;MACX;IACF,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ;AAEO,SAASuB,OAAO,CAACF,EAAE,EAAiB;EACzC,OAAO,IAAItB,OAAO,CAAC,UAACC,OAAO,EAAEC,MAAM,EAAK;IACtCoB,EAAE,CAACF,KAAK,CAAC,UAACf,GAAG,EAAK;MAChB,IAAIA,GAAG,EAAE;QACPH,MAAM,CAACG,GAAG,CAAC;MACb,CAAC,MAAM;QACLJ,OAAO,EAAE;MACX;IACF,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ;AAEO,SAASwB,MAAM,CAACC,IAAI,EAAEC,IAAI,EAAE;EACjC,OAAO,IAAI3B,OAAO,CAAC,UAACC,OAAO,EAAEC,MAAM,EAAK;IACtC,IAAM0B,YAAY,GAAGzB,WAAE,CAAC0B,iBAAiB,CAACH,IAAI,EAAEC,IAAI,CAAC;IAErDC,YAAY,CAACE,EAAE,CAAC,MAAM,EAAE,UAAUxB,EAAE,EAAE;MACpCL,OAAO,CAAC2B,YAAY,CAAC;IACvB,CAAC,CAAC;IAEFA,YAAY,CAACE,EAAE,CAAC,OAAO,EAAE,UAAUzB,GAAG,EAAE;MACtCH,MAAM,CAACG,GAAG,CAAC;IACb,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ"}
|