@loaders.gl/parquet 3.4.6 → 4.0.0-alpha.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +27 -34
- package/dist/dist.min.js.map +3 -3
- package/dist/es5/index.js +6 -6
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/arrow/convert-row-group-to-columns.js.map +1 -1
- package/dist/es5/lib/arrow/convert-schema-from-parquet.js +58 -42
- package/dist/es5/lib/arrow/convert-schema-from-parquet.js.map +1 -1
- package/dist/es5/lib/arrow/convert-schema-to-parquet.js +33 -31
- package/dist/es5/lib/arrow/convert-schema-to-parquet.js.map +1 -1
- package/dist/es5/lib/geo/decode-geo-metadata.js +12 -8
- package/dist/es5/lib/geo/decode-geo-metadata.js.map +1 -1
- package/dist/es5/lib/parsers/parse-parquet-to-columns.js +11 -7
- package/dist/es5/lib/parsers/parse-parquet-to-columns.js.map +1 -1
- package/dist/es5/lib/parsers/parse-parquet-to-rows.js +51 -29
- package/dist/es5/lib/parsers/parse-parquet-to-rows.js.map +1 -1
- package/dist/es5/lib/wasm/parse-parquet-wasm.js +6 -6
- package/dist/es5/lib/wasm/parse-parquet-wasm.js.map +1 -1
- package/dist/es5/parquet-loader.js +16 -4
- package/dist/es5/parquet-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-loader.js +1 -1
- package/dist/es5/parquet-wasm-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-writer.js +1 -1
- package/dist/es5/parquet-wasm-writer.js.map +1 -1
- package/dist/es5/parquet-writer.js +1 -1
- package/dist/es5/parquet-writer.js.map +1 -1
- package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -1
- package/dist/es5/parquetjs/parser/decoders.js.map +1 -1
- package/dist/es5/parquetjs/parser/parquet-reader.js +1 -1
- package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/es5/parquetjs/schema/declare.js +4 -4
- package/dist/es5/parquetjs/schema/declare.js.map +1 -1
- package/dist/es5/parquetjs/schema/schema.js +7 -7
- package/dist/es5/parquetjs/schema/schema.js.map +1 -1
- package/dist/es5/parquetjs/schema/shred.js +117 -22
- package/dist/es5/parquetjs/schema/shred.js.map +1 -1
- package/dist/esm/index.js +5 -5
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/arrow/convert-row-group-to-columns.js.map +1 -1
- package/dist/esm/lib/arrow/convert-schema-from-parquet.js +57 -41
- package/dist/esm/lib/arrow/convert-schema-from-parquet.js.map +1 -1
- package/dist/esm/lib/arrow/convert-schema-to-parquet.js +33 -31
- package/dist/esm/lib/arrow/convert-schema-to-parquet.js.map +1 -1
- package/dist/esm/lib/geo/decode-geo-metadata.js +12 -8
- package/dist/esm/lib/geo/decode-geo-metadata.js.map +1 -1
- package/dist/esm/lib/parsers/parse-parquet-to-columns.js +12 -8
- package/dist/esm/lib/parsers/parse-parquet-to-columns.js.map +1 -1
- package/dist/esm/lib/parsers/parse-parquet-to-rows.js +14 -3
- package/dist/esm/lib/parsers/parse-parquet-to-rows.js.map +1 -1
- package/dist/esm/lib/wasm/parse-parquet-wasm.js +3 -3
- package/dist/esm/lib/wasm/parse-parquet-wasm.js.map +1 -1
- package/dist/esm/parquet-loader.js +14 -2
- package/dist/esm/parquet-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-loader.js +1 -1
- package/dist/esm/parquet-wasm-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-writer.js +1 -1
- package/dist/esm/parquet-wasm-writer.js.map +1 -1
- package/dist/esm/parquet-writer.js +1 -1
- package/dist/esm/parquet-writer.js.map +1 -1
- package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +1 -1
- package/dist/esm/parquetjs/parser/decoders.js.map +1 -1
- package/dist/esm/parquetjs/parser/parquet-reader.js +2 -2
- package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/esm/parquetjs/schema/declare.js +1 -1
- package/dist/esm/parquetjs/schema/declare.js.map +1 -1
- package/dist/esm/parquetjs/schema/schema.js +6 -6
- package/dist/esm/parquetjs/schema/schema.js.map +1 -1
- package/dist/esm/parquetjs/schema/shred.js +108 -21
- package/dist/esm/parquetjs/schema/shred.js.map +1 -1
- package/dist/index.d.ts +8 -49
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +8 -6
- package/dist/lib/arrow/convert-row-group-to-columns.d.ts +2 -2
- package/dist/lib/arrow/convert-row-group-to-columns.d.ts.map +1 -1
- package/dist/lib/arrow/convert-schema-from-parquet.d.ts +4 -4
- package/dist/lib/arrow/convert-schema-from-parquet.d.ts.map +1 -1
- package/dist/lib/arrow/convert-schema-from-parquet.js +48 -44
- package/dist/lib/arrow/convert-schema-to-parquet.d.ts +1 -1
- package/dist/lib/arrow/convert-schema-to-parquet.d.ts.map +1 -1
- package/dist/lib/arrow/convert-schema-to-parquet.js +30 -31
- package/dist/lib/geo/decode-geo-metadata.js +12 -8
- package/dist/lib/parsers/parse-parquet-to-columns.d.ts +2 -2
- package/dist/lib/parsers/parse-parquet-to-columns.d.ts.map +1 -1
- package/dist/lib/parsers/parse-parquet-to-columns.js +13 -7
- package/dist/lib/parsers/parse-parquet-to-rows.d.ts +3 -2
- package/dist/lib/parsers/parse-parquet-to-rows.d.ts.map +1 -1
- package/dist/lib/parsers/parse-parquet-to-rows.js +16 -19
- package/dist/lib/wasm/parse-parquet-wasm.d.ts +3 -3
- package/dist/lib/wasm/parse-parquet-wasm.d.ts.map +1 -1
- package/dist/lib/wasm/parse-parquet-wasm.js +3 -3
- package/dist/parquet-loader.d.ts +3 -14
- package/dist/parquet-loader.d.ts.map +1 -1
- package/dist/parquet-loader.js +14 -2
- package/dist/parquet-worker.js +31 -38
- package/dist/parquet-worker.js.map +3 -3
- package/dist/parquet-writer.d.ts +2 -1
- package/dist/parquet-writer.d.ts.map +1 -1
- package/dist/parquet-writer.js +1 -0
- package/dist/parquetjs/encoder/parquet-encoder.d.ts +4 -4
- package/dist/parquetjs/encoder/parquet-encoder.d.ts.map +1 -1
- package/dist/parquetjs/parser/decoders.d.ts +2 -2
- package/dist/parquetjs/parser/decoders.d.ts.map +1 -1
- package/dist/parquetjs/parser/parquet-reader.d.ts +6 -6
- package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -1
- package/dist/parquetjs/parser/parquet-reader.js +1 -1
- package/dist/parquetjs/schema/declare.d.ts +6 -5
- package/dist/parquetjs/schema/declare.d.ts.map +1 -1
- package/dist/parquetjs/schema/declare.js +3 -3
- package/dist/parquetjs/schema/schema.d.ts +4 -4
- package/dist/parquetjs/schema/schema.d.ts.map +1 -1
- package/dist/parquetjs/schema/schema.js +5 -5
- package/dist/parquetjs/schema/shred.d.ts +17 -111
- package/dist/parquetjs/schema/shred.d.ts.map +1 -1
- package/dist/parquetjs/schema/shred.js +127 -119
- package/package.json +8 -8
- package/src/index.ts +32 -9
- package/src/lib/arrow/convert-row-group-to-columns.ts +2 -2
- package/src/lib/arrow/convert-schema-from-parquet.ts +56 -66
- package/src/lib/arrow/convert-schema-to-parquet.ts +32 -44
- package/src/lib/geo/decode-geo-metadata.ts +17 -8
- package/src/lib/parsers/parse-parquet-to-columns.ts +22 -11
- package/src/lib/parsers/parse-parquet-to-rows.ts +28 -23
- package/src/lib/wasm/parse-parquet-wasm.ts +7 -7
- package/src/parquet-loader.ts +25 -2
- package/src/parquet-writer.ts +4 -1
- package/src/parquetjs/encoder/parquet-encoder.ts +11 -10
- package/src/parquetjs/parser/decoders.ts +3 -3
- package/src/parquetjs/parser/parquet-reader.ts +7 -7
- package/src/parquetjs/schema/declare.ts +6 -5
- package/src/parquetjs/schema/schema.ts +8 -8
- package/src/parquetjs/schema/shred.ts +142 -103
|
@@ -1,13 +1,14 @@
|
|
|
1
1
|
// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
|
|
2
2
|
|
|
3
|
-
import {
|
|
3
|
+
import {ArrayType} from '@loaders.gl/schema';
|
|
4
|
+
import {ParquetRowGroup, ParquetColumnChunk, ParquetField, ParquetRow} from './declare';
|
|
4
5
|
import {ParquetSchema} from './schema';
|
|
5
6
|
import * as Types from './types';
|
|
6
7
|
|
|
7
|
-
export {
|
|
8
|
+
export {ParquetRowGroup};
|
|
8
9
|
|
|
9
|
-
export function shredBuffer(schema: ParquetSchema):
|
|
10
|
-
const columnData: Record<string,
|
|
10
|
+
export function shredBuffer(schema: ParquetSchema): ParquetRowGroup {
|
|
11
|
+
const columnData: Record<string, ParquetColumnChunk> = {};
|
|
11
12
|
for (const field of schema.fieldList) {
|
|
12
13
|
columnData[field.key] = {
|
|
13
14
|
dlevels: [],
|
|
@@ -24,14 +25,14 @@ export function shredBuffer(schema: ParquetSchema): ParquetBuffer {
|
|
|
24
25
|
* 'Shred' a record into a list of <value, repetition_level, definition_level>
|
|
25
26
|
* tuples per column using the Google Dremel Algorithm..
|
|
26
27
|
*
|
|
27
|
-
* The
|
|
28
|
-
* will be returned. You may re-use the
|
|
29
|
-
* to append to an existing
|
|
28
|
+
* The rowGroup argument must point to an object into which the shredded record
|
|
29
|
+
* will be returned. You may re-use the rowGroup for repeated calls to this function
|
|
30
|
+
* to append to an existing rowGroup, as long as the schema is unchanged.
|
|
30
31
|
*
|
|
31
|
-
* The format in which the shredded records will be stored in the
|
|
32
|
+
* The format in which the shredded records will be stored in the rowGroup is as
|
|
32
33
|
* follows:
|
|
33
34
|
*
|
|
34
|
-
*
|
|
35
|
+
* rowGroup = {
|
|
35
36
|
* columnData: [
|
|
36
37
|
* 'my_col': {
|
|
37
38
|
* dlevels: [d1, d2, .. dN],
|
|
@@ -42,32 +43,36 @@ export function shredBuffer(schema: ParquetSchema): ParquetBuffer {
|
|
|
42
43
|
* rowCount: X,
|
|
43
44
|
* }
|
|
44
45
|
*/
|
|
45
|
-
export function shredRecord(
|
|
46
|
+
export function shredRecord(
|
|
47
|
+
schema: ParquetSchema,
|
|
48
|
+
record: ParquetRow,
|
|
49
|
+
rowGroup: ParquetRowGroup
|
|
50
|
+
): void {
|
|
46
51
|
/* shred the record, this may raise an exception */
|
|
47
52
|
const data = shredBuffer(schema).columnData;
|
|
48
53
|
|
|
49
54
|
shredRecordFields(schema.fields, record, data, 0, 0);
|
|
50
55
|
|
|
51
|
-
/* if no error during shredding, add the shredded record to the
|
|
52
|
-
if (
|
|
53
|
-
|
|
54
|
-
|
|
56
|
+
/* if no error during shredding, add the shredded record to the rowGroup */
|
|
57
|
+
if (rowGroup.rowCount === 0) {
|
|
58
|
+
rowGroup.rowCount = 1;
|
|
59
|
+
rowGroup.columnData = data;
|
|
55
60
|
return;
|
|
56
61
|
}
|
|
57
|
-
|
|
62
|
+
rowGroup.rowCount += 1;
|
|
58
63
|
for (const field of schema.fieldList) {
|
|
59
|
-
Array.prototype.push.apply(
|
|
60
|
-
Array.prototype.push.apply(
|
|
61
|
-
Array.prototype.push.apply(
|
|
62
|
-
|
|
64
|
+
Array.prototype.push.apply(rowGroup.columnData[field.key].rlevels, data[field.key].rlevels);
|
|
65
|
+
Array.prototype.push.apply(rowGroup.columnData[field.key].dlevels, data[field.key].dlevels);
|
|
66
|
+
Array.prototype.push.apply(rowGroup.columnData[field.key].values, data[field.key].values);
|
|
67
|
+
rowGroup.columnData[field.key].count += data[field.key].count;
|
|
63
68
|
}
|
|
64
69
|
}
|
|
65
70
|
|
|
66
71
|
// eslint-disable-next-line max-statements, complexity
|
|
67
72
|
function shredRecordFields(
|
|
68
73
|
fields: Record<string, ParquetField>,
|
|
69
|
-
record:
|
|
70
|
-
data: Record<string,
|
|
74
|
+
record: ParquetRow,
|
|
75
|
+
data: Record<string, ParquetColumnChunk>,
|
|
71
76
|
rLevel: number,
|
|
72
77
|
dLevel: number
|
|
73
78
|
) {
|
|
@@ -99,7 +104,7 @@ function shredRecordFields(
|
|
|
99
104
|
// push null
|
|
100
105
|
if (values.length === 0) {
|
|
101
106
|
if (field.isNested) {
|
|
102
|
-
shredRecordFields(field.fields!, null
|
|
107
|
+
shredRecordFields(field.fields!, null!, data, rLevel, dLevel);
|
|
103
108
|
} else {
|
|
104
109
|
data[field.key].count += 1;
|
|
105
110
|
data[field.key].rlevels.push(rLevel);
|
|
@@ -130,10 +135,10 @@ function shredRecordFields(
|
|
|
130
135
|
* tuples back to nested records (objects/arrays) using the Google Dremel
|
|
131
136
|
* Algorithm..
|
|
132
137
|
*
|
|
133
|
-
* The
|
|
138
|
+
* The rowGroup argument must point to an object with the following structure (i.e.
|
|
134
139
|
* the same structure that is returned by shredRecords):
|
|
135
140
|
*
|
|
136
|
-
*
|
|
141
|
+
* rowGroup = {
|
|
137
142
|
* columnData: [
|
|
138
143
|
* 'my_col': {
|
|
139
144
|
* dlevels: [d1, d2, .. dN],
|
|
@@ -144,26 +149,28 @@ function shredRecordFields(
|
|
|
144
149
|
* rowCount: X,
|
|
145
150
|
* }
|
|
146
151
|
*/
|
|
147
|
-
export function
|
|
148
|
-
const
|
|
149
|
-
|
|
150
|
-
|
|
152
|
+
export function materializeRows(schema: ParquetSchema, rowGroup: ParquetRowGroup): ParquetRow[] {
|
|
153
|
+
const rows: ParquetRow[] = [];
|
|
154
|
+
// rows = new Array(rowGroup.rowCount).fill({})'
|
|
155
|
+
for (let i = 0; i < rowGroup.rowCount; i++) {
|
|
156
|
+
rows.push({});
|
|
151
157
|
}
|
|
152
|
-
for (const key in
|
|
153
|
-
const columnData =
|
|
158
|
+
for (const key in rowGroup.columnData) {
|
|
159
|
+
const columnData = rowGroup.columnData[key];
|
|
154
160
|
if (columnData.count) {
|
|
155
|
-
|
|
161
|
+
materializeColumnAsRows(schema, columnData, key, rows);
|
|
156
162
|
}
|
|
157
163
|
}
|
|
158
|
-
return
|
|
164
|
+
return rows;
|
|
159
165
|
}
|
|
160
166
|
|
|
167
|
+
/** Populate record fields for one column */
|
|
161
168
|
// eslint-disable-next-line max-statements, complexity
|
|
162
|
-
function
|
|
169
|
+
function materializeColumnAsRows(
|
|
163
170
|
schema: ParquetSchema,
|
|
164
|
-
columnData:
|
|
171
|
+
columnData: ParquetColumnChunk,
|
|
165
172
|
key: string,
|
|
166
|
-
|
|
173
|
+
rows: ParquetRow[]
|
|
167
174
|
): void {
|
|
168
175
|
const field = schema.findField(key);
|
|
169
176
|
const branch = schema.findFieldBranch(key);
|
|
@@ -178,7 +185,7 @@ function materializeColumn(
|
|
|
178
185
|
rLevels.fill(0, rLevel + 1);
|
|
179
186
|
|
|
180
187
|
let rIndex = 0;
|
|
181
|
-
let record =
|
|
188
|
+
let record = rows[rLevels[rIndex++] - 1];
|
|
182
189
|
|
|
183
190
|
// Internal nodes - Build a nested row object
|
|
184
191
|
for (const step of branch) {
|
|
@@ -244,10 +251,10 @@ function materializeColumn(
|
|
|
244
251
|
* tuples back to nested records (objects/arrays) using the Google Dremel
|
|
245
252
|
* Algorithm..
|
|
246
253
|
*
|
|
247
|
-
* The
|
|
254
|
+
* The rowGroup argument must point to an object with the following structure (i.e.
|
|
248
255
|
* the same structure that is returned by shredRecords):
|
|
249
256
|
*
|
|
250
|
-
*
|
|
257
|
+
* rowGroup = {
|
|
251
258
|
* columnData: [
|
|
252
259
|
* 'my_col': {
|
|
253
260
|
* dlevels: [d1, d2, .. dN],
|
|
@@ -257,100 +264,132 @@ function materializeColumn(
|
|
|
257
264
|
* ],
|
|
258
265
|
* rowCount: X,
|
|
259
266
|
* }
|
|
260
|
-
|
|
261
|
-
export function
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
267
|
+
*/
|
|
268
|
+
export function materializeColumns(
|
|
269
|
+
schema: ParquetSchema,
|
|
270
|
+
rowGroup: ParquetRowGroup
|
|
271
|
+
): Record<string, ArrayType> {
|
|
272
|
+
const columns: Record<string, ArrayType> = {};
|
|
273
|
+
for (const key in rowGroup.columnData) {
|
|
274
|
+
const columnData = rowGroup.columnData[key];
|
|
265
275
|
if (columnData.count) {
|
|
266
|
-
|
|
276
|
+
materializeColumnAsColumnarArray(schema, columnData, rowGroup.rowCount, key, columns);
|
|
267
277
|
}
|
|
268
278
|
}
|
|
269
279
|
return columns;
|
|
270
280
|
}
|
|
271
281
|
|
|
272
282
|
// eslint-disable-next-line max-statements, complexity
|
|
273
|
-
function
|
|
283
|
+
function materializeColumnAsColumnarArray(
|
|
274
284
|
schema: ParquetSchema,
|
|
275
|
-
columnData:
|
|
285
|
+
columnData: ParquetColumnChunk,
|
|
286
|
+
rowCount: number,
|
|
276
287
|
key: string,
|
|
277
|
-
columns: Record<string,
|
|
288
|
+
columns: Record<string, ArrayType<any>>
|
|
278
289
|
) {
|
|
279
290
|
if (columnData.count <= 0) {
|
|
280
291
|
return;
|
|
281
292
|
}
|
|
282
293
|
|
|
283
|
-
const record = columns;
|
|
284
|
-
|
|
285
294
|
const field = schema.findField(key);
|
|
286
295
|
const branch = schema.findFieldBranch(key);
|
|
287
296
|
|
|
297
|
+
const columnName = branch[0].name;
|
|
298
|
+
|
|
299
|
+
let column: ArrayType | undefined;
|
|
300
|
+
const {values} = columnData;
|
|
301
|
+
if (values.length === rowCount && branch[0].primitiveType) {
|
|
302
|
+
// if (branch[0].repetitionType === `REQUIRED`) {
|
|
303
|
+
// switch (branch[0].primitiveType) {
|
|
304
|
+
// case 'INT32': return values instanceof Int32Array ? values : new Int32Array(values);
|
|
305
|
+
// }
|
|
306
|
+
// }
|
|
307
|
+
column = values;
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
if (column) {
|
|
311
|
+
columns[columnName] = column;
|
|
312
|
+
return;
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
column = new Array(rowCount);
|
|
316
|
+
for (let i = 0; i < rowCount; i++) {
|
|
317
|
+
column[i] = {};
|
|
318
|
+
}
|
|
319
|
+
columns[columnName] = column;
|
|
320
|
+
|
|
288
321
|
// tslint:disable-next-line:prefer-array-literal
|
|
289
322
|
const rLevels: number[] = new Array(field.rLevelMax + 1).fill(0);
|
|
290
323
|
let vIndex = 0;
|
|
324
|
+
for (let i = 0; i < columnData.count; i++) {
|
|
325
|
+
const dLevel = columnData.dlevels[i];
|
|
326
|
+
const rLevel = columnData.rlevels[i];
|
|
327
|
+
rLevels[rLevel]++;
|
|
328
|
+
rLevels.fill(0, rLevel + 1);
|
|
291
329
|
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
const rLevel = columnData.rlevels[i];
|
|
295
|
-
rLevels[rLevel]++;
|
|
296
|
-
rLevels.fill(0, rLevel + 1);
|
|
330
|
+
let rIndex = 0;
|
|
331
|
+
let record = column[rLevels[rIndex++] - 1] as ParquetRow;
|
|
297
332
|
|
|
298
|
-
|
|
299
|
-
|
|
333
|
+
// Internal nodes - Build a nested row object
|
|
334
|
+
for (const step of branch) {
|
|
335
|
+
if (step === field || dLevel < step.dLevelMax) {
|
|
336
|
+
break;
|
|
337
|
+
}
|
|
300
338
|
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
339
|
+
switch (step.repetitionType) {
|
|
340
|
+
case 'REPEATED':
|
|
341
|
+
if (!(step.name in record)) {
|
|
342
|
+
// eslint-disable max-depth
|
|
343
|
+
record[step.name] = [];
|
|
344
|
+
}
|
|
345
|
+
const ix = rLevels[rIndex++];
|
|
346
|
+
while (record[step.name].length <= ix) {
|
|
347
|
+
// eslint-disable max-depth
|
|
348
|
+
record[step.name].push({});
|
|
349
|
+
}
|
|
350
|
+
record = record[step.name][ix];
|
|
351
|
+
break;
|
|
352
|
+
|
|
353
|
+
default:
|
|
354
|
+
record[step.name] = record[step.name] || {};
|
|
355
|
+
record = record[step.name];
|
|
356
|
+
}
|
|
305
357
|
}
|
|
306
358
|
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
record[step.name].push({});
|
|
317
|
-
}
|
|
318
|
-
record = record[step.name][ix];
|
|
319
|
-
break;
|
|
359
|
+
// Leaf node - Add the value
|
|
360
|
+
if (dLevel === field.dLevelMax) {
|
|
361
|
+
const value = Types.fromPrimitive(
|
|
362
|
+
// @ts-ignore
|
|
363
|
+
field.originalType || field.primitiveType,
|
|
364
|
+
columnData.values[vIndex],
|
|
365
|
+
field
|
|
366
|
+
);
|
|
367
|
+
vIndex++;
|
|
320
368
|
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
369
|
+
switch (field.repetitionType) {
|
|
370
|
+
case 'REPEATED':
|
|
371
|
+
if (!(field.name in record)) {
|
|
372
|
+
// eslint-disable max-depth
|
|
373
|
+
record[field.name] = [];
|
|
374
|
+
}
|
|
375
|
+
const ix = rLevels[rIndex];
|
|
376
|
+
while (record[field.name].length <= ix) {
|
|
377
|
+
// eslint-disable max-depth
|
|
378
|
+
record[field.name].push(null);
|
|
379
|
+
}
|
|
380
|
+
record[field.name][ix] = value;
|
|
381
|
+
break;
|
|
382
|
+
|
|
383
|
+
default:
|
|
384
|
+
record[field.name] = value;
|
|
385
|
+
}
|
|
324
386
|
}
|
|
325
387
|
}
|
|
326
388
|
|
|
327
|
-
//
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
field.originalType || field.primitiveType,
|
|
332
|
-
columnData.values[vIndex],
|
|
333
|
-
field
|
|
334
|
-
);
|
|
335
|
-
vIndex++;
|
|
336
|
-
|
|
337
|
-
switch (field.repetitionType) {
|
|
338
|
-
case 'REPEATED':
|
|
339
|
-
if (!(field.name in record)) {
|
|
340
|
-
// eslint-disable max-depth
|
|
341
|
-
record[field.name] = [];
|
|
342
|
-
}
|
|
343
|
-
const ix = rLevels[rIndex];
|
|
344
|
-
while (record[field.name].length <= ix) {
|
|
345
|
-
// eslint-disable max-depth
|
|
346
|
-
record[field.name].push(null);
|
|
347
|
-
}
|
|
348
|
-
record[field.name][ix] = value;
|
|
349
|
-
break;
|
|
350
|
-
|
|
351
|
-
default:
|
|
352
|
-
record[field.name] = value;
|
|
389
|
+
// Remove one level of nesting
|
|
390
|
+
for (let i = 0; i < rowCount; ++i) {
|
|
391
|
+
if (columnName in (column[i] as object)) {
|
|
392
|
+
column[i] = (column[i] as object)[columnName];
|
|
353
393
|
}
|
|
354
394
|
}
|
|
355
395
|
}
|
|
356
|
-
*/
|