@loaders.gl/parquet 3.4.6 → 4.0.0-alpha.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (130) hide show
  1. package/dist/dist.min.js +27 -34
  2. package/dist/dist.min.js.map +3 -3
  3. package/dist/es5/index.js +6 -6
  4. package/dist/es5/index.js.map +1 -1
  5. package/dist/es5/lib/arrow/convert-row-group-to-columns.js.map +1 -1
  6. package/dist/es5/lib/arrow/convert-schema-from-parquet.js +58 -42
  7. package/dist/es5/lib/arrow/convert-schema-from-parquet.js.map +1 -1
  8. package/dist/es5/lib/arrow/convert-schema-to-parquet.js +33 -31
  9. package/dist/es5/lib/arrow/convert-schema-to-parquet.js.map +1 -1
  10. package/dist/es5/lib/geo/decode-geo-metadata.js +12 -8
  11. package/dist/es5/lib/geo/decode-geo-metadata.js.map +1 -1
  12. package/dist/es5/lib/parsers/parse-parquet-to-columns.js +11 -7
  13. package/dist/es5/lib/parsers/parse-parquet-to-columns.js.map +1 -1
  14. package/dist/es5/lib/parsers/parse-parquet-to-rows.js +51 -29
  15. package/dist/es5/lib/parsers/parse-parquet-to-rows.js.map +1 -1
  16. package/dist/es5/lib/wasm/parse-parquet-wasm.js +6 -6
  17. package/dist/es5/lib/wasm/parse-parquet-wasm.js.map +1 -1
  18. package/dist/es5/parquet-loader.js +16 -4
  19. package/dist/es5/parquet-loader.js.map +1 -1
  20. package/dist/es5/parquet-wasm-loader.js +1 -1
  21. package/dist/es5/parquet-wasm-loader.js.map +1 -1
  22. package/dist/es5/parquet-wasm-writer.js +1 -1
  23. package/dist/es5/parquet-wasm-writer.js.map +1 -1
  24. package/dist/es5/parquet-writer.js +1 -1
  25. package/dist/es5/parquet-writer.js.map +1 -1
  26. package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -1
  27. package/dist/es5/parquetjs/parser/decoders.js.map +1 -1
  28. package/dist/es5/parquetjs/parser/parquet-reader.js +1 -1
  29. package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
  30. package/dist/es5/parquetjs/schema/declare.js +4 -4
  31. package/dist/es5/parquetjs/schema/declare.js.map +1 -1
  32. package/dist/es5/parquetjs/schema/schema.js +7 -7
  33. package/dist/es5/parquetjs/schema/schema.js.map +1 -1
  34. package/dist/es5/parquetjs/schema/shred.js +117 -22
  35. package/dist/es5/parquetjs/schema/shred.js.map +1 -1
  36. package/dist/esm/index.js +5 -5
  37. package/dist/esm/index.js.map +1 -1
  38. package/dist/esm/lib/arrow/convert-row-group-to-columns.js.map +1 -1
  39. package/dist/esm/lib/arrow/convert-schema-from-parquet.js +57 -41
  40. package/dist/esm/lib/arrow/convert-schema-from-parquet.js.map +1 -1
  41. package/dist/esm/lib/arrow/convert-schema-to-parquet.js +33 -31
  42. package/dist/esm/lib/arrow/convert-schema-to-parquet.js.map +1 -1
  43. package/dist/esm/lib/geo/decode-geo-metadata.js +12 -8
  44. package/dist/esm/lib/geo/decode-geo-metadata.js.map +1 -1
  45. package/dist/esm/lib/parsers/parse-parquet-to-columns.js +12 -8
  46. package/dist/esm/lib/parsers/parse-parquet-to-columns.js.map +1 -1
  47. package/dist/esm/lib/parsers/parse-parquet-to-rows.js +14 -3
  48. package/dist/esm/lib/parsers/parse-parquet-to-rows.js.map +1 -1
  49. package/dist/esm/lib/wasm/parse-parquet-wasm.js +3 -3
  50. package/dist/esm/lib/wasm/parse-parquet-wasm.js.map +1 -1
  51. package/dist/esm/parquet-loader.js +14 -2
  52. package/dist/esm/parquet-loader.js.map +1 -1
  53. package/dist/esm/parquet-wasm-loader.js +1 -1
  54. package/dist/esm/parquet-wasm-loader.js.map +1 -1
  55. package/dist/esm/parquet-wasm-writer.js +1 -1
  56. package/dist/esm/parquet-wasm-writer.js.map +1 -1
  57. package/dist/esm/parquet-writer.js +1 -1
  58. package/dist/esm/parquet-writer.js.map +1 -1
  59. package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +1 -1
  60. package/dist/esm/parquetjs/parser/decoders.js.map +1 -1
  61. package/dist/esm/parquetjs/parser/parquet-reader.js +2 -2
  62. package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
  63. package/dist/esm/parquetjs/schema/declare.js +1 -1
  64. package/dist/esm/parquetjs/schema/declare.js.map +1 -1
  65. package/dist/esm/parquetjs/schema/schema.js +6 -6
  66. package/dist/esm/parquetjs/schema/schema.js.map +1 -1
  67. package/dist/esm/parquetjs/schema/shred.js +108 -21
  68. package/dist/esm/parquetjs/schema/shred.js.map +1 -1
  69. package/dist/index.d.ts +8 -49
  70. package/dist/index.d.ts.map +1 -1
  71. package/dist/index.js +8 -6
  72. package/dist/lib/arrow/convert-row-group-to-columns.d.ts +2 -2
  73. package/dist/lib/arrow/convert-row-group-to-columns.d.ts.map +1 -1
  74. package/dist/lib/arrow/convert-schema-from-parquet.d.ts +4 -4
  75. package/dist/lib/arrow/convert-schema-from-parquet.d.ts.map +1 -1
  76. package/dist/lib/arrow/convert-schema-from-parquet.js +48 -44
  77. package/dist/lib/arrow/convert-schema-to-parquet.d.ts +1 -1
  78. package/dist/lib/arrow/convert-schema-to-parquet.d.ts.map +1 -1
  79. package/dist/lib/arrow/convert-schema-to-parquet.js +30 -31
  80. package/dist/lib/geo/decode-geo-metadata.js +12 -8
  81. package/dist/lib/parsers/parse-parquet-to-columns.d.ts +2 -2
  82. package/dist/lib/parsers/parse-parquet-to-columns.d.ts.map +1 -1
  83. package/dist/lib/parsers/parse-parquet-to-columns.js +13 -7
  84. package/dist/lib/parsers/parse-parquet-to-rows.d.ts +3 -2
  85. package/dist/lib/parsers/parse-parquet-to-rows.d.ts.map +1 -1
  86. package/dist/lib/parsers/parse-parquet-to-rows.js +16 -19
  87. package/dist/lib/wasm/parse-parquet-wasm.d.ts +3 -3
  88. package/dist/lib/wasm/parse-parquet-wasm.d.ts.map +1 -1
  89. package/dist/lib/wasm/parse-parquet-wasm.js +3 -3
  90. package/dist/parquet-loader.d.ts +3 -14
  91. package/dist/parquet-loader.d.ts.map +1 -1
  92. package/dist/parquet-loader.js +14 -2
  93. package/dist/parquet-worker.js +31 -38
  94. package/dist/parquet-worker.js.map +3 -3
  95. package/dist/parquet-writer.d.ts +2 -1
  96. package/dist/parquet-writer.d.ts.map +1 -1
  97. package/dist/parquet-writer.js +1 -0
  98. package/dist/parquetjs/encoder/parquet-encoder.d.ts +4 -4
  99. package/dist/parquetjs/encoder/parquet-encoder.d.ts.map +1 -1
  100. package/dist/parquetjs/parser/decoders.d.ts +2 -2
  101. package/dist/parquetjs/parser/decoders.d.ts.map +1 -1
  102. package/dist/parquetjs/parser/parquet-reader.d.ts +6 -6
  103. package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -1
  104. package/dist/parquetjs/parser/parquet-reader.js +1 -1
  105. package/dist/parquetjs/schema/declare.d.ts +6 -5
  106. package/dist/parquetjs/schema/declare.d.ts.map +1 -1
  107. package/dist/parquetjs/schema/declare.js +3 -3
  108. package/dist/parquetjs/schema/schema.d.ts +4 -4
  109. package/dist/parquetjs/schema/schema.d.ts.map +1 -1
  110. package/dist/parquetjs/schema/schema.js +5 -5
  111. package/dist/parquetjs/schema/shred.d.ts +17 -111
  112. package/dist/parquetjs/schema/shred.d.ts.map +1 -1
  113. package/dist/parquetjs/schema/shred.js +127 -119
  114. package/package.json +8 -8
  115. package/src/index.ts +32 -9
  116. package/src/lib/arrow/convert-row-group-to-columns.ts +2 -2
  117. package/src/lib/arrow/convert-schema-from-parquet.ts +56 -66
  118. package/src/lib/arrow/convert-schema-to-parquet.ts +32 -44
  119. package/src/lib/geo/decode-geo-metadata.ts +17 -8
  120. package/src/lib/parsers/parse-parquet-to-columns.ts +22 -11
  121. package/src/lib/parsers/parse-parquet-to-rows.ts +28 -23
  122. package/src/lib/wasm/parse-parquet-wasm.ts +7 -7
  123. package/src/parquet-loader.ts +25 -2
  124. package/src/parquet-writer.ts +4 -1
  125. package/src/parquetjs/encoder/parquet-encoder.ts +11 -10
  126. package/src/parquetjs/parser/decoders.ts +3 -3
  127. package/src/parquetjs/parser/parquet-reader.ts +7 -7
  128. package/src/parquetjs/schema/declare.ts +6 -5
  129. package/src/parquetjs/schema/schema.ts +8 -8
  130. package/src/parquetjs/schema/shred.ts +142 -103
@@ -1,13 +1,14 @@
1
1
  // Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
2
2
 
3
- import {ParquetBuffer, ParquetData, ParquetField, ParquetRecord} from './declare';
3
+ import {ArrayType} from '@loaders.gl/schema';
4
+ import {ParquetRowGroup, ParquetColumnChunk, ParquetField, ParquetRow} from './declare';
4
5
  import {ParquetSchema} from './schema';
5
6
  import * as Types from './types';
6
7
 
7
- export {ParquetBuffer};
8
+ export {ParquetRowGroup};
8
9
 
9
- export function shredBuffer(schema: ParquetSchema): ParquetBuffer {
10
- const columnData: Record<string, ParquetData> = {};
10
+ export function shredBuffer(schema: ParquetSchema): ParquetRowGroup {
11
+ const columnData: Record<string, ParquetColumnChunk> = {};
11
12
  for (const field of schema.fieldList) {
12
13
  columnData[field.key] = {
13
14
  dlevels: [],
@@ -24,14 +25,14 @@ export function shredBuffer(schema: ParquetSchema): ParquetBuffer {
24
25
  * 'Shred' a record into a list of <value, repetition_level, definition_level>
25
26
  * tuples per column using the Google Dremel Algorithm..
26
27
  *
27
- * The buffer argument must point to an object into which the shredded record
28
- * will be returned. You may re-use the buffer for repeated calls to this function
29
- * to append to an existing buffer, as long as the schema is unchanged.
28
+ * The rowGroup argument must point to an object into which the shredded record
29
+ * will be returned. You may re-use the rowGroup for repeated calls to this function
30
+ * to append to an existing rowGroup, as long as the schema is unchanged.
30
31
  *
31
- * The format in which the shredded records will be stored in the buffer is as
32
+ * The format in which the shredded records will be stored in the rowGroup is as
32
33
  * follows:
33
34
  *
34
- * buffer = {
35
+ * rowGroup = {
35
36
  * columnData: [
36
37
  * 'my_col': {
37
38
  * dlevels: [d1, d2, .. dN],
@@ -42,32 +43,36 @@ export function shredBuffer(schema: ParquetSchema): ParquetBuffer {
42
43
  * rowCount: X,
43
44
  * }
44
45
  */
45
- export function shredRecord(schema: ParquetSchema, record: any, buffer: ParquetBuffer): void {
46
+ export function shredRecord(
47
+ schema: ParquetSchema,
48
+ record: ParquetRow,
49
+ rowGroup: ParquetRowGroup
50
+ ): void {
46
51
  /* shred the record, this may raise an exception */
47
52
  const data = shredBuffer(schema).columnData;
48
53
 
49
54
  shredRecordFields(schema.fields, record, data, 0, 0);
50
55
 
51
- /* if no error during shredding, add the shredded record to the buffer */
52
- if (buffer.rowCount === 0) {
53
- buffer.rowCount = 1;
54
- buffer.columnData = data;
56
+ /* if no error during shredding, add the shredded record to the rowGroup */
57
+ if (rowGroup.rowCount === 0) {
58
+ rowGroup.rowCount = 1;
59
+ rowGroup.columnData = data;
55
60
  return;
56
61
  }
57
- buffer.rowCount += 1;
62
+ rowGroup.rowCount += 1;
58
63
  for (const field of schema.fieldList) {
59
- Array.prototype.push.apply(buffer.columnData[field.key].rlevels, data[field.key].rlevels);
60
- Array.prototype.push.apply(buffer.columnData[field.key].dlevels, data[field.key].dlevels);
61
- Array.prototype.push.apply(buffer.columnData[field.key].values, data[field.key].values);
62
- buffer.columnData[field.key].count += data[field.key].count;
64
+ Array.prototype.push.apply(rowGroup.columnData[field.key].rlevels, data[field.key].rlevels);
65
+ Array.prototype.push.apply(rowGroup.columnData[field.key].dlevels, data[field.key].dlevels);
66
+ Array.prototype.push.apply(rowGroup.columnData[field.key].values, data[field.key].values);
67
+ rowGroup.columnData[field.key].count += data[field.key].count;
63
68
  }
64
69
  }
65
70
 
66
71
  // eslint-disable-next-line max-statements, complexity
67
72
  function shredRecordFields(
68
73
  fields: Record<string, ParquetField>,
69
- record: any,
70
- data: Record<string, ParquetData>,
74
+ record: ParquetRow,
75
+ data: Record<string, ParquetColumnChunk>,
71
76
  rLevel: number,
72
77
  dLevel: number
73
78
  ) {
@@ -99,7 +104,7 @@ function shredRecordFields(
99
104
  // push null
100
105
  if (values.length === 0) {
101
106
  if (field.isNested) {
102
- shredRecordFields(field.fields!, null, data, rLevel, dLevel);
107
+ shredRecordFields(field.fields!, null!, data, rLevel, dLevel);
103
108
  } else {
104
109
  data[field.key].count += 1;
105
110
  data[field.key].rlevels.push(rLevel);
@@ -130,10 +135,10 @@ function shredRecordFields(
130
135
  * tuples back to nested records (objects/arrays) using the Google Dremel
131
136
  * Algorithm..
132
137
  *
133
- * The buffer argument must point to an object with the following structure (i.e.
138
+ * The rowGroup argument must point to an object with the following structure (i.e.
134
139
  * the same structure that is returned by shredRecords):
135
140
  *
136
- * buffer = {
141
+ * rowGroup = {
137
142
  * columnData: [
138
143
  * 'my_col': {
139
144
  * dlevels: [d1, d2, .. dN],
@@ -144,26 +149,28 @@ function shredRecordFields(
144
149
  * rowCount: X,
145
150
  * }
146
151
  */
147
- export function materializeRecords(schema: ParquetSchema, buffer: ParquetBuffer): ParquetRecord[] {
148
- const records: ParquetRecord[] = [];
149
- for (let i = 0; i < buffer.rowCount; i++) {
150
- records.push({});
152
+ export function materializeRows(schema: ParquetSchema, rowGroup: ParquetRowGroup): ParquetRow[] {
153
+ const rows: ParquetRow[] = [];
154
+ // rows = new Array(rowGroup.rowCount).fill({})'
155
+ for (let i = 0; i < rowGroup.rowCount; i++) {
156
+ rows.push({});
151
157
  }
152
- for (const key in buffer.columnData) {
153
- const columnData = buffer.columnData[key];
158
+ for (const key in rowGroup.columnData) {
159
+ const columnData = rowGroup.columnData[key];
154
160
  if (columnData.count) {
155
- materializeColumn(schema, columnData, key, records);
161
+ materializeColumnAsRows(schema, columnData, key, rows);
156
162
  }
157
163
  }
158
- return records;
164
+ return rows;
159
165
  }
160
166
 
167
+ /** Populate record fields for one column */
161
168
  // eslint-disable-next-line max-statements, complexity
162
- function materializeColumn(
169
+ function materializeColumnAsRows(
163
170
  schema: ParquetSchema,
164
- columnData: ParquetData,
171
+ columnData: ParquetColumnChunk,
165
172
  key: string,
166
- records: ParquetRecord[]
173
+ rows: ParquetRow[]
167
174
  ): void {
168
175
  const field = schema.findField(key);
169
176
  const branch = schema.findFieldBranch(key);
@@ -178,7 +185,7 @@ function materializeColumn(
178
185
  rLevels.fill(0, rLevel + 1);
179
186
 
180
187
  let rIndex = 0;
181
- let record = records[rLevels[rIndex++] - 1];
188
+ let record = rows[rLevels[rIndex++] - 1];
182
189
 
183
190
  // Internal nodes - Build a nested row object
184
191
  for (const step of branch) {
@@ -244,10 +251,10 @@ function materializeColumn(
244
251
  * tuples back to nested records (objects/arrays) using the Google Dremel
245
252
  * Algorithm..
246
253
  *
247
- * The buffer argument must point to an object with the following structure (i.e.
254
+ * The rowGroup argument must point to an object with the following structure (i.e.
248
255
  * the same structure that is returned by shredRecords):
249
256
  *
250
- * buffer = {
257
+ * rowGroup = {
251
258
  * columnData: [
252
259
  * 'my_col': {
253
260
  * dlevels: [d1, d2, .. dN],
@@ -257,100 +264,132 @@ function materializeColumn(
257
264
  * ],
258
265
  * rowCount: X,
259
266
  * }
260
- *
261
- export function extractColumns(schema: ParquetSchema, buffer: ParquetBuffer): Record<string, unknown> {
262
- const columns: ParquetRecord = {};
263
- for (const key in buffer.columnData) {
264
- const columnData = buffer.columnData[key];
267
+ */
268
+ export function materializeColumns(
269
+ schema: ParquetSchema,
270
+ rowGroup: ParquetRowGroup
271
+ ): Record<string, ArrayType> {
272
+ const columns: Record<string, ArrayType> = {};
273
+ for (const key in rowGroup.columnData) {
274
+ const columnData = rowGroup.columnData[key];
265
275
  if (columnData.count) {
266
- extractColumn(schema, columnData, key, columns);
276
+ materializeColumnAsColumnarArray(schema, columnData, rowGroup.rowCount, key, columns);
267
277
  }
268
278
  }
269
279
  return columns;
270
280
  }
271
281
 
272
282
  // eslint-disable-next-line max-statements, complexity
273
- function extractColumn(
283
+ function materializeColumnAsColumnarArray(
274
284
  schema: ParquetSchema,
275
- columnData: ParquetData,
285
+ columnData: ParquetColumnChunk,
286
+ rowCount: number,
276
287
  key: string,
277
- columns: Record<string, unknown>
288
+ columns: Record<string, ArrayType<any>>
278
289
  ) {
279
290
  if (columnData.count <= 0) {
280
291
  return;
281
292
  }
282
293
 
283
- const record = columns;
284
-
285
294
  const field = schema.findField(key);
286
295
  const branch = schema.findFieldBranch(key);
287
296
 
297
+ const columnName = branch[0].name;
298
+
299
+ let column: ArrayType | undefined;
300
+ const {values} = columnData;
301
+ if (values.length === rowCount && branch[0].primitiveType) {
302
+ // if (branch[0].repetitionType === `REQUIRED`) {
303
+ // switch (branch[0].primitiveType) {
304
+ // case 'INT32': return values instanceof Int32Array ? values : new Int32Array(values);
305
+ // }
306
+ // }
307
+ column = values;
308
+ }
309
+
310
+ if (column) {
311
+ columns[columnName] = column;
312
+ return;
313
+ }
314
+
315
+ column = new Array(rowCount);
316
+ for (let i = 0; i < rowCount; i++) {
317
+ column[i] = {};
318
+ }
319
+ columns[columnName] = column;
320
+
288
321
  // tslint:disable-next-line:prefer-array-literal
289
322
  const rLevels: number[] = new Array(field.rLevelMax + 1).fill(0);
290
323
  let vIndex = 0;
324
+ for (let i = 0; i < columnData.count; i++) {
325
+ const dLevel = columnData.dlevels[i];
326
+ const rLevel = columnData.rlevels[i];
327
+ rLevels[rLevel]++;
328
+ rLevels.fill(0, rLevel + 1);
291
329
 
292
- let i = 0;
293
- const dLevel = columnData.dlevels[i];
294
- const rLevel = columnData.rlevels[i];
295
- rLevels[rLevel]++;
296
- rLevels.fill(0, rLevel + 1);
330
+ let rIndex = 0;
331
+ let record = column[rLevels[rIndex++] - 1] as ParquetRow;
297
332
 
298
- let rIndex = 0;
299
- let record = records[rLevels[rIndex++] - 1];
333
+ // Internal nodes - Build a nested row object
334
+ for (const step of branch) {
335
+ if (step === field || dLevel < step.dLevelMax) {
336
+ break;
337
+ }
300
338
 
301
- // Internal nodes
302
- for (const step of branch) {
303
- if (step === field || dLevel < step.dLevelMax) {
304
- break;
339
+ switch (step.repetitionType) {
340
+ case 'REPEATED':
341
+ if (!(step.name in record)) {
342
+ // eslint-disable max-depth
343
+ record[step.name] = [];
344
+ }
345
+ const ix = rLevels[rIndex++];
346
+ while (record[step.name].length <= ix) {
347
+ // eslint-disable max-depth
348
+ record[step.name].push({});
349
+ }
350
+ record = record[step.name][ix];
351
+ break;
352
+
353
+ default:
354
+ record[step.name] = record[step.name] || {};
355
+ record = record[step.name];
356
+ }
305
357
  }
306
358
 
307
- switch (step.repetitionType) {
308
- case 'REPEATED':
309
- if (!(step.name in record)) {
310
- // eslint-disable max-depth
311
- record[step.name] = [];
312
- }
313
- const ix = rLevels[rIndex++];
314
- while (record[step.name].length <= ix) {
315
- // eslint-disable max-depth
316
- record[step.name].push({});
317
- }
318
- record = record[step.name][ix];
319
- break;
359
+ // Leaf node - Add the value
360
+ if (dLevel === field.dLevelMax) {
361
+ const value = Types.fromPrimitive(
362
+ // @ts-ignore
363
+ field.originalType || field.primitiveType,
364
+ columnData.values[vIndex],
365
+ field
366
+ );
367
+ vIndex++;
320
368
 
321
- default:
322
- record[step.name] = record[step.name] || {};
323
- record = record[step.name];
369
+ switch (field.repetitionType) {
370
+ case 'REPEATED':
371
+ if (!(field.name in record)) {
372
+ // eslint-disable max-depth
373
+ record[field.name] = [];
374
+ }
375
+ const ix = rLevels[rIndex];
376
+ while (record[field.name].length <= ix) {
377
+ // eslint-disable max-depth
378
+ record[field.name].push(null);
379
+ }
380
+ record[field.name][ix] = value;
381
+ break;
382
+
383
+ default:
384
+ record[field.name] = value;
385
+ }
324
386
  }
325
387
  }
326
388
 
327
- // Leaf node
328
- if (dLevel === field.dLevelMax) {
329
- const value = Types.fromPrimitive(
330
- // @ts-ignore
331
- field.originalType || field.primitiveType,
332
- columnData.values[vIndex],
333
- field
334
- );
335
- vIndex++;
336
-
337
- switch (field.repetitionType) {
338
- case 'REPEATED':
339
- if (!(field.name in record)) {
340
- // eslint-disable max-depth
341
- record[field.name] = [];
342
- }
343
- const ix = rLevels[rIndex];
344
- while (record[field.name].length <= ix) {
345
- // eslint-disable max-depth
346
- record[field.name].push(null);
347
- }
348
- record[field.name][ix] = value;
349
- break;
350
-
351
- default:
352
- record[field.name] = value;
389
+ // Remove one level of nesting
390
+ for (let i = 0; i < rowCount; ++i) {
391
+ if (columnName in (column[i] as object)) {
392
+ column[i] = (column[i] as object)[columnName];
353
393
  }
354
394
  }
355
395
  }
356
- */