@loaders.gl/parquet 4.0.0-alpha.22 → 4.0.0-alpha.24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. package/dist/dist.min.js +17 -17
  2. package/dist/dist.min.js.map +3 -3
  3. package/dist/es5/index.js +0 -3
  4. package/dist/es5/index.js.map +1 -1
  5. package/dist/es5/parquet-loader.js +1 -1
  6. package/dist/es5/parquet-wasm-loader.js +1 -1
  7. package/dist/es5/parquet-wasm-writer.js +1 -1
  8. package/dist/es5/parquet-writer.js +2 -3
  9. package/dist/es5/parquet-writer.js.map +1 -1
  10. package/dist/esm/index.js +0 -1
  11. package/dist/esm/index.js.map +1 -1
  12. package/dist/esm/parquet-loader.js +1 -1
  13. package/dist/esm/parquet-wasm-loader.js +1 -1
  14. package/dist/esm/parquet-wasm-writer.js +1 -1
  15. package/dist/esm/parquet-writer.js +2 -3
  16. package/dist/esm/parquet-writer.js.map +1 -1
  17. package/dist/index.d.ts +0 -1
  18. package/dist/index.d.ts.map +1 -1
  19. package/dist/parquet-worker.js +17 -17
  20. package/dist/parquet-worker.js.map +3 -3
  21. package/dist/parquet-writer.d.ts.map +1 -1
  22. package/package.json +11 -9
  23. package/src/index.ts +0 -3
  24. package/src/parquet-writer.ts +1 -3
  25. package/dist/bundle.js +0 -5
  26. package/dist/constants.js +0 -18
  27. package/dist/index.js +0 -58
  28. package/dist/lib/arrow/convert-columns-to-row-group.js +0 -1
  29. package/dist/lib/arrow/convert-row-group-to-columns.js +0 -12
  30. package/dist/lib/arrow/convert-schema-from-parquet.js +0 -86
  31. package/dist/lib/arrow/convert-schema-to-parquet.js +0 -71
  32. package/dist/lib/geo/decode-geo-metadata.js +0 -77
  33. package/dist/lib/geo/geoparquet-schema.js +0 -69
  34. package/dist/lib/parsers/parse-parquet-to-columns.js +0 -46
  35. package/dist/lib/parsers/parse-parquet-to-rows.js +0 -37
  36. package/dist/lib/wasm/encode-parquet-wasm.js +0 -30
  37. package/dist/lib/wasm/load-wasm/index.js +0 -5
  38. package/dist/lib/wasm/load-wasm/load-wasm-browser.js +0 -38
  39. package/dist/lib/wasm/load-wasm/load-wasm-node.js +0 -31
  40. package/dist/lib/wasm/parse-parquet-wasm.js +0 -27
  41. package/dist/parquet-loader.js +0 -41
  42. package/dist/parquet-wasm-loader.js +0 -26
  43. package/dist/parquet-wasm-writer.js +0 -24
  44. package/dist/parquet-writer.js +0 -22
  45. package/dist/parquetjs/codecs/declare.js +0 -2
  46. package/dist/parquetjs/codecs/dictionary.js +0 -14
  47. package/dist/parquetjs/codecs/index.js +0 -55
  48. package/dist/parquetjs/codecs/plain.js +0 -211
  49. package/dist/parquetjs/codecs/rle.js +0 -145
  50. package/dist/parquetjs/compression.js +0 -183
  51. package/dist/parquetjs/encoder/parquet-encoder.js +0 -484
  52. package/dist/parquetjs/parquet-thrift/BoundaryOrder.js +0 -15
  53. package/dist/parquetjs/parquet-thrift/BsonType.js +0 -62
  54. package/dist/parquetjs/parquet-thrift/ColumnChunk.js +0 -211
  55. package/dist/parquetjs/parquet-thrift/ColumnIndex.js +0 -217
  56. package/dist/parquetjs/parquet-thrift/ColumnMetaData.js +0 -402
  57. package/dist/parquetjs/parquet-thrift/ColumnOrder.js +0 -108
  58. package/dist/parquetjs/parquet-thrift/CompressionCodec.js +0 -20
  59. package/dist/parquetjs/parquet-thrift/ConvertedType.js +0 -34
  60. package/dist/parquetjs/parquet-thrift/DataPageHeader.js +0 -170
  61. package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.js +0 -230
  62. package/dist/parquetjs/parquet-thrift/DateType.js +0 -62
  63. package/dist/parquetjs/parquet-thrift/DecimalType.js +0 -109
  64. package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.js +0 -126
  65. package/dist/parquetjs/parquet-thrift/Encoding.js +0 -20
  66. package/dist/parquetjs/parquet-thrift/EnumType.js +0 -62
  67. package/dist/parquetjs/parquet-thrift/FieldRepetitionType.js +0 -15
  68. package/dist/parquetjs/parquet-thrift/FileMetaData.js +0 -260
  69. package/dist/parquetjs/parquet-thrift/IndexPageHeader.js +0 -62
  70. package/dist/parquetjs/parquet-thrift/IntType.js +0 -109
  71. package/dist/parquetjs/parquet-thrift/JsonType.js +0 -62
  72. package/dist/parquetjs/parquet-thrift/KeyValue.js +0 -106
  73. package/dist/parquetjs/parquet-thrift/ListType.js +0 -62
  74. package/dist/parquetjs/parquet-thrift/LogicalType.js +0 -384
  75. package/dist/parquetjs/parquet-thrift/MapType.js +0 -62
  76. package/dist/parquetjs/parquet-thrift/MicroSeconds.js +0 -62
  77. package/dist/parquetjs/parquet-thrift/MilliSeconds.js +0 -62
  78. package/dist/parquetjs/parquet-thrift/NullType.js +0 -62
  79. package/dist/parquetjs/parquet-thrift/OffsetIndex.js +0 -101
  80. package/dist/parquetjs/parquet-thrift/PageEncodingStats.js +0 -131
  81. package/dist/parquetjs/parquet-thrift/PageHeader.js +0 -220
  82. package/dist/parquetjs/parquet-thrift/PageLocation.js +0 -145
  83. package/dist/parquetjs/parquet-thrift/PageType.js +0 -16
  84. package/dist/parquetjs/parquet-thrift/RowGroup.js +0 -186
  85. package/dist/parquetjs/parquet-thrift/SchemaElement.js +0 -243
  86. package/dist/parquetjs/parquet-thrift/SortingColumn.js +0 -131
  87. package/dist/parquetjs/parquet-thrift/Statistics.js +0 -180
  88. package/dist/parquetjs/parquet-thrift/StringType.js +0 -62
  89. package/dist/parquetjs/parquet-thrift/TimeType.js +0 -110
  90. package/dist/parquetjs/parquet-thrift/TimeUnit.js +0 -131
  91. package/dist/parquetjs/parquet-thrift/TimestampType.js +0 -110
  92. package/dist/parquetjs/parquet-thrift/Type.js +0 -20
  93. package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.js +0 -62
  94. package/dist/parquetjs/parquet-thrift/UUIDType.js +0 -62
  95. package/dist/parquetjs/parquet-thrift/index.js +0 -65
  96. package/dist/parquetjs/parser/decoders.js +0 -318
  97. package/dist/parquetjs/parser/parquet-reader.js +0 -200
  98. package/dist/parquetjs/schema/declare.js +0 -12
  99. package/dist/parquetjs/schema/schema.js +0 -162
  100. package/dist/parquetjs/schema/shred.js +0 -355
  101. package/dist/parquetjs/schema/types.js +0 -416
  102. package/dist/parquetjs/utils/file-utils.js +0 -43
  103. package/dist/parquetjs/utils/read-utils.js +0 -109
  104. package/dist/workers/parquet-worker.js +0 -5
@@ -1,200 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.ParquetReader = void 0;
4
- const schema_1 = require("../schema/schema");
5
- const decoders_1 = require("./decoders");
6
- const shred_1 = require("../schema/shred");
7
- const constants_1 = require("../../constants");
8
- const parquet_thrift_1 = require("../parquet-thrift");
9
- const read_utils_1 = require("../utils/read-utils");
10
- const decoders_2 = require("./decoders");
11
- const DEFAULT_PROPS = {
12
- defaultDictionarySize: 1e6
13
- };
14
- /**
15
- * The parquet envelope reader allows direct, unbuffered access to the individual
16
- * sections of the parquet file, namely the header, footer and the row groups.
17
- * This class is intended for advanced/internal users; if you just want to retrieve
18
- * rows from a parquet file use the ParquetReader instead
19
- */
20
- class ParquetReader {
21
- constructor(file, props) {
22
- this.metadata = null;
23
- this.file = file;
24
- this.props = { ...DEFAULT_PROPS, ...props };
25
- }
26
- close() {
27
- // eslint-disable-next-line @typescript-eslint/no-floating-promises
28
- this.file.close();
29
- }
30
- // HIGH LEVEL METHODS
31
- /** Yield one row at a time */
32
- async *rowIterator(props) {
33
- for await (const rows of this.rowBatchIterator(props)) {
34
- // yield *rows
35
- for (const row of rows) {
36
- yield row;
37
- }
38
- }
39
- }
40
- /** Yield one batch of rows at a time */
41
- async *rowBatchIterator(props) {
42
- const schema = await this.getSchema();
43
- for await (const rowGroup of this.rowGroupIterator(props)) {
44
- yield (0, shred_1.materializeRows)(schema, rowGroup);
45
- }
46
- }
47
- /** Iterate over the raw row groups */
48
- async *rowGroupIterator(props) {
49
- // Ensure strings are nested in arrays
50
- const columnList = (props?.columnList || []).map((x) => Array.isArray(x) ? x : [x]);
51
- const metadata = await this.getFileMetadata();
52
- const schema = await this.getSchema();
53
- const rowGroupCount = metadata?.row_groups.length || 0;
54
- for (let rowGroupIndex = 0; rowGroupIndex < rowGroupCount; rowGroupIndex++) {
55
- const rowGroup = await this.readRowGroup(schema, metadata.row_groups[rowGroupIndex], columnList);
56
- yield rowGroup;
57
- }
58
- }
59
- async getRowCount() {
60
- const metadata = await this.getFileMetadata();
61
- return Number(metadata.num_rows);
62
- }
63
- async getSchema() {
64
- const metadata = await this.getFileMetadata();
65
- const root = metadata.schema[0];
66
- const { schema: schemaDefinition } = (0, decoders_1.decodeSchema)(metadata.schema, 1, root.num_children);
67
- const schema = new schema_1.ParquetSchema(schemaDefinition);
68
- return schema;
69
- }
70
- /**
71
- * Returns the user (key/value) metadata for this file
72
- * In parquet this is not stored on the schema like it is in arrow
73
- */
74
- async getSchemaMetadata() {
75
- const metadata = await this.getFileMetadata();
76
- const md = {};
77
- for (const kv of metadata.key_value_metadata) {
78
- md[kv.key] = kv.value;
79
- }
80
- return md;
81
- }
82
- async getFileMetadata() {
83
- if (!this.metadata) {
84
- await this.readHeader();
85
- this.metadata = this.readFooter();
86
- }
87
- return this.metadata;
88
- }
89
- // LOW LEVEL METHODS
90
- /** Metadata is stored in the footer */
91
- async readHeader() {
92
- const buffer = await this.file.read(0, constants_1.PARQUET_MAGIC.length);
93
- const magic = buffer.toString();
94
- switch (magic) {
95
- case constants_1.PARQUET_MAGIC:
96
- break;
97
- case constants_1.PARQUET_MAGIC_ENCRYPTED:
98
- throw new Error('Encrypted parquet file not supported');
99
- default:
100
- throw new Error(`Invalid parquet file (magic=${magic})`);
101
- }
102
- }
103
- /** Metadata is stored in the footer */
104
- async readFooter() {
105
- const trailerLen = constants_1.PARQUET_MAGIC.length + 4;
106
- const trailerBuf = await this.file.read(this.file.size - trailerLen, trailerLen);
107
- const magic = trailerBuf.slice(4).toString();
108
- if (magic !== constants_1.PARQUET_MAGIC) {
109
- throw new Error(`Not a valid parquet file (magic="${magic})`);
110
- }
111
- const metadataSize = trailerBuf.readUInt32LE(0);
112
- const metadataOffset = this.file.size - metadataSize - trailerLen;
113
- if (metadataOffset < constants_1.PARQUET_MAGIC.length) {
114
- throw new Error(`Invalid metadata size ${metadataOffset}`);
115
- }
116
- const metadataBuf = await this.file.read(metadataOffset, metadataSize);
117
- // let metadata = new parquet_thrift.FileMetaData();
118
- // parquet_util.decodeThrift(metadata, metadataBuf);
119
- const { metadata } = (0, read_utils_1.decodeFileMetadata)(metadataBuf);
120
- return metadata;
121
- }
122
- /** Data is stored in row groups (similar to Apache Arrow record batches) */
123
- async readRowGroup(schema, rowGroup, columnList) {
124
- const buffer = {
125
- rowCount: Number(rowGroup.num_rows),
126
- columnData: {}
127
- };
128
- for (const colChunk of rowGroup.columns) {
129
- const colMetadata = colChunk.meta_data;
130
- const colKey = colMetadata?.path_in_schema;
131
- if (columnList.length > 0 && (0, read_utils_1.fieldIndexOf)(columnList, colKey) < 0) {
132
- continue; // eslint-disable-line no-continue
133
- }
134
- buffer.columnData[colKey.join()] = await this.readColumnChunk(schema, colChunk);
135
- }
136
- return buffer;
137
- }
138
- /**
139
- * Each row group contains column chunks for all the columns.
140
- */
141
- async readColumnChunk(schema, colChunk) {
142
- if (colChunk.file_path !== undefined && colChunk.file_path !== null) {
143
- throw new Error('external references are not supported');
144
- }
145
- const field = schema.findField(colChunk.meta_data?.path_in_schema);
146
- const type = (0, read_utils_1.getThriftEnum)(parquet_thrift_1.Type, colChunk.meta_data?.type);
147
- if (type !== field.primitiveType) {
148
- throw new Error(`chunk type not matching schema: ${type}`);
149
- }
150
- const compression = (0, read_utils_1.getThriftEnum)(parquet_thrift_1.CompressionCodec, colChunk.meta_data?.codec);
151
- const pagesOffset = Number(colChunk.meta_data?.data_page_offset);
152
- let pagesSize = Number(colChunk.meta_data?.total_compressed_size);
153
- if (!colChunk.file_path) {
154
- pagesSize = Math.min(this.file.size - pagesOffset, Number(colChunk.meta_data?.total_compressed_size));
155
- }
156
- const options = {
157
- type,
158
- rLevelMax: field.rLevelMax,
159
- dLevelMax: field.dLevelMax,
160
- compression,
161
- column: field,
162
- numValues: colChunk.meta_data?.num_values,
163
- dictionary: []
164
- };
165
- let dictionary;
166
- const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;
167
- if (dictionaryPageOffset) {
168
- const dictionaryOffset = Number(dictionaryPageOffset);
169
- // Getting dictionary from column chunk to iterate all over indexes to get dataPage values.
170
- dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);
171
- }
172
- dictionary = options.dictionary?.length ? options.dictionary : dictionary;
173
- const pagesBuf = await this.file.read(pagesOffset, pagesSize);
174
- return await (0, decoders_2.decodeDataPages)(pagesBuf, { ...options, dictionary });
175
- }
176
- /**
177
- * Getting dictionary for allows to flatten values by indices.
178
- * @param dictionaryPageOffset
179
- * @param options
180
- * @param pagesOffset
181
- * @returns
182
- */
183
- async getDictionary(dictionaryPageOffset, options, pagesOffset) {
184
- if (dictionaryPageOffset === 0) {
185
- // dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);
186
- // pagesBuf = await this.read(pagesOffset, dictionarySize);
187
- // In this case we are working with parquet-mr files format. Problem is described below:
188
- // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding
189
- // We need to get dictionary page from column chunk if it exists.
190
- // Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.
191
- return [];
192
- }
193
- const dictionarySize = Math.min(this.file.size - dictionaryPageOffset, this.props.defaultDictionarySize);
194
- const pagesBuf = await this.file.read(dictionaryPageOffset, dictionarySize);
195
- const cursor = { buffer: pagesBuf, offset: 0, size: pagesBuf.length };
196
- const decodedPage = await (0, decoders_2.decodePage)(cursor, options);
197
- return decodedPage.dictionary;
198
- }
199
- }
200
- exports.ParquetReader = ParquetReader;
@@ -1,12 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.ParquetRowGroup = void 0;
4
- /** @
5
- * Holds data for one row group (column chunks) */
6
- class ParquetRowGroup {
7
- constructor(rowCount = 0, columnData = {}) {
8
- this.rowCount = rowCount;
9
- this.columnData = columnData;
10
- }
11
- }
12
- exports.ParquetRowGroup = ParquetRowGroup;
@@ -1,162 +0,0 @@
1
- "use strict";
2
- // Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
3
- Object.defineProperty(exports, "__esModule", { value: true });
4
- exports.ParquetSchema = void 0;
5
- const codecs_1 = require("../codecs");
6
- const compression_1 = require("../compression");
7
- const shred_1 = require("./shred");
8
- const types_1 = require("./types");
9
- /**
10
- * A parquet file schema
11
- */
12
- class ParquetSchema {
13
- /**
14
- * Create a new schema from a JSON schema definition
15
- */
16
- constructor(schema) {
17
- this.schema = schema;
18
- this.fields = buildFields(schema, 0, 0, []);
19
- this.fieldList = listFields(this.fields);
20
- }
21
- /**
22
- * Retrieve a field definition
23
- */
24
- findField(path) {
25
- if (typeof path === 'string') {
26
- // tslint:disable-next-line:no-parameter-reassignment
27
- path = path.split(',');
28
- }
29
- else {
30
- // tslint:disable-next-line:no-parameter-reassignment
31
- path = path.slice(0); // clone array
32
- }
33
- let n = this.fields;
34
- for (; path.length > 1; path.shift()) {
35
- n = n[path[0]].fields;
36
- }
37
- return n[path[0]];
38
- }
39
- /**
40
- * Retrieve a field definition and all the field's ancestors
41
- */
42
- findFieldBranch(path) {
43
- if (typeof path === 'string') {
44
- // tslint:disable-next-line:no-parameter-reassignment
45
- path = path.split(',');
46
- }
47
- const branch = [];
48
- let n = this.fields;
49
- for (; path.length > 0; path.shift()) {
50
- branch.push(n[path[0]]);
51
- if (path.length > 1) {
52
- n = n[path[0]].fields;
53
- }
54
- }
55
- return branch;
56
- }
57
- shredRecord(row, rowGroup) {
58
- (0, shred_1.shredRecord)(this, row, rowGroup);
59
- }
60
- materializeRows(rowGroup) {
61
- return (0, shred_1.materializeRows)(this, rowGroup);
62
- }
63
- compress(type) {
64
- setCompress(this.schema, type);
65
- setCompress(this.fields, type);
66
- return this;
67
- }
68
- rowGroup() {
69
- return (0, shred_1.shredBuffer)(this);
70
- }
71
- }
72
- exports.ParquetSchema = ParquetSchema;
73
- function setCompress(schema, type) {
74
- for (const name in schema) {
75
- const node = schema[name];
76
- if (node.fields) {
77
- setCompress(node.fields, type);
78
- }
79
- else {
80
- node.compression = type;
81
- }
82
- }
83
- }
84
- // eslint-disable-next-line max-statements, complexity
85
- function buildFields(schema, rLevelParentMax, dLevelParentMax, path) {
86
- const fieldList = {};
87
- for (const name in schema) {
88
- const opts = schema[name];
89
- /* field repetition type */
90
- const required = !opts.optional;
91
- const repeated = Boolean(opts.repeated);
92
- let rLevelMax = rLevelParentMax;
93
- let dLevelMax = dLevelParentMax;
94
- let repetitionType = 'REQUIRED';
95
- if (!required) {
96
- repetitionType = 'OPTIONAL';
97
- dLevelMax++;
98
- }
99
- if (repeated) {
100
- repetitionType = 'REPEATED';
101
- rLevelMax++;
102
- if (required)
103
- dLevelMax++;
104
- }
105
- /* nested field */
106
- if (opts.fields) {
107
- const cpath = path.concat([name]);
108
- fieldList[name] = {
109
- name,
110
- path: cpath,
111
- key: cpath.join(),
112
- repetitionType,
113
- rLevelMax,
114
- dLevelMax,
115
- isNested: true,
116
- fieldCount: Object.keys(opts.fields).length,
117
- fields: buildFields(opts.fields, rLevelMax, dLevelMax, cpath)
118
- };
119
- continue; // eslint-disable-line no-continue
120
- }
121
- const typeDef = types_1.PARQUET_LOGICAL_TYPES[opts.type];
122
- if (!typeDef) {
123
- throw new Error(`invalid parquet type: ${opts.type}`);
124
- }
125
- opts.encoding = opts.encoding || 'PLAIN';
126
- if (!(opts.encoding in codecs_1.PARQUET_CODECS)) {
127
- throw new Error(`unsupported parquet encoding: ${opts.encoding}`);
128
- }
129
- opts.compression = opts.compression || 'UNCOMPRESSED';
130
- if (!(opts.compression in compression_1.PARQUET_COMPRESSION_METHODS)) {
131
- throw new Error(`unsupported compression method: ${opts.compression}`);
132
- }
133
- /* add to schema */
134
- const cpath = path.concat([name]);
135
- fieldList[name] = {
136
- name,
137
- primitiveType: typeDef.primitiveType,
138
- originalType: typeDef.originalType,
139
- path: cpath,
140
- key: cpath.join(),
141
- repetitionType,
142
- encoding: opts.encoding,
143
- compression: opts.compression,
144
- typeLength: opts.typeLength || typeDef.typeLength,
145
- presision: opts.presision,
146
- scale: opts.scale,
147
- rLevelMax,
148
- dLevelMax
149
- };
150
- }
151
- return fieldList;
152
- }
153
- function listFields(fields) {
154
- let list = [];
155
- for (const k in fields) {
156
- list.push(fields[k]);
157
- if (fields[k].isNested) {
158
- list = list.concat(listFields(fields[k].fields));
159
- }
160
- }
161
- return list;
162
- }