@loaders.gl/parquet 3.3.0-alpha.8 → 3.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +19 -19
- package/dist/dist.min.js.map +3 -3
- package/dist/es5/index.js +3 -3
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/convert-schema-deep.ts.disabled +910 -0
- package/dist/es5/lib/parse-parquet.js +49 -25
- package/dist/es5/lib/parse-parquet.js.map +1 -1
- package/dist/es5/parquet-loader.js +3 -2
- package/dist/es5/parquet-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-loader.js +1 -1
- package/dist/es5/parquet-wasm-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-writer.js +1 -1
- package/dist/es5/parquet-wasm-writer.js.map +1 -1
- package/dist/es5/parquet-writer.js +1 -1
- package/dist/es5/parquet-writer.js.map +1 -1
- package/dist/es5/parquetjs/compression.js +15 -5
- package/dist/es5/parquetjs/compression.js.map +1 -1
- package/dist/es5/parquetjs/encoder/{writer.js → parquet-encoder.js} +70 -158
- package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -0
- package/dist/es5/parquetjs/parser/parquet-reader.js +553 -222
- package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/es5/parquetjs/schema/declare.js +3 -1
- package/dist/es5/parquetjs/schema/declare.js.map +1 -1
- package/dist/es5/parquetjs/schema/shred.js +39 -33
- package/dist/es5/parquetjs/schema/shred.js.map +1 -1
- package/dist/es5/parquetjs/schema/types.js.map +1 -1
- package/dist/es5/parquetjs/utils/file-utils.js +2 -3
- package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/esm/index.js +1 -1
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/convert-schema-deep.ts.disabled +910 -0
- package/dist/esm/lib/parse-parquet.js +6 -12
- package/dist/esm/lib/parse-parquet.js.map +1 -1
- package/dist/esm/parquet-loader.js +3 -2
- package/dist/esm/parquet-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-loader.js +1 -1
- package/dist/esm/parquet-wasm-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-writer.js +1 -1
- package/dist/esm/parquet-wasm-writer.js.map +1 -1
- package/dist/esm/parquet-writer.js +1 -1
- package/dist/esm/parquet-writer.js.map +1 -1
- package/dist/esm/parquetjs/compression.js +10 -1
- package/dist/esm/parquetjs/compression.js.map +1 -1
- package/dist/esm/parquetjs/encoder/{writer.js → parquet-encoder.js} +7 -37
- package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +1 -0
- package/dist/esm/parquetjs/parser/parquet-reader.js +158 -72
- package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/esm/parquetjs/schema/declare.js +1 -0
- package/dist/esm/parquetjs/schema/declare.js.map +1 -1
- package/dist/esm/parquetjs/schema/shred.js +42 -34
- package/dist/esm/parquetjs/schema/shred.js.map +1 -1
- package/dist/esm/parquetjs/schema/types.js.map +1 -1
- package/dist/esm/parquetjs/utils/file-utils.js +1 -1
- package/dist/esm/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +3 -4
- package/dist/lib/parse-parquet.d.ts +2 -2
- package/dist/lib/parse-parquet.d.ts.map +1 -1
- package/dist/lib/parse-parquet.js +24 -12
- package/dist/parquet-loader.d.ts +1 -0
- package/dist/parquet-loader.d.ts.map +1 -1
- package/dist/parquet-loader.js +2 -1
- package/dist/parquet-worker.js +17 -17
- package/dist/parquet-worker.js.map +3 -3
- package/dist/parquetjs/compression.d.ts.map +1 -1
- package/dist/parquetjs/compression.js +16 -5
- package/dist/parquetjs/encoder/{writer.d.ts → parquet-encoder.d.ts} +10 -19
- package/dist/parquetjs/encoder/parquet-encoder.d.ts.map +1 -0
- package/dist/parquetjs/encoder/{writer.js → parquet-encoder.js} +39 -37
- package/dist/parquetjs/parser/parquet-reader.d.ts +47 -57
- package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -1
- package/dist/parquetjs/parser/parquet-reader.js +168 -102
- package/dist/parquetjs/schema/declare.d.ts +14 -7
- package/dist/parquetjs/schema/declare.d.ts.map +1 -1
- package/dist/parquetjs/schema/declare.js +2 -0
- package/dist/parquetjs/schema/shred.d.ts +115 -0
- package/dist/parquetjs/schema/shred.d.ts.map +1 -1
- package/dist/parquetjs/schema/shred.js +161 -43
- package/dist/parquetjs/schema/types.d.ts +2 -2
- package/dist/parquetjs/schema/types.d.ts.map +1 -1
- package/dist/parquetjs/utils/file-utils.d.ts +3 -4
- package/dist/parquetjs/utils/file-utils.d.ts.map +1 -1
- package/dist/parquetjs/utils/file-utils.js +2 -5
- package/package.json +7 -5
- package/src/index.ts +2 -2
- package/src/lib/convert-schema-deep.ts.disabled +910 -0
- package/src/lib/parse-parquet.ts +25 -12
- package/src/parquet-loader.ts +3 -1
- package/src/parquetjs/compression.ts +14 -1
- package/src/parquetjs/encoder/{writer.ts → parquet-encoder.ts} +22 -28
- package/src/parquetjs/parser/parquet-reader.ts +239 -122
- package/src/parquetjs/schema/declare.ts +17 -9
- package/src/parquetjs/schema/shred.ts +157 -28
- package/src/parquetjs/schema/types.ts +21 -27
- package/src/parquetjs/utils/file-utils.ts +3 -4
- package/dist/es5/parquetjs/encoder/writer.js.map +0 -1
- package/dist/es5/parquetjs/file.js +0 -94
- package/dist/es5/parquetjs/file.js.map +0 -1
- package/dist/es5/parquetjs/parser/parquet-cursor.js +0 -183
- package/dist/es5/parquetjs/parser/parquet-cursor.js.map +0 -1
- package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +0 -327
- package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
- package/dist/es5/parquetjs/utils/buffer-utils.js +0 -19
- package/dist/es5/parquetjs/utils/buffer-utils.js.map +0 -1
- package/dist/esm/parquetjs/encoder/writer.js.map +0 -1
- package/dist/esm/parquetjs/file.js +0 -81
- package/dist/esm/parquetjs/file.js.map +0 -1
- package/dist/esm/parquetjs/parser/parquet-cursor.js +0 -78
- package/dist/esm/parquetjs/parser/parquet-cursor.js.map +0 -1
- package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +0 -129
- package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
- package/dist/esm/parquetjs/utils/buffer-utils.js +0 -13
- package/dist/esm/parquetjs/utils/buffer-utils.js.map +0 -1
- package/dist/parquetjs/encoder/writer.d.ts.map +0 -1
- package/dist/parquetjs/file.d.ts +0 -10
- package/dist/parquetjs/file.d.ts.map +0 -1
- package/dist/parquetjs/file.js +0 -99
- package/dist/parquetjs/parser/parquet-cursor.d.ts +0 -36
- package/dist/parquetjs/parser/parquet-cursor.d.ts.map +0 -1
- package/dist/parquetjs/parser/parquet-cursor.js +0 -74
- package/dist/parquetjs/parser/parquet-envelope-reader.d.ts +0 -40
- package/dist/parquetjs/parser/parquet-envelope-reader.d.ts.map +0 -1
- package/dist/parquetjs/parser/parquet-envelope-reader.js +0 -136
- package/dist/parquetjs/utils/buffer-utils.d.ts +0 -10
- package/dist/parquetjs/utils/buffer-utils.d.ts.map +0 -1
- package/dist/parquetjs/utils/buffer-utils.js +0 -22
- package/src/parquetjs/file.ts +0 -90
- package/src/parquetjs/parser/parquet-cursor.ts +0 -94
- package/src/parquetjs/parser/parquet-envelope-reader.ts +0 -199
- package/src/parquetjs/utils/buffer-utils.ts +0 -18
|
@@ -155,75 +155,193 @@ function shredRecordFields(fields, record, data, rLevel, dLevel) {
|
|
|
155
155
|
*/
|
|
156
156
|
function materializeRecords(schema, buffer) {
|
|
157
157
|
const records = [];
|
|
158
|
-
for (let i = 0; i < buffer.rowCount; i++)
|
|
158
|
+
for (let i = 0; i < buffer.rowCount; i++) {
|
|
159
159
|
records.push({});
|
|
160
|
+
}
|
|
160
161
|
for (const key in buffer.columnData) {
|
|
161
|
-
|
|
162
|
+
const columnData = buffer.columnData[key];
|
|
163
|
+
if (columnData.count) {
|
|
164
|
+
materializeColumn(schema, columnData, key, records);
|
|
165
|
+
}
|
|
162
166
|
}
|
|
163
167
|
return records;
|
|
164
168
|
}
|
|
165
169
|
exports.materializeRecords = materializeRecords;
|
|
166
170
|
// eslint-disable-next-line max-statements, complexity
|
|
167
|
-
function materializeColumn(schema,
|
|
168
|
-
const data = buffer.columnData[key];
|
|
169
|
-
if (!data.count)
|
|
170
|
-
return;
|
|
171
|
+
function materializeColumn(schema, columnData, key, records) {
|
|
171
172
|
const field = schema.findField(key);
|
|
172
173
|
const branch = schema.findFieldBranch(key);
|
|
173
174
|
// tslint:disable-next-line:prefer-array-literal
|
|
174
175
|
const rLevels = new Array(field.rLevelMax + 1).fill(0);
|
|
175
176
|
let vIndex = 0;
|
|
176
|
-
for (let i = 0; i <
|
|
177
|
-
const dLevel =
|
|
178
|
-
const rLevel =
|
|
177
|
+
for (let i = 0; i < columnData.count; i++) {
|
|
178
|
+
const dLevel = columnData.dlevels[i];
|
|
179
|
+
const rLevel = columnData.rlevels[i];
|
|
179
180
|
rLevels[rLevel]++;
|
|
180
181
|
rLevels.fill(0, rLevel + 1);
|
|
181
182
|
let rIndex = 0;
|
|
182
183
|
let record = records[rLevels[rIndex++] - 1];
|
|
183
|
-
// Internal nodes
|
|
184
|
+
// Internal nodes - Build a nested row object
|
|
184
185
|
for (const step of branch) {
|
|
185
|
-
if (step === field)
|
|
186
|
+
if (step === field || dLevel < step.dLevelMax) {
|
|
186
187
|
break;
|
|
187
|
-
if (dLevel < step.dLevelMax)
|
|
188
|
-
break;
|
|
189
|
-
if (step.repetitionType === 'REPEATED') {
|
|
190
|
-
if (!(step.name in record)) {
|
|
191
|
-
// eslint-disable max-depth
|
|
192
|
-
record[step.name] = [];
|
|
193
|
-
}
|
|
194
|
-
const ix = rLevels[rIndex++];
|
|
195
|
-
while (record[step.name].length <= ix) {
|
|
196
|
-
// eslint-disable max-depth
|
|
197
|
-
record[step.name].push({});
|
|
198
|
-
}
|
|
199
|
-
record = record[step.name][ix];
|
|
200
188
|
}
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
189
|
+
switch (step.repetitionType) {
|
|
190
|
+
case 'REPEATED':
|
|
191
|
+
if (!(step.name in record)) {
|
|
192
|
+
// eslint-disable max-depth
|
|
193
|
+
record[step.name] = [];
|
|
194
|
+
}
|
|
195
|
+
const ix = rLevels[rIndex++];
|
|
196
|
+
while (record[step.name].length <= ix) {
|
|
197
|
+
// eslint-disable max-depth
|
|
198
|
+
record[step.name].push({});
|
|
199
|
+
}
|
|
200
|
+
record = record[step.name][ix];
|
|
201
|
+
break;
|
|
202
|
+
default:
|
|
203
|
+
record[step.name] = record[step.name] || {};
|
|
204
|
+
record = record[step.name];
|
|
204
205
|
}
|
|
205
206
|
}
|
|
206
|
-
// Leaf node
|
|
207
|
+
// Leaf node - Add the value
|
|
207
208
|
if (dLevel === field.dLevelMax) {
|
|
208
209
|
const value = Types.fromPrimitive(
|
|
209
210
|
// @ts-ignore
|
|
210
|
-
field.originalType || field.primitiveType,
|
|
211
|
+
field.originalType || field.primitiveType, columnData.values[vIndex], field);
|
|
211
212
|
vIndex++;
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
213
|
+
switch (field.repetitionType) {
|
|
214
|
+
case 'REPEATED':
|
|
215
|
+
if (!(field.name in record)) {
|
|
216
|
+
// eslint-disable max-depth
|
|
217
|
+
record[field.name] = [];
|
|
218
|
+
}
|
|
219
|
+
const ix = rLevels[rIndex];
|
|
220
|
+
while (record[field.name].length <= ix) {
|
|
221
|
+
// eslint-disable max-depth
|
|
222
|
+
record[field.name].push(null);
|
|
223
|
+
}
|
|
224
|
+
record[field.name][ix] = value;
|
|
225
|
+
break;
|
|
226
|
+
default:
|
|
227
|
+
record[field.name] = value;
|
|
226
228
|
}
|
|
227
229
|
}
|
|
228
230
|
}
|
|
229
231
|
}
|
|
232
|
+
// Columnar export
|
|
233
|
+
/**
|
|
234
|
+
* 'Materialize' a list of <value, repetition_level, definition_level>
|
|
235
|
+
* tuples back to nested records (objects/arrays) using the Google Dremel
|
|
236
|
+
* Algorithm..
|
|
237
|
+
*
|
|
238
|
+
* The buffer argument must point to an object with the following structure (i.e.
|
|
239
|
+
* the same structure that is returned by shredRecords):
|
|
240
|
+
*
|
|
241
|
+
* buffer = {
|
|
242
|
+
* columnData: [
|
|
243
|
+
* 'my_col': {
|
|
244
|
+
* dlevels: [d1, d2, .. dN],
|
|
245
|
+
* rlevels: [r1, r2, .. rN],
|
|
246
|
+
* values: [v1, v2, .. vN],
|
|
247
|
+
* }, ...
|
|
248
|
+
* ],
|
|
249
|
+
* rowCount: X,
|
|
250
|
+
* }
|
|
251
|
+
*
|
|
252
|
+
export function extractColumns(schema: ParquetSchema, buffer: ParquetBuffer): Record<string, unknown> {
|
|
253
|
+
const columns: ParquetRecord = {};
|
|
254
|
+
for (const key in buffer.columnData) {
|
|
255
|
+
const columnData = buffer.columnData[key];
|
|
256
|
+
if (columnData.count) {
|
|
257
|
+
extractColumn(schema, columnData, key, columns);
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
return columns;
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
// eslint-disable-next-line max-statements, complexity
|
|
264
|
+
function extractColumn(
|
|
265
|
+
schema: ParquetSchema,
|
|
266
|
+
columnData: ParquetData,
|
|
267
|
+
key: string,
|
|
268
|
+
columns: Record<string, unknown>
|
|
269
|
+
) {
|
|
270
|
+
if (columnData.count <= 0) {
|
|
271
|
+
return;
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
const record = columns;
|
|
275
|
+
|
|
276
|
+
const field = schema.findField(key);
|
|
277
|
+
const branch = schema.findFieldBranch(key);
|
|
278
|
+
|
|
279
|
+
// tslint:disable-next-line:prefer-array-literal
|
|
280
|
+
const rLevels: number[] = new Array(field.rLevelMax + 1).fill(0);
|
|
281
|
+
let vIndex = 0;
|
|
282
|
+
|
|
283
|
+
let i = 0;
|
|
284
|
+
const dLevel = columnData.dlevels[i];
|
|
285
|
+
const rLevel = columnData.rlevels[i];
|
|
286
|
+
rLevels[rLevel]++;
|
|
287
|
+
rLevels.fill(0, rLevel + 1);
|
|
288
|
+
|
|
289
|
+
let rIndex = 0;
|
|
290
|
+
let record = records[rLevels[rIndex++] - 1];
|
|
291
|
+
|
|
292
|
+
// Internal nodes
|
|
293
|
+
for (const step of branch) {
|
|
294
|
+
if (step === field || dLevel < step.dLevelMax) {
|
|
295
|
+
break;
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
switch (step.repetitionType) {
|
|
299
|
+
case 'REPEATED':
|
|
300
|
+
if (!(step.name in record)) {
|
|
301
|
+
// eslint-disable max-depth
|
|
302
|
+
record[step.name] = [];
|
|
303
|
+
}
|
|
304
|
+
const ix = rLevels[rIndex++];
|
|
305
|
+
while (record[step.name].length <= ix) {
|
|
306
|
+
// eslint-disable max-depth
|
|
307
|
+
record[step.name].push({});
|
|
308
|
+
}
|
|
309
|
+
record = record[step.name][ix];
|
|
310
|
+
break;
|
|
311
|
+
|
|
312
|
+
default:
|
|
313
|
+
record[step.name] = record[step.name] || {};
|
|
314
|
+
record = record[step.name];
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
// Leaf node
|
|
319
|
+
if (dLevel === field.dLevelMax) {
|
|
320
|
+
const value = Types.fromPrimitive(
|
|
321
|
+
// @ts-ignore
|
|
322
|
+
field.originalType || field.primitiveType,
|
|
323
|
+
columnData.values[vIndex],
|
|
324
|
+
field
|
|
325
|
+
);
|
|
326
|
+
vIndex++;
|
|
327
|
+
|
|
328
|
+
switch (field.repetitionType) {
|
|
329
|
+
case 'REPEATED':
|
|
330
|
+
if (!(field.name in record)) {
|
|
331
|
+
// eslint-disable max-depth
|
|
332
|
+
record[field.name] = [];
|
|
333
|
+
}
|
|
334
|
+
const ix = rLevels[rIndex];
|
|
335
|
+
while (record[field.name].length <= ix) {
|
|
336
|
+
// eslint-disable max-depth
|
|
337
|
+
record[field.name].push(null);
|
|
338
|
+
}
|
|
339
|
+
record[field.name][ix] = value;
|
|
340
|
+
break;
|
|
341
|
+
|
|
342
|
+
default:
|
|
343
|
+
record[field.name] = value;
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
*/
|
|
@@ -11,10 +11,10 @@ export declare const PARQUET_LOGICAL_TYPES: Record<ParquetType, ParquetTypeKit>;
|
|
|
11
11
|
* Convert a value from it's native representation to the internal/underlying
|
|
12
12
|
* primitive type
|
|
13
13
|
*/
|
|
14
|
-
export declare function toPrimitive(type: ParquetType, value:
|
|
14
|
+
export declare function toPrimitive(type: ParquetType, value: unknown, field?: ParquetField): unknown;
|
|
15
15
|
/**
|
|
16
16
|
* Convert a value from it's internal/underlying primitive representation to
|
|
17
17
|
* the native representation
|
|
18
18
|
*/
|
|
19
|
-
export declare function fromPrimitive(type: ParquetType, value:
|
|
19
|
+
export declare function fromPrimitive(type: ParquetType, value: unknown, field?: ParquetField): any;
|
|
20
20
|
//# sourceMappingURL=types.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/schema/types.ts"],"names":[],"mappings":"AAGA,OAAO,EAAC,YAAY,EAAE,YAAY,EAAE,WAAW,EAAE,aAAa,EAAC,MAAM,WAAW,CAAC;AAEjF,MAAM,WAAW,cAAc;IAC7B,aAAa,EAAE,aAAa,CAAC;IAC7B,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,WAAW,EAAE,QAAQ,CAAC;IACtB,aAAa,CAAC,EAAE,QAAQ,CAAC;CAC1B;AAED,eAAO,MAAM,qBAAqB,EAAE,MAAM,CAAC,WAAW,EAAE,cAAc,CAuJrE,CAAC;AAEF;;;GAGG;AACH,wBAAgB,WAAW,CAAC,IAAI,EAAE,WAAW,EAAE,KAAK,EAAE,
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/schema/types.ts"],"names":[],"mappings":"AAGA,OAAO,EAAC,YAAY,EAAE,YAAY,EAAE,WAAW,EAAE,aAAa,EAAC,MAAM,WAAW,CAAC;AAEjF,MAAM,WAAW,cAAc;IAC7B,aAAa,EAAE,aAAa,CAAC;IAC7B,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,WAAW,EAAE,QAAQ,CAAC;IACtB,aAAa,CAAC,EAAE,QAAQ,CAAC;CAC1B;AAED,eAAO,MAAM,qBAAqB,EAAE,MAAM,CAAC,WAAW,EAAE,cAAc,CAuJrE,CAAC;AAEF;;;GAGG;AACH,wBAAgB,WAAW,CAAC,IAAI,EAAE,WAAW,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,EAAE,YAAY,GAAG,OAAO,CAM5F;AAED;;;GAGG;AACH,wBAAgB,aAAa,CAAC,IAAI,EAAE,WAAW,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,EAAE,YAAY,OAUpF"}
|
|
@@ -1,8 +1,7 @@
|
|
|
1
1
|
/// <reference types="node" />
|
|
2
2
|
/// <reference types="node" />
|
|
3
3
|
/// <reference types="node" />
|
|
4
|
-
import fs from '
|
|
5
|
-
import { Writable } from 'stream';
|
|
4
|
+
import { fs, stream } from '@loaders.gl/loader-utils';
|
|
6
5
|
export declare function load(name: string): any;
|
|
7
6
|
export interface WriteStreamOptions {
|
|
8
7
|
flags?: string;
|
|
@@ -12,7 +11,7 @@ export interface WriteStreamOptions {
|
|
|
12
11
|
autoClose?: boolean;
|
|
13
12
|
start?: number;
|
|
14
13
|
}
|
|
15
|
-
export declare function oswrite(os: Writable, buf: Buffer): Promise<void>;
|
|
16
|
-
export declare function osclose(os: Writable): Promise<void>;
|
|
14
|
+
export declare function oswrite(os: stream.Writable, buf: Buffer): Promise<void>;
|
|
15
|
+
export declare function osclose(os: stream.Writable): Promise<void>;
|
|
17
16
|
export declare function osopen(path: string, opts?: WriteStreamOptions): Promise<fs.WriteStream>;
|
|
18
17
|
//# sourceMappingURL=file-utils.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"file-utils.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/utils/file-utils.ts"],"names":[],"mappings":";;;AACA,OAAO,EAAE,MAAM,
|
|
1
|
+
{"version":3,"file":"file-utils.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/utils/file-utils.ts"],"names":[],"mappings":";;;AACA,OAAO,EAAC,EAAE,EAAE,MAAM,EAAC,MAAM,0BAA0B,CAAC;AAEpD,wBAAgB,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,GAAG,CAEtC;AACD,MAAM,WAAW,kBAAkB;IACjC,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,EAAE,CAAC,EAAE,MAAM,CAAC;IACZ,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,wBAAgB,OAAO,CAAC,EAAE,EAAE,MAAM,CAAC,QAAQ,EAAE,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAUvE;AAED,wBAAgB,OAAO,CAAC,EAAE,EAAE,MAAM,CAAC,QAAQ,GAAG,OAAO,CAAC,IAAI,CAAC,CAU1D;AAED,wBAAgB,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,kBAAkB,GAAG,OAAO,CAAC,EAAE,CAAC,WAAW,CAAC,CAMvF"}
|
|
@@ -1,11 +1,8 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
3
|
exports.osopen = exports.osclose = exports.oswrite = exports.load = void 0;
|
|
7
4
|
// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
|
|
8
|
-
const
|
|
5
|
+
const loader_utils_1 = require("@loaders.gl/loader-utils");
|
|
9
6
|
function load(name) {
|
|
10
7
|
return (module || global).require(name);
|
|
11
8
|
}
|
|
@@ -38,7 +35,7 @@ function osclose(os) {
|
|
|
38
35
|
exports.osclose = osclose;
|
|
39
36
|
function osopen(path, opts) {
|
|
40
37
|
return new Promise((resolve, reject) => {
|
|
41
|
-
const outputStream =
|
|
38
|
+
const outputStream = loader_utils_1.fs.createWriteStream(path, opts);
|
|
42
39
|
outputStream.once('open', (fd) => resolve(outputStream));
|
|
43
40
|
outputStream.once('error', (err) => reject(err));
|
|
44
41
|
});
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@loaders.gl/parquet",
|
|
3
|
-
"version": "3.3.
|
|
3
|
+
"version": "3.3.1",
|
|
4
4
|
"description": "Framework-independent loader for Apache Parquet files",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"publishConfig": {
|
|
@@ -37,12 +37,14 @@
|
|
|
37
37
|
"net": false,
|
|
38
38
|
"tls": false,
|
|
39
39
|
"lzo": false,
|
|
40
|
+
"stream": false,
|
|
41
|
+
"fs": false,
|
|
40
42
|
"./src/lib/wasm/load-wasm/load-wasm-node.ts": "./src/lib/wasm/load-wasm/load-wasm-browser.ts"
|
|
41
43
|
},
|
|
42
44
|
"dependencies": {
|
|
43
|
-
"@loaders.gl/compression": "3.3.
|
|
44
|
-
"@loaders.gl/loader-utils": "3.3.
|
|
45
|
-
"@loaders.gl/schema": "3.3.
|
|
45
|
+
"@loaders.gl/compression": "3.3.1",
|
|
46
|
+
"@loaders.gl/loader-utils": "3.3.1",
|
|
47
|
+
"@loaders.gl/schema": "3.3.1",
|
|
46
48
|
"async-mutex": "^0.2.2",
|
|
47
49
|
"brotli": "^1.3.2",
|
|
48
50
|
"bson": "^1.0.4",
|
|
@@ -68,5 +70,5 @@
|
|
|
68
70
|
"@types/varint": "^5.0.0",
|
|
69
71
|
"apache-arrow": "^4.0.0"
|
|
70
72
|
},
|
|
71
|
-
"gitHead": "
|
|
73
|
+
"gitHead": "51632b5948e496a4b75e970030ad7579650c129d"
|
|
72
74
|
}
|
package/src/index.ts
CHANGED
|
@@ -32,8 +32,8 @@ export {preloadCompressions} from './parquetjs/compression';
|
|
|
32
32
|
|
|
33
33
|
export {ParquetSchema} from './parquetjs/schema/schema';
|
|
34
34
|
export {ParquetReader} from './parquetjs/parser/parquet-reader';
|
|
35
|
-
export {
|
|
36
|
-
|
|
35
|
+
export {ParquetEncoder} from './parquetjs/encoder/parquet-encoder';
|
|
36
|
+
|
|
37
37
|
export {convertParquetToArrowSchema} from './lib/convert-schema';
|
|
38
38
|
|
|
39
39
|
// TESTS
|