@loaders.gl/parquet 3.3.0-alpha.8 → 3.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +19 -19
- package/dist/dist.min.js.map +3 -3
- package/dist/es5/index.js +3 -3
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/convert-schema-deep.ts.disabled +910 -0
- package/dist/es5/lib/parse-parquet.js +49 -25
- package/dist/es5/lib/parse-parquet.js.map +1 -1
- package/dist/es5/parquet-loader.js +3 -2
- package/dist/es5/parquet-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-loader.js +1 -1
- package/dist/es5/parquet-wasm-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-writer.js +1 -1
- package/dist/es5/parquet-wasm-writer.js.map +1 -1
- package/dist/es5/parquet-writer.js +1 -1
- package/dist/es5/parquet-writer.js.map +1 -1
- package/dist/es5/parquetjs/compression.js +15 -5
- package/dist/es5/parquetjs/compression.js.map +1 -1
- package/dist/es5/parquetjs/encoder/{writer.js → parquet-encoder.js} +70 -158
- package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -0
- package/dist/es5/parquetjs/parser/parquet-reader.js +553 -222
- package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/es5/parquetjs/schema/declare.js +3 -1
- package/dist/es5/parquetjs/schema/declare.js.map +1 -1
- package/dist/es5/parquetjs/schema/shred.js +39 -33
- package/dist/es5/parquetjs/schema/shred.js.map +1 -1
- package/dist/es5/parquetjs/schema/types.js.map +1 -1
- package/dist/es5/parquetjs/utils/file-utils.js +2 -3
- package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/esm/index.js +1 -1
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/convert-schema-deep.ts.disabled +910 -0
- package/dist/esm/lib/parse-parquet.js +6 -12
- package/dist/esm/lib/parse-parquet.js.map +1 -1
- package/dist/esm/parquet-loader.js +3 -2
- package/dist/esm/parquet-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-loader.js +1 -1
- package/dist/esm/parquet-wasm-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-writer.js +1 -1
- package/dist/esm/parquet-wasm-writer.js.map +1 -1
- package/dist/esm/parquet-writer.js +1 -1
- package/dist/esm/parquet-writer.js.map +1 -1
- package/dist/esm/parquetjs/compression.js +10 -1
- package/dist/esm/parquetjs/compression.js.map +1 -1
- package/dist/esm/parquetjs/encoder/{writer.js → parquet-encoder.js} +7 -37
- package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +1 -0
- package/dist/esm/parquetjs/parser/parquet-reader.js +158 -72
- package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/esm/parquetjs/schema/declare.js +1 -0
- package/dist/esm/parquetjs/schema/declare.js.map +1 -1
- package/dist/esm/parquetjs/schema/shred.js +42 -34
- package/dist/esm/parquetjs/schema/shred.js.map +1 -1
- package/dist/esm/parquetjs/schema/types.js.map +1 -1
- package/dist/esm/parquetjs/utils/file-utils.js +1 -1
- package/dist/esm/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +3 -4
- package/dist/lib/parse-parquet.d.ts +2 -2
- package/dist/lib/parse-parquet.d.ts.map +1 -1
- package/dist/lib/parse-parquet.js +24 -12
- package/dist/parquet-loader.d.ts +1 -0
- package/dist/parquet-loader.d.ts.map +1 -1
- package/dist/parquet-loader.js +2 -1
- package/dist/parquet-worker.js +17 -17
- package/dist/parquet-worker.js.map +3 -3
- package/dist/parquetjs/compression.d.ts.map +1 -1
- package/dist/parquetjs/compression.js +16 -5
- package/dist/parquetjs/encoder/{writer.d.ts → parquet-encoder.d.ts} +10 -19
- package/dist/parquetjs/encoder/parquet-encoder.d.ts.map +1 -0
- package/dist/parquetjs/encoder/{writer.js → parquet-encoder.js} +39 -37
- package/dist/parquetjs/parser/parquet-reader.d.ts +47 -57
- package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -1
- package/dist/parquetjs/parser/parquet-reader.js +168 -102
- package/dist/parquetjs/schema/declare.d.ts +14 -7
- package/dist/parquetjs/schema/declare.d.ts.map +1 -1
- package/dist/parquetjs/schema/declare.js +2 -0
- package/dist/parquetjs/schema/shred.d.ts +115 -0
- package/dist/parquetjs/schema/shred.d.ts.map +1 -1
- package/dist/parquetjs/schema/shred.js +161 -43
- package/dist/parquetjs/schema/types.d.ts +2 -2
- package/dist/parquetjs/schema/types.d.ts.map +1 -1
- package/dist/parquetjs/utils/file-utils.d.ts +3 -4
- package/dist/parquetjs/utils/file-utils.d.ts.map +1 -1
- package/dist/parquetjs/utils/file-utils.js +2 -5
- package/package.json +7 -5
- package/src/index.ts +2 -2
- package/src/lib/convert-schema-deep.ts.disabled +910 -0
- package/src/lib/parse-parquet.ts +25 -12
- package/src/parquet-loader.ts +3 -1
- package/src/parquetjs/compression.ts +14 -1
- package/src/parquetjs/encoder/{writer.ts → parquet-encoder.ts} +22 -28
- package/src/parquetjs/parser/parquet-reader.ts +239 -122
- package/src/parquetjs/schema/declare.ts +17 -9
- package/src/parquetjs/schema/shred.ts +157 -28
- package/src/parquetjs/schema/types.ts +21 -27
- package/src/parquetjs/utils/file-utils.ts +3 -4
- package/dist/es5/parquetjs/encoder/writer.js.map +0 -1
- package/dist/es5/parquetjs/file.js +0 -94
- package/dist/es5/parquetjs/file.js.map +0 -1
- package/dist/es5/parquetjs/parser/parquet-cursor.js +0 -183
- package/dist/es5/parquetjs/parser/parquet-cursor.js.map +0 -1
- package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +0 -327
- package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
- package/dist/es5/parquetjs/utils/buffer-utils.js +0 -19
- package/dist/es5/parquetjs/utils/buffer-utils.js.map +0 -1
- package/dist/esm/parquetjs/encoder/writer.js.map +0 -1
- package/dist/esm/parquetjs/file.js +0 -81
- package/dist/esm/parquetjs/file.js.map +0 -1
- package/dist/esm/parquetjs/parser/parquet-cursor.js +0 -78
- package/dist/esm/parquetjs/parser/parquet-cursor.js.map +0 -1
- package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +0 -129
- package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
- package/dist/esm/parquetjs/utils/buffer-utils.js +0 -13
- package/dist/esm/parquetjs/utils/buffer-utils.js.map +0 -1
- package/dist/parquetjs/encoder/writer.d.ts.map +0 -1
- package/dist/parquetjs/file.d.ts +0 -10
- package/dist/parquetjs/file.d.ts.map +0 -1
- package/dist/parquetjs/file.js +0 -99
- package/dist/parquetjs/parser/parquet-cursor.d.ts +0 -36
- package/dist/parquetjs/parser/parquet-cursor.d.ts.map +0 -1
- package/dist/parquetjs/parser/parquet-cursor.js +0 -74
- package/dist/parquetjs/parser/parquet-envelope-reader.d.ts +0 -40
- package/dist/parquetjs/parser/parquet-envelope-reader.d.ts.map +0 -1
- package/dist/parquetjs/parser/parquet-envelope-reader.js +0 -136
- package/dist/parquetjs/utils/buffer-utils.d.ts +0 -10
- package/dist/parquetjs/utils/buffer-utils.d.ts.map +0 -1
- package/dist/parquetjs/utils/buffer-utils.js +0 -22
- package/src/parquetjs/file.ts +0 -90
- package/src/parquetjs/parser/parquet-cursor.ts +0 -94
- package/src/parquetjs/parser/parquet-envelope-reader.ts +0 -199
- package/src/parquetjs/utils/buffer-utils.ts +0 -18
|
@@ -1,183 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
-
Object.defineProperty(exports, "__esModule", {
|
|
5
|
-
value: true
|
|
6
|
-
});
|
|
7
|
-
exports.ParquetCursor = void 0;
|
|
8
|
-
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
9
|
-
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
|
10
|
-
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime/helpers/classCallCheck"));
|
|
11
|
-
var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass"));
|
|
12
|
-
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
|
13
|
-
var _shred = require("../schema/shred");
|
|
14
|
-
var _Symbol$asyncIterator;
|
|
15
|
-
_Symbol$asyncIterator = Symbol.asyncIterator;
|
|
16
|
-
var ParquetCursor = function () {
|
|
17
|
-
function ParquetCursor(metadata, envelopeReader, schema, columnList) {
|
|
18
|
-
(0, _classCallCheck2.default)(this, ParquetCursor);
|
|
19
|
-
(0, _defineProperty2.default)(this, "metadata", void 0);
|
|
20
|
-
(0, _defineProperty2.default)(this, "envelopeReader", void 0);
|
|
21
|
-
(0, _defineProperty2.default)(this, "schema", void 0);
|
|
22
|
-
(0, _defineProperty2.default)(this, "columnList", void 0);
|
|
23
|
-
(0, _defineProperty2.default)(this, "rowGroup", []);
|
|
24
|
-
(0, _defineProperty2.default)(this, "rowGroupIndex", void 0);
|
|
25
|
-
this.metadata = metadata;
|
|
26
|
-
this.envelopeReader = envelopeReader;
|
|
27
|
-
this.schema = schema;
|
|
28
|
-
this.columnList = columnList;
|
|
29
|
-
this.rowGroupIndex = 0;
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
(0, _createClass2.default)(ParquetCursor, [{
|
|
33
|
-
key: "next",
|
|
34
|
-
value: function () {
|
|
35
|
-
var _next = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee() {
|
|
36
|
-
var rowBuffer;
|
|
37
|
-
return _regenerator.default.wrap(function _callee$(_context) {
|
|
38
|
-
while (1) {
|
|
39
|
-
switch (_context.prev = _context.next) {
|
|
40
|
-
case 0:
|
|
41
|
-
if (!(this.rowGroup.length === 0)) {
|
|
42
|
-
_context.next = 8;
|
|
43
|
-
break;
|
|
44
|
-
}
|
|
45
|
-
if (!(this.rowGroupIndex >= this.metadata.row_groups.length)) {
|
|
46
|
-
_context.next = 3;
|
|
47
|
-
break;
|
|
48
|
-
}
|
|
49
|
-
return _context.abrupt("return", null);
|
|
50
|
-
case 3:
|
|
51
|
-
_context.next = 5;
|
|
52
|
-
return this.envelopeReader.readRowGroup(this.schema, this.metadata.row_groups[this.rowGroupIndex], this.columnList);
|
|
53
|
-
case 5:
|
|
54
|
-
rowBuffer = _context.sent;
|
|
55
|
-
this.rowGroup = (0, _shred.materializeRecords)(this.schema, rowBuffer);
|
|
56
|
-
this.rowGroupIndex++;
|
|
57
|
-
case 8:
|
|
58
|
-
return _context.abrupt("return", this.rowGroup.shift());
|
|
59
|
-
case 9:
|
|
60
|
-
case "end":
|
|
61
|
-
return _context.stop();
|
|
62
|
-
}
|
|
63
|
-
}
|
|
64
|
-
}, _callee, this);
|
|
65
|
-
}));
|
|
66
|
-
function next() {
|
|
67
|
-
return _next.apply(this, arguments);
|
|
68
|
-
}
|
|
69
|
-
return next;
|
|
70
|
-
}()
|
|
71
|
-
}, {
|
|
72
|
-
key: "rewind",
|
|
73
|
-
value:
|
|
74
|
-
function rewind() {
|
|
75
|
-
this.rowGroup = [];
|
|
76
|
-
this.rowGroupIndex = 0;
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
}, {
|
|
80
|
-
key: _Symbol$asyncIterator,
|
|
81
|
-
value:
|
|
82
|
-
function value() {
|
|
83
|
-
var _this = this;
|
|
84
|
-
var done = false;
|
|
85
|
-
return {
|
|
86
|
-
next: function () {
|
|
87
|
-
var _next2 = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee2() {
|
|
88
|
-
var value;
|
|
89
|
-
return _regenerator.default.wrap(function _callee2$(_context2) {
|
|
90
|
-
while (1) {
|
|
91
|
-
switch (_context2.prev = _context2.next) {
|
|
92
|
-
case 0:
|
|
93
|
-
if (!done) {
|
|
94
|
-
_context2.next = 2;
|
|
95
|
-
break;
|
|
96
|
-
}
|
|
97
|
-
return _context2.abrupt("return", {
|
|
98
|
-
done: done,
|
|
99
|
-
value: null
|
|
100
|
-
});
|
|
101
|
-
case 2:
|
|
102
|
-
_context2.next = 4;
|
|
103
|
-
return _this.next();
|
|
104
|
-
case 4:
|
|
105
|
-
value = _context2.sent;
|
|
106
|
-
if (!(value === null)) {
|
|
107
|
-
_context2.next = 7;
|
|
108
|
-
break;
|
|
109
|
-
}
|
|
110
|
-
return _context2.abrupt("return", {
|
|
111
|
-
done: true,
|
|
112
|
-
value: value
|
|
113
|
-
});
|
|
114
|
-
case 7:
|
|
115
|
-
return _context2.abrupt("return", {
|
|
116
|
-
done: false,
|
|
117
|
-
value: value
|
|
118
|
-
});
|
|
119
|
-
case 8:
|
|
120
|
-
case "end":
|
|
121
|
-
return _context2.stop();
|
|
122
|
-
}
|
|
123
|
-
}
|
|
124
|
-
}, _callee2);
|
|
125
|
-
}));
|
|
126
|
-
function next() {
|
|
127
|
-
return _next2.apply(this, arguments);
|
|
128
|
-
}
|
|
129
|
-
return next;
|
|
130
|
-
}(),
|
|
131
|
-
return: function () {
|
|
132
|
-
var _return2 = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee3() {
|
|
133
|
-
return _regenerator.default.wrap(function _callee3$(_context3) {
|
|
134
|
-
while (1) {
|
|
135
|
-
switch (_context3.prev = _context3.next) {
|
|
136
|
-
case 0:
|
|
137
|
-
done = true;
|
|
138
|
-
return _context3.abrupt("return", {
|
|
139
|
-
done: done,
|
|
140
|
-
value: null
|
|
141
|
-
});
|
|
142
|
-
case 2:
|
|
143
|
-
case "end":
|
|
144
|
-
return _context3.stop();
|
|
145
|
-
}
|
|
146
|
-
}
|
|
147
|
-
}, _callee3);
|
|
148
|
-
}));
|
|
149
|
-
function _return() {
|
|
150
|
-
return _return2.apply(this, arguments);
|
|
151
|
-
}
|
|
152
|
-
return _return;
|
|
153
|
-
}(),
|
|
154
|
-
throw: function () {
|
|
155
|
-
var _throw2 = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee4() {
|
|
156
|
-
return _regenerator.default.wrap(function _callee4$(_context4) {
|
|
157
|
-
while (1) {
|
|
158
|
-
switch (_context4.prev = _context4.next) {
|
|
159
|
-
case 0:
|
|
160
|
-
done = true;
|
|
161
|
-
return _context4.abrupt("return", {
|
|
162
|
-
done: true,
|
|
163
|
-
value: null
|
|
164
|
-
});
|
|
165
|
-
case 2:
|
|
166
|
-
case "end":
|
|
167
|
-
return _context4.stop();
|
|
168
|
-
}
|
|
169
|
-
}
|
|
170
|
-
}, _callee4);
|
|
171
|
-
}));
|
|
172
|
-
function _throw() {
|
|
173
|
-
return _throw2.apply(this, arguments);
|
|
174
|
-
}
|
|
175
|
-
return _throw;
|
|
176
|
-
}()
|
|
177
|
-
};
|
|
178
|
-
}
|
|
179
|
-
}]);
|
|
180
|
-
return ParquetCursor;
|
|
181
|
-
}();
|
|
182
|
-
exports.ParquetCursor = ParquetCursor;
|
|
183
|
-
//# sourceMappingURL=parquet-cursor.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"parquet-cursor.js","names":["Symbol","asyncIterator","ParquetCursor","metadata","envelopeReader","schema","columnList","rowGroupIndex","rowGroup","length","row_groups","readRowGroup","rowBuffer","materializeRecords","shift","done","next","value","return","throw"],"sources":["../../../../src/parquetjs/parser/parquet-cursor.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {FileMetaData} from '../parquet-thrift';\nimport {ParquetEnvelopeReader} from './parquet-envelope-reader';\nimport {ParquetSchema} from '../schema/schema';\nimport {ParquetRecord} from '../schema/declare';\nimport {materializeRecords} from '../schema/shred';\n\n/**\n * A parquet cursor is used to retrieve rows from a parquet file in order\n */\nexport class ParquetCursor<T> implements AsyncIterable<T> {\n public metadata: FileMetaData;\n public envelopeReader: ParquetEnvelopeReader;\n public schema: ParquetSchema;\n public columnList: string[][];\n public rowGroup: ParquetRecord[] = [];\n public rowGroupIndex: number;\n\n /**\n * Create a new parquet reader from the file metadata and an envelope reader.\n * It is usually not recommended to call this constructor directly except for\n * advanced and internal use cases. Consider using getCursor() on the\n * ParquetReader instead\n */\n constructor(\n metadata: FileMetaData,\n envelopeReader: ParquetEnvelopeReader,\n schema: ParquetSchema,\n columnList: string[][]\n ) {\n this.metadata = metadata;\n this.envelopeReader = envelopeReader;\n this.schema = schema;\n this.columnList = columnList;\n this.rowGroupIndex = 0;\n }\n\n /**\n * Retrieve the next row from the cursor. Returns a row or NULL if the end\n * of the file was reached\n */\n async next<T = any>(): Promise<T> {\n if (this.rowGroup.length === 0) {\n if (this.rowGroupIndex >= this.metadata.row_groups.length) {\n // @ts-ignore\n return null;\n }\n const rowBuffer = await this.envelopeReader.readRowGroup(\n this.schema,\n this.metadata.row_groups[this.rowGroupIndex],\n this.columnList\n );\n this.rowGroup = materializeRecords(this.schema, rowBuffer);\n this.rowGroupIndex++;\n }\n return this.rowGroup.shift() as any;\n }\n\n /**\n * Rewind the cursor the the beginning of the file\n */\n rewind(): void {\n this.rowGroup = [];\n this.rowGroupIndex = 0;\n }\n\n /**\n * Implement AsyncIterable\n */\n // tslint:disable-next-line:function-name\n [Symbol.asyncIterator](): AsyncIterator<T> {\n let done = false;\n return {\n next: async () => {\n if (done) {\n return {done, value: null};\n }\n const value = await this.next();\n if (value === null) {\n return {done: true, value};\n }\n return {done: false, value};\n },\n return: async () => {\n done = true;\n return {done, value: null};\n },\n throw: async () => {\n done = true;\n return {done: true, value: null};\n }\n };\n }\n}\n"],"mappings":";;;;;;;;;;;;AAKA;AAAmD;AAAA,wBAiEhDA,MAAM,CAACC,aAAa;AAAA,IA5DVC,aAAa;EAcxB,uBACEC,QAAsB,EACtBC,cAAqC,EACrCC,MAAqB,EACrBC,UAAsB,EACtB;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA,gDAdiC,EAAE;IAAA;IAenC,IAAI,CAACH,QAAQ,GAAGA,QAAQ;IACxB,IAAI,CAACC,cAAc,GAAGA,cAAc;IACpC,IAAI,CAACC,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACC,UAAU,GAAGA,UAAU;IAC5B,IAAI,CAACC,aAAa,GAAG,CAAC;EACxB;;EAAC;IAAA;IAAA;MAAA,sEAMD;QAAA;QAAA;UAAA;YAAA;cAAA;gBAAA,MACM,IAAI,CAACC,QAAQ,CAACC,MAAM,KAAK,CAAC;kBAAA;kBAAA;gBAAA;gBAAA,MACxB,IAAI,CAACF,aAAa,IAAI,IAAI,CAACJ,QAAQ,CAACO,UAAU,CAACD,MAAM;kBAAA;kBAAA;gBAAA;gBAAA,iCAEhD,IAAI;cAAA;gBAAA;gBAAA,OAEW,IAAI,CAACL,cAAc,CAACO,YAAY,CACtD,IAAI,CAACN,MAAM,EACX,IAAI,CAACF,QAAQ,CAACO,UAAU,CAAC,IAAI,CAACH,aAAa,CAAC,EAC5C,IAAI,CAACD,UAAU,CAChB;cAAA;gBAJKM,SAAS;gBAKf,IAAI,CAACJ,QAAQ,GAAG,IAAAK,yBAAkB,EAAC,IAAI,CAACR,MAAM,EAAEO,SAAS,CAAC;gBAC1D,IAAI,CAACL,aAAa,EAAE;cAAC;gBAAA,iCAEhB,IAAI,CAACC,QAAQ,CAACM,KAAK,EAAE;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CAC7B;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;IAKD,kBAAe;MACb,IAAI,CAACN,QAAQ,GAAG,EAAE;MAClB,IAAI,CAACD,aAAa,GAAG,CAAC;IACxB;;EAAC;IAAA;IAAA;IAMD,iBAA2C;MAAA;MACzC,IAAIQ,IAAI,GAAG,KAAK;MAChB,OAAO;QACLC,IAAI;UAAA,uEAAE;YAAA;YAAA;cAAA;gBAAA;kBAAA;oBAAA,KACAD,IAAI;sBAAA;sBAAA;oBAAA;oBAAA,kCACC;sBAACA,IAAI,EAAJA,IAAI;sBAAEE,KAAK,EAAE;oBAAI,CAAC;kBAAA;oBAAA;oBAAA,OAER,KAAI,CAACD,IAAI,EAAE;kBAAA;oBAAzBC,KAAK;oBAAA,MACPA,KAAK,KAAK,IAAI;sBAAA;sBAAA;oBAAA;oBAAA,kCACT;sBAACF,IAAI,EAAE,IAAI;sBAAEE,KAAK,EAALA;oBAAK,CAAC;kBAAA;oBAAA,kCAErB;sBAACF,IAAI,EAAE,KAAK;sBAAEE,KAAK,EAALA;oBAAK,CAAC;kBAAA;kBAAA;oBAAA;gBAAA;cAAA;YAAA;UAAA,CAC5B;UAAA;YAAA;UAAA;UAAA;QAAA;QACDC,MAAM;UAAA,yEAAE;YAAA;cAAA;gBAAA;kBAAA;oBACNH,IAAI,GAAG,IAAI;oBAAC,kCACL;sBAACA,IAAI,EAAJA,IAAI;sBAAEE,KAAK,EAAE;oBAAI,CAAC;kBAAA;kBAAA;oBAAA;gBAAA;cAAA;YAAA;UAAA,CAC3B;UAAA;YAAA;UAAA;UAAA;QAAA;QACDE,KAAK;UAAA,wEAAE;YAAA;cAAA;gBAAA;kBAAA;oBACLJ,IAAI,GAAG,IAAI;oBAAC,kCACL;sBAACA,IAAI,EAAE,IAAI;sBAAEE,KAAK,EAAE;oBAAI,CAAC;kBAAA;kBAAA;oBAAA;gBAAA;cAAA;YAAA;UAAA,CACjC;UAAA;YAAA;UAAA;UAAA;QAAA;MACH,CAAC;IACH;EAAC;EAAA;AAAA;AAAA"}
|
|
@@ -1,327 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
-
Object.defineProperty(exports, "__esModule", {
|
|
5
|
-
value: true
|
|
6
|
-
});
|
|
7
|
-
exports.ParquetEnvelopeReader = void 0;
|
|
8
|
-
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
9
|
-
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
|
10
|
-
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime/helpers/classCallCheck"));
|
|
11
|
-
var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass"));
|
|
12
|
-
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
|
13
|
-
var _constants = require("../../constants");
|
|
14
|
-
var _parquetThrift = require("../parquet-thrift");
|
|
15
|
-
var _readUtils = require("../utils/read-utils");
|
|
16
|
-
var _decoders = require("./decoders");
|
|
17
|
-
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
|
|
18
|
-
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
|
|
19
|
-
function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; }
|
|
20
|
-
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
|
|
21
|
-
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
|
|
22
|
-
var DEFAULT_DICTIONARY_SIZE = 1e6;
|
|
23
|
-
|
|
24
|
-
var ParquetEnvelopeReader = function () {
|
|
25
|
-
function ParquetEnvelopeReader(read, close, fileSize, options) {
|
|
26
|
-
(0, _classCallCheck2.default)(this, ParquetEnvelopeReader);
|
|
27
|
-
(0, _defineProperty2.default)(this, "read", void 0);
|
|
28
|
-
(0, _defineProperty2.default)(this, "close", void 0);
|
|
29
|
-
(0, _defineProperty2.default)(this, "fileSize", void 0);
|
|
30
|
-
(0, _defineProperty2.default)(this, "defaultDictionarySize", void 0);
|
|
31
|
-
this.read = read;
|
|
32
|
-
this.close = close;
|
|
33
|
-
this.fileSize = fileSize;
|
|
34
|
-
this.defaultDictionarySize = (options === null || options === void 0 ? void 0 : options.defaultDictionarySize) || DEFAULT_DICTIONARY_SIZE;
|
|
35
|
-
}
|
|
36
|
-
(0, _createClass2.default)(ParquetEnvelopeReader, [{
|
|
37
|
-
key: "readHeader",
|
|
38
|
-
value: function () {
|
|
39
|
-
var _readHeader = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee() {
|
|
40
|
-
var buffer, magic;
|
|
41
|
-
return _regenerator.default.wrap(function _callee$(_context) {
|
|
42
|
-
while (1) {
|
|
43
|
-
switch (_context.prev = _context.next) {
|
|
44
|
-
case 0:
|
|
45
|
-
_context.next = 2;
|
|
46
|
-
return this.read(0, _constants.PARQUET_MAGIC.length);
|
|
47
|
-
case 2:
|
|
48
|
-
buffer = _context.sent;
|
|
49
|
-
magic = buffer.toString();
|
|
50
|
-
_context.t0 = magic;
|
|
51
|
-
_context.next = _context.t0 === _constants.PARQUET_MAGIC ? 7 : _context.t0 === _constants.PARQUET_MAGIC_ENCRYPTED ? 8 : 9;
|
|
52
|
-
break;
|
|
53
|
-
case 7:
|
|
54
|
-
return _context.abrupt("break", 10);
|
|
55
|
-
case 8:
|
|
56
|
-
throw new Error('Encrypted parquet file not supported');
|
|
57
|
-
case 9:
|
|
58
|
-
throw new Error("Invalid parquet file (magic=".concat(magic, ")"));
|
|
59
|
-
case 10:
|
|
60
|
-
case "end":
|
|
61
|
-
return _context.stop();
|
|
62
|
-
}
|
|
63
|
-
}
|
|
64
|
-
}, _callee, this);
|
|
65
|
-
}));
|
|
66
|
-
function readHeader() {
|
|
67
|
-
return _readHeader.apply(this, arguments);
|
|
68
|
-
}
|
|
69
|
-
return readHeader;
|
|
70
|
-
}()
|
|
71
|
-
}, {
|
|
72
|
-
key: "readRowGroup",
|
|
73
|
-
value: function () {
|
|
74
|
-
var _readRowGroup = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee2(schema, rowGroup, columnList) {
|
|
75
|
-
var buffer, _iterator, _step, colChunk, colMetadata, colKey;
|
|
76
|
-
return _regenerator.default.wrap(function _callee2$(_context2) {
|
|
77
|
-
while (1) {
|
|
78
|
-
switch (_context2.prev = _context2.next) {
|
|
79
|
-
case 0:
|
|
80
|
-
buffer = {
|
|
81
|
-
rowCount: Number(rowGroup.num_rows),
|
|
82
|
-
columnData: {}
|
|
83
|
-
};
|
|
84
|
-
_iterator = _createForOfIteratorHelper(rowGroup.columns);
|
|
85
|
-
_context2.prev = 2;
|
|
86
|
-
_iterator.s();
|
|
87
|
-
case 4:
|
|
88
|
-
if ((_step = _iterator.n()).done) {
|
|
89
|
-
_context2.next = 15;
|
|
90
|
-
break;
|
|
91
|
-
}
|
|
92
|
-
colChunk = _step.value;
|
|
93
|
-
colMetadata = colChunk.meta_data;
|
|
94
|
-
colKey = colMetadata === null || colMetadata === void 0 ? void 0 : colMetadata.path_in_schema;
|
|
95
|
-
if (!(columnList.length > 0 && (0, _readUtils.fieldIndexOf)(columnList, colKey) < 0)) {
|
|
96
|
-
_context2.next = 10;
|
|
97
|
-
break;
|
|
98
|
-
}
|
|
99
|
-
return _context2.abrupt("continue", 13);
|
|
100
|
-
case 10:
|
|
101
|
-
_context2.next = 12;
|
|
102
|
-
return this.readColumnChunk(schema, colChunk);
|
|
103
|
-
case 12:
|
|
104
|
-
buffer.columnData[colKey.join()] = _context2.sent;
|
|
105
|
-
case 13:
|
|
106
|
-
_context2.next = 4;
|
|
107
|
-
break;
|
|
108
|
-
case 15:
|
|
109
|
-
_context2.next = 20;
|
|
110
|
-
break;
|
|
111
|
-
case 17:
|
|
112
|
-
_context2.prev = 17;
|
|
113
|
-
_context2.t0 = _context2["catch"](2);
|
|
114
|
-
_iterator.e(_context2.t0);
|
|
115
|
-
case 20:
|
|
116
|
-
_context2.prev = 20;
|
|
117
|
-
_iterator.f();
|
|
118
|
-
return _context2.finish(20);
|
|
119
|
-
case 23:
|
|
120
|
-
return _context2.abrupt("return", buffer);
|
|
121
|
-
case 24:
|
|
122
|
-
case "end":
|
|
123
|
-
return _context2.stop();
|
|
124
|
-
}
|
|
125
|
-
}
|
|
126
|
-
}, _callee2, this, [[2, 17, 20, 23]]);
|
|
127
|
-
}));
|
|
128
|
-
function readRowGroup(_x, _x2, _x3) {
|
|
129
|
-
return _readRowGroup.apply(this, arguments);
|
|
130
|
-
}
|
|
131
|
-
return readRowGroup;
|
|
132
|
-
}()
|
|
133
|
-
}, {
|
|
134
|
-
key: "readColumnChunk",
|
|
135
|
-
value: function () {
|
|
136
|
-
var _readColumnChunk = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee3(schema, colChunk) {
|
|
137
|
-
var _colChunk$meta_data, _colChunk$meta_data2, _colChunk$meta_data3, _colChunk$meta_data4, _colChunk$meta_data5, _colChunk$meta_data7, _colChunk$meta_data8, _options$dictionary;
|
|
138
|
-
var field, type, compression, pagesOffset, pagesSize, _colChunk$meta_data6, options, dictionary, dictionaryPageOffset, dictionaryOffset, pagesBuf;
|
|
139
|
-
return _regenerator.default.wrap(function _callee3$(_context3) {
|
|
140
|
-
while (1) {
|
|
141
|
-
switch (_context3.prev = _context3.next) {
|
|
142
|
-
case 0:
|
|
143
|
-
if (!(colChunk.file_path !== undefined && colChunk.file_path !== null)) {
|
|
144
|
-
_context3.next = 2;
|
|
145
|
-
break;
|
|
146
|
-
}
|
|
147
|
-
throw new Error('external references are not supported');
|
|
148
|
-
case 2:
|
|
149
|
-
field = schema.findField((_colChunk$meta_data = colChunk.meta_data) === null || _colChunk$meta_data === void 0 ? void 0 : _colChunk$meta_data.path_in_schema);
|
|
150
|
-
type = (0, _readUtils.getThriftEnum)(_parquetThrift.Type, (_colChunk$meta_data2 = colChunk.meta_data) === null || _colChunk$meta_data2 === void 0 ? void 0 : _colChunk$meta_data2.type);
|
|
151
|
-
if (!(type !== field.primitiveType)) {
|
|
152
|
-
_context3.next = 6;
|
|
153
|
-
break;
|
|
154
|
-
}
|
|
155
|
-
throw new Error("chunk type not matching schema: ".concat(type));
|
|
156
|
-
case 6:
|
|
157
|
-
compression = (0, _readUtils.getThriftEnum)(_parquetThrift.CompressionCodec, (_colChunk$meta_data3 = colChunk.meta_data) === null || _colChunk$meta_data3 === void 0 ? void 0 : _colChunk$meta_data3.codec);
|
|
158
|
-
pagesOffset = Number((_colChunk$meta_data4 = colChunk.meta_data) === null || _colChunk$meta_data4 === void 0 ? void 0 : _colChunk$meta_data4.data_page_offset);
|
|
159
|
-
pagesSize = Number((_colChunk$meta_data5 = colChunk.meta_data) === null || _colChunk$meta_data5 === void 0 ? void 0 : _colChunk$meta_data5.total_compressed_size);
|
|
160
|
-
if (!colChunk.file_path) {
|
|
161
|
-
pagesSize = Math.min(this.fileSize - pagesOffset, Number((_colChunk$meta_data6 = colChunk.meta_data) === null || _colChunk$meta_data6 === void 0 ? void 0 : _colChunk$meta_data6.total_compressed_size));
|
|
162
|
-
}
|
|
163
|
-
options = {
|
|
164
|
-
type: type,
|
|
165
|
-
rLevelMax: field.rLevelMax,
|
|
166
|
-
dLevelMax: field.dLevelMax,
|
|
167
|
-
compression: compression,
|
|
168
|
-
column: field,
|
|
169
|
-
numValues: (_colChunk$meta_data7 = colChunk.meta_data) === null || _colChunk$meta_data7 === void 0 ? void 0 : _colChunk$meta_data7.num_values,
|
|
170
|
-
dictionary: []
|
|
171
|
-
};
|
|
172
|
-
dictionaryPageOffset = colChunk === null || colChunk === void 0 ? void 0 : (_colChunk$meta_data8 = colChunk.meta_data) === null || _colChunk$meta_data8 === void 0 ? void 0 : _colChunk$meta_data8.dictionary_page_offset;
|
|
173
|
-
if (!dictionaryPageOffset) {
|
|
174
|
-
_context3.next = 17;
|
|
175
|
-
break;
|
|
176
|
-
}
|
|
177
|
-
dictionaryOffset = Number(dictionaryPageOffset);
|
|
178
|
-
_context3.next = 16;
|
|
179
|
-
return this.getDictionary(dictionaryOffset, options, pagesOffset);
|
|
180
|
-
case 16:
|
|
181
|
-
dictionary = _context3.sent;
|
|
182
|
-
case 17:
|
|
183
|
-
dictionary = (_options$dictionary = options.dictionary) !== null && _options$dictionary !== void 0 && _options$dictionary.length ? options.dictionary : dictionary;
|
|
184
|
-
_context3.next = 20;
|
|
185
|
-
return this.read(pagesOffset, pagesSize);
|
|
186
|
-
case 20:
|
|
187
|
-
pagesBuf = _context3.sent;
|
|
188
|
-
_context3.next = 23;
|
|
189
|
-
return (0, _decoders.decodeDataPages)(pagesBuf, _objectSpread(_objectSpread({}, options), {}, {
|
|
190
|
-
dictionary: dictionary
|
|
191
|
-
}));
|
|
192
|
-
case 23:
|
|
193
|
-
return _context3.abrupt("return", _context3.sent);
|
|
194
|
-
case 24:
|
|
195
|
-
case "end":
|
|
196
|
-
return _context3.stop();
|
|
197
|
-
}
|
|
198
|
-
}
|
|
199
|
-
}, _callee3, this);
|
|
200
|
-
}));
|
|
201
|
-
function readColumnChunk(_x4, _x5) {
|
|
202
|
-
return _readColumnChunk.apply(this, arguments);
|
|
203
|
-
}
|
|
204
|
-
return readColumnChunk;
|
|
205
|
-
}()
|
|
206
|
-
}, {
|
|
207
|
-
key: "getDictionary",
|
|
208
|
-
value: function () {
|
|
209
|
-
var _getDictionary = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee4(dictionaryPageOffset, options, pagesOffset) {
|
|
210
|
-
var dictionarySize, pagesBuf, cursor, decodedPage;
|
|
211
|
-
return _regenerator.default.wrap(function _callee4$(_context4) {
|
|
212
|
-
while (1) {
|
|
213
|
-
switch (_context4.prev = _context4.next) {
|
|
214
|
-
case 0:
|
|
215
|
-
if (!(dictionaryPageOffset === 0)) {
|
|
216
|
-
_context4.next = 2;
|
|
217
|
-
break;
|
|
218
|
-
}
|
|
219
|
-
return _context4.abrupt("return", []);
|
|
220
|
-
case 2:
|
|
221
|
-
dictionarySize = Math.min(this.fileSize - dictionaryPageOffset, this.defaultDictionarySize);
|
|
222
|
-
_context4.next = 5;
|
|
223
|
-
return this.read(dictionaryPageOffset, dictionarySize);
|
|
224
|
-
case 5:
|
|
225
|
-
pagesBuf = _context4.sent;
|
|
226
|
-
cursor = {
|
|
227
|
-
buffer: pagesBuf,
|
|
228
|
-
offset: 0,
|
|
229
|
-
size: pagesBuf.length
|
|
230
|
-
};
|
|
231
|
-
_context4.next = 9;
|
|
232
|
-
return (0, _decoders.decodePage)(cursor, options);
|
|
233
|
-
case 9:
|
|
234
|
-
decodedPage = _context4.sent;
|
|
235
|
-
return _context4.abrupt("return", decodedPage.dictionary);
|
|
236
|
-
case 11:
|
|
237
|
-
case "end":
|
|
238
|
-
return _context4.stop();
|
|
239
|
-
}
|
|
240
|
-
}
|
|
241
|
-
}, _callee4, this);
|
|
242
|
-
}));
|
|
243
|
-
function getDictionary(_x6, _x7, _x8) {
|
|
244
|
-
return _getDictionary.apply(this, arguments);
|
|
245
|
-
}
|
|
246
|
-
return getDictionary;
|
|
247
|
-
}()
|
|
248
|
-
}, {
|
|
249
|
-
key: "readFooter",
|
|
250
|
-
value: function () {
|
|
251
|
-
var _readFooter = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee5() {
|
|
252
|
-
var trailerLen, trailerBuf, magic, metadataSize, metadataOffset, metadataBuf, _decodeFileMetadata, metadata;
|
|
253
|
-
return _regenerator.default.wrap(function _callee5$(_context5) {
|
|
254
|
-
while (1) {
|
|
255
|
-
switch (_context5.prev = _context5.next) {
|
|
256
|
-
case 0:
|
|
257
|
-
trailerLen = _constants.PARQUET_MAGIC.length + 4;
|
|
258
|
-
_context5.next = 3;
|
|
259
|
-
return this.read(this.fileSize - trailerLen, trailerLen);
|
|
260
|
-
case 3:
|
|
261
|
-
trailerBuf = _context5.sent;
|
|
262
|
-
magic = trailerBuf.slice(4).toString();
|
|
263
|
-
if (!(magic !== _constants.PARQUET_MAGIC)) {
|
|
264
|
-
_context5.next = 7;
|
|
265
|
-
break;
|
|
266
|
-
}
|
|
267
|
-
throw new Error("Not a valid parquet file (magic=\"".concat(magic, ")"));
|
|
268
|
-
case 7:
|
|
269
|
-
metadataSize = trailerBuf.readUInt32LE(0);
|
|
270
|
-
metadataOffset = this.fileSize - metadataSize - trailerLen;
|
|
271
|
-
if (!(metadataOffset < _constants.PARQUET_MAGIC.length)) {
|
|
272
|
-
_context5.next = 11;
|
|
273
|
-
break;
|
|
274
|
-
}
|
|
275
|
-
throw new Error("Invalid metadata size ".concat(metadataOffset));
|
|
276
|
-
case 11:
|
|
277
|
-
_context5.next = 13;
|
|
278
|
-
return this.read(metadataOffset, metadataSize);
|
|
279
|
-
case 13:
|
|
280
|
-
metadataBuf = _context5.sent;
|
|
281
|
-
_decodeFileMetadata = (0, _readUtils.decodeFileMetadata)(metadataBuf), metadata = _decodeFileMetadata.metadata;
|
|
282
|
-
return _context5.abrupt("return", metadata);
|
|
283
|
-
case 16:
|
|
284
|
-
case "end":
|
|
285
|
-
return _context5.stop();
|
|
286
|
-
}
|
|
287
|
-
}
|
|
288
|
-
}, _callee5, this);
|
|
289
|
-
}));
|
|
290
|
-
function readFooter() {
|
|
291
|
-
return _readFooter.apply(this, arguments);
|
|
292
|
-
}
|
|
293
|
-
return readFooter;
|
|
294
|
-
}()
|
|
295
|
-
}], [{
|
|
296
|
-
key: "openBuffer",
|
|
297
|
-
value: function () {
|
|
298
|
-
var _openBuffer = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee6(buffer) {
|
|
299
|
-
var readFn, closeFn;
|
|
300
|
-
return _regenerator.default.wrap(function _callee6$(_context6) {
|
|
301
|
-
while (1) {
|
|
302
|
-
switch (_context6.prev = _context6.next) {
|
|
303
|
-
case 0:
|
|
304
|
-
readFn = function readFn(position, length) {
|
|
305
|
-
return Promise.resolve(buffer.slice(position, position + length));
|
|
306
|
-
};
|
|
307
|
-
closeFn = function closeFn() {
|
|
308
|
-
return Promise.resolve();
|
|
309
|
-
};
|
|
310
|
-
return _context6.abrupt("return", new ParquetEnvelopeReader(readFn, closeFn, buffer.length));
|
|
311
|
-
case 3:
|
|
312
|
-
case "end":
|
|
313
|
-
return _context6.stop();
|
|
314
|
-
}
|
|
315
|
-
}
|
|
316
|
-
}, _callee6);
|
|
317
|
-
}));
|
|
318
|
-
function openBuffer(_x9) {
|
|
319
|
-
return _openBuffer.apply(this, arguments);
|
|
320
|
-
}
|
|
321
|
-
return openBuffer;
|
|
322
|
-
}()
|
|
323
|
-
}]);
|
|
324
|
-
return ParquetEnvelopeReader;
|
|
325
|
-
}();
|
|
326
|
-
exports.ParquetEnvelopeReader = ParquetEnvelopeReader;
|
|
327
|
-
//# sourceMappingURL=parquet-envelope-reader.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"parquet-envelope-reader.js","names":["DEFAULT_DICTIONARY_SIZE","ParquetEnvelopeReader","read","close","fileSize","options","defaultDictionarySize","PARQUET_MAGIC","length","buffer","magic","toString","PARQUET_MAGIC_ENCRYPTED","Error","schema","rowGroup","columnList","rowCount","Number","num_rows","columnData","columns","colChunk","colMetadata","meta_data","colKey","path_in_schema","fieldIndexOf","readColumnChunk","join","file_path","undefined","field","findField","type","getThriftEnum","Type","primitiveType","compression","CompressionCodec","codec","pagesOffset","data_page_offset","pagesSize","total_compressed_size","Math","min","rLevelMax","dLevelMax","column","numValues","num_values","dictionary","dictionaryPageOffset","dictionary_page_offset","dictionaryOffset","getDictionary","pagesBuf","decodeDataPages","dictionarySize","cursor","offset","size","decodePage","decodedPage","trailerLen","trailerBuf","slice","metadataSize","readUInt32LE","metadataOffset","metadataBuf","decodeFileMetadata","metadata","readFn","position","Promise","resolve","closeFn"],"sources":["../../../../src/parquetjs/parser/parquet-envelope-reader.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetSchema} from '../schema/schema';\nimport {PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED} from '../../constants';\nimport {ColumnChunk, CompressionCodec, FileMetaData, RowGroup, Type} from '../parquet-thrift';\nimport {\n ParquetBuffer,\n ParquetCompression,\n ParquetData,\n PrimitiveType,\n ParquetOptions\n} from '../schema/declare';\nimport {decodeFileMetadata, getThriftEnum, fieldIndexOf} from '../utils/read-utils';\nimport {decodeDataPages, decodePage} from './decoders';\n\nconst DEFAULT_DICTIONARY_SIZE = 1e6;\n\n/**\n * The parquet envelope reader allows direct, unbuffered access to the individual\n * sections of the parquet file, namely the header, footer and the row groups.\n * This class is intended for advanced/internal users; if you just want to retrieve\n * rows from a parquet file use the ParquetReader instead\n */\nexport class ParquetEnvelopeReader {\n public read: (position: number, length: number) => Promise<Buffer>;\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n public close: () => Promise<void>;\n public fileSize: number;\n public defaultDictionarySize: number;\n\n static async openBuffer(buffer: Buffer): Promise<ParquetEnvelopeReader> {\n const readFn = (position: number, length: number) =>\n Promise.resolve(buffer.slice(position, position + length));\n const closeFn = () => Promise.resolve();\n return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);\n }\n\n constructor(\n read: (position: number, length: number) => Promise<Buffer>,\n close: () => Promise<void>,\n fileSize: number,\n options?: any\n ) {\n this.read = read;\n this.close = close;\n this.fileSize = fileSize;\n this.defaultDictionarySize = options?.defaultDictionarySize || DEFAULT_DICTIONARY_SIZE;\n }\n\n async readHeader(): Promise<void> {\n const buffer = await this.read(0, PARQUET_MAGIC.length);\n\n const magic = buffer.toString();\n switch (magic) {\n case PARQUET_MAGIC:\n break;\n case PARQUET_MAGIC_ENCRYPTED:\n throw new Error('Encrypted parquet file not supported');\n default:\n throw new Error(`Invalid parquet file (magic=${magic})`);\n }\n }\n\n async readRowGroup(\n schema: ParquetSchema,\n rowGroup: RowGroup,\n columnList: string[][]\n ): Promise<ParquetBuffer> {\n const buffer: ParquetBuffer = {\n rowCount: Number(rowGroup.num_rows),\n columnData: {}\n };\n for (const colChunk of rowGroup.columns) {\n const colMetadata = colChunk.meta_data;\n const colKey = colMetadata?.path_in_schema;\n if (columnList.length > 0 && fieldIndexOf(columnList, colKey!) < 0) {\n continue; // eslint-disable-line no-continue\n }\n buffer.columnData[colKey!.join()] = await this.readColumnChunk(schema, colChunk);\n }\n return buffer;\n }\n\n /**\n * Do reading of parquet file's column chunk\n * @param schema\n * @param colChunk\n */\n async readColumnChunk(schema: ParquetSchema, colChunk: ColumnChunk): Promise<ParquetData> {\n if (colChunk.file_path !== undefined && colChunk.file_path !== null) {\n throw new Error('external references are not supported');\n }\n\n const field = schema.findField(colChunk.meta_data?.path_in_schema!);\n const type: PrimitiveType = getThriftEnum(Type, colChunk.meta_data?.type!) as any;\n\n if (type !== field.primitiveType) {\n throw new Error(`chunk type not matching schema: ${type}`);\n }\n\n const compression: ParquetCompression = getThriftEnum(\n CompressionCodec,\n colChunk.meta_data?.codec!\n ) as any;\n\n const pagesOffset = Number(colChunk.meta_data?.data_page_offset!);\n let pagesSize = Number(colChunk.meta_data?.total_compressed_size!);\n\n if (!colChunk.file_path) {\n pagesSize = Math.min(\n this.fileSize - pagesOffset,\n Number(colChunk.meta_data?.total_compressed_size)\n );\n }\n\n const options: ParquetOptions = {\n type,\n rLevelMax: field.rLevelMax,\n dLevelMax: field.dLevelMax,\n compression,\n column: field,\n numValues: colChunk.meta_data?.num_values,\n dictionary: []\n };\n\n let dictionary;\n\n const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;\n\n if (dictionaryPageOffset) {\n const dictionaryOffset = Number(dictionaryPageOffset);\n // Getting dictionary from column chunk to iterate all over indexes to get dataPage values.\n dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);\n }\n\n dictionary = options.dictionary?.length ? options.dictionary : dictionary;\n const pagesBuf = await this.read(pagesOffset, pagesSize);\n return await decodeDataPages(pagesBuf, {...options, dictionary});\n }\n\n /**\n * Getting dictionary for allows to flatten values by indices.\n * @param dictionaryPageOffset\n * @param options\n * @param pagesOffset\n * @returns\n */\n async getDictionary(\n dictionaryPageOffset: number,\n options: ParquetOptions,\n pagesOffset: number\n ): Promise<string[]> {\n if (dictionaryPageOffset === 0) {\n // dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);\n // pagesBuf = await this.read(pagesOffset, dictionarySize);\n\n // In this case we are working with parquet-mr files format. Problem is described below:\n // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding\n // We need to get dictionary page from column chunk if it exists.\n // Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.\n return [];\n }\n\n const dictionarySize = Math.min(\n this.fileSize - dictionaryPageOffset,\n this.defaultDictionarySize\n );\n const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);\n\n const cursor = {buffer: pagesBuf, offset: 0, size: pagesBuf.length};\n const decodedPage = await decodePage(cursor, options);\n\n return decodedPage.dictionary!;\n }\n\n async readFooter(): Promise<FileMetaData> {\n const trailerLen = PARQUET_MAGIC.length + 4;\n const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);\n\n const magic = trailerBuf.slice(4).toString();\n if (magic !== PARQUET_MAGIC) {\n throw new Error(`Not a valid parquet file (magic=\"${magic})`);\n }\n\n const metadataSize = trailerBuf.readUInt32LE(0);\n const metadataOffset = this.fileSize - metadataSize - trailerLen;\n if (metadataOffset < PARQUET_MAGIC.length) {\n throw new Error(`Invalid metadata size ${metadataOffset}`);\n }\n\n const metadataBuf = await this.read(metadataOffset, metadataSize);\n // let metadata = new parquet_thrift.FileMetaData();\n // parquet_util.decodeThrift(metadata, metadataBuf);\n const {metadata} = decodeFileMetadata(metadataBuf);\n return metadata;\n }\n}\n"],"mappings":";;;;;;;;;;;;AAEA;AACA;AAQA;AACA;AAAuD;AAAA;AAAA;AAAA;AAAA;AAEvD,IAAMA,uBAAuB,GAAG,GAAG;;AAAC,IAQvBC,qBAAqB;EAiBhC,+BACEC,IAA2D,EAC3DC,KAA0B,EAC1BC,QAAgB,EAChBC,OAAa,EACb;IAAA;IAAA;IAAA;IAAA;IAAA;IACA,IAAI,CAACH,IAAI,GAAGA,IAAI;IAChB,IAAI,CAACC,KAAK,GAAGA,KAAK;IAClB,IAAI,CAACC,QAAQ,GAAGA,QAAQ;IACxB,IAAI,CAACE,qBAAqB,GAAG,CAAAD,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEC,qBAAqB,KAAIN,uBAAuB;EACxF;EAAC;IAAA;IAAA;MAAA,4EAED;QAAA;QAAA;UAAA;YAAA;cAAA;gBAAA;gBAAA,OACuB,IAAI,CAACE,IAAI,CAAC,CAAC,EAAEK,wBAAa,CAACC,MAAM,CAAC;cAAA;gBAAjDC,MAAM;gBAENC,KAAK,GAAGD,MAAM,CAACE,QAAQ,EAAE;gBAAA,cACvBD,KAAK;gBAAA,gCACNH,wBAAa,uBAEbK,kCAAuB;gBAAA;cAAA;gBAAA;cAAA;gBAAA,MACpB,IAAIC,KAAK,CAAC,sCAAsC,CAAC;cAAA;gBAAA,MAEjD,IAAIA,KAAK,uCAAgCH,KAAK,OAAI;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CAE7D;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;MAAA,8EAED,kBACEI,MAAqB,EACrBC,QAAkB,EAClBC,UAAsB;QAAA;QAAA;UAAA;YAAA;cAAA;gBAEhBP,MAAqB,GAAG;kBAC5BQ,QAAQ,EAAEC,MAAM,CAACH,QAAQ,CAACI,QAAQ,CAAC;kBACnCC,UAAU,EAAE,CAAC;gBACf,CAAC;gBAAA,uCACsBL,QAAQ,CAACM,OAAO;gBAAA;gBAAA;cAAA;gBAAA;kBAAA;kBAAA;gBAAA;gBAA5BC,QAAQ;gBACXC,WAAW,GAAGD,QAAQ,CAACE,SAAS;gBAChCC,MAAM,GAAGF,WAAW,aAAXA,WAAW,uBAAXA,WAAW,CAAEG,cAAc;gBAAA,MACtCV,UAAU,CAACR,MAAM,GAAG,CAAC,IAAI,IAAAmB,uBAAY,EAACX,UAAU,EAAES,MAAM,CAAE,GAAG,CAAC;kBAAA;kBAAA;gBAAA;gBAAA;cAAA;gBAAA;gBAAA,OAGxB,IAAI,CAACG,eAAe,CAACd,MAAM,EAAEQ,QAAQ,CAAC;cAAA;gBAAhFb,MAAM,CAACW,UAAU,CAACK,MAAM,CAAEI,IAAI,EAAE,CAAC;cAAA;gBAAA;gBAAA;cAAA;gBAAA;gBAAA;cAAA;gBAAA;gBAAA;gBAAA;cAAA;gBAAA;gBAAA;gBAAA;cAAA;gBAAA,kCAE5BpB,MAAM;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CACd;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;MAAA,iFAOD,kBAAsBK,MAAqB,EAAEQ,QAAqB;QAAA;QAAA;QAAA;UAAA;YAAA;cAAA;gBAAA,MAC5DA,QAAQ,CAACQ,SAAS,KAAKC,SAAS,IAAIT,QAAQ,CAACQ,SAAS,KAAK,IAAI;kBAAA;kBAAA;gBAAA;gBAAA,MAC3D,IAAIjB,KAAK,CAAC,uCAAuC,CAAC;cAAA;gBAGpDmB,KAAK,GAAGlB,MAAM,CAACmB,SAAS,wBAACX,QAAQ,CAACE,SAAS,wDAAlB,oBAAoBE,cAAc,CAAE;gBAC7DQ,IAAmB,GAAG,IAAAC,wBAAa,EAACC,mBAAI,0BAAEd,QAAQ,CAACE,SAAS,yDAAlB,qBAAoBU,IAAI,CAAE;gBAAA,MAEtEA,IAAI,KAAKF,KAAK,CAACK,aAAa;kBAAA;kBAAA;gBAAA;gBAAA,MACxB,IAAIxB,KAAK,2CAAoCqB,IAAI,EAAG;cAAA;gBAGtDI,WAA+B,GAAG,IAAAH,wBAAa,EACnDI,+BAAgB,0BAChBjB,QAAQ,CAACE,SAAS,yDAAlB,qBAAoBgB,KAAK,CAC1B;gBAEKC,WAAW,GAAGvB,MAAM,yBAACI,QAAQ,CAACE,SAAS,yDAAlB,qBAAoBkB,gBAAgB,CAAE;gBAC7DC,SAAS,GAAGzB,MAAM,yBAACI,QAAQ,CAACE,SAAS,yDAAlB,qBAAoBoB,qBAAqB,CAAE;gBAElE,IAAI,CAACtB,QAAQ,CAACQ,SAAS,EAAE;kBACvBa,SAAS,GAAGE,IAAI,CAACC,GAAG,CAClB,IAAI,CAAC1C,QAAQ,GAAGqC,WAAW,EAC3BvB,MAAM,yBAACI,QAAQ,CAACE,SAAS,yDAAlB,qBAAoBoB,qBAAqB,CAAC,CAClD;gBACH;gBAEMvC,OAAuB,GAAG;kBAC9B6B,IAAI,EAAJA,IAAI;kBACJa,SAAS,EAAEf,KAAK,CAACe,SAAS;kBAC1BC,SAAS,EAAEhB,KAAK,CAACgB,SAAS;kBAC1BV,WAAW,EAAXA,WAAW;kBACXW,MAAM,EAAEjB,KAAK;kBACbkB,SAAS,0BAAE5B,QAAQ,CAACE,SAAS,yDAAlB,qBAAoB2B,UAAU;kBACzCC,UAAU,EAAE;gBACd,CAAC;gBAIKC,oBAAoB,GAAG/B,QAAQ,aAARA,QAAQ,+CAARA,QAAQ,CAAEE,SAAS,yDAAnB,qBAAqB8B,sBAAsB;gBAAA,KAEpED,oBAAoB;kBAAA;kBAAA;gBAAA;gBAChBE,gBAAgB,GAAGrC,MAAM,CAACmC,oBAAoB,CAAC;gBAAA;gBAAA,OAElC,IAAI,CAACG,aAAa,CAACD,gBAAgB,EAAElD,OAAO,EAAEoC,WAAW,CAAC;cAAA;gBAA7EW,UAAU;cAAA;gBAGZA,UAAU,GAAG,uBAAA/C,OAAO,CAAC+C,UAAU,gDAAlB,oBAAoB5C,MAAM,GAAGH,OAAO,CAAC+C,UAAU,GAAGA,UAAU;gBAAC;gBAAA,OACnD,IAAI,CAAClD,IAAI,CAACuC,WAAW,EAAEE,SAAS,CAAC;cAAA;gBAAlDc,QAAQ;gBAAA;gBAAA,OACD,IAAAC,yBAAe,EAACD,QAAQ,kCAAMpD,OAAO;kBAAE+C,UAAU,EAAVA;gBAAU,GAAE;cAAA;gBAAA;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CACjE;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;MAAA,+EASD,kBACEC,oBAA4B,EAC5BhD,OAAuB,EACvBoC,WAAmB;QAAA;QAAA;UAAA;YAAA;cAAA;gBAAA,MAEfY,oBAAoB,KAAK,CAAC;kBAAA;kBAAA;gBAAA;gBAAA,kCAQrB,EAAE;cAAA;gBAGLM,cAAc,GAAGd,IAAI,CAACC,GAAG,CAC7B,IAAI,CAAC1C,QAAQ,GAAGiD,oBAAoB,EACpC,IAAI,CAAC/C,qBAAqB,CAC3B;gBAAA;gBAAA,OACsB,IAAI,CAACJ,IAAI,CAACmD,oBAAoB,EAAEM,cAAc,CAAC;cAAA;gBAAhEF,QAAQ;gBAERG,MAAM,GAAG;kBAACnD,MAAM,EAAEgD,QAAQ;kBAAEI,MAAM,EAAE,CAAC;kBAAEC,IAAI,EAAEL,QAAQ,CAACjD;gBAAM,CAAC;gBAAA;gBAAA,OACzC,IAAAuD,oBAAU,EAACH,MAAM,EAAEvD,OAAO,CAAC;cAAA;gBAA/C2D,WAAW;gBAAA,kCAEVA,WAAW,CAACZ,UAAU;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CAC9B;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;MAAA,4EAED;QAAA;QAAA;UAAA;YAAA;cAAA;gBACQa,UAAU,GAAG1D,wBAAa,CAACC,MAAM,GAAG,CAAC;gBAAA;gBAAA,OAClB,IAAI,CAACN,IAAI,CAAC,IAAI,CAACE,QAAQ,GAAG6D,UAAU,EAAEA,UAAU,CAAC;cAAA;gBAApEC,UAAU;gBAEVxD,KAAK,GAAGwD,UAAU,CAACC,KAAK,CAAC,CAAC,CAAC,CAACxD,QAAQ,EAAE;gBAAA,MACxCD,KAAK,KAAKH,wBAAa;kBAAA;kBAAA;gBAAA;gBAAA,MACnB,IAAIM,KAAK,6CAAqCH,KAAK,OAAI;cAAA;gBAGzD0D,YAAY,GAAGF,UAAU,CAACG,YAAY,CAAC,CAAC,CAAC;gBACzCC,cAAc,GAAG,IAAI,CAAClE,QAAQ,GAAGgE,YAAY,GAAGH,UAAU;gBAAA,MAC5DK,cAAc,GAAG/D,wBAAa,CAACC,MAAM;kBAAA;kBAAA;gBAAA;gBAAA,MACjC,IAAIK,KAAK,iCAA0ByD,cAAc,EAAG;cAAA;gBAAA;gBAAA,OAGlC,IAAI,CAACpE,IAAI,CAACoE,cAAc,EAAEF,YAAY,CAAC;cAAA;gBAA3DG,WAAW;gBAAA,sBAGE,IAAAC,6BAAkB,EAACD,WAAW,CAAC,EAA3CE,QAAQ,uBAARA,QAAQ;gBAAA,kCACRA,QAAQ;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CAChB;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;IAAA;IAAA;MAAA,4EArKD,kBAAwBhE,MAAc;QAAA;QAAA;UAAA;YAAA;cAAA;gBAC9BiE,MAAM,GAAG,SAATA,MAAM,CAAIC,QAAgB,EAAEnE,MAAc;kBAAA,OAC9CoE,OAAO,CAACC,OAAO,CAACpE,MAAM,CAAC0D,KAAK,CAACQ,QAAQ,EAAEA,QAAQ,GAAGnE,MAAM,CAAC,CAAC;gBAAA;gBACtDsE,OAAO,GAAG,SAAVA,OAAO;kBAAA,OAASF,OAAO,CAACC,OAAO,EAAE;gBAAA;gBAAA,kCAChC,IAAI5E,qBAAqB,CAACyE,MAAM,EAAEI,OAAO,EAAErE,MAAM,CAACD,MAAM,CAAC;cAAA;cAAA;gBAAA;YAAA;UAAA;QAAA;MAAA,CACjE;MAAA;QAAA;MAAA;MAAA;IAAA;EAAA;EAAA;AAAA;AAAA"}
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
|
|
3
|
-
Object.defineProperty(exports, "__esModule", {
|
|
4
|
-
value: true
|
|
5
|
-
});
|
|
6
|
-
exports.toArrayBuffer = toArrayBuffer;
|
|
7
|
-
exports.toBuffer = toBuffer;
|
|
8
|
-
function toArrayBuffer(buffer) {
|
|
9
|
-
if (Buffer.isBuffer(buffer)) {
|
|
10
|
-
var typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);
|
|
11
|
-
return typedArray.slice().buffer;
|
|
12
|
-
}
|
|
13
|
-
return buffer;
|
|
14
|
-
}
|
|
15
|
-
|
|
16
|
-
function toBuffer(arrayBuffer) {
|
|
17
|
-
return Buffer.from(arrayBuffer);
|
|
18
|
-
}
|
|
19
|
-
//# sourceMappingURL=buffer-utils.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"buffer-utils.js","names":["toArrayBuffer","buffer","Buffer","isBuffer","typedArray","Uint8Array","byteOffset","length","slice","toBuffer","arrayBuffer","from"],"sources":["../../../../src/parquetjs/utils/buffer-utils.ts"],"sourcesContent":["/**\n * Convert Buffer to ArrayBuffer\n */\nexport function toArrayBuffer(buffer: Buffer): ArrayBuffer {\n // TODO - per docs we should just be able to call buffer.buffer, but there are issues\n if (Buffer.isBuffer(buffer)) {\n const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);\n return typedArray.slice().buffer;\n }\n return buffer;\n}\n\n/**\n * Convert (copy) ArrayBuffer to Buffer\n */\nexport function toBuffer(arrayBuffer: ArrayBuffer): Buffer {\n return Buffer.from(arrayBuffer);\n}\n"],"mappings":";;;;;;;AAGO,SAASA,aAAa,CAACC,MAAc,EAAe;EAEzD,IAAIC,MAAM,CAACC,QAAQ,CAACF,MAAM,CAAC,EAAE;IAC3B,IAAMG,UAAU,GAAG,IAAIC,UAAU,CAACJ,MAAM,CAACA,MAAM,EAAEA,MAAM,CAACK,UAAU,EAAEL,MAAM,CAACM,MAAM,CAAC;IAClF,OAAOH,UAAU,CAACI,KAAK,EAAE,CAACP,MAAM;EAClC;EACA,OAAOA,MAAM;AACf;;AAKO,SAASQ,QAAQ,CAACC,WAAwB,EAAU;EACzD,OAAOR,MAAM,CAACS,IAAI,CAACD,WAAW,CAAC;AACjC"}
|