@loaders.gl/parquet 3.0.12 → 3.1.0-alpha.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +7 -18
- package/dist/dist.min.js.map +1 -1
- package/dist/es5/bundle.js +2 -4
- package/dist/es5/bundle.js.map +1 -1
- package/dist/es5/constants.js +17 -0
- package/dist/es5/constants.js.map +1 -0
- package/dist/es5/index.js +53 -21
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/convert-schema.js +82 -0
- package/dist/es5/lib/convert-schema.js.map +1 -0
- package/dist/es5/lib/parse-parquet.js +173 -0
- package/dist/es5/lib/parse-parquet.js.map +1 -0
- package/dist/es5/lib/read-array-buffer.js +53 -0
- package/dist/es5/lib/read-array-buffer.js.map +1 -0
- package/dist/es5/parquet-loader.js +6 -79
- package/dist/es5/parquet-loader.js.map +1 -1
- package/dist/es5/parquet-writer.js +1 -1
- package/dist/es5/parquet-writer.js.map +1 -1
- package/dist/es5/parquetjs/codecs/dictionary.js +30 -0
- package/dist/es5/parquetjs/codecs/dictionary.js.map +1 -0
- package/dist/es5/parquetjs/codecs/index.js +10 -0
- package/dist/es5/parquetjs/codecs/index.js.map +1 -1
- package/dist/es5/parquetjs/codecs/rle.js +2 -2
- package/dist/es5/parquetjs/codecs/rle.js.map +1 -1
- package/dist/es5/parquetjs/compression.js +138 -104
- package/dist/es5/parquetjs/compression.js.map +1 -1
- package/dist/es5/parquetjs/{writer.js → encoder/writer.js} +397 -228
- package/dist/es5/parquetjs/encoder/writer.js.map +1 -0
- package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js +1 -0
- package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js.map +1 -1
- package/dist/es5/parquetjs/parser/decoders.js +495 -0
- package/dist/es5/parquetjs/parser/decoders.js.map +1 -0
- package/dist/es5/parquetjs/parser/parquet-cursor.js +215 -0
- package/dist/es5/parquetjs/parser/parquet-cursor.js.map +1 -0
- package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +452 -0
- package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +1 -0
- package/dist/es5/parquetjs/parser/parquet-reader.js +413 -0
- package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -0
- package/dist/es5/parquetjs/schema/declare.js.map +1 -1
- package/dist/es5/parquetjs/schema/schema.js +2 -0
- package/dist/es5/parquetjs/schema/schema.js.map +1 -1
- package/dist/es5/parquetjs/schema/shred.js +2 -1
- package/dist/es5/parquetjs/schema/shred.js.map +1 -1
- package/dist/es5/parquetjs/schema/types.js +79 -4
- package/dist/es5/parquetjs/schema/types.js.map +1 -1
- package/dist/es5/parquetjs/utils/buffer-utils.js +21 -0
- package/dist/es5/parquetjs/utils/buffer-utils.js.map +1 -0
- package/dist/es5/parquetjs/utils/file-utils.js +108 -0
- package/dist/es5/parquetjs/utils/file-utils.js.map +1 -0
- package/dist/es5/parquetjs/{util.js → utils/read-utils.js} +13 -113
- package/dist/es5/parquetjs/utils/read-utils.js.map +1 -0
- package/dist/esm/bundle.js +2 -4
- package/dist/esm/bundle.js.map +1 -1
- package/dist/esm/constants.js +6 -0
- package/dist/esm/constants.js.map +1 -0
- package/dist/esm/index.js +14 -4
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/convert-schema.js +71 -0
- package/dist/esm/lib/convert-schema.js.map +1 -0
- package/dist/esm/lib/parse-parquet.js +28 -0
- package/dist/esm/lib/parse-parquet.js.map +1 -0
- package/dist/esm/lib/read-array-buffer.js +9 -0
- package/dist/esm/lib/read-array-buffer.js.map +1 -0
- package/dist/esm/parquet-loader.js +4 -24
- package/dist/esm/parquet-loader.js.map +1 -1
- package/dist/esm/parquet-writer.js +1 -1
- package/dist/esm/parquet-writer.js.map +1 -1
- package/dist/esm/parquetjs/codecs/dictionary.js +12 -0
- package/dist/esm/parquetjs/codecs/dictionary.js.map +1 -0
- package/dist/esm/parquetjs/codecs/index.js +9 -0
- package/dist/esm/parquetjs/codecs/index.js.map +1 -1
- package/dist/esm/parquetjs/codecs/rle.js +2 -2
- package/dist/esm/parquetjs/codecs/rle.js.map +1 -1
- package/dist/esm/parquetjs/compression.js +54 -105
- package/dist/esm/parquetjs/compression.js.map +1 -1
- package/dist/esm/parquetjs/{writer.js → encoder/writer.js} +32 -35
- package/dist/esm/parquetjs/encoder/writer.js.map +1 -0
- package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js +1 -0
- package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js.map +1 -1
- package/dist/esm/parquetjs/parser/decoders.js +300 -0
- package/dist/esm/parquetjs/parser/decoders.js.map +1 -0
- package/dist/esm/parquetjs/parser/parquet-cursor.js +90 -0
- package/dist/esm/parquetjs/parser/parquet-cursor.js.map +1 -0
- package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +164 -0
- package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +1 -0
- package/dist/esm/parquetjs/parser/parquet-reader.js +133 -0
- package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -0
- package/dist/esm/parquetjs/schema/declare.js.map +1 -1
- package/dist/esm/parquetjs/schema/schema.js +2 -0
- package/dist/esm/parquetjs/schema/schema.js.map +1 -1
- package/dist/esm/parquetjs/schema/shred.js +2 -1
- package/dist/esm/parquetjs/schema/shred.js.map +1 -1
- package/dist/esm/parquetjs/schema/types.js +78 -4
- package/dist/esm/parquetjs/schema/types.js.map +1 -1
- package/dist/esm/parquetjs/utils/buffer-utils.js +12 -0
- package/dist/esm/parquetjs/utils/buffer-utils.js.map +1 -0
- package/dist/esm/parquetjs/utils/file-utils.js +79 -0
- package/dist/esm/parquetjs/utils/file-utils.js.map +1 -0
- package/dist/esm/parquetjs/{util.js → utils/read-utils.js} +11 -89
- package/dist/esm/parquetjs/utils/read-utils.js.map +1 -0
- package/dist/parquet-worker.js +7 -18
- package/dist/parquet-worker.js.map +1 -1
- package/package.json +10 -10
- package/src/bundle.ts +2 -3
- package/src/constants.ts +17 -0
- package/src/index.ts +30 -4
- package/src/lib/convert-schema.ts +95 -0
- package/src/lib/parse-parquet.ts +27 -0
- package/{dist/es5/libs → src/lib}/read-array-buffer.ts +0 -0
- package/src/parquet-loader.ts +4 -24
- package/src/parquetjs/codecs/dictionary.ts +11 -0
- package/src/parquetjs/codecs/index.ts +13 -0
- package/src/parquetjs/codecs/rle.ts +4 -2
- package/src/parquetjs/compression.ts +89 -50
- package/src/parquetjs/{writer.ts → encoder/writer.ts} +46 -45
- package/src/parquetjs/parquet-thrift/CompressionCodec.ts +2 -1
- package/src/parquetjs/parser/decoders.ts +448 -0
- package/src/parquetjs/parser/parquet-cursor.ts +94 -0
- package/src/parquetjs/parser/parquet-envelope-reader.ts +210 -0
- package/src/parquetjs/parser/parquet-reader.ts +179 -0
- package/src/parquetjs/schema/declare.ts +48 -2
- package/src/parquetjs/schema/schema.ts +2 -0
- package/src/parquetjs/schema/shred.ts +3 -1
- package/src/parquetjs/schema/types.ts +82 -5
- package/src/parquetjs/utils/buffer-utils.ts +18 -0
- package/src/parquetjs/utils/file-utils.ts +96 -0
- package/src/parquetjs/{util.ts → utils/read-utils.ts} +13 -110
- package/dist/dist.es5.min.js +0 -51
- package/dist/dist.es5.min.js.map +0 -1
- package/dist/es5/parquetjs/compression.ts.disabled +0 -105
- package/dist/es5/parquetjs/reader.js +0 -1078
- package/dist/es5/parquetjs/reader.js.map +0 -1
- package/dist/es5/parquetjs/util.js.map +0 -1
- package/dist/es5/parquetjs/writer.js.map +0 -1
- package/dist/esm/libs/read-array-buffer.ts +0 -31
- package/dist/esm/parquetjs/compression.ts.disabled +0 -105
- package/dist/esm/parquetjs/reader.js +0 -524
- package/dist/esm/parquetjs/reader.js.map +0 -1
- package/dist/esm/parquetjs/util.js.map +0 -1
- package/dist/esm/parquetjs/writer.js.map +0 -1
- package/src/libs/read-array-buffer.ts +0 -31
- package/src/parquetjs/compression.ts.disabled +0 -105
- package/src/parquetjs/reader.ts +0 -707
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/parquetjs/parser/parquet-envelope-reader.ts"],"names":["DEFAULT_DICTIONARY_SIZE","ParquetEnvelopeReader","read","close","fileSize","options","defaultDictionarySize","PARQUET_MAGIC","length","buffer","magic","toString","PARQUET_MAGIC_ENCRYPTED","Error","schema","rowGroup","columnList","rowCount","Number","num_rows","columnData","columns","colChunk","colMetadata","meta_data","colKey","path_in_schema","readColumnChunk","join","file_path","undefined","field","findField","type","Type","primitiveType","compression","CompressionCodec","codec","pagesOffset","data_page_offset","pagesSize","total_compressed_size","Math","min","rLevelMax","dLevelMax","column","numValues","num_values","dictionary","dictionaryPageOffset","dictionary_page_offset","dictionaryOffset","getDictionary","pagesBuf","dictionarySize","cursor","offset","size","decodedPage","trailerLen","trailerBuf","slice","metadataSize","readUInt32LE","metadataOffset","metadataBuf","metadata","filePath","fileStat","fileDescriptor","readFn","fread","bind","closeFn","fclose","position","Promise","resolve"],"mappings":";;;;;;;;;;;;;;;;;;;AAEA;;AACA;;AAQA;;AACA;;AACA;;;;;;;;;;;;AAEA,IAAMA,uBAAuB,GAAG,GAAhC;;IAQaC,qB;AA2BX,iCACEC,IADF,EAEEC,KAFF,EAGEC,QAHF,EAIEC,OAJF,EAKE;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,SAAKH,IAAL,GAAYA,IAAZ;AACA,SAAKC,KAAL,GAAaA,KAAb;AACA,SAAKC,QAAL,GAAgBA,QAAhB;AACA,SAAKE,qBAAL,GAA6B,CAAAD,OAAO,SAAP,IAAAA,OAAO,WAAP,YAAAA,OAAO,CAAEC,qBAAT,KAAkCN,uBAA/D;AACD;;;;;kFAED;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBACuB,KAAKE,IAAL,CAAU,CAAV,EAAaK,yBAAcC,MAA3B,CADvB;;AAAA;AACQC,gBAAAA,MADR;AAGQC,gBAAAA,KAHR,GAGgBD,MAAM,CAACE,QAAP,EAHhB;AAAA,8BAIUD,KAJV;AAAA,gDAKSH,wBALT,uBAOSK,kCAPT;AAAA;;AAAA;AAAA;;AAAA;AAAA,sBAQY,IAAIC,KAAJ,CAAU,sCAAV,CARZ;;AAAA;AAAA,sBAUY,IAAIA,KAAJ,uCAAyCH,KAAzC,OAVZ;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,O;;;;;;;;;;;oFAcA,kBACEI,MADF,EAEEC,QAFF,EAGEC,UAHF;AAAA;;AAAA;AAAA;AAAA;AAAA;AAKQP,gBAAAA,MALR,GAKgC;AAC5BQ,kBAAAA,QAAQ,EAAEC,MAAM,CAACH,QAAQ,CAACI,QAAV,CADY;AAE5BC,kBAAAA,UAAU,EAAE;AAFgB,iBALhC;AAAA,uDASyBL,QAAQ,CAACM,OATlC;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;;AASaC,gBAAAA,QATb;AAUUC,gBAAAA,WAVV,GAUwBD,QAAQ,CAACE,SAVjC;AAWUC,gBAAAA,MAXV,GAWmBF,WAXnB,aAWmBA,WAXnB,uBAWmBA,WAAW,CAAEG,cAXhC;;AAAA,sBAYQV,UAAU,CAACR,MAAX,GAAoB,CAApB,IAAyB,6BAAaQ,UAAb,EAAyBS,MAAzB,IAAoC,CAZrE;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;AAAA,uBAe8C,KAAKE,eAAL,CAAqBb,MAArB,EAA6BQ,QAA7B,CAf9C;;AAAA;AAeIb,gBAAAA,MAAM,CAACW,UAAP,CAAkBK,MAAM,CAAEG,IAAR,EAAlB,CAfJ;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;AAAA;AAAA;;AAAA;;AAAA;AAAA;;AAAA;;AAAA;;AAAA;AAAA,kDAiBSnB,MAjBT;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,O;;;;;;;;;;;uFAyBA,kBAAsBK,MAAtB,EAA6CQ,QAA7C;AAAA;;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA,sBACMA,QAAQ,CAACO,SAAT,KAAuBC,SAAvB,IAAoCR,QAAQ,CAACO,SAAT,KAAuB,IADjE;AAAA;AAAA;AAAA;;AAAA,sBAEU,IAAIhB,KAAJ,CAAU,uCAAV,CAFV;;AAAA;AAKQkB,gBAAAA,KALR,GAKgBjB,MAAM,CAACkB,SAAP,wBAAiBV,QAAQ,CAACE,SAA1B,wDAAiB,oBAAoBE,cAArC,CALhB;AAMQO,gBAAAA,IANR,GAM8B,8BAAcC,mBAAd,0BAAoBZ,QAAQ,CAACE,SAA7B,yDAAoB,qBAAoBS,IAAxC,CAN9B;;AAAA,sBAQMA,IAAI,KAAKF,KAAK,CAACI,aARrB;AAAA;AAAA;AAAA;;AAAA,sBASU,IAAItB,KAAJ,2CAA6CoB,IAA7C,EATV;;AAAA;AAYQG,gBAAAA,WAZR,GAY0C,8BACtCC,+BADsC,0BAEtCf,QAAQ,CAACE,SAF6B,yDAEtC,qBAAoBc,KAFkB,CAZ1C;AAiBQC,gBAAAA,WAjBR,GAiBsBrB,MAAM,yBAACI,QAAQ,CAACE,SAAV,yDAAC,qBAAoBgB,gBAArB,CAjB5B;AAkBMC,gBAAAA,SAlBN,GAkBkBvB,MAAM,yBAACI,QAAQ,CAACE,SAAV,yDAAC,qBAAoBkB,qBAArB,CAlBxB;;AAoBE,oBAAI,CAACpB,QAAQ,CAACO,SAAd,EAAyB;AACvBY,kBAAAA,SAAS,GAAGE,IAAI,CAACC,GAAL,CACV,KAAKxC,QAAL,GAAgBmC,WADN,EAEVrB,MAAM,yBAACI,QAAQ,CAACE,SAAV,yDAAC,qBAAoBkB,qBAArB,CAFI,CAAZ;AAID;;AAEKrC,gBAAAA,OA3BR,GA2BkC;AAC9B4B,kBAAAA,IAAI,EAAJA,IAD8B;AAE9BY,kBAAAA,SAAS,EAAEd,KAAK,CAACc,SAFa;AAG9BC,kBAAAA,SAAS,EAAEf,KAAK,CAACe,SAHa;AAI9BV,kBAAAA,WAAW,EAAXA,WAJ8B;AAK9BW,kBAAAA,MAAM,EAAEhB,KALsB;AAM9BiB,kBAAAA,SAAS,0BAAE1B,QAAQ,CAACE,SAAX,yDAAE,qBAAoByB,UAND;AAO9BC,kBAAAA,UAAU,EAAE;AAPkB,iBA3BlC;AAuCQC,gBAAAA,oBAvCR,GAuC+B7B,QAvC/B,aAuC+BA,QAvC/B,+CAuC+BA,QAAQ,CAAEE,SAvCzC,yDAuC+B,qBAAqB4B,sBAvCpD;;AAAA,qBAyCMD,oBAzCN;AAAA;AAAA;AAAA;;AA0CUE,gBAAAA,gBA1CV,GA0C6BnC,MAAM,CAACiC,oBAAD,CA1CnC;AAAA;AAAA,uBA4CuB,KAAKG,aAAL,CAAmBD,gBAAnB,EAAqChD,OAArC,EAA8CkC,WAA9C,CA5CvB;;AAAA;AA4CIW,gBAAAA,UA5CJ;;AAAA;AA+CEA,gBAAAA,UAAU,GAAG,uBAAA7C,OAAO,CAAC6C,UAAR,oEAAoB1C,MAApB,GAA6BH,OAAO,CAAC6C,UAArC,GAAkDA,UAA/D;AA/CF;AAAA,uBAgDyB,KAAKhD,IAAL,CAAUqC,WAAV,EAAuBE,SAAvB,CAhDzB;;AAAA;AAgDQc,gBAAAA,QAhDR;AAAA;AAAA,uBAiDe,+BAAgBA,QAAhB,kCAA8BlD,OAA9B;AAAuC6C,kBAAAA,UAAU,EAAVA;AAAvC,mBAjDf;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,O;;;;;;;;;;;qFA2DA,kBACEC,oBADF,EAEE9C,OAFF,EAGEkC,WAHF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAKMY,oBAAoB,KAAK,CAL/B;AAAA;AAAA;AAAA;;AAAA,kDAaW,EAbX;;AAAA;AAgBQK,gBAAAA,cAhBR,GAgByBb,IAAI,CAACC,GAAL,CACrB,KAAKxC,QAAL,GAAgB+C,oBADK,EAErB,KAAK7C,qBAFgB,CAhBzB;AAAA;AAAA,uBAoByB,KAAKJ,IAAL,CAAUiD,oBAAV,EAAgCK,cAAhC,CApBzB;;AAAA;AAoBQD,gBAAAA,QApBR;AAsBQE,gBAAAA,MAtBR,GAsBiB;AAAChD,kBAAAA,MAAM,EAAE8C,QAAT;AAAmBG,kBAAAA,MAAM,EAAE,CAA3B;AAA8BC,kBAAAA,IAAI,EAAEJ,QAAQ,CAAC/C;AAA7C,iBAtBjB;AAAA;AAAA,uBAuB4B,0BAAWiD,MAAX,EAAmBpD,OAAnB,CAvB5B;;AAAA;AAuBQuD,gBAAAA,WAvBR;AAAA,kDAyBSA,WAAW,CAACV,UAzBrB;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,O;;;;;;;;;;;kFA4BA;AAAA;;AAAA;AAAA;AAAA;AAAA;AACQW,gBAAAA,UADR,GACqBtD,yBAAcC,MAAd,GAAuB,CAD5C;AAAA;AAAA,uBAE2B,KAAKN,IAAL,CAAU,KAAKE,QAAL,GAAgByD,UAA1B,EAAsCA,UAAtC,CAF3B;;AAAA;AAEQC,gBAAAA,UAFR;AAIQpD,gBAAAA,KAJR,GAIgBoD,UAAU,CAACC,KAAX,CAAiB,CAAjB,EAAoBpD,QAApB,EAJhB;;AAAA,sBAKMD,KAAK,KAAKH,wBALhB;AAAA;AAAA;AAAA;;AAAA,sBAMU,IAAIM,KAAJ,6CAA8CH,KAA9C,OANV;;AAAA;AASQsD,gBAAAA,YATR,GASuBF,UAAU,CAACG,YAAX,CAAwB,CAAxB,CATvB;AAUQC,gBAAAA,cAVR,GAUyB,KAAK9D,QAAL,GAAgB4D,YAAhB,GAA+BH,UAVxD;;AAAA,sBAWMK,cAAc,GAAG3D,yBAAcC,MAXrC;AAAA;AAAA;AAAA;;AAAA,sBAYU,IAAIK,KAAJ,iCAAmCqD,cAAnC,EAZV;;AAAA;AAAA;AAAA,uBAe4B,KAAKhE,IAAL,CAAUgE,cAAV,EAA0BF,YAA1B,CAf5B;;AAAA;AAeQG,gBAAAA,WAfR;AAAA,sCAkBqB,mCAAmBA,WAAnB,CAlBrB,EAkBSC,QAlBT,uBAkBSA,QAlBT;AAAA,kDAmBSA,QAnBT;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,O;;;;;;;;;;;gFA3JA,kBAAsBC,QAAtB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBACyB,sBAAMA,QAAN,CADzB;;AAAA;AACQC,gBAAAA,QADR;AAAA;AAAA,uBAE+B,sBAAMD,QAAN,CAF/B;;AAAA;AAEQE,gBAAAA,cAFR;AAIQC,gBAAAA,MAJR,GAIiBC,iBAAMC,IAAN,CAAW5C,SAAX,EAAsByC,cAAtB,CAJjB;AAKQI,gBAAAA,OALR,GAKkBC,kBAAOF,IAAP,CAAY5C,SAAZ,EAAuByC,cAAvB,CALlB;AAAA,kDAOS,IAAItE,qBAAJ,CAA0BuE,MAA1B,EAAkCG,OAAlC,EAA2CL,QAAQ,CAACX,IAApD,CAPT;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,O;;;;;;;;;;;kFAUA,kBAAwBlD,MAAxB;AAAA;AAAA;AAAA;AAAA;AAAA;AACQ+D,gBAAAA,MADR,GACiB,SAATA,MAAS,CAACK,QAAD,EAAmBrE,MAAnB;AAAA,yBACbsE,OAAO,CAACC,OAAR,CAAgBtE,MAAM,CAACsD,KAAP,CAAac,QAAb,EAAuBA,QAAQ,GAAGrE,MAAlC,CAAhB,CADa;AAAA,iBADjB;;AAGQmE,gBAAAA,OAHR,GAGkB,SAAVA,OAAU;AAAA,yBAAMG,OAAO,CAACC,OAAR,EAAN;AAAA,iBAHlB;;AAAA,kDAIS,IAAI9E,qBAAJ,CAA0BuE,MAA1B,EAAkCG,OAAlC,EAA2ClE,MAAM,CAACD,MAAlD,CAJT;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,O","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetSchema} from '../schema/schema';\nimport {PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED} from '../../constants';\nimport {ColumnChunk, CompressionCodec, FileMetaData, RowGroup, Type} from '../parquet-thrift';\nimport {\n ParquetBuffer,\n ParquetCompression,\n ParquetData,\n PrimitiveType,\n ParquetOptions\n} from '../schema/declare';\nimport {fstat, fopen, fread, fclose} from '../utils/file-utils';\nimport {decodeFileMetadata, getThriftEnum, fieldIndexOf} from '../utils/read-utils';\nimport {decodeDataPages, decodePage} from './decoders';\n\nconst DEFAULT_DICTIONARY_SIZE = 1e6;\n\n/**\n * The parquet envelope reader allows direct, unbuffered access to the individual\n * sections of the parquet file, namely the header, footer and the row groups.\n * This class is intended for advanced/internal users; if you just want to retrieve\n * rows from a parquet file use the ParquetReader instead\n */\nexport class ParquetEnvelopeReader {\n public read: (position: number, length: number) => Promise<Buffer>;\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n public close: () => Promise<void>;\n public fileSize: number;\n public defaultDictionarySize: number;\n\n static async openFile(filePath: string): Promise<ParquetEnvelopeReader> {\n const fileStat = await fstat(filePath);\n const fileDescriptor = await fopen(filePath);\n\n const readFn = fread.bind(undefined, fileDescriptor);\n const closeFn = fclose.bind(undefined, fileDescriptor);\n\n return new ParquetEnvelopeReader(readFn, closeFn, fileStat.size);\n }\n\n static async openBuffer(buffer: Buffer): Promise<ParquetEnvelopeReader> {\n const readFn = (position: number, length: number) =>\n Promise.resolve(buffer.slice(position, position + length));\n const closeFn = () => Promise.resolve();\n return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);\n }\n\n constructor(\n read: (position: number, length: number) => Promise<Buffer>,\n close: () => Promise<void>,\n fileSize: number,\n options?: any\n ) {\n this.read = read;\n this.close = close;\n this.fileSize = fileSize;\n this.defaultDictionarySize = options?.defaultDictionarySize || DEFAULT_DICTIONARY_SIZE;\n }\n\n async readHeader(): Promise<void> {\n const buffer = await this.read(0, PARQUET_MAGIC.length);\n\n const magic = buffer.toString();\n switch (magic) {\n case PARQUET_MAGIC:\n break;\n case PARQUET_MAGIC_ENCRYPTED:\n throw new Error('Encrypted parquet file not supported');\n default:\n throw new Error(`Invalid parquet file (magic=${magic})`);\n }\n }\n\n async readRowGroup(\n schema: ParquetSchema,\n rowGroup: RowGroup,\n columnList: string[][]\n ): Promise<ParquetBuffer> {\n const buffer: ParquetBuffer = {\n rowCount: Number(rowGroup.num_rows),\n columnData: {}\n };\n for (const colChunk of rowGroup.columns) {\n const colMetadata = colChunk.meta_data;\n const colKey = colMetadata?.path_in_schema;\n if (columnList.length > 0 && fieldIndexOf(columnList, colKey!) < 0) {\n continue; // eslint-disable-line no-continue\n }\n buffer.columnData[colKey!.join()] = await this.readColumnChunk(schema, colChunk);\n }\n return buffer;\n }\n\n /**\n * Do reading of parquet file's column chunk\n * @param schema\n * @param colChunk\n */\n async readColumnChunk(schema: ParquetSchema, colChunk: ColumnChunk): Promise<ParquetData> {\n if (colChunk.file_path !== undefined && colChunk.file_path !== null) {\n throw new Error('external references are not supported');\n }\n\n const field = schema.findField(colChunk.meta_data?.path_in_schema!);\n const type: PrimitiveType = getThriftEnum(Type, colChunk.meta_data?.type!) as any;\n\n if (type !== field.primitiveType) {\n throw new Error(`chunk type not matching schema: ${type}`);\n }\n\n const compression: ParquetCompression = getThriftEnum(\n CompressionCodec,\n colChunk.meta_data?.codec!\n ) as any;\n\n const pagesOffset = Number(colChunk.meta_data?.data_page_offset!);\n let pagesSize = Number(colChunk.meta_data?.total_compressed_size!);\n\n if (!colChunk.file_path) {\n pagesSize = Math.min(\n this.fileSize - pagesOffset,\n Number(colChunk.meta_data?.total_compressed_size)\n );\n }\n\n const options: ParquetOptions = {\n type,\n rLevelMax: field.rLevelMax,\n dLevelMax: field.dLevelMax,\n compression,\n column: field,\n numValues: colChunk.meta_data?.num_values,\n dictionary: []\n };\n\n let dictionary;\n\n const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;\n\n if (dictionaryPageOffset) {\n const dictionaryOffset = Number(dictionaryPageOffset);\n // Getting dictionary from column chunk to iterate all over indexes to get dataPage values.\n dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);\n }\n\n dictionary = options.dictionary?.length ? options.dictionary : dictionary;\n const pagesBuf = await this.read(pagesOffset, pagesSize);\n return await decodeDataPages(pagesBuf, {...options, dictionary});\n }\n\n /**\n * Getting dictionary for allows to flatten values by indices.\n * @param dictionaryPageOffset\n * @param options\n * @param pagesOffset\n * @returns\n */\n async getDictionary(\n dictionaryPageOffset: number,\n options: ParquetOptions,\n pagesOffset: number\n ): Promise<string[]> {\n if (dictionaryPageOffset === 0) {\n // dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);\n // pagesBuf = await this.read(pagesOffset, dictionarySize);\n\n // In this case we are working with parquet-mr files format. Problem is described below:\n // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding\n // We need to get dictionary page from column chunk if it exists.\n // Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.\n return [];\n }\n\n const dictionarySize = Math.min(\n this.fileSize - dictionaryPageOffset,\n this.defaultDictionarySize\n );\n const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);\n\n const cursor = {buffer: pagesBuf, offset: 0, size: pagesBuf.length};\n const decodedPage = await decodePage(cursor, options);\n\n return decodedPage.dictionary!;\n }\n\n async readFooter(): Promise<FileMetaData> {\n const trailerLen = PARQUET_MAGIC.length + 4;\n const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);\n\n const magic = trailerBuf.slice(4).toString();\n if (magic !== PARQUET_MAGIC) {\n throw new Error(`Not a valid parquet file (magic=\"${magic})`);\n }\n\n const metadataSize = trailerBuf.readUInt32LE(0);\n const metadataOffset = this.fileSize - metadataSize - trailerLen;\n if (metadataOffset < PARQUET_MAGIC.length) {\n throw new Error(`Invalid metadata size ${metadataOffset}`);\n }\n\n const metadataBuf = await this.read(metadataOffset, metadataSize);\n // let metadata = new parquet_thrift.FileMetaData();\n // parquet_util.decodeThrift(metadata, metadataBuf);\n const {metadata} = decodeFileMetadata(metadataBuf);\n return metadata;\n }\n}\n"],"file":"parquet-envelope-reader.js"}
|
|
@@ -0,0 +1,413 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
+
|
|
5
|
+
Object.defineProperty(exports, "__esModule", {
|
|
6
|
+
value: true
|
|
7
|
+
});
|
|
8
|
+
exports.ParquetReader = void 0;
|
|
9
|
+
|
|
10
|
+
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
11
|
+
|
|
12
|
+
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
|
13
|
+
|
|
14
|
+
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime/helpers/classCallCheck"));
|
|
15
|
+
|
|
16
|
+
var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass"));
|
|
17
|
+
|
|
18
|
+
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
|
19
|
+
|
|
20
|
+
var _parquetEnvelopeReader = require("./parquet-envelope-reader");
|
|
21
|
+
|
|
22
|
+
var _schema = require("../schema/schema");
|
|
23
|
+
|
|
24
|
+
var _parquetCursor = require("./parquet-cursor");
|
|
25
|
+
|
|
26
|
+
var _constants = require("../../constants");
|
|
27
|
+
|
|
28
|
+
var _decoders = require("./decoders");
|
|
29
|
+
|
|
30
|
+
var _Symbol$asyncIterator;
|
|
31
|
+
|
|
32
|
+
function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; }
|
|
33
|
+
|
|
34
|
+
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
|
|
35
|
+
|
|
36
|
+
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
|
|
37
|
+
|
|
38
|
+
_Symbol$asyncIterator = Symbol.asyncIterator;
|
|
39
|
+
|
|
40
|
+
var ParquetReader = function () {
|
|
41
|
+
function ParquetReader(metadata, envelopeReader) {
|
|
42
|
+
(0, _classCallCheck2.default)(this, ParquetReader);
|
|
43
|
+
(0, _defineProperty2.default)(this, "metadata", void 0);
|
|
44
|
+
(0, _defineProperty2.default)(this, "envelopeReader", void 0);
|
|
45
|
+
(0, _defineProperty2.default)(this, "schema", void 0);
|
|
46
|
+
|
|
47
|
+
if (metadata.version !== _constants.PARQUET_VERSION) {
|
|
48
|
+
throw new Error('invalid parquet version');
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
this.metadata = metadata;
|
|
52
|
+
this.envelopeReader = envelopeReader;
|
|
53
|
+
var root = this.metadata.schema[0];
|
|
54
|
+
|
|
55
|
+
var _decodeSchema = (0, _decoders.decodeSchema)(this.metadata.schema, 1, root.num_children),
|
|
56
|
+
schema = _decodeSchema.schema;
|
|
57
|
+
|
|
58
|
+
this.schema = new _schema.ParquetSchema(schema);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
(0, _createClass2.default)(ParquetReader, [{
|
|
62
|
+
key: "close",
|
|
63
|
+
value: function () {
|
|
64
|
+
var _close = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee() {
|
|
65
|
+
return _regenerator.default.wrap(function _callee$(_context) {
|
|
66
|
+
while (1) {
|
|
67
|
+
switch (_context.prev = _context.next) {
|
|
68
|
+
case 0:
|
|
69
|
+
_context.next = 2;
|
|
70
|
+
return this.envelopeReader.close();
|
|
71
|
+
|
|
72
|
+
case 2:
|
|
73
|
+
case "end":
|
|
74
|
+
return _context.stop();
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
}, _callee, this);
|
|
78
|
+
}));
|
|
79
|
+
|
|
80
|
+
function close() {
|
|
81
|
+
return _close.apply(this, arguments);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
return close;
|
|
85
|
+
}()
|
|
86
|
+
}, {
|
|
87
|
+
key: "getCursor",
|
|
88
|
+
value: function getCursor(columnList) {
|
|
89
|
+
if (!columnList) {
|
|
90
|
+
columnList = [];
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
columnList = columnList.map(function (x) {
|
|
94
|
+
return Array.isArray(x) ? x : [x];
|
|
95
|
+
});
|
|
96
|
+
return new _parquetCursor.ParquetCursor(this.metadata, this.envelopeReader, this.schema, columnList);
|
|
97
|
+
}
|
|
98
|
+
}, {
|
|
99
|
+
key: "getRowCount",
|
|
100
|
+
value: function getRowCount() {
|
|
101
|
+
return Number(this.metadata.num_rows);
|
|
102
|
+
}
|
|
103
|
+
}, {
|
|
104
|
+
key: "getSchema",
|
|
105
|
+
value: function getSchema() {
|
|
106
|
+
return this.schema;
|
|
107
|
+
}
|
|
108
|
+
}, {
|
|
109
|
+
key: "getMetadata",
|
|
110
|
+
value: function getMetadata() {
|
|
111
|
+
var md = {};
|
|
112
|
+
|
|
113
|
+
var _iterator = _createForOfIteratorHelper(this.metadata.key_value_metadata),
|
|
114
|
+
_step;
|
|
115
|
+
|
|
116
|
+
try {
|
|
117
|
+
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
|
118
|
+
var kv = _step.value;
|
|
119
|
+
md[kv.key] = kv.value;
|
|
120
|
+
}
|
|
121
|
+
} catch (err) {
|
|
122
|
+
_iterator.e(err);
|
|
123
|
+
} finally {
|
|
124
|
+
_iterator.f();
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
return md;
|
|
128
|
+
}
|
|
129
|
+
}, {
|
|
130
|
+
key: _Symbol$asyncIterator,
|
|
131
|
+
value: function value() {
|
|
132
|
+
return this.getCursor()[Symbol.asyncIterator]();
|
|
133
|
+
}
|
|
134
|
+
}], [{
|
|
135
|
+
key: "openBlob",
|
|
136
|
+
value: function () {
|
|
137
|
+
var _openBlob = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee4(blob) {
|
|
138
|
+
var readFn, closeFn, size, envelopeReader, metadata;
|
|
139
|
+
return _regenerator.default.wrap(function _callee4$(_context4) {
|
|
140
|
+
while (1) {
|
|
141
|
+
switch (_context4.prev = _context4.next) {
|
|
142
|
+
case 0:
|
|
143
|
+
readFn = function () {
|
|
144
|
+
var _ref = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee2(start, length) {
|
|
145
|
+
var arrayBuffer;
|
|
146
|
+
return _regenerator.default.wrap(function _callee2$(_context2) {
|
|
147
|
+
while (1) {
|
|
148
|
+
switch (_context2.prev = _context2.next) {
|
|
149
|
+
case 0:
|
|
150
|
+
_context2.next = 2;
|
|
151
|
+
return blob.slice(start, start + length).arrayBuffer();
|
|
152
|
+
|
|
153
|
+
case 2:
|
|
154
|
+
arrayBuffer = _context2.sent;
|
|
155
|
+
return _context2.abrupt("return", Buffer.from(arrayBuffer));
|
|
156
|
+
|
|
157
|
+
case 4:
|
|
158
|
+
case "end":
|
|
159
|
+
return _context2.stop();
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}, _callee2);
|
|
163
|
+
}));
|
|
164
|
+
|
|
165
|
+
return function readFn(_x2, _x3) {
|
|
166
|
+
return _ref.apply(this, arguments);
|
|
167
|
+
};
|
|
168
|
+
}();
|
|
169
|
+
|
|
170
|
+
closeFn = function () {
|
|
171
|
+
var _ref2 = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee3() {
|
|
172
|
+
return _regenerator.default.wrap(function _callee3$(_context3) {
|
|
173
|
+
while (1) {
|
|
174
|
+
switch (_context3.prev = _context3.next) {
|
|
175
|
+
case 0:
|
|
176
|
+
case "end":
|
|
177
|
+
return _context3.stop();
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
}, _callee3);
|
|
181
|
+
}));
|
|
182
|
+
|
|
183
|
+
return function closeFn() {
|
|
184
|
+
return _ref2.apply(this, arguments);
|
|
185
|
+
};
|
|
186
|
+
}();
|
|
187
|
+
|
|
188
|
+
size = blob.size;
|
|
189
|
+
envelopeReader = new _parquetEnvelopeReader.ParquetEnvelopeReader(readFn, closeFn, size);
|
|
190
|
+
_context4.prev = 4;
|
|
191
|
+
_context4.next = 7;
|
|
192
|
+
return envelopeReader.readHeader();
|
|
193
|
+
|
|
194
|
+
case 7:
|
|
195
|
+
_context4.next = 9;
|
|
196
|
+
return envelopeReader.readFooter();
|
|
197
|
+
|
|
198
|
+
case 9:
|
|
199
|
+
metadata = _context4.sent;
|
|
200
|
+
return _context4.abrupt("return", new ParquetReader(metadata, envelopeReader));
|
|
201
|
+
|
|
202
|
+
case 13:
|
|
203
|
+
_context4.prev = 13;
|
|
204
|
+
_context4.t0 = _context4["catch"](4);
|
|
205
|
+
_context4.next = 17;
|
|
206
|
+
return envelopeReader.close();
|
|
207
|
+
|
|
208
|
+
case 17:
|
|
209
|
+
throw _context4.t0;
|
|
210
|
+
|
|
211
|
+
case 18:
|
|
212
|
+
case "end":
|
|
213
|
+
return _context4.stop();
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
}, _callee4, null, [[4, 13]]);
|
|
217
|
+
}));
|
|
218
|
+
|
|
219
|
+
function openBlob(_x) {
|
|
220
|
+
return _openBlob.apply(this, arguments);
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
return openBlob;
|
|
224
|
+
}()
|
|
225
|
+
}, {
|
|
226
|
+
key: "openArrayBuffer",
|
|
227
|
+
value: function () {
|
|
228
|
+
var _openArrayBuffer = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee7(arrayBuffer) {
|
|
229
|
+
var readFn, closeFn, size, envelopeReader, metadata;
|
|
230
|
+
return _regenerator.default.wrap(function _callee7$(_context7) {
|
|
231
|
+
while (1) {
|
|
232
|
+
switch (_context7.prev = _context7.next) {
|
|
233
|
+
case 0:
|
|
234
|
+
readFn = function () {
|
|
235
|
+
var _ref3 = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee5(start, length) {
|
|
236
|
+
return _regenerator.default.wrap(function _callee5$(_context5) {
|
|
237
|
+
while (1) {
|
|
238
|
+
switch (_context5.prev = _context5.next) {
|
|
239
|
+
case 0:
|
|
240
|
+
return _context5.abrupt("return", Buffer.from(arrayBuffer, start, length));
|
|
241
|
+
|
|
242
|
+
case 1:
|
|
243
|
+
case "end":
|
|
244
|
+
return _context5.stop();
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
}, _callee5);
|
|
248
|
+
}));
|
|
249
|
+
|
|
250
|
+
return function readFn(_x5, _x6) {
|
|
251
|
+
return _ref3.apply(this, arguments);
|
|
252
|
+
};
|
|
253
|
+
}();
|
|
254
|
+
|
|
255
|
+
closeFn = function () {
|
|
256
|
+
var _ref4 = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee6() {
|
|
257
|
+
return _regenerator.default.wrap(function _callee6$(_context6) {
|
|
258
|
+
while (1) {
|
|
259
|
+
switch (_context6.prev = _context6.next) {
|
|
260
|
+
case 0:
|
|
261
|
+
case "end":
|
|
262
|
+
return _context6.stop();
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
}, _callee6);
|
|
266
|
+
}));
|
|
267
|
+
|
|
268
|
+
return function closeFn() {
|
|
269
|
+
return _ref4.apply(this, arguments);
|
|
270
|
+
};
|
|
271
|
+
}();
|
|
272
|
+
|
|
273
|
+
size = arrayBuffer.byteLength;
|
|
274
|
+
envelopeReader = new _parquetEnvelopeReader.ParquetEnvelopeReader(readFn, closeFn, size);
|
|
275
|
+
_context7.prev = 4;
|
|
276
|
+
_context7.next = 7;
|
|
277
|
+
return envelopeReader.readHeader();
|
|
278
|
+
|
|
279
|
+
case 7:
|
|
280
|
+
_context7.next = 9;
|
|
281
|
+
return envelopeReader.readFooter();
|
|
282
|
+
|
|
283
|
+
case 9:
|
|
284
|
+
metadata = _context7.sent;
|
|
285
|
+
return _context7.abrupt("return", new ParquetReader(metadata, envelopeReader));
|
|
286
|
+
|
|
287
|
+
case 13:
|
|
288
|
+
_context7.prev = 13;
|
|
289
|
+
_context7.t0 = _context7["catch"](4);
|
|
290
|
+
_context7.next = 17;
|
|
291
|
+
return envelopeReader.close();
|
|
292
|
+
|
|
293
|
+
case 17:
|
|
294
|
+
throw _context7.t0;
|
|
295
|
+
|
|
296
|
+
case 18:
|
|
297
|
+
case "end":
|
|
298
|
+
return _context7.stop();
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
}, _callee7, null, [[4, 13]]);
|
|
302
|
+
}));
|
|
303
|
+
|
|
304
|
+
function openArrayBuffer(_x4) {
|
|
305
|
+
return _openArrayBuffer.apply(this, arguments);
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
return openArrayBuffer;
|
|
309
|
+
}()
|
|
310
|
+
}, {
|
|
311
|
+
key: "openFile",
|
|
312
|
+
value: function () {
|
|
313
|
+
var _openFile = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee8(filePath) {
|
|
314
|
+
var envelopeReader, metadata;
|
|
315
|
+
return _regenerator.default.wrap(function _callee8$(_context8) {
|
|
316
|
+
while (1) {
|
|
317
|
+
switch (_context8.prev = _context8.next) {
|
|
318
|
+
case 0:
|
|
319
|
+
_context8.next = 2;
|
|
320
|
+
return _parquetEnvelopeReader.ParquetEnvelopeReader.openFile(filePath);
|
|
321
|
+
|
|
322
|
+
case 2:
|
|
323
|
+
envelopeReader = _context8.sent;
|
|
324
|
+
_context8.prev = 3;
|
|
325
|
+
_context8.next = 6;
|
|
326
|
+
return envelopeReader.readHeader();
|
|
327
|
+
|
|
328
|
+
case 6:
|
|
329
|
+
_context8.next = 8;
|
|
330
|
+
return envelopeReader.readFooter();
|
|
331
|
+
|
|
332
|
+
case 8:
|
|
333
|
+
metadata = _context8.sent;
|
|
334
|
+
return _context8.abrupt("return", new ParquetReader(metadata, envelopeReader));
|
|
335
|
+
|
|
336
|
+
case 12:
|
|
337
|
+
_context8.prev = 12;
|
|
338
|
+
_context8.t0 = _context8["catch"](3);
|
|
339
|
+
_context8.next = 16;
|
|
340
|
+
return envelopeReader.close();
|
|
341
|
+
|
|
342
|
+
case 16:
|
|
343
|
+
throw _context8.t0;
|
|
344
|
+
|
|
345
|
+
case 17:
|
|
346
|
+
case "end":
|
|
347
|
+
return _context8.stop();
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
}, _callee8, null, [[3, 12]]);
|
|
351
|
+
}));
|
|
352
|
+
|
|
353
|
+
function openFile(_x7) {
|
|
354
|
+
return _openFile.apply(this, arguments);
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
return openFile;
|
|
358
|
+
}()
|
|
359
|
+
}, {
|
|
360
|
+
key: "openBuffer",
|
|
361
|
+
value: function () {
|
|
362
|
+
var _openBuffer = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee9(buffer) {
|
|
363
|
+
var envelopeReader, metadata;
|
|
364
|
+
return _regenerator.default.wrap(function _callee9$(_context9) {
|
|
365
|
+
while (1) {
|
|
366
|
+
switch (_context9.prev = _context9.next) {
|
|
367
|
+
case 0:
|
|
368
|
+
_context9.next = 2;
|
|
369
|
+
return _parquetEnvelopeReader.ParquetEnvelopeReader.openBuffer(buffer);
|
|
370
|
+
|
|
371
|
+
case 2:
|
|
372
|
+
envelopeReader = _context9.sent;
|
|
373
|
+
_context9.prev = 3;
|
|
374
|
+
_context9.next = 6;
|
|
375
|
+
return envelopeReader.readHeader();
|
|
376
|
+
|
|
377
|
+
case 6:
|
|
378
|
+
_context9.next = 8;
|
|
379
|
+
return envelopeReader.readFooter();
|
|
380
|
+
|
|
381
|
+
case 8:
|
|
382
|
+
metadata = _context9.sent;
|
|
383
|
+
return _context9.abrupt("return", new ParquetReader(metadata, envelopeReader));
|
|
384
|
+
|
|
385
|
+
case 12:
|
|
386
|
+
_context9.prev = 12;
|
|
387
|
+
_context9.t0 = _context9["catch"](3);
|
|
388
|
+
_context9.next = 16;
|
|
389
|
+
return envelopeReader.close();
|
|
390
|
+
|
|
391
|
+
case 16:
|
|
392
|
+
throw _context9.t0;
|
|
393
|
+
|
|
394
|
+
case 17:
|
|
395
|
+
case "end":
|
|
396
|
+
return _context9.stop();
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
}, _callee9, null, [[3, 12]]);
|
|
400
|
+
}));
|
|
401
|
+
|
|
402
|
+
function openBuffer(_x8) {
|
|
403
|
+
return _openBuffer.apply(this, arguments);
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
return openBuffer;
|
|
407
|
+
}()
|
|
408
|
+
}]);
|
|
409
|
+
return ParquetReader;
|
|
410
|
+
}();
|
|
411
|
+
|
|
412
|
+
exports.ParquetReader = ParquetReader;
|
|
413
|
+
//# sourceMappingURL=parquet-reader.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/parquetjs/parser/parquet-reader.ts"],"names":["Symbol","asyncIterator","ParquetReader","metadata","envelopeReader","version","PARQUET_VERSION","Error","root","schema","num_children","ParquetSchema","close","columnList","map","x","Array","isArray","ParquetCursor","Number","num_rows","md","key_value_metadata","kv","key","value","getCursor","blob","readFn","start","length","slice","arrayBuffer","Buffer","from","closeFn","size","ParquetEnvelopeReader","readHeader","readFooter","byteLength","filePath","openFile","buffer","openBuffer"],"mappings":";;;;;;;;;;;;;;;;;;;AACA;;AAEA;;AACA;;AACA;;AACA;;;;;;;;;;wBAyKGA,MAAM,CAACC,a;;IAhKGC,a;AA8EX,yBAAYC,QAAZ,EAAoCC,cAApC,EAA2E;AAAA;AAAA;AAAA;AAAA;;AACzE,QAAID,QAAQ,CAACE,OAAT,KAAqBC,0BAAzB,EAA0C;AACxC,YAAM,IAAIC,KAAJ,CAAU,yBAAV,CAAN;AACD;;AAED,SAAKJ,QAAL,GAAgBA,QAAhB;AACA,SAAKC,cAAL,GAAsBA,cAAtB;AACA,QAAMI,IAAI,GAAG,KAAKL,QAAL,CAAcM,MAAd,CAAqB,CAArB,CAAb;;AACA,wBAAiB,4BAAa,KAAKN,QAAL,CAAcM,MAA3B,EAAmC,CAAnC,EAAsCD,IAAI,CAACE,YAA3C,CAAjB;AAAA,QAAOD,MAAP,iBAAOA,MAAP;;AACA,SAAKA,MAAL,GAAc,IAAIE,qBAAJ,CAAkBF,MAAlB,CAAd;AACD;;;;;6EAMD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBACQ,KAAKL,cAAL,CAAoBQ,KAApB,EADR;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,O;;;;;;;;;;WAmBA,mBAAUC,UAAV,EAAyE;AACvE,UAAI,CAACA,UAAL,EAAiB;AAEfA,QAAAA,UAAU,GAAG,EAAb;AACD;;AAGDA,MAAAA,UAAU,GAAGA,UAAU,CAACC,GAAX,CAAe,UAACC,CAAD;AAAA,eAAQC,KAAK,CAACC,OAAN,CAAcF,CAAd,IAAmBA,CAAnB,GAAuB,CAACA,CAAD,CAA/B;AAAA,OAAf,CAAb;AAEA,aAAO,IAAIG,4BAAJ,CACL,KAAKf,QADA,EAEL,KAAKC,cAFA,EAGL,KAAKK,MAHA,EAILI,UAJK,CAAP;AAMD;;;WAMD,uBAAsB;AACpB,aAAOM,MAAM,CAAC,KAAKhB,QAAL,CAAciB,QAAf,CAAb;AACD;;;WAKD,qBAA2B;AACzB,aAAO,KAAKX,MAAZ;AACD;;;WAKD,uBAAsC;AACpC,UAAMY,EAA0B,GAAG,EAAnC;;AADoC,iDAEnB,KAAKlB,QAAL,CAAcmB,kBAFK;AAAA;;AAAA;AAEpC,4DAAoD;AAAA,cAAzCC,EAAyC;AAClDF,UAAAA,EAAE,CAACE,EAAE,CAACC,GAAJ,CAAF,GAAaD,EAAE,CAACE,KAAhB;AACD;AAJmC;AAAA;AAAA;AAAA;AAAA;;AAKpC,aAAOJ,EAAP;AACD;;;WAMD,iBAA2C;AACzC,aAAO,KAAKK,SAAL,GAAiB1B,MAAM,CAACC,aAAxB,GAAP;AACD;;;;gFA9JD,kBAAyB0B,IAAzB;AAAA;AAAA;AAAA;AAAA;AAAA;AACQC,gBAAAA,MADR;AAAA,uFACiB,kBAAOC,KAAP,EAAsBC,MAAtB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mCACaH,IAAI,CAACI,KAAL,CAAWF,KAAX,EAAkBA,KAAK,GAAGC,MAA1B,EAAkCE,WAAlC,EADb;;AAAA;AACPA,4BAAAA,WADO;AAAA,8DAENC,MAAM,CAACC,IAAP,CAAYF,WAAZ,CAFM;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBADjB;;AAAA,kCACQJ,MADR;AAAA;AAAA;AAAA;;AAKQO,gBAAAA,OALR;AAAA,wFAKkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBALlB;;AAAA,kCAKQA,OALR;AAAA;AAAA;AAAA;;AAMQC,gBAAAA,IANR,GAMeT,IAAI,CAACS,IANpB;AAOQhC,gBAAAA,cAPR,GAOyB,IAAIiC,4CAAJ,CAA0BT,MAA1B,EAAkCO,OAAlC,EAA2CC,IAA3C,CAPzB;AAAA;AAAA;AAAA,uBASUhC,cAAc,CAACkC,UAAf,EATV;;AAAA;AAAA;AAAA,uBAU2BlC,cAAc,CAACmC,UAAf,EAV3B;;AAAA;AAUUpC,gBAAAA,QAVV;AAAA,kDAWW,IAAID,aAAJ,CAAkBC,QAAlB,EAA4BC,cAA5B,CAXX;;AAAA;AAAA;AAAA;AAAA;AAAA,uBAaUA,cAAc,CAACQ,KAAf,EAbV;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,O;;;;;;;;;;;uFAqBA,kBAAgCoB,WAAhC;AAAA;AAAA;AAAA;AAAA;AAAA;AACQJ,gBAAAA,MADR;AAAA,wFACiB,kBAAOC,KAAP,EAAsBC,MAAtB;AAAA;AAAA;AAAA;AAAA;AAAA,8DAAyCG,MAAM,CAACC,IAAP,CAAYF,WAAZ,EAAyBH,KAAzB,EAAgCC,MAAhC,CAAzC;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBADjB;;AAAA,kCACQF,MADR;AAAA;AAAA;AAAA;;AAEQO,gBAAAA,OAFR;AAAA,wFAEkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAFlB;;AAAA,kCAEQA,OAFR;AAAA;AAAA;AAAA;;AAGQC,gBAAAA,IAHR,GAGeJ,WAAW,CAACQ,UAH3B;AAIQpC,gBAAAA,cAJR,GAIyB,IAAIiC,4CAAJ,CAA0BT,MAA1B,EAAkCO,OAAlC,EAA2CC,IAA3C,CAJzB;AAAA;AAAA;AAAA,uBAMUhC,cAAc,CAACkC,UAAf,EANV;;AAAA;AAAA;AAAA,uBAO2BlC,cAAc,CAACmC,UAAf,EAP3B;;AAAA;AAOUpC,gBAAAA,QAPV;AAAA,kDAQW,IAAID,aAAJ,CAAkBC,QAAlB,EAA4BC,cAA5B,CARX;;AAAA;AAAA;AAAA;AAAA;AAAA,uBAUUA,cAAc,CAACQ,KAAf,EAVV;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,O;;;;;;;;;;;gFAmBA,kBAAyB6B,QAAzB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAC+BJ,6CAAsBK,QAAtB,CAA+BD,QAA/B,CAD/B;;AAAA;AACQrC,gBAAAA,cADR;AAAA;AAAA;AAAA,uBAGUA,cAAc,CAACkC,UAAf,EAHV;;AAAA;AAAA;AAAA,uBAI2BlC,cAAc,CAACmC,UAAf,EAJ3B;;AAAA;AAIUpC,gBAAAA,QAJV;AAAA,kDAKW,IAAID,aAAJ,CAAqBC,QAArB,EAA+BC,cAA/B,CALX;;AAAA;AAAA;AAAA;AAAA;AAAA,uBAOUA,cAAc,CAACQ,KAAf,EAPV;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,O;;;;;;;;;;;kFAYA,kBAA2B+B,MAA3B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAC+BN,6CAAsBO,UAAtB,CAAiCD,MAAjC,CAD/B;;AAAA;AACQvC,gBAAAA,cADR;AAAA;AAAA;AAAA,uBAGUA,cAAc,CAACkC,UAAf,EAHV;;AAAA;AAAA;AAAA,uBAI2BlC,cAAc,CAACmC,UAAf,EAJ3B;;AAAA;AAIUpC,gBAAAA,QAJV;AAAA,kDAKW,IAAID,aAAJ,CAAqBC,QAArB,EAA+BC,cAA/B,CALX;;AAAA;AAAA;AAAA;AAAA;AAAA,uBAOUA,cAAc,CAACQ,KAAf,EAPV;;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,O","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetEnvelopeReader} from './parquet-envelope-reader';\nimport {FileMetaData} from '../parquet-thrift';\nimport {ParquetSchema} from '../schema/schema';\nimport {ParquetCursor} from './parquet-cursor';\nimport {PARQUET_VERSION} from '../../constants';\nimport {decodeSchema} from './decoders';\n\n/**\n * A parquet reader allows retrieving the rows from a parquet file in order.\n * The basic usage is to create a reader and then retrieve a cursor/iterator\n * which allows you to consume row after row until all rows have been read. It is\n * important that you call close() after you are finished reading the file to\n * avoid leaking file descriptors.\n */\nexport class ParquetReader<T> implements AsyncIterable<T> {\n /**\n * return a new parquet reader initialized with a read function\n */\n static async openBlob<T>(blob: Blob): Promise<ParquetReader<T>> {\n const readFn = async (start: number, length: number) => {\n const arrayBuffer = await blob.slice(start, start + length).arrayBuffer();\n return Buffer.from(arrayBuffer);\n };\n const closeFn = async () => {};\n const size = blob.size;\n const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n /**\n * return a new parquet reader initialized with a read function\n */\n static async openArrayBuffer<T>(arrayBuffer: ArrayBuffer): Promise<ParquetReader<T>> {\n const readFn = async (start: number, length: number) => Buffer.from(arrayBuffer, start, length);\n const closeFn = async () => {};\n const size = arrayBuffer.byteLength;\n const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n /**\n * Open the parquet file pointed to by the specified path and return a new\n * parquet reader\n */\n static async openFile<T>(filePath: string): Promise<ParquetReader<T>> {\n const envelopeReader = await ParquetEnvelopeReader.openFile(filePath);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader<T>(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n static async openBuffer<T>(buffer: Buffer): Promise<ParquetReader<T>> {\n const envelopeReader = await ParquetEnvelopeReader.openBuffer(buffer);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader<T>(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n public metadata: FileMetaData;\n public envelopeReader: ParquetEnvelopeReader;\n public schema: ParquetSchema;\n\n /**\n * Create a new parquet reader from the file metadata and an envelope reader.\n * It is not recommended to call this constructor directly except for advanced\n * and internal use cases. Consider using one of the open{File,Buffer} methods\n * instead\n */\n constructor(metadata: FileMetaData, envelopeReader: ParquetEnvelopeReader) {\n if (metadata.version !== PARQUET_VERSION) {\n throw new Error('invalid parquet version');\n }\n\n this.metadata = metadata;\n this.envelopeReader = envelopeReader;\n const root = this.metadata.schema[0];\n const {schema} = decodeSchema(this.metadata.schema, 1, root.num_children!);\n this.schema = new ParquetSchema(schema);\n }\n\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n async close(): Promise<void> {\n await this.envelopeReader.close();\n // this.envelopeReader = null;\n // this.metadata = null;\n }\n\n /**\n * Return a cursor to the file. You may open more than one cursor and use\n * them concurrently. All cursors become invalid once close() is called on\n * the reader object.\n *\n * The required_columns parameter controls which columns are actually read\n * from disk. An empty array or no value implies all columns. A list of column\n * names means that only those columns should be loaded from disk.\n */\n getCursor(): ParquetCursor<T>;\n // @ts-ignore\n getCursor<K extends keyof T>(columnList: (K | K[])[]): ParquetCursor<Pick<T, K>>;\n getCursor(columnList: (string | string[])[]): ParquetCursor<Partial<T>>;\n getCursor(columnList?: (string | string[])[]): ParquetCursor<Partial<T>> {\n if (!columnList) {\n // tslint:disable-next-line:no-parameter-reassignment\n columnList = [];\n }\n\n // tslint:disable-next-line:no-parameter-reassignment\n columnList = columnList.map((x) => (Array.isArray(x) ? x : [x]));\n\n return new ParquetCursor<T>(\n this.metadata,\n this.envelopeReader,\n this.schema,\n columnList as string[][]\n );\n }\n\n /**\n * Return the number of rows in this file. Note that the number of rows is\n * not neccessarily equal to the number of rows in each column.\n */\n getRowCount(): number {\n return Number(this.metadata.num_rows);\n }\n\n /**\n * Returns the ParquetSchema for this file\n */\n getSchema(): ParquetSchema {\n return this.schema;\n }\n\n /**\n * Returns the user (key/value) metadata for this file\n */\n getMetadata(): Record<string, string> {\n const md: Record<string, string> = {};\n for (const kv of this.metadata.key_value_metadata!) {\n md[kv.key] = kv.value!;\n }\n return md;\n }\n\n /**\n * Implement AsyncIterable\n */\n // tslint:disable-next-line:function-name\n [Symbol.asyncIterator](): AsyncIterator<T> {\n return this.getCursor()[Symbol.asyncIterator]();\n }\n}\n"],"file":"parquet-reader.js"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/parquetjs/schema/declare.ts"],"names":["ParquetBuffer","rowCount","columnData"],"mappings":";;;;;;;;;;;;;
|
|
1
|
+
{"version":3,"sources":["../../../../src/parquetjs/schema/declare.ts"],"names":["ParquetBuffer","rowCount","columnData"],"mappings":";;;;;;;;;;;;;IAmIaA,a,GAGX,yBAAgF;AAAA,MAApEC,QAAoE,uEAAjD,CAAiD;AAAA,MAA9CC,UAA8C,uEAAJ,EAAI;AAAA;AAAA;AAAA;AAC9E,OAAKD,QAAL,GAAgBA,QAAhB;AACA,OAAKC,UAAL,GAAkBA,UAAlB;AACD,C","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport Int64 from 'node-int64';\nimport type {PageHeader} from '../parquet-thrift';\n\nexport type ParquetCodec = 'PLAIN' | 'RLE' | 'PLAIN_DICTIONARY';\nexport type ParquetCompression =\n | 'UNCOMPRESSED'\n | 'GZIP'\n | 'SNAPPY'\n | 'LZO'\n | 'BROTLI'\n | 'LZ4'\n | 'LZ4_RAW'\n | 'ZSTD';\nexport type RepetitionType = 'REQUIRED' | 'OPTIONAL' | 'REPEATED';\nexport type ParquetType = PrimitiveType | OriginalType;\n\n/**\n * Physical type\n */\nexport type PrimitiveType =\n // Base Types\n | 'BOOLEAN' // 0\n | 'INT32' // 1\n | 'INT64' // 2\n | 'INT96' // 3\n | 'FLOAT' // 4\n | 'DOUBLE' // 5\n | 'BYTE_ARRAY' // 6,\n | 'FIXED_LEN_BYTE_ARRAY'; // 7\n\n/**\n * Logical type\n */\nexport type OriginalType =\n // Converted Types\n | 'UTF8' // 0\n // | 'MAP' // 1\n // | 'MAP_KEY_VALUE' // 2\n // | 'LIST' // 3\n // | 'ENUM' // 4\n // | 'DECIMAL' // 5\n | 'DECIMAL_INT32' // 5\n | 'DECIMAL_INT64' // 5\n | 'DECIMAL_BYTE_ARRAY' // 5\n | 'DECIMAL_FIXED_LEN_BYTE_ARRAY' // 5\n | 'DATE' // 6\n | 'TIME_MILLIS' // 7\n | 'TIME_MICROS' // 8\n | 'TIMESTAMP_MILLIS' // 9\n | 'TIMESTAMP_MICROS' // 10\n | 'UINT_8' // 11\n | 'UINT_16' // 12\n | 'UINT_32' // 13\n | 'UINT_64' // 14\n | 'INT_8' // 15\n | 'INT_16' // 16\n | 'INT_32' // 17\n | 'INT_64' // 18\n | 'JSON' // 19\n | 'BSON' // 20\n | 'INTERVAL'; // 21\n\nexport type ParquetDictionary = string[];\n\nexport interface SchemaDefinition {\n [string: string]: FieldDefinition;\n}\n\nexport interface FieldDefinition {\n type?: ParquetType;\n typeLength?: number;\n presision?: number;\n scale?: number;\n encoding?: ParquetCodec;\n compression?: ParquetCompression;\n optional?: boolean;\n repeated?: boolean;\n fields?: SchemaDefinition;\n}\n\nexport interface ParquetField {\n name: string;\n path: string[];\n key: string;\n primitiveType?: PrimitiveType;\n originalType?: OriginalType;\n repetitionType: RepetitionType;\n typeLength?: number;\n presision?: number;\n scale?: number;\n encoding?: ParquetCodec;\n compression?: ParquetCompression;\n rLevelMax: number;\n dLevelMax: number;\n isNested?: boolean;\n fieldCount?: number;\n fields?: Record<string, ParquetField>;\n}\n\nexport interface ParquetOptions {\n type: ParquetType;\n rLevelMax: number;\n dLevelMax: number;\n compression: ParquetCompression;\n column: ParquetField;\n numValues?: Int64;\n dictionary?: ParquetDictionary;\n}\n\nexport interface ParquetData {\n dlevels: number[];\n rlevels: number[];\n values: any[];\n count: number;\n pageHeaders: PageHeader[];\n}\n\nexport interface ParquetPageData {\n dlevels: number[];\n rlevels: number[];\n values: any[];\n count: number;\n dictionary?: ParquetDictionary;\n pageHeader: PageHeader;\n}\n\nexport interface ParquetRecord {\n [key: string]: any;\n}\n\nexport class ParquetBuffer {\n rowCount: number;\n columnData: Record<string, ParquetData>;\n constructor(rowCount: number = 0, columnData: Record<string, ParquetData> = {}) {\n this.rowCount = rowCount;\n this.columnData = columnData;\n }\n}\n"],"file":"declare.js"}
|
|
@@ -177,6 +177,8 @@ function buildFields(schema, rLevelParentMax, dLevelParentMax, path) {
|
|
|
177
177
|
encoding: opts.encoding,
|
|
178
178
|
compression: opts.compression,
|
|
179
179
|
typeLength: opts.typeLength || typeDef.typeLength,
|
|
180
|
+
presision: opts.presision,
|
|
181
|
+
scale: opts.scale,
|
|
180
182
|
rLevelMax: rLevelMax,
|
|
181
183
|
dLevelMax: dLevelMax
|
|
182
184
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/parquetjs/schema/schema.ts"],"names":["ParquetSchema","schema","fields","buildFields","fieldList","listFields","path","split","slice","n","length","shift","branch","push","record","buffer","type","setCompress","name","node","compression","rLevelParentMax","dLevelParentMax","opts","required","optional","repeated","Boolean","rLevelMax","dLevelMax","repetitionType","cpath","concat","key","join","isNested","fieldCount","Object","keys","typeDef","PARQUET_LOGICAL_TYPES","Error","encoding","PARQUET_CODECS","PARQUET_COMPRESSION_METHODS","primitiveType","originalType","typeLength","list","k"],"mappings":";;;;;;;;;;;;;;;AAEA;;AACA;;AAUA;;AACA;;IAKaA,a;AAQX,yBAAYC,MAAZ,EAAsC;AAAA;AAAA;AAAA;AAAA;AACpC,SAAKA,MAAL,GAAcA,MAAd;AACA,SAAKC,MAAL,GAAcC,WAAW,CAACF,MAAD,EAAS,CAAT,EAAY,CAAZ,EAAe,EAAf,CAAzB;AACA,SAAKG,SAAL,GAAiBC,UAAU,CAAC,KAAKH,MAAN,CAA3B;AACD;;;;WAKD,mBAAUI,IAAV,EAAiD;AAC/C,UAAI,OAAOA,IAAP,KAAgB,QAApB,EAA8B;AAE5BA,QAAAA,IAAI,GAAGA,IAAI,CAACC,KAAL,CAAW,GAAX,CAAP;AACD,OAHD,MAGO;AAELD,QAAAA,IAAI,GAAGA,IAAI,CAACE,KAAL,CAAW,CAAX,CAAP;AACD;;AAED,UAAIC,CAAC,GAAG,KAAKP,MAAb;;AACA,aAAOI,IAAI,CAACI,MAAL,GAAc,CAArB,EAAwBJ,IAAI,CAACK,KAAL,EAAxB,EAAsC;AACpCF,QAAAA,CAAC,GAAGA,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAD,CAAWJ,MAAf;AACD;;AAED,aAAOO,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAR;AACD;;;WAKD,yBAAgBA,IAAhB,EAAyD;AACvD,UAAI,OAAOA,IAAP,KAAgB,QAApB,EAA8B;AAE5BA,QAAAA,IAAI,GAAGA,IAAI,CAACC,KAAL,CAAW,GAAX,CAAP;AACD;;AACD,UAAMK,MAAsB,GAAG,EAA/B;AACA,UAAIH,CAAC,GAAG,KAAKP,MAAb;;AACA,aAAOI,IAAI,CAACI,MAAL,GAAc,CAArB,EAAwBJ,IAAI,CAACK,KAAL,EAAxB,EAAsC;AACpCC,QAAAA,MAAM,CAACC,IAAP,CAAYJ,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAb;;AACA,YAAIA,IAAI,CAACI,MAAL,GAAc,CAAlB,EAAqB;AACnBD,UAAAA,CAAC,GAAGA,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAD,CAAWJ,MAAf;AACD;AACF;;AACD,aAAOU,MAAP;AACD;;;WAED,qBAAYE,MAAZ,EAAmCC,MAAnC,EAAgE;AAC9D,8BAAY,IAAZ,EAAkBD,MAAlB,EAA0BC,MAA1B;AACD;;;WAED,4BAAmBA,MAAnB,EAA2D;AACzD,aAAO,+BAAmB,IAAnB,EAAyBA,MAAzB,CAAP;AACD;;;WAED,kBAASC,IAAT,EAAyC;AACvCC,MAAAA,WAAW,CAAC,KAAKhB,MAAN,EAAce,IAAd,CAAX;AACAC,MAAAA,WAAW,CAAC,KAAKf,MAAN,EAAcc,IAAd,CAAX;AACA,aAAO,IAAP;AACD;;;WAED,kBAAwB;AACtB,aAAO,wBAAY,IAAZ,CAAP;AACD;;;;;;;AAGH,SAASC,WAAT,CAAqBhB,MAArB,EAAkCe,IAAlC,EAA4D;AAC1D,OAAK,IAAME,IAAX,IAAmBjB,MAAnB,EAA2B;AACzB,QAAMkB,IAAI,GAAGlB,MAAM,CAACiB,IAAD,CAAnB;;AACA,QAAIC,IAAI,CAACjB,MAAT,EAAiB;AACfe,MAAAA,WAAW,CAACE,IAAI,CAACjB,MAAN,EAAcc,IAAd,CAAX;AACD,KAFD,MAEO;AACLG,MAAAA,IAAI,CAACC,WAAL,GAAmBJ,IAAnB;AACD;AACF;AACF;;AAGD,SAASb,WAAT,CACEF,MADF,EAEEoB,eAFF,EAGEC,eAHF,EAIEhB,IAJF,EAKgC;AAC9B,MAAMF,SAAuC,GAAG,EAAhD;;AAEA,OAAK,IAAMc,IAAX,IAAmBjB,MAAnB,EAA2B;AACzB,QAAMsB,IAAI,GAAGtB,MAAM,CAACiB,IAAD,CAAnB;AAGA,QAAMM,QAAQ,GAAG,CAACD,IAAI,CAACE,QAAvB;AACA,QAAMC,QAAQ,GAAGC,OAAO,CAACJ,IAAI,CAACG,QAAN,CAAxB;AACA,QAAIE,SAAS,GAAGP,eAAhB;AACA,QAAIQ,SAAS,GAAGP,eAAhB;AAEA,QAAIQ,cAA8B,GAAG,UAArC;;AACA,QAAI,CAACN,QAAL,EAAe;AACbM,MAAAA,cAAc,GAAG,UAAjB;AACAD,MAAAA,SAAS;AACV;;AACD,QAAIH,QAAJ,EAAc;AACZI,MAAAA,cAAc,GAAG,UAAjB;AACAF,MAAAA,SAAS;AACT,UAAIJ,QAAJ,EAAcK,SAAS;AACxB;;AAGD,QAAIN,IAAI,CAACrB,MAAT,EAAiB;AACf,UAAM6B,MAAK,GAAGzB,IAAI,CAAC0B,MAAL,CAAY,CAACd,IAAD,CAAZ,CAAd;;AACAd,MAAAA,SAAS,CAACc,IAAD,CAAT,GAAkB;AAChBA,QAAAA,IAAI,EAAJA,IADgB;AAEhBZ,QAAAA,IAAI,EAAEyB,MAFU;AAGhBE,QAAAA,GAAG,EAAEF,MAAK,CAACG,IAAN,EAHW;AAIhBJ,QAAAA,cAAc,EAAdA,cAJgB;AAKhBF,QAAAA,SAAS,EAATA,SALgB;AAMhBC,QAAAA,SAAS,EAATA,SANgB;AAOhBM,QAAAA,QAAQ,EAAE,IAPM;AAQhBC,QAAAA,UAAU,EAAEC,MAAM,CAACC,IAAP,CAAYf,IAAI,CAACrB,MAAjB,EAAyBQ,MARrB;AAShBR,QAAAA,MAAM,EAAEC,WAAW,CAACoB,IAAI,CAACrB,MAAN,EAAc0B,SAAd,EAAyBC,SAAzB,EAAoCE,MAApC;AATH,OAAlB;AAWA;AACD;;AAED,QAAMQ,OAAY,GAAGC,6BAAsBjB,IAAI,CAACP,IAA3B,CAArB;;AACA,QAAI,CAACuB,OAAL,EAAc;AACZ,YAAM,IAAIE,KAAJ,iCAAmClB,IAAI,CAACP,IAAxC,EAAN;AACD;;AAEDO,IAAAA,IAAI,CAACmB,QAAL,GAAgBnB,IAAI,CAACmB,QAAL,IAAiB,OAAjC;;AACA,QAAI,EAAEnB,IAAI,CAACmB,QAAL,IAAiBC,sBAAnB,CAAJ,EAAwC;AACtC,YAAM,IAAIF,KAAJ,yCAA2ClB,IAAI,CAACmB,QAAhD,EAAN;AACD;;AAEDnB,IAAAA,IAAI,CAACH,WAAL,GAAmBG,IAAI,CAACH,WAAL,IAAoB,cAAvC;;AACA,QAAI,EAAEG,IAAI,CAACH,WAAL,IAAoBwB,wCAAtB,CAAJ,EAAwD;AACtD,YAAM,IAAIH,KAAJ,2CAA6ClB,IAAI,CAACH,WAAlD,EAAN;AACD;;AAGD,QAAMW,KAAK,GAAGzB,IAAI,CAAC0B,MAAL,CAAY,CAACd,IAAD,CAAZ,CAAd;AACAd,IAAAA,SAAS,CAACc,IAAD,CAAT,GAAkB;AAChBA,MAAAA,IAAI,EAAJA,IADgB;AAEhB2B,MAAAA,aAAa,EAAEN,OAAO,CAACM,aAFP;AAGhBC,MAAAA,YAAY,EAAEP,OAAO,CAACO,YAHN;AAIhBxC,MAAAA,IAAI,EAAEyB,KAJU;AAKhBE,MAAAA,GAAG,EAAEF,KAAK,CAACG,IAAN,EALW;AAMhBJ,MAAAA,cAAc,EAAdA,cANgB;AAOhBY,MAAAA,QAAQ,EAAEnB,IAAI,CAACmB,QAPC;AAQhBtB,MAAAA,WAAW,EAAEG,IAAI,CAACH,WARF;AAShB2B,MAAAA,UAAU,EAAExB,IAAI,CAACwB,UAAL,IAAmBR,OAAO,CAACQ,UATvB;AAUhBnB,MAAAA,SAAS,EAATA,SAVgB;AAWhBC,MAAAA,SAAS,EAATA;AAXgB,KAAlB;AAaD;;AACD,SAAOzB,SAAP;AACD;;AAED,SAASC,UAAT,CAAoBH,MAApB,EAA0E;AACxE,MAAI8C,IAAoB,GAAG,EAA3B;;AACA,OAAK,IAAMC,CAAX,IAAgB/C,MAAhB,EAAwB;AACtB8C,IAAAA,IAAI,CAACnC,IAAL,CAAUX,MAAM,CAAC+C,CAAD,CAAhB;;AACA,QAAI/C,MAAM,CAAC+C,CAAD,CAAN,CAAUd,QAAd,EAAwB;AACtBa,MAAAA,IAAI,GAAGA,IAAI,CAAChB,MAAL,CAAY3B,UAAU,CAACH,MAAM,CAAC+C,CAAD,CAAN,CAAU/C,MAAX,CAAtB,CAAP;AACD;AACF;;AACD,SAAO8C,IAAP;AACD","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport {PARQUET_CODECS} from '../codecs';\nimport {PARQUET_COMPRESSION_METHODS} from '../compression';\nimport {\n FieldDefinition,\n ParquetBuffer,\n ParquetCompression,\n ParquetField,\n ParquetRecord,\n RepetitionType,\n SchemaDefinition\n} from './declare';\nimport {materializeRecords, shredBuffer, shredRecord} from './shred';\nimport {PARQUET_LOGICAL_TYPES} from './types';\n\n/**\n * A parquet file schema\n */\nexport class ParquetSchema {\n public schema: Record<string, FieldDefinition>;\n public fields: Record<string, ParquetField>;\n public fieldList: ParquetField[];\n\n /**\n * Create a new schema from a JSON schema definition\n */\n constructor(schema: SchemaDefinition) {\n this.schema = schema;\n this.fields = buildFields(schema, 0, 0, []);\n this.fieldList = listFields(this.fields);\n }\n\n /**\n * Retrieve a field definition\n */\n findField(path: string | string[]): ParquetField {\n if (typeof path === 'string') {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.split(',');\n } else {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.slice(0); // clone array\n }\n\n let n = this.fields;\n for (; path.length > 1; path.shift()) {\n n = n[path[0]].fields as Record<string, ParquetField>;\n }\n\n return n[path[0]];\n }\n\n /**\n * Retrieve a field definition and all the field's ancestors\n */\n findFieldBranch(path: string | string[]): ParquetField[] {\n if (typeof path === 'string') {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.split(',');\n }\n const branch: ParquetField[] = [];\n let n = this.fields;\n for (; path.length > 0; path.shift()) {\n branch.push(n[path[0]]);\n if (path.length > 1) {\n n = n[path[0]].fields as Record<string, ParquetField>;\n }\n }\n return branch;\n }\n\n shredRecord(record: ParquetRecord, buffer: ParquetBuffer): void {\n shredRecord(this, record, buffer);\n }\n\n materializeRecords(buffer: ParquetBuffer): ParquetRecord[] {\n return materializeRecords(this, buffer);\n }\n\n compress(type: ParquetCompression): this {\n setCompress(this.schema, type);\n setCompress(this.fields, type);\n return this;\n }\n\n buffer(): ParquetBuffer {\n return shredBuffer(this);\n }\n}\n\nfunction setCompress(schema: any, type: ParquetCompression) {\n for (const name in schema) {\n const node = schema[name];\n if (node.fields) {\n setCompress(node.fields, type);\n } else {\n node.compression = type;\n }\n }\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction buildFields(\n schema: SchemaDefinition,\n rLevelParentMax: number,\n dLevelParentMax: number,\n path: string[]\n): Record<string, ParquetField> {\n const fieldList: Record<string, ParquetField> = {};\n\n for (const name in schema) {\n const opts = schema[name];\n\n /* field repetition type */\n const required = !opts.optional;\n const repeated = Boolean(opts.repeated);\n let rLevelMax = rLevelParentMax;\n let dLevelMax = dLevelParentMax;\n\n let repetitionType: RepetitionType = 'REQUIRED';\n if (!required) {\n repetitionType = 'OPTIONAL';\n dLevelMax++;\n }\n if (repeated) {\n repetitionType = 'REPEATED';\n rLevelMax++;\n if (required) dLevelMax++;\n }\n\n /* nested field */\n if (opts.fields) {\n const cpath = path.concat([name]);\n fieldList[name] = {\n name,\n path: cpath,\n key: cpath.join(),\n repetitionType,\n rLevelMax,\n dLevelMax,\n isNested: true,\n fieldCount: Object.keys(opts.fields).length,\n fields: buildFields(opts.fields, rLevelMax, dLevelMax, cpath)\n };\n continue; // eslint-disable-line no-continue\n }\n\n const typeDef: any = PARQUET_LOGICAL_TYPES[opts.type!];\n if (!typeDef) {\n throw new Error(`invalid parquet type: ${opts.type}`);\n }\n\n opts.encoding = opts.encoding || 'PLAIN';\n if (!(opts.encoding in PARQUET_CODECS)) {\n throw new Error(`unsupported parquet encoding: ${opts.encoding}`);\n }\n\n opts.compression = opts.compression || 'UNCOMPRESSED';\n if (!(opts.compression in PARQUET_COMPRESSION_METHODS)) {\n throw new Error(`unsupported compression method: ${opts.compression}`);\n }\n\n /* add to schema */\n const cpath = path.concat([name]);\n fieldList[name] = {\n name,\n primitiveType: typeDef.primitiveType,\n originalType: typeDef.originalType,\n path: cpath,\n key: cpath.join(),\n repetitionType,\n encoding: opts.encoding,\n compression: opts.compression,\n typeLength: opts.typeLength || typeDef.typeLength,\n rLevelMax,\n dLevelMax\n };\n }\n return fieldList;\n}\n\nfunction listFields(fields: Record<string, ParquetField>): ParquetField[] {\n let list: ParquetField[] = [];\n for (const k in fields) {\n list.push(fields[k]);\n if (fields[k].isNested) {\n list = list.concat(listFields(fields[k].fields!));\n }\n }\n return list;\n}\n"],"file":"schema.js"}
|
|
1
|
+
{"version":3,"sources":["../../../../src/parquetjs/schema/schema.ts"],"names":["ParquetSchema","schema","fields","buildFields","fieldList","listFields","path","split","slice","n","length","shift","branch","push","record","buffer","type","setCompress","name","node","compression","rLevelParentMax","dLevelParentMax","opts","required","optional","repeated","Boolean","rLevelMax","dLevelMax","repetitionType","cpath","concat","key","join","isNested","fieldCount","Object","keys","typeDef","PARQUET_LOGICAL_TYPES","Error","encoding","PARQUET_CODECS","PARQUET_COMPRESSION_METHODS","primitiveType","originalType","typeLength","presision","scale","list","k"],"mappings":";;;;;;;;;;;;;;;AAEA;;AACA;;AAUA;;AACA;;IAKaA,a;AAQX,yBAAYC,MAAZ,EAAsC;AAAA;AAAA;AAAA;AAAA;AACpC,SAAKA,MAAL,GAAcA,MAAd;AACA,SAAKC,MAAL,GAAcC,WAAW,CAACF,MAAD,EAAS,CAAT,EAAY,CAAZ,EAAe,EAAf,CAAzB;AACA,SAAKG,SAAL,GAAiBC,UAAU,CAAC,KAAKH,MAAN,CAA3B;AACD;;;;WAKD,mBAAUI,IAAV,EAAiD;AAC/C,UAAI,OAAOA,IAAP,KAAgB,QAApB,EAA8B;AAE5BA,QAAAA,IAAI,GAAGA,IAAI,CAACC,KAAL,CAAW,GAAX,CAAP;AACD,OAHD,MAGO;AAELD,QAAAA,IAAI,GAAGA,IAAI,CAACE,KAAL,CAAW,CAAX,CAAP;AACD;;AAED,UAAIC,CAAC,GAAG,KAAKP,MAAb;;AACA,aAAOI,IAAI,CAACI,MAAL,GAAc,CAArB,EAAwBJ,IAAI,CAACK,KAAL,EAAxB,EAAsC;AACpCF,QAAAA,CAAC,GAAGA,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAD,CAAWJ,MAAf;AACD;;AAED,aAAOO,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAR;AACD;;;WAKD,yBAAgBA,IAAhB,EAAyD;AACvD,UAAI,OAAOA,IAAP,KAAgB,QAApB,EAA8B;AAE5BA,QAAAA,IAAI,GAAGA,IAAI,CAACC,KAAL,CAAW,GAAX,CAAP;AACD;;AACD,UAAMK,MAAsB,GAAG,EAA/B;AACA,UAAIH,CAAC,GAAG,KAAKP,MAAb;;AACA,aAAOI,IAAI,CAACI,MAAL,GAAc,CAArB,EAAwBJ,IAAI,CAACK,KAAL,EAAxB,EAAsC;AACpCC,QAAAA,MAAM,CAACC,IAAP,CAAYJ,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAb;;AACA,YAAIA,IAAI,CAACI,MAAL,GAAc,CAAlB,EAAqB;AACnBD,UAAAA,CAAC,GAAGA,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAD,CAAWJ,MAAf;AACD;AACF;;AACD,aAAOU,MAAP;AACD;;;WAED,qBAAYE,MAAZ,EAAmCC,MAAnC,EAAgE;AAC9D,8BAAY,IAAZ,EAAkBD,MAAlB,EAA0BC,MAA1B;AACD;;;WAED,4BAAmBA,MAAnB,EAA2D;AACzD,aAAO,+BAAmB,IAAnB,EAAyBA,MAAzB,CAAP;AACD;;;WAED,kBAASC,IAAT,EAAyC;AACvCC,MAAAA,WAAW,CAAC,KAAKhB,MAAN,EAAce,IAAd,CAAX;AACAC,MAAAA,WAAW,CAAC,KAAKf,MAAN,EAAcc,IAAd,CAAX;AACA,aAAO,IAAP;AACD;;;WAED,kBAAwB;AACtB,aAAO,wBAAY,IAAZ,CAAP;AACD;;;;;;;AAGH,SAASC,WAAT,CAAqBhB,MAArB,EAAkCe,IAAlC,EAA4D;AAC1D,OAAK,IAAME,IAAX,IAAmBjB,MAAnB,EAA2B;AACzB,QAAMkB,IAAI,GAAGlB,MAAM,CAACiB,IAAD,CAAnB;;AACA,QAAIC,IAAI,CAACjB,MAAT,EAAiB;AACfe,MAAAA,WAAW,CAACE,IAAI,CAACjB,MAAN,EAAcc,IAAd,CAAX;AACD,KAFD,MAEO;AACLG,MAAAA,IAAI,CAACC,WAAL,GAAmBJ,IAAnB;AACD;AACF;AACF;;AAGD,SAASb,WAAT,CACEF,MADF,EAEEoB,eAFF,EAGEC,eAHF,EAIEhB,IAJF,EAKgC;AAC9B,MAAMF,SAAuC,GAAG,EAAhD;;AAEA,OAAK,IAAMc,IAAX,IAAmBjB,MAAnB,EAA2B;AACzB,QAAMsB,IAAI,GAAGtB,MAAM,CAACiB,IAAD,CAAnB;AAGA,QAAMM,QAAQ,GAAG,CAACD,IAAI,CAACE,QAAvB;AACA,QAAMC,QAAQ,GAAGC,OAAO,CAACJ,IAAI,CAACG,QAAN,CAAxB;AACA,QAAIE,SAAS,GAAGP,eAAhB;AACA,QAAIQ,SAAS,GAAGP,eAAhB;AAEA,QAAIQ,cAA8B,GAAG,UAArC;;AACA,QAAI,CAACN,QAAL,EAAe;AACbM,MAAAA,cAAc,GAAG,UAAjB;AACAD,MAAAA,SAAS;AACV;;AACD,QAAIH,QAAJ,EAAc;AACZI,MAAAA,cAAc,GAAG,UAAjB;AACAF,MAAAA,SAAS;AACT,UAAIJ,QAAJ,EAAcK,SAAS;AACxB;;AAGD,QAAIN,IAAI,CAACrB,MAAT,EAAiB;AACf,UAAM6B,MAAK,GAAGzB,IAAI,CAAC0B,MAAL,CAAY,CAACd,IAAD,CAAZ,CAAd;;AACAd,MAAAA,SAAS,CAACc,IAAD,CAAT,GAAkB;AAChBA,QAAAA,IAAI,EAAJA,IADgB;AAEhBZ,QAAAA,IAAI,EAAEyB,MAFU;AAGhBE,QAAAA,GAAG,EAAEF,MAAK,CAACG,IAAN,EAHW;AAIhBJ,QAAAA,cAAc,EAAdA,cAJgB;AAKhBF,QAAAA,SAAS,EAATA,SALgB;AAMhBC,QAAAA,SAAS,EAATA,SANgB;AAOhBM,QAAAA,QAAQ,EAAE,IAPM;AAQhBC,QAAAA,UAAU,EAAEC,MAAM,CAACC,IAAP,CAAYf,IAAI,CAACrB,MAAjB,EAAyBQ,MARrB;AAShBR,QAAAA,MAAM,EAAEC,WAAW,CAACoB,IAAI,CAACrB,MAAN,EAAc0B,SAAd,EAAyBC,SAAzB,EAAoCE,MAApC;AATH,OAAlB;AAWA;AACD;;AAED,QAAMQ,OAAY,GAAGC,6BAAsBjB,IAAI,CAACP,IAA3B,CAArB;;AACA,QAAI,CAACuB,OAAL,EAAc;AACZ,YAAM,IAAIE,KAAJ,iCAAmClB,IAAI,CAACP,IAAxC,EAAN;AACD;;AAEDO,IAAAA,IAAI,CAACmB,QAAL,GAAgBnB,IAAI,CAACmB,QAAL,IAAiB,OAAjC;;AACA,QAAI,EAAEnB,IAAI,CAACmB,QAAL,IAAiBC,sBAAnB,CAAJ,EAAwC;AACtC,YAAM,IAAIF,KAAJ,yCAA2ClB,IAAI,CAACmB,QAAhD,EAAN;AACD;;AAEDnB,IAAAA,IAAI,CAACH,WAAL,GAAmBG,IAAI,CAACH,WAAL,IAAoB,cAAvC;;AACA,QAAI,EAAEG,IAAI,CAACH,WAAL,IAAoBwB,wCAAtB,CAAJ,EAAwD;AACtD,YAAM,IAAIH,KAAJ,2CAA6ClB,IAAI,CAACH,WAAlD,EAAN;AACD;;AAGD,QAAMW,KAAK,GAAGzB,IAAI,CAAC0B,MAAL,CAAY,CAACd,IAAD,CAAZ,CAAd;AACAd,IAAAA,SAAS,CAACc,IAAD,CAAT,GAAkB;AAChBA,MAAAA,IAAI,EAAJA,IADgB;AAEhB2B,MAAAA,aAAa,EAAEN,OAAO,CAACM,aAFP;AAGhBC,MAAAA,YAAY,EAAEP,OAAO,CAACO,YAHN;AAIhBxC,MAAAA,IAAI,EAAEyB,KAJU;AAKhBE,MAAAA,GAAG,EAAEF,KAAK,CAACG,IAAN,EALW;AAMhBJ,MAAAA,cAAc,EAAdA,cANgB;AAOhBY,MAAAA,QAAQ,EAAEnB,IAAI,CAACmB,QAPC;AAQhBtB,MAAAA,WAAW,EAAEG,IAAI,CAACH,WARF;AAShB2B,MAAAA,UAAU,EAAExB,IAAI,CAACwB,UAAL,IAAmBR,OAAO,CAACQ,UATvB;AAUhBC,MAAAA,SAAS,EAAEzB,IAAI,CAACyB,SAVA;AAWhBC,MAAAA,KAAK,EAAE1B,IAAI,CAAC0B,KAXI;AAYhBrB,MAAAA,SAAS,EAATA,SAZgB;AAahBC,MAAAA,SAAS,EAATA;AAbgB,KAAlB;AAeD;;AACD,SAAOzB,SAAP;AACD;;AAED,SAASC,UAAT,CAAoBH,MAApB,EAA0E;AACxE,MAAIgD,IAAoB,GAAG,EAA3B;;AACA,OAAK,IAAMC,CAAX,IAAgBjD,MAAhB,EAAwB;AACtBgD,IAAAA,IAAI,CAACrC,IAAL,CAAUX,MAAM,CAACiD,CAAD,CAAhB;;AACA,QAAIjD,MAAM,CAACiD,CAAD,CAAN,CAAUhB,QAAd,EAAwB;AACtBe,MAAAA,IAAI,GAAGA,IAAI,CAAClB,MAAL,CAAY3B,UAAU,CAACH,MAAM,CAACiD,CAAD,CAAN,CAAUjD,MAAX,CAAtB,CAAP;AACD;AACF;;AACD,SAAOgD,IAAP;AACD","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport {PARQUET_CODECS} from '../codecs';\nimport {PARQUET_COMPRESSION_METHODS} from '../compression';\nimport {\n FieldDefinition,\n ParquetBuffer,\n ParquetCompression,\n ParquetField,\n ParquetRecord,\n RepetitionType,\n SchemaDefinition\n} from './declare';\nimport {materializeRecords, shredBuffer, shredRecord} from './shred';\nimport {PARQUET_LOGICAL_TYPES} from './types';\n\n/**\n * A parquet file schema\n */\nexport class ParquetSchema {\n public schema: Record<string, FieldDefinition>;\n public fields: Record<string, ParquetField>;\n public fieldList: ParquetField[];\n\n /**\n * Create a new schema from a JSON schema definition\n */\n constructor(schema: SchemaDefinition) {\n this.schema = schema;\n this.fields = buildFields(schema, 0, 0, []);\n this.fieldList = listFields(this.fields);\n }\n\n /**\n * Retrieve a field definition\n */\n findField(path: string | string[]): ParquetField {\n if (typeof path === 'string') {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.split(',');\n } else {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.slice(0); // clone array\n }\n\n let n = this.fields;\n for (; path.length > 1; path.shift()) {\n n = n[path[0]].fields as Record<string, ParquetField>;\n }\n\n return n[path[0]];\n }\n\n /**\n * Retrieve a field definition and all the field's ancestors\n */\n findFieldBranch(path: string | string[]): ParquetField[] {\n if (typeof path === 'string') {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.split(',');\n }\n const branch: ParquetField[] = [];\n let n = this.fields;\n for (; path.length > 0; path.shift()) {\n branch.push(n[path[0]]);\n if (path.length > 1) {\n n = n[path[0]].fields as Record<string, ParquetField>;\n }\n }\n return branch;\n }\n\n shredRecord(record: ParquetRecord, buffer: ParquetBuffer): void {\n shredRecord(this, record, buffer);\n }\n\n materializeRecords(buffer: ParquetBuffer): ParquetRecord[] {\n return materializeRecords(this, buffer);\n }\n\n compress(type: ParquetCompression): this {\n setCompress(this.schema, type);\n setCompress(this.fields, type);\n return this;\n }\n\n buffer(): ParquetBuffer {\n return shredBuffer(this);\n }\n}\n\nfunction setCompress(schema: any, type: ParquetCompression) {\n for (const name in schema) {\n const node = schema[name];\n if (node.fields) {\n setCompress(node.fields, type);\n } else {\n node.compression = type;\n }\n }\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction buildFields(\n schema: SchemaDefinition,\n rLevelParentMax: number,\n dLevelParentMax: number,\n path: string[]\n): Record<string, ParquetField> {\n const fieldList: Record<string, ParquetField> = {};\n\n for (const name in schema) {\n const opts = schema[name];\n\n /* field repetition type */\n const required = !opts.optional;\n const repeated = Boolean(opts.repeated);\n let rLevelMax = rLevelParentMax;\n let dLevelMax = dLevelParentMax;\n\n let repetitionType: RepetitionType = 'REQUIRED';\n if (!required) {\n repetitionType = 'OPTIONAL';\n dLevelMax++;\n }\n if (repeated) {\n repetitionType = 'REPEATED';\n rLevelMax++;\n if (required) dLevelMax++;\n }\n\n /* nested field */\n if (opts.fields) {\n const cpath = path.concat([name]);\n fieldList[name] = {\n name,\n path: cpath,\n key: cpath.join(),\n repetitionType,\n rLevelMax,\n dLevelMax,\n isNested: true,\n fieldCount: Object.keys(opts.fields).length,\n fields: buildFields(opts.fields, rLevelMax, dLevelMax, cpath)\n };\n continue; // eslint-disable-line no-continue\n }\n\n const typeDef: any = PARQUET_LOGICAL_TYPES[opts.type!];\n if (!typeDef) {\n throw new Error(`invalid parquet type: ${opts.type}`);\n }\n\n opts.encoding = opts.encoding || 'PLAIN';\n if (!(opts.encoding in PARQUET_CODECS)) {\n throw new Error(`unsupported parquet encoding: ${opts.encoding}`);\n }\n\n opts.compression = opts.compression || 'UNCOMPRESSED';\n if (!(opts.compression in PARQUET_COMPRESSION_METHODS)) {\n throw new Error(`unsupported compression method: ${opts.compression}`);\n }\n\n /* add to schema */\n const cpath = path.concat([name]);\n fieldList[name] = {\n name,\n primitiveType: typeDef.primitiveType,\n originalType: typeDef.originalType,\n path: cpath,\n key: cpath.join(),\n repetitionType,\n encoding: opts.encoding,\n compression: opts.compression,\n typeLength: opts.typeLength || typeDef.typeLength,\n presision: opts.presision,\n scale: opts.scale,\n rLevelMax,\n dLevelMax\n };\n }\n return fieldList;\n}\n\nfunction listFields(fields: Record<string, ParquetField>): ParquetField[] {\n let list: ParquetField[] = [];\n for (const k in fields) {\n list.push(fields[k]);\n if (fields[k].isNested) {\n list = list.concat(listFields(fields[k].fields!));\n }\n }\n return list;\n}\n"],"file":"schema.js"}
|
|
@@ -42,6 +42,7 @@ function shredBuffer(schema) {
|
|
|
42
42
|
dlevels: [],
|
|
43
43
|
rlevels: [],
|
|
44
44
|
values: [],
|
|
45
|
+
pageHeaders: [],
|
|
45
46
|
count: 0
|
|
46
47
|
};
|
|
47
48
|
}
|
|
@@ -198,7 +199,7 @@ function materializeColumn(schema, buffer, key, records) {
|
|
|
198
199
|
}
|
|
199
200
|
|
|
200
201
|
if (dLevel === field.dLevelMax) {
|
|
201
|
-
var value = Types.fromPrimitive(field.originalType || field.primitiveType, data.values[vIndex]);
|
|
202
|
+
var value = Types.fromPrimitive(field.originalType || field.primitiveType, data.values[vIndex], field);
|
|
202
203
|
vIndex++;
|
|
203
204
|
|
|
204
205
|
if (field.repetitionType === 'REPEATED') {
|