@loaders.gl/parquet 3.0.12 → 3.1.0-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (143) hide show
  1. package/dist/dist.min.js +7 -18
  2. package/dist/dist.min.js.map +1 -1
  3. package/dist/es5/bundle.js +2 -4
  4. package/dist/es5/bundle.js.map +1 -1
  5. package/dist/es5/constants.js +17 -0
  6. package/dist/es5/constants.js.map +1 -0
  7. package/dist/es5/index.js +53 -21
  8. package/dist/es5/index.js.map +1 -1
  9. package/dist/es5/lib/convert-schema.js +82 -0
  10. package/dist/es5/lib/convert-schema.js.map +1 -0
  11. package/dist/es5/lib/parse-parquet.js +173 -0
  12. package/dist/es5/lib/parse-parquet.js.map +1 -0
  13. package/dist/es5/lib/read-array-buffer.js +53 -0
  14. package/dist/es5/lib/read-array-buffer.js.map +1 -0
  15. package/dist/es5/parquet-loader.js +6 -79
  16. package/dist/es5/parquet-loader.js.map +1 -1
  17. package/dist/es5/parquet-writer.js +1 -1
  18. package/dist/es5/parquet-writer.js.map +1 -1
  19. package/dist/es5/parquetjs/codecs/dictionary.js +30 -0
  20. package/dist/es5/parquetjs/codecs/dictionary.js.map +1 -0
  21. package/dist/es5/parquetjs/codecs/index.js +10 -0
  22. package/dist/es5/parquetjs/codecs/index.js.map +1 -1
  23. package/dist/es5/parquetjs/codecs/rle.js +2 -2
  24. package/dist/es5/parquetjs/codecs/rle.js.map +1 -1
  25. package/dist/es5/parquetjs/compression.js +138 -104
  26. package/dist/es5/parquetjs/compression.js.map +1 -1
  27. package/dist/es5/parquetjs/{writer.js → encoder/writer.js} +397 -228
  28. package/dist/es5/parquetjs/encoder/writer.js.map +1 -0
  29. package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js +1 -0
  30. package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js.map +1 -1
  31. package/dist/es5/parquetjs/parser/decoders.js +495 -0
  32. package/dist/es5/parquetjs/parser/decoders.js.map +1 -0
  33. package/dist/es5/parquetjs/parser/parquet-cursor.js +215 -0
  34. package/dist/es5/parquetjs/parser/parquet-cursor.js.map +1 -0
  35. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +452 -0
  36. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +1 -0
  37. package/dist/es5/parquetjs/parser/parquet-reader.js +413 -0
  38. package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -0
  39. package/dist/es5/parquetjs/schema/declare.js.map +1 -1
  40. package/dist/es5/parquetjs/schema/schema.js +2 -0
  41. package/dist/es5/parquetjs/schema/schema.js.map +1 -1
  42. package/dist/es5/parquetjs/schema/shred.js +2 -1
  43. package/dist/es5/parquetjs/schema/shred.js.map +1 -1
  44. package/dist/es5/parquetjs/schema/types.js +79 -4
  45. package/dist/es5/parquetjs/schema/types.js.map +1 -1
  46. package/dist/es5/parquetjs/utils/buffer-utils.js +21 -0
  47. package/dist/es5/parquetjs/utils/buffer-utils.js.map +1 -0
  48. package/dist/es5/parquetjs/utils/file-utils.js +108 -0
  49. package/dist/es5/parquetjs/utils/file-utils.js.map +1 -0
  50. package/dist/es5/parquetjs/{util.js → utils/read-utils.js} +13 -113
  51. package/dist/es5/parquetjs/utils/read-utils.js.map +1 -0
  52. package/dist/esm/bundle.js +2 -4
  53. package/dist/esm/bundle.js.map +1 -1
  54. package/dist/esm/constants.js +6 -0
  55. package/dist/esm/constants.js.map +1 -0
  56. package/dist/esm/index.js +14 -4
  57. package/dist/esm/index.js.map +1 -1
  58. package/dist/esm/lib/convert-schema.js +71 -0
  59. package/dist/esm/lib/convert-schema.js.map +1 -0
  60. package/dist/esm/lib/parse-parquet.js +28 -0
  61. package/dist/esm/lib/parse-parquet.js.map +1 -0
  62. package/dist/esm/lib/read-array-buffer.js +9 -0
  63. package/dist/esm/lib/read-array-buffer.js.map +1 -0
  64. package/dist/esm/parquet-loader.js +4 -24
  65. package/dist/esm/parquet-loader.js.map +1 -1
  66. package/dist/esm/parquet-writer.js +1 -1
  67. package/dist/esm/parquet-writer.js.map +1 -1
  68. package/dist/esm/parquetjs/codecs/dictionary.js +12 -0
  69. package/dist/esm/parquetjs/codecs/dictionary.js.map +1 -0
  70. package/dist/esm/parquetjs/codecs/index.js +9 -0
  71. package/dist/esm/parquetjs/codecs/index.js.map +1 -1
  72. package/dist/esm/parquetjs/codecs/rle.js +2 -2
  73. package/dist/esm/parquetjs/codecs/rle.js.map +1 -1
  74. package/dist/esm/parquetjs/compression.js +54 -105
  75. package/dist/esm/parquetjs/compression.js.map +1 -1
  76. package/dist/esm/parquetjs/{writer.js → encoder/writer.js} +32 -35
  77. package/dist/esm/parquetjs/encoder/writer.js.map +1 -0
  78. package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js +1 -0
  79. package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js.map +1 -1
  80. package/dist/esm/parquetjs/parser/decoders.js +300 -0
  81. package/dist/esm/parquetjs/parser/decoders.js.map +1 -0
  82. package/dist/esm/parquetjs/parser/parquet-cursor.js +90 -0
  83. package/dist/esm/parquetjs/parser/parquet-cursor.js.map +1 -0
  84. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +164 -0
  85. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +1 -0
  86. package/dist/esm/parquetjs/parser/parquet-reader.js +133 -0
  87. package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -0
  88. package/dist/esm/parquetjs/schema/declare.js.map +1 -1
  89. package/dist/esm/parquetjs/schema/schema.js +2 -0
  90. package/dist/esm/parquetjs/schema/schema.js.map +1 -1
  91. package/dist/esm/parquetjs/schema/shred.js +2 -1
  92. package/dist/esm/parquetjs/schema/shred.js.map +1 -1
  93. package/dist/esm/parquetjs/schema/types.js +78 -4
  94. package/dist/esm/parquetjs/schema/types.js.map +1 -1
  95. package/dist/esm/parquetjs/utils/buffer-utils.js +12 -0
  96. package/dist/esm/parquetjs/utils/buffer-utils.js.map +1 -0
  97. package/dist/esm/parquetjs/utils/file-utils.js +79 -0
  98. package/dist/esm/parquetjs/utils/file-utils.js.map +1 -0
  99. package/dist/esm/parquetjs/{util.js → utils/read-utils.js} +11 -89
  100. package/dist/esm/parquetjs/utils/read-utils.js.map +1 -0
  101. package/dist/parquet-worker.js +7 -18
  102. package/dist/parquet-worker.js.map +1 -1
  103. package/package.json +10 -10
  104. package/src/bundle.ts +2 -3
  105. package/src/constants.ts +17 -0
  106. package/src/index.ts +30 -4
  107. package/src/lib/convert-schema.ts +95 -0
  108. package/src/lib/parse-parquet.ts +27 -0
  109. package/{dist/es5/libs → src/lib}/read-array-buffer.ts +0 -0
  110. package/src/parquet-loader.ts +4 -24
  111. package/src/parquetjs/codecs/dictionary.ts +11 -0
  112. package/src/parquetjs/codecs/index.ts +13 -0
  113. package/src/parquetjs/codecs/rle.ts +4 -2
  114. package/src/parquetjs/compression.ts +89 -50
  115. package/src/parquetjs/{writer.ts → encoder/writer.ts} +46 -45
  116. package/src/parquetjs/parquet-thrift/CompressionCodec.ts +2 -1
  117. package/src/parquetjs/parser/decoders.ts +448 -0
  118. package/src/parquetjs/parser/parquet-cursor.ts +94 -0
  119. package/src/parquetjs/parser/parquet-envelope-reader.ts +210 -0
  120. package/src/parquetjs/parser/parquet-reader.ts +179 -0
  121. package/src/parquetjs/schema/declare.ts +48 -2
  122. package/src/parquetjs/schema/schema.ts +2 -0
  123. package/src/parquetjs/schema/shred.ts +3 -1
  124. package/src/parquetjs/schema/types.ts +82 -5
  125. package/src/parquetjs/utils/buffer-utils.ts +18 -0
  126. package/src/parquetjs/utils/file-utils.ts +96 -0
  127. package/src/parquetjs/{util.ts → utils/read-utils.ts} +13 -110
  128. package/dist/dist.es5.min.js +0 -51
  129. package/dist/dist.es5.min.js.map +0 -1
  130. package/dist/es5/parquetjs/compression.ts.disabled +0 -105
  131. package/dist/es5/parquetjs/reader.js +0 -1078
  132. package/dist/es5/parquetjs/reader.js.map +0 -1
  133. package/dist/es5/parquetjs/util.js.map +0 -1
  134. package/dist/es5/parquetjs/writer.js.map +0 -1
  135. package/dist/esm/libs/read-array-buffer.ts +0 -31
  136. package/dist/esm/parquetjs/compression.ts.disabled +0 -105
  137. package/dist/esm/parquetjs/reader.js +0 -524
  138. package/dist/esm/parquetjs/reader.js.map +0 -1
  139. package/dist/esm/parquetjs/util.js.map +0 -1
  140. package/dist/esm/parquetjs/writer.js.map +0 -1
  141. package/src/libs/read-array-buffer.ts +0 -31
  142. package/src/parquetjs/compression.ts.disabled +0 -105
  143. package/src/parquetjs/reader.ts +0 -707
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../../src/parquetjs/parser/decoders.ts"],"names":["PARQUET_CODECS","ConvertedType","Encoding","FieldRepetitionType","PageType","Type","decompress","PARQUET_RDLVL_TYPE","PARQUET_RDLVL_ENCODING","decodePageHeader","getThriftEnum","getBitWidth","decodeDataPages","buffer","options","cursor","offset","size","length","data","rlevels","dlevels","values","pageHeaders","count","dictionary","numValues","Number","page","decodePage","map","value","index","push","undefined","pageHeader","pageType","type","decodeDataPage","decodeDataPageV2","decodeDictionaryPage","Error","decodeSchema","schemaElements","len","schema","next","i","schemaElement","repetitionType","repetition_type","optional","repeated","num_children","res","name","fields","logicalType","converted_type","typeLength","type_length","presision","precision","scale","decodeValues","encoding","opts","header","cursorEnd","compressed_page_size","valueCount","data_page_header","num_values","dataCursor","compression","valuesBuf","slice","uncompressed_page_size","rLevelEncoding","repetition_level_encoding","rLevels","Array","column","rLevelMax","bitWidth","disableEnvelope","fill","dLevelEncoding","definition_level_encoding","dLevels","dLevelMax","valueCountNonNull","dlvl","valueEncoding","decodeOptions","primitiveType","data_page_header_v2","num_nulls","valuesBufCursor","is_compressed","dictCursor","dictionary_page_header","d","toString"],"mappings":"AAUA,SAA2CA,cAA3C,QAAgE,WAAhE;AACA,SACEC,aADF,EAEEC,QAFF,EAGEC,mBAHF,EAKEC,QALF,EAOEC,IAPF,QAQO,mBARP;AASA,SAAQC,UAAR,QAAyB,gBAAzB;AACA,SAAQC,kBAAR,EAA4BC,sBAA5B,QAAyD,iBAAzD;AACA,SAAQC,gBAAR,EAA0BC,aAA1B,EAAyCC,WAAzC,QAA2D,qBAA3D;AASA,OAAO,eAAeC,eAAf,CACLC,MADK,EAELC,OAFK,EAGiB;AACtB,QAAMC,MAAoB,GAAG;AAC3BF,IAAAA,MAD2B;AAE3BG,IAAAA,MAAM,EAAE,CAFmB;AAG3BC,IAAAA,IAAI,EAAEJ,MAAM,CAACK;AAHc,GAA7B;AAMA,QAAMC,IAAiB,GAAG;AACxBC,IAAAA,OAAO,EAAE,EADe;AAExBC,IAAAA,OAAO,EAAE,EAFe;AAGxBC,IAAAA,MAAM,EAAE,EAHgB;AAIxBC,IAAAA,WAAW,EAAE,EAJW;AAKxBC,IAAAA,KAAK,EAAE;AALiB,GAA1B;AAQA,MAAIC,UAAU,GAAGX,OAAO,CAACW,UAAR,IAAsB,EAAvC;;AAEA,SAEEV,MAAM,CAACC,MAAP,GAAgBD,MAAM,CAACE,IAAvB,KACC,CAACH,OAAO,CAACY,SAAT,IAAsBP,IAAI,CAACE,OAAL,CAAaH,MAAb,GAAsBS,MAAM,CAACb,OAAO,CAACY,SAAT,CADnD,CAFF,EAIE;AAEA,UAAME,IAAI,GAAG,MAAMC,UAAU,CAACd,MAAD,EAASD,OAAT,CAA7B;;AAEA,QAAIc,IAAI,CAACH,UAAT,EAAqB;AACnBA,MAAAA,UAAU,GAAGG,IAAI,CAACH,UAAlB;AAEA;AACD;;AAED,QAAIA,UAAU,CAACP,MAAf,EAAuB;AAErBU,MAAAA,IAAI,CAACN,MAAL,GAAcM,IAAI,CAACN,MAAL,CAAYQ,GAAZ,CAAiBC,KAAD,IAAWN,UAAU,CAACM,KAAD,CAArC,CAAd;AACD;;AAED,SAAK,IAAIC,KAAK,GAAG,CAAjB,EAAoBA,KAAK,GAAGJ,IAAI,CAACR,OAAL,CAAaF,MAAzC,EAAiDc,KAAK,EAAtD,EAA0D;AACxDb,MAAAA,IAAI,CAACC,OAAL,CAAaa,IAAb,CAAkBL,IAAI,CAACR,OAAL,CAAaY,KAAb,CAAlB;AACAb,MAAAA,IAAI,CAACE,OAAL,CAAaY,IAAb,CAAkBL,IAAI,CAACP,OAAL,CAAaW,KAAb,CAAlB;AACA,YAAMD,KAAK,GAAGH,IAAI,CAACN,MAAL,CAAYU,KAAZ,CAAd;;AAEA,UAAID,KAAK,KAAKG,SAAd,EAAyB;AACvBf,QAAAA,IAAI,CAACG,MAAL,CAAYW,IAAZ,CAAiBF,KAAjB;AACD;AACF;;AAEDZ,IAAAA,IAAI,CAACK,KAAL,IAAcI,IAAI,CAACJ,KAAnB;AACAL,IAAAA,IAAI,CAACI,WAAL,CAAiBU,IAAjB,CAAsBL,IAAI,CAACO,UAA3B;AACD;;AAED,SAAOhB,IAAP;AACD;AAOD,OAAO,eAAeU,UAAf,CACLd,MADK,EAELD,OAFK,EAGqB;AAC1B,MAAIc,IAAJ;AACA,QAAM;AAACO,IAAAA,UAAD;AAAajB,IAAAA;AAAb,MAAuB,MAAMT,gBAAgB,CAACM,MAAM,CAACF,MAAR,EAAgBE,MAAM,CAACC,MAAvB,CAAnD;AACAD,EAAAA,MAAM,CAACC,MAAP,IAAiBE,MAAjB;AAEA,QAAMkB,QAAQ,GAAG1B,aAAa,CAACN,QAAD,EAAW+B,UAAU,CAACE,IAAtB,CAA9B;;AAEA,UAAQD,QAAR;AACE,SAAK,WAAL;AACER,MAAAA,IAAI,GAAG,MAAMU,cAAc,CAACvB,MAAD,EAASoB,UAAT,EAAqBrB,OAArB,CAA3B;AACA;;AACF,SAAK,cAAL;AACEc,MAAAA,IAAI,GAAG,MAAMW,gBAAgB,CAACxB,MAAD,EAASoB,UAAT,EAAqBrB,OAArB,CAA7B;AACA;;AACF,SAAK,iBAAL;AACEc,MAAAA,IAAI,GAAG;AACLH,QAAAA,UAAU,EAAE,MAAMe,oBAAoB,CAACzB,MAAD,EAASoB,UAAT,EAAqBrB,OAArB,CADjC;AAELqB,QAAAA;AAFK,OAAP;AAIA;;AACF;AACE,YAAM,IAAIM,KAAJ,CAAW,sBAAqBL,QAAS,EAAzC,CAAN;AAdJ;;AAiBA,SAAOR,IAAP;AACD;AAYD,OAAO,SAASc,YAAT,CACLC,cADK,EAEL3B,MAFK,EAGL4B,GAHK,EAQL;AACA,QAAMC,MAAwB,GAAG,EAAjC;AACA,MAAIC,IAAI,GAAG9B,MAAX;;AACA,OAAK,IAAI+B,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGH,GAApB,EAAyBG,CAAC,EAA1B,EAA8B;AAC5B,UAAMC,aAAa,GAAGL,cAAc,CAACG,IAAD,CAApC;AAEA,UAAMG,cAAc,GAClBH,IAAI,GAAG,CAAP,GAAWpC,aAAa,CAACP,mBAAD,EAAsB6C,aAAa,CAACE,eAApC,CAAxB,GAAgF,MADlF;AAGA,QAAIC,QAAQ,GAAG,KAAf;AACA,QAAIC,QAAQ,GAAG,KAAf;;AACA,YAAQH,cAAR;AACE,WAAK,UAAL;AACE;;AACF,WAAK,UAAL;AACEE,QAAAA,QAAQ,GAAG,IAAX;AACA;;AACF,WAAK,UAAL;AACEC,QAAAA,QAAQ,GAAG,IAAX;AACA;;AACF;AACE,cAAM,IAAIX,KAAJ,CAAU,kCAAV,CAAN;AAVJ;;AAaA,QAAIO,aAAa,CAACK,YAAd,GAA8B,CAAlC,EAAqC;AACnC,YAAMC,GAAG,GAAGZ,YAAY,CAACC,cAAD,EAAiBG,IAAI,GAAG,CAAxB,EAA2BE,aAAa,CAACK,YAAzC,CAAxB;AACAP,MAAAA,IAAI,GAAGQ,GAAG,CAACR,IAAX;AACAD,MAAAA,MAAM,CAACG,aAAa,CAACO,IAAf,CAAN,GAA6B;AAE3BJ,QAAAA,QAF2B;AAG3BC,QAAAA,QAH2B;AAI3BI,QAAAA,MAAM,EAAEF,GAAG,CAACT;AAJe,OAA7B;AAMD,KATD,MASO;AACL,YAAMR,IAAI,GAAG3B,aAAa,CAACL,IAAD,EAAO2C,aAAa,CAACX,IAArB,CAA1B;AACA,UAAIoB,WAAW,GAAGpB,IAAlB;;AAEA,UAAIW,aAAa,CAACU,cAAlB,EAAkC;AAChCD,QAAAA,WAAW,GAAG/C,aAAa,CAACT,aAAD,EAAgB+C,aAAa,CAACU,cAA9B,CAA3B;AACD;;AAED,cAAQD,WAAR;AACE,aAAK,SAAL;AACEA,UAAAA,WAAW,GAAI,GAAEA,WAAY,IAAGpB,IAAK,EAArC;AACA;;AACF;AAJF;;AAOAQ,MAAAA,MAAM,CAACG,aAAa,CAACO,IAAf,CAAN,GAA6B;AAC3BlB,QAAAA,IAAI,EAAEoB,WADqB;AAE3BE,QAAAA,UAAU,EAAEX,aAAa,CAACY,WAFC;AAG3BC,QAAAA,SAAS,EAAEb,aAAa,CAACc,SAHE;AAI3BC,QAAAA,KAAK,EAAEf,aAAa,CAACe,KAJM;AAK3BZ,QAAAA,QAL2B;AAM3BC,QAAAA;AAN2B,OAA7B;AAQAN,MAAAA,IAAI;AACL;AACF;;AACD,SAAO;AAACD,IAAAA,MAAD;AAAS7B,IAAAA,MAAT;AAAiB8B,IAAAA;AAAjB,GAAP;AACD;;AAKD,SAASkB,YAAT,CACE3B,IADF,EAEE4B,QAFF,EAGElD,MAHF,EAIES,KAJF,EAKE0C,IALF,EAMS;AACP,MAAI,EAAED,QAAQ,IAAIjE,cAAd,CAAJ,EAAmC;AACjC,UAAM,IAAIyC,KAAJ,CAAW,qBAAoBwB,QAAS,EAAxC,CAAN;AACD;;AACD,SAAOjE,cAAc,CAACiE,QAAD,CAAd,CAAyBD,YAAzB,CAAsC3B,IAAtC,EAA4CtB,MAA5C,EAAoDS,KAApD,EAA2D0C,IAA3D,CAAP;AACD;;AAQD,eAAe5B,cAAf,CACEvB,MADF,EAEEoD,MAFF,EAGErD,OAHF,EAI4B;AAAA;;AAC1B,QAAMsD,SAAS,GAAGrD,MAAM,CAACC,MAAP,GAAgBmD,MAAM,CAACE,oBAAzC;AACA,QAAMC,UAAU,4BAAGH,MAAM,CAACI,gBAAV,0DAAG,sBAAyBC,UAA5C;AAGA,MAAIC,UAAU,GAAG1D,MAAjB;;AAEA,MAAID,OAAO,CAAC4D,WAAR,KAAwB,cAA5B,EAA4C;AAC1C,UAAMC,SAAS,GAAG,MAAMrE,UAAU,CAChCQ,OAAO,CAAC4D,WADwB,EAEhC3D,MAAM,CAACF,MAAP,CAAc+D,KAAd,CAAoB7D,MAAM,CAACC,MAA3B,EAAmCoD,SAAnC,CAFgC,EAGhCD,MAAM,CAACU,sBAHyB,CAAlC;AAKAJ,IAAAA,UAAU,GAAG;AACX5D,MAAAA,MAAM,EAAE8D,SADG;AAEX3D,MAAAA,MAAM,EAAE,CAFG;AAGXC,MAAAA,IAAI,EAAE0D,SAAS,CAACzD;AAHL,KAAb;AAKAH,IAAAA,MAAM,CAACC,MAAP,GAAgBoD,SAAhB;AACD;;AAGD,QAAMU,cAAc,GAAGpE,aAAa,CAClCR,QADkC,4BAElCiE,MAAM,CAACI,gBAF2B,2DAElC,uBAAyBQ,yBAFS,CAApC;AAKA,MAAIC,OAAO,GAAG,IAAIC,KAAJ,CAAUX,UAAV,CAAd;;AAEA,MAAIxD,OAAO,CAACoE,MAAR,CAAeC,SAAf,GAA2B,CAA/B,EAAkC;AAChCH,IAAAA,OAAO,GAAGhB,YAAY,CAACzD,kBAAD,EAAqBuE,cAArB,EAAqCL,UAArC,EAAiDH,UAAjD,EAA8D;AAClFc,MAAAA,QAAQ,EAAEzE,WAAW,CAACG,OAAO,CAACoE,MAAR,CAAeC,SAAhB,CAD6D;AAElFE,MAAAA,eAAe,EAAE;AAFiE,KAA9D,CAAtB;AAKD,GAND,MAMO;AACLL,IAAAA,OAAO,CAACM,IAAR,CAAa,CAAb;AACD;;AAGD,QAAMC,cAAc,GAAG7E,aAAa,CAClCR,QADkC,4BAElCiE,MAAM,CAACI,gBAF2B,2DAElC,uBAAyBiB,yBAFS,CAApC;AAKA,MAAIC,OAAO,GAAG,IAAIR,KAAJ,CAAUX,UAAV,CAAd;;AACA,MAAIxD,OAAO,CAACoE,MAAR,CAAeQ,SAAf,GAA2B,CAA/B,EAAkC;AAChCD,IAAAA,OAAO,GAAGzB,YAAY,CAACzD,kBAAD,EAAqBgF,cAArB,EAAqCd,UAArC,EAAiDH,UAAjD,EAA8D;AAClFc,MAAAA,QAAQ,EAAEzE,WAAW,CAACG,OAAO,CAACoE,MAAR,CAAeQ,SAAhB,CAD6D;AAElFL,MAAAA,eAAe,EAAE;AAFiE,KAA9D,CAAtB;AAKD,GAND,MAMO;AACLI,IAAAA,OAAO,CAACH,IAAR,CAAa,CAAb;AACD;;AACD,MAAIK,iBAAiB,GAAG,CAAxB;;AACA,OAAK,MAAMC,IAAX,IAAmBH,OAAnB,EAA4B;AAC1B,QAAIG,IAAI,KAAK9E,OAAO,CAACoE,MAAR,CAAeQ,SAA5B,EAAuC;AACrCC,MAAAA,iBAAiB;AAClB;AACF;;AAGD,QAAME,aAAa,GAAGnF,aAAa,CAACR,QAAD,4BAAWiE,MAAM,CAACI,gBAAlB,2DAAW,uBAAyBN,QAApC,CAAnC;AACA,QAAM6B,aAAa,GAAG;AACpBnC,IAAAA,UAAU,EAAE7C,OAAO,CAACoE,MAAR,CAAevB,UADP;AAEpByB,IAAAA,QAAQ,EAAEtE,OAAO,CAACoE,MAAR,CAAevB;AAFL,GAAtB;AAKA,QAAMrC,MAAM,GAAG0C,YAAY,CACzBlD,OAAO,CAACoE,MAAR,CAAea,aADU,EAEzBF,aAFyB,EAGzBpB,UAHyB,EAIzBkB,iBAJyB,EAKzBG,aALyB,CAA3B;AAQA,SAAO;AACLzE,IAAAA,OAAO,EAAEoE,OADJ;AAELrE,IAAAA,OAAO,EAAE4D,OAFJ;AAGL1D,IAAAA,MAHK;AAILE,IAAAA,KAAK,EAAE8C,UAJF;AAKLnC,IAAAA,UAAU,EAAEgC;AALP,GAAP;AAOD;;AASD,eAAe5B,gBAAf,CACExB,MADF,EAEEoD,MAFF,EAGED,IAHF,EAI4B;AAAA;;AAC1B,QAAME,SAAS,GAAGrD,MAAM,CAACC,MAAP,GAAgBmD,MAAM,CAACE,oBAAzC;AAEA,QAAMC,UAAU,6BAAGH,MAAM,CAAC6B,mBAAV,2DAAG,uBAA4BxB,UAA/C;AAEA,QAAMmB,iBAAiB,GAAGrB,UAAU,8BAAGH,MAAM,CAAC6B,mBAAV,2DAAG,uBAA4BC,SAA/B,CAApC;AACA,QAAMJ,aAAa,GAAGnF,aAAa,CACjCR,QADiC,4BAEjCiE,MAAM,CAAC6B,mBAF0B,2DAEjC,uBAA4B/B,QAFK,CAAnC;AAOA,MAAIe,OAAO,GAAG,IAAIC,KAAJ,CAAUX,UAAV,CAAd;;AACA,MAAIJ,IAAI,CAACgB,MAAL,CAAYC,SAAZ,GAAwB,CAA5B,EAA+B;AAC7BH,IAAAA,OAAO,GAAGhB,YAAY,CAACzD,kBAAD,EAAqBC,sBAArB,EAA6CO,MAA7C,EAAqDuD,UAArD,EAAkE;AACtFc,MAAAA,QAAQ,EAAEzE,WAAW,CAACuD,IAAI,CAACgB,MAAL,CAAYC,SAAb,CADiE;AAEtFE,MAAAA,eAAe,EAAE;AAFqE,KAAlE,CAAtB;AAID,GALD,MAKO;AACLL,IAAAA,OAAO,CAACM,IAAR,CAAa,CAAb;AACD;;AAID,MAAIG,OAAO,GAAG,IAAIR,KAAJ,CAAUX,UAAV,CAAd;;AACA,MAAIJ,IAAI,CAACgB,MAAL,CAAYQ,SAAZ,GAAwB,CAA5B,EAA+B;AAC7BD,IAAAA,OAAO,GAAGzB,YAAY,CAACzD,kBAAD,EAAqBC,sBAArB,EAA6CO,MAA7C,EAAqDuD,UAArD,EAAkE;AACtFc,MAAAA,QAAQ,EAAEzE,WAAW,CAACuD,IAAI,CAACgB,MAAL,CAAYQ,SAAb,CADiE;AAEtFL,MAAAA,eAAe,EAAE;AAFqE,KAAlE,CAAtB;AAID,GALD,MAKO;AACLI,IAAAA,OAAO,CAACH,IAAR,CAAa,CAAb;AACD;;AAGD,MAAIY,eAAe,GAAGnF,MAAtB;;AAEA,gCAAIoD,MAAM,CAAC6B,mBAAX,mDAAI,uBAA4BG,aAAhC,EAA+C;AAC7C,UAAMxB,SAAS,GAAG,MAAMrE,UAAU,CAChC4D,IAAI,CAACQ,WAD2B,EAEhC3D,MAAM,CAACF,MAAP,CAAc+D,KAAd,CAAoB7D,MAAM,CAACC,MAA3B,EAAmCoD,SAAnC,CAFgC,EAGhCD,MAAM,CAACU,sBAHyB,CAAlC;AAMAqB,IAAAA,eAAe,GAAG;AAChBrF,MAAAA,MAAM,EAAE8D,SADQ;AAEhB3D,MAAAA,MAAM,EAAE,CAFQ;AAGhBC,MAAAA,IAAI,EAAE0D,SAAS,CAACzD;AAHA,KAAlB;AAMAH,IAAAA,MAAM,CAACC,MAAP,GAAgBoD,SAAhB;AACD;;AAED,QAAM0B,aAAa,GAAG;AACpBnC,IAAAA,UAAU,EAAEO,IAAI,CAACgB,MAAL,CAAYvB,UADJ;AAEpByB,IAAAA,QAAQ,EAAElB,IAAI,CAACgB,MAAL,CAAYvB;AAFF,GAAtB;AAKA,QAAMrC,MAAM,GAAG0C,YAAY,CACzBE,IAAI,CAACgB,MAAL,CAAYa,aADa,EAEzBF,aAFyB,EAGzBK,eAHyB,EAIzBP,iBAJyB,EAKzBG,aALyB,CAA3B;AAQA,SAAO;AACLzE,IAAAA,OAAO,EAAEoE,OADJ;AAELrE,IAAAA,OAAO,EAAE4D,OAFJ;AAGL1D,IAAAA,MAHK;AAILE,IAAAA,KAAK,EAAE8C,UAJF;AAKLnC,IAAAA,UAAU,EAAEgC;AALP,GAAP;AAOD;;AAQD,eAAe3B,oBAAf,CACEzB,MADF,EAEEoB,UAFF,EAGErB,OAHF,EAIqB;AAAA;;AACnB,QAAMsD,SAAS,GAAGrD,MAAM,CAACC,MAAP,GAAgBmB,UAAU,CAACkC,oBAA7C;AAEA,MAAI+B,UAAU,GAAG;AACfpF,IAAAA,MAAM,EAAE,CADO;AAEfH,IAAAA,MAAM,EAAEE,MAAM,CAACF,MAAP,CAAc+D,KAAd,CAAoB7D,MAAM,CAACC,MAA3B,EAAmCoD,SAAnC,CAFO;AAGfnD,IAAAA,IAAI,EAAEmD,SAAS,GAAGrD,MAAM,CAACC;AAHV,GAAjB;AAMAD,EAAAA,MAAM,CAACC,MAAP,GAAgBoD,SAAhB;;AAEA,MAAItD,OAAO,CAAC4D,WAAR,KAAwB,cAA5B,EAA4C;AAC1C,UAAMC,SAAS,GAAG,MAAMrE,UAAU,CAChCQ,OAAO,CAAC4D,WADwB,EAEhC0B,UAAU,CAACvF,MAAX,CAAkB+D,KAAlB,CAAwBwB,UAAU,CAACpF,MAAnC,EAA2CoD,SAA3C,CAFgC,EAGhCjC,UAAU,CAAC0C,sBAHqB,CAAlC;AAMAuB,IAAAA,UAAU,GAAG;AACXvF,MAAAA,MAAM,EAAE8D,SADG;AAEX3D,MAAAA,MAAM,EAAE,CAFG;AAGXC,MAAAA,IAAI,EAAE0D,SAAS,CAACzD;AAHL,KAAb;AAMAH,IAAAA,MAAM,CAACC,MAAP,GAAgBoD,SAAhB;AACD;;AAED,QAAM1C,SAAS,GAAG,CAAAS,UAAU,SAAV,IAAAA,UAAU,WAAV,qCAAAA,UAAU,CAAEkE,sBAAZ,gFAAoC7B,UAApC,KAAkD,CAApE;AAEA,SAAOR,YAAY,CACjBlD,OAAO,CAACoE,MAAR,CAAea,aADE,EAEjBjF,OAAO,CAACoE,MAAR,CAAejB,QAFE,EAGjBmC,UAHiB,EAIjB1E,SAJiB,EAKjBZ,OALiB,CAAZ,CAMLgB,GANK,CAMAwE,CAAD,IAAOA,CAAC,CAACC,QAAF,EANN,CAAP;AAOD","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {\n ParquetCodec,\n ParquetData,\n ParquetOptions,\n ParquetPageData,\n ParquetType,\n PrimitiveType,\n SchemaDefinition\n} from '../schema/declare';\nimport {CursorBuffer, ParquetCodecOptions, PARQUET_CODECS} from '../codecs';\nimport {\n ConvertedType,\n Encoding,\n FieldRepetitionType,\n PageHeader,\n PageType,\n SchemaElement,\n Type\n} from '../parquet-thrift';\nimport {decompress} from '../compression';\nimport {PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING} from '../../constants';\nimport {decodePageHeader, getThriftEnum, getBitWidth} from '../utils/read-utils';\n\n/**\n * Decode data pages\n * @param buffer - input data\n * @param column - parquet column\n * @param compression - compression type\n * @returns parquet data page data\n */\nexport async function decodeDataPages(\n buffer: Buffer,\n options: ParquetOptions\n): Promise<ParquetData> {\n const cursor: CursorBuffer = {\n buffer,\n offset: 0,\n size: buffer.length\n };\n\n const data: ParquetData = {\n rlevels: [],\n dlevels: [],\n values: [],\n pageHeaders: [],\n count: 0\n };\n\n let dictionary = options.dictionary || [];\n\n while (\n // @ts-ignore size can be undefined\n cursor.offset < cursor.size &&\n (!options.numValues || data.dlevels.length < Number(options.numValues))\n ) {\n // Looks like we have to decode these in sequence due to cursor updates?\n const page = await decodePage(cursor, options);\n\n if (page.dictionary) {\n dictionary = page.dictionary;\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (dictionary.length) {\n // eslint-disable-next-line no-loop-func\n page.values = page.values.map((value) => dictionary[value]);\n }\n\n for (let index = 0; index < page.rlevels.length; index++) {\n data.rlevels.push(page.rlevels[index]);\n data.dlevels.push(page.dlevels[index]);\n const value = page.values[index];\n\n if (value !== undefined) {\n data.values.push(value);\n }\n }\n\n data.count += page.count;\n data.pageHeaders.push(page.pageHeader);\n }\n\n return data;\n}\n\n/**\n * Decode parquet page based on page type\n * @param cursor\n * @param options\n */\nexport async function decodePage(\n cursor: CursorBuffer,\n options: ParquetOptions\n): Promise<ParquetPageData> {\n let page;\n const {pageHeader, length} = await decodePageHeader(cursor.buffer, cursor.offset);\n cursor.offset += length;\n\n const pageType = getThriftEnum(PageType, pageHeader.type);\n\n switch (pageType) {\n case 'DATA_PAGE':\n page = await decodeDataPage(cursor, pageHeader, options);\n break;\n case 'DATA_PAGE_V2':\n page = await decodeDataPageV2(cursor, pageHeader, options);\n break;\n case 'DICTIONARY_PAGE':\n page = {\n dictionary: await decodeDictionaryPage(cursor, pageHeader, options),\n pageHeader\n };\n break;\n default:\n throw new Error(`invalid page type: ${pageType}`);\n }\n\n return page;\n}\n\n/**\n * Decode parquet schema\n * @param schemaElements input schema elements data\n * @param offset offset to read from\n * @param len length of data\n * @returns result.offset\n * result.next - offset at the end of function\n * result.schema - schema read from the input data\n * @todo output offset is the same as input - possibly excess output field\n */\nexport function decodeSchema(\n schemaElements: SchemaElement[],\n offset: number,\n len: number\n): {\n offset: number;\n next: number;\n schema: SchemaDefinition;\n} {\n const schema: SchemaDefinition = {};\n let next = offset;\n for (let i = 0; i < len; i++) {\n const schemaElement = schemaElements[next];\n\n const repetitionType =\n next > 0 ? getThriftEnum(FieldRepetitionType, schemaElement.repetition_type!) : 'ROOT';\n\n let optional = false;\n let repeated = false;\n switch (repetitionType) {\n case 'REQUIRED':\n break;\n case 'OPTIONAL':\n optional = true;\n break;\n case 'REPEATED':\n repeated = true;\n break;\n default:\n throw new Error('parquet: unknown repetition type');\n }\n\n if (schemaElement.num_children! > 0) {\n const res = decodeSchema(schemaElements, next + 1, schemaElement.num_children!);\n next = res.next;\n schema[schemaElement.name] = {\n // type: undefined,\n optional,\n repeated,\n fields: res.schema\n };\n } else {\n const type = getThriftEnum(Type, schemaElement.type!);\n let logicalType = type;\n\n if (schemaElement.converted_type) {\n logicalType = getThriftEnum(ConvertedType, schemaElement.converted_type);\n }\n\n switch (logicalType) {\n case 'DECIMAL':\n logicalType = `${logicalType}_${type}` as ParquetType;\n break;\n default:\n }\n\n schema[schemaElement.name] = {\n type: logicalType as ParquetType,\n typeLength: schemaElement.type_length,\n presision: schemaElement.precision,\n scale: schemaElement.scale,\n optional,\n repeated\n };\n next++;\n }\n }\n return {schema, offset, next};\n}\n\n/**\n * Decode a consecutive array of data using one of the parquet encodings\n */\nfunction decodeValues(\n type: PrimitiveType,\n encoding: ParquetCodec,\n cursor: CursorBuffer,\n count: number,\n opts: ParquetCodecOptions\n): any[] {\n if (!(encoding in PARQUET_CODECS)) {\n throw new Error(`invalid encoding: ${encoding}`);\n }\n return PARQUET_CODECS[encoding].decodeValues(type, cursor, count, opts);\n}\n\n/**\n * Do decoding of parquet dataPage from column chunk\n * @param cursor\n * @param header\n * @param options\n */\nasync function decodeDataPage(\n cursor: CursorBuffer,\n header: PageHeader,\n options: ParquetOptions\n): Promise<ParquetPageData> {\n const cursorEnd = cursor.offset + header.compressed_page_size;\n const valueCount = header.data_page_header?.num_values;\n\n /* uncompress page */\n let dataCursor = cursor;\n\n if (options.compression !== 'UNCOMPRESSED') {\n const valuesBuf = await decompress(\n options.compression,\n cursor.buffer.slice(cursor.offset, cursorEnd),\n header.uncompressed_page_size\n );\n dataCursor = {\n buffer: valuesBuf,\n offset: 0,\n size: valuesBuf.length\n };\n cursor.offset = cursorEnd;\n }\n\n /* read repetition levels */\n const rLevelEncoding = getThriftEnum(\n Encoding,\n header.data_page_header?.repetition_level_encoding!\n ) as ParquetCodec;\n // tslint:disable-next-line:prefer-array-literal\n let rLevels = new Array(valueCount);\n\n if (options.column.rLevelMax > 0) {\n rLevels = decodeValues(PARQUET_RDLVL_TYPE, rLevelEncoding, dataCursor, valueCount!, {\n bitWidth: getBitWidth(options.column.rLevelMax),\n disableEnvelope: false\n // column: opts.column\n });\n } else {\n rLevels.fill(0);\n }\n\n /* read definition levels */\n const dLevelEncoding = getThriftEnum(\n Encoding,\n header.data_page_header?.definition_level_encoding!\n ) as ParquetCodec;\n // tslint:disable-next-line:prefer-array-literal\n let dLevels = new Array(valueCount);\n if (options.column.dLevelMax > 0) {\n dLevels = decodeValues(PARQUET_RDLVL_TYPE, dLevelEncoding, dataCursor, valueCount!, {\n bitWidth: getBitWidth(options.column.dLevelMax),\n disableEnvelope: false\n // column: opts.column\n });\n } else {\n dLevels.fill(0);\n }\n let valueCountNonNull = 0;\n for (const dlvl of dLevels) {\n if (dlvl === options.column.dLevelMax) {\n valueCountNonNull++;\n }\n }\n\n /* read values */\n const valueEncoding = getThriftEnum(Encoding, header.data_page_header?.encoding!) as ParquetCodec;\n const decodeOptions = {\n typeLength: options.column.typeLength,\n bitWidth: options.column.typeLength\n };\n\n const values = decodeValues(\n options.column.primitiveType!,\n valueEncoding,\n dataCursor,\n valueCountNonNull,\n decodeOptions\n );\n\n return {\n dlevels: dLevels,\n rlevels: rLevels,\n values,\n count: valueCount!,\n pageHeader: header\n };\n}\n\n/**\n * Do decoding of parquet dataPage in version 2 from column chunk\n * @param cursor\n * @param header\n * @param opts\n * @returns\n */\nasync function decodeDataPageV2(\n cursor: CursorBuffer,\n header: PageHeader,\n opts: any\n): Promise<ParquetPageData> {\n const cursorEnd = cursor.offset + header.compressed_page_size;\n\n const valueCount = header.data_page_header_v2?.num_values;\n // @ts-ignore\n const valueCountNonNull = valueCount - header.data_page_header_v2?.num_nulls;\n const valueEncoding = getThriftEnum(\n Encoding,\n header.data_page_header_v2?.encoding!\n ) as ParquetCodec;\n\n /* read repetition levels */\n // tslint:disable-next-line:prefer-array-literal\n let rLevels = new Array(valueCount);\n if (opts.column.rLevelMax > 0) {\n rLevels = decodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, cursor, valueCount!, {\n bitWidth: getBitWidth(opts.column.rLevelMax),\n disableEnvelope: true\n });\n } else {\n rLevels.fill(0);\n }\n\n /* read definition levels */\n // tslint:disable-next-line:prefer-array-literal\n let dLevels = new Array(valueCount);\n if (opts.column.dLevelMax > 0) {\n dLevels = decodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, cursor, valueCount!, {\n bitWidth: getBitWidth(opts.column.dLevelMax),\n disableEnvelope: true\n });\n } else {\n dLevels.fill(0);\n }\n\n /* read values */\n let valuesBufCursor = cursor;\n\n if (header.data_page_header_v2?.is_compressed) {\n const valuesBuf = await decompress(\n opts.compression,\n cursor.buffer.slice(cursor.offset, cursorEnd),\n header.uncompressed_page_size\n );\n\n valuesBufCursor = {\n buffer: valuesBuf,\n offset: 0,\n size: valuesBuf.length\n };\n\n cursor.offset = cursorEnd;\n }\n\n const decodeOptions = {\n typeLength: opts.column.typeLength,\n bitWidth: opts.column.typeLength\n };\n\n const values = decodeValues(\n opts.column.primitiveType!,\n valueEncoding,\n valuesBufCursor,\n valueCountNonNull,\n decodeOptions\n );\n\n return {\n dlevels: dLevels,\n rlevels: rLevels,\n values,\n count: valueCount!,\n pageHeader: header\n };\n}\n\n/**\n * Do decoding of dictionary page which helps to iterate over all indexes and get dataPage values.\n * @param cursor\n * @param pageHeader\n * @param options\n */\nasync function decodeDictionaryPage(\n cursor: CursorBuffer,\n pageHeader: PageHeader,\n options: ParquetOptions\n): Promise<string[]> {\n const cursorEnd = cursor.offset + pageHeader.compressed_page_size;\n\n let dictCursor = {\n offset: 0,\n buffer: cursor.buffer.slice(cursor.offset, cursorEnd),\n size: cursorEnd - cursor.offset\n };\n\n cursor.offset = cursorEnd;\n\n if (options.compression !== 'UNCOMPRESSED') {\n const valuesBuf = await decompress(\n options.compression,\n dictCursor.buffer.slice(dictCursor.offset, cursorEnd),\n pageHeader.uncompressed_page_size\n );\n\n dictCursor = {\n buffer: valuesBuf,\n offset: 0,\n size: valuesBuf.length\n };\n\n cursor.offset = cursorEnd;\n }\n\n const numValues = pageHeader?.dictionary_page_header?.num_values || 0;\n\n return decodeValues(\n options.column.primitiveType!,\n options.column.encoding!,\n dictCursor,\n numValues,\n options as ParquetCodecOptions\n ).map((d) => d.toString());\n}\n"],"file":"decoders.js"}
@@ -0,0 +1,90 @@
1
+ import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
2
+
3
+ let _Symbol$asyncIterator;
4
+
5
+ import { materializeRecords } from '../schema/shred';
6
+ _Symbol$asyncIterator = Symbol.asyncIterator;
7
+ export class ParquetCursor {
8
+ constructor(metadata, envelopeReader, schema, columnList) {
9
+ _defineProperty(this, "metadata", void 0);
10
+
11
+ _defineProperty(this, "envelopeReader", void 0);
12
+
13
+ _defineProperty(this, "schema", void 0);
14
+
15
+ _defineProperty(this, "columnList", void 0);
16
+
17
+ _defineProperty(this, "rowGroup", []);
18
+
19
+ _defineProperty(this, "rowGroupIndex", void 0);
20
+
21
+ this.metadata = metadata;
22
+ this.envelopeReader = envelopeReader;
23
+ this.schema = schema;
24
+ this.columnList = columnList;
25
+ this.rowGroupIndex = 0;
26
+ }
27
+
28
+ async next() {
29
+ if (this.rowGroup.length === 0) {
30
+ if (this.rowGroupIndex >= this.metadata.row_groups.length) {
31
+ return null;
32
+ }
33
+
34
+ const rowBuffer = await this.envelopeReader.readRowGroup(this.schema, this.metadata.row_groups[this.rowGroupIndex], this.columnList);
35
+ this.rowGroup = materializeRecords(this.schema, rowBuffer);
36
+ this.rowGroupIndex++;
37
+ }
38
+
39
+ return this.rowGroup.shift();
40
+ }
41
+
42
+ rewind() {
43
+ this.rowGroup = [];
44
+ this.rowGroupIndex = 0;
45
+ }
46
+
47
+ [_Symbol$asyncIterator]() {
48
+ let done = false;
49
+ return {
50
+ next: async () => {
51
+ if (done) {
52
+ return {
53
+ done,
54
+ value: null
55
+ };
56
+ }
57
+
58
+ const value = await this.next();
59
+
60
+ if (value === null) {
61
+ return {
62
+ done: true,
63
+ value
64
+ };
65
+ }
66
+
67
+ return {
68
+ done: false,
69
+ value
70
+ };
71
+ },
72
+ return: async () => {
73
+ done = true;
74
+ return {
75
+ done,
76
+ value: null
77
+ };
78
+ },
79
+ throw: async () => {
80
+ done = true;
81
+ return {
82
+ done: true,
83
+ value: null
84
+ };
85
+ }
86
+ };
87
+ }
88
+
89
+ }
90
+ //# sourceMappingURL=parquet-cursor.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../../src/parquetjs/parser/parquet-cursor.ts"],"names":["materializeRecords","Symbol","asyncIterator","ParquetCursor","constructor","metadata","envelopeReader","schema","columnList","rowGroupIndex","next","rowGroup","length","row_groups","rowBuffer","readRowGroup","shift","rewind","done","value","return","throw"],"mappings":";;;;AAKA,SAAQA,kBAAR,QAAiC,iBAAjC;wBAiEGC,MAAM,CAACC,a;AA5DV,OAAO,MAAMC,aAAN,CAAmD;AAcxDC,EAAAA,WAAW,CACTC,QADS,EAETC,cAFS,EAGTC,MAHS,EAITC,UAJS,EAKT;AAAA;;AAAA;;AAAA;;AAAA;;AAAA,sCAdiC,EAcjC;;AAAA;;AACA,SAAKH,QAAL,GAAgBA,QAAhB;AACA,SAAKC,cAAL,GAAsBA,cAAtB;AACA,SAAKC,MAAL,GAAcA,MAAd;AACA,SAAKC,UAAL,GAAkBA,UAAlB;AACA,SAAKC,aAAL,GAAqB,CAArB;AACD;;AAMS,QAAJC,IAAI,GAAwB;AAChC,QAAI,KAAKC,QAAL,CAAcC,MAAd,KAAyB,CAA7B,EAAgC;AAC9B,UAAI,KAAKH,aAAL,IAAsB,KAAKJ,QAAL,CAAcQ,UAAd,CAAyBD,MAAnD,EAA2D;AAEzD,eAAO,IAAP;AACD;;AACD,YAAME,SAAS,GAAG,MAAM,KAAKR,cAAL,CAAoBS,YAApB,CACtB,KAAKR,MADiB,EAEtB,KAAKF,QAAL,CAAcQ,UAAd,CAAyB,KAAKJ,aAA9B,CAFsB,EAGtB,KAAKD,UAHiB,CAAxB;AAKA,WAAKG,QAAL,GAAgBX,kBAAkB,CAAC,KAAKO,MAAN,EAAcO,SAAd,CAAlC;AACA,WAAKL,aAAL;AACD;;AACD,WAAO,KAAKE,QAAL,CAAcK,KAAd,EAAP;AACD;;AAKDC,EAAAA,MAAM,GAAS;AACb,SAAKN,QAAL,GAAgB,EAAhB;AACA,SAAKF,aAAL,GAAqB,CAArB;AACD;;AAMD,4BAA2C;AACzC,QAAIS,IAAI,GAAG,KAAX;AACA,WAAO;AACLR,MAAAA,IAAI,EAAE,YAAY;AAChB,YAAIQ,IAAJ,EAAU;AACR,iBAAO;AAACA,YAAAA,IAAD;AAAOC,YAAAA,KAAK,EAAE;AAAd,WAAP;AACD;;AACD,cAAMA,KAAK,GAAG,MAAM,KAAKT,IAAL,EAApB;;AACA,YAAIS,KAAK,KAAK,IAAd,EAAoB;AAClB,iBAAO;AAACD,YAAAA,IAAI,EAAE,IAAP;AAAaC,YAAAA;AAAb,WAAP;AACD;;AACD,eAAO;AAACD,UAAAA,IAAI,EAAE,KAAP;AAAcC,UAAAA;AAAd,SAAP;AACD,OAVI;AAWLC,MAAAA,MAAM,EAAE,YAAY;AAClBF,QAAAA,IAAI,GAAG,IAAP;AACA,eAAO;AAACA,UAAAA,IAAD;AAAOC,UAAAA,KAAK,EAAE;AAAd,SAAP;AACD,OAdI;AAeLE,MAAAA,KAAK,EAAE,YAAY;AACjBH,QAAAA,IAAI,GAAG,IAAP;AACA,eAAO;AAACA,UAAAA,IAAI,EAAE,IAAP;AAAaC,UAAAA,KAAK,EAAE;AAApB,SAAP;AACD;AAlBI,KAAP;AAoBD;;AAlFuD","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {FileMetaData} from '../parquet-thrift';\nimport {ParquetEnvelopeReader} from './parquet-envelope-reader';\nimport {ParquetSchema} from '../schema/schema';\nimport {ParquetRecord} from '../schema/declare';\nimport {materializeRecords} from '../schema/shred';\n\n/**\n * A parquet cursor is used to retrieve rows from a parquet file in order\n */\nexport class ParquetCursor<T> implements AsyncIterable<T> {\n public metadata: FileMetaData;\n public envelopeReader: ParquetEnvelopeReader;\n public schema: ParquetSchema;\n public columnList: string[][];\n public rowGroup: ParquetRecord[] = [];\n public rowGroupIndex: number;\n\n /**\n * Create a new parquet reader from the file metadata and an envelope reader.\n * It is usually not recommended to call this constructor directly except for\n * advanced and internal use cases. Consider using getCursor() on the\n * ParquetReader instead\n */\n constructor(\n metadata: FileMetaData,\n envelopeReader: ParquetEnvelopeReader,\n schema: ParquetSchema,\n columnList: string[][]\n ) {\n this.metadata = metadata;\n this.envelopeReader = envelopeReader;\n this.schema = schema;\n this.columnList = columnList;\n this.rowGroupIndex = 0;\n }\n\n /**\n * Retrieve the next row from the cursor. Returns a row or NULL if the end\n * of the file was reached\n */\n async next<T = any>(): Promise<T> {\n if (this.rowGroup.length === 0) {\n if (this.rowGroupIndex >= this.metadata.row_groups.length) {\n // @ts-ignore\n return null;\n }\n const rowBuffer = await this.envelopeReader.readRowGroup(\n this.schema,\n this.metadata.row_groups[this.rowGroupIndex],\n this.columnList\n );\n this.rowGroup = materializeRecords(this.schema, rowBuffer);\n this.rowGroupIndex++;\n }\n return this.rowGroup.shift() as any;\n }\n\n /**\n * Rewind the cursor the the beginning of the file\n */\n rewind(): void {\n this.rowGroup = [];\n this.rowGroupIndex = 0;\n }\n\n /**\n * Implement AsyncIterable\n */\n // tslint:disable-next-line:function-name\n [Symbol.asyncIterator](): AsyncIterator<T> {\n let done = false;\n return {\n next: async () => {\n if (done) {\n return {done, value: null};\n }\n const value = await this.next();\n if (value === null) {\n return {done: true, value};\n }\n return {done: false, value};\n },\n return: async () => {\n done = true;\n return {done, value: null};\n },\n throw: async () => {\n done = true;\n return {done: true, value: null};\n }\n };\n }\n}\n"],"file":"parquet-cursor.js"}
@@ -0,0 +1,164 @@
1
+ import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
2
+ import { PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED } from '../../constants';
3
+ import { CompressionCodec, Type } from '../parquet-thrift';
4
+ import { fstat, fopen, fread, fclose } from '../utils/file-utils';
5
+ import { decodeFileMetadata, getThriftEnum, fieldIndexOf } from '../utils/read-utils';
6
+ import { decodeDataPages, decodePage } from './decoders';
7
+ const DEFAULT_DICTIONARY_SIZE = 1e6;
8
+ export class ParquetEnvelopeReader {
9
+ static async openFile(filePath) {
10
+ const fileStat = await fstat(filePath);
11
+ const fileDescriptor = await fopen(filePath);
12
+ const readFn = fread.bind(undefined, fileDescriptor);
13
+ const closeFn = fclose.bind(undefined, fileDescriptor);
14
+ return new ParquetEnvelopeReader(readFn, closeFn, fileStat.size);
15
+ }
16
+
17
+ static async openBuffer(buffer) {
18
+ const readFn = (position, length) => Promise.resolve(buffer.slice(position, position + length));
19
+
20
+ const closeFn = () => Promise.resolve();
21
+
22
+ return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);
23
+ }
24
+
25
+ constructor(read, close, fileSize, options) {
26
+ _defineProperty(this, "read", void 0);
27
+
28
+ _defineProperty(this, "close", void 0);
29
+
30
+ _defineProperty(this, "fileSize", void 0);
31
+
32
+ _defineProperty(this, "defaultDictionarySize", void 0);
33
+
34
+ this.read = read;
35
+ this.close = close;
36
+ this.fileSize = fileSize;
37
+ this.defaultDictionarySize = (options === null || options === void 0 ? void 0 : options.defaultDictionarySize) || DEFAULT_DICTIONARY_SIZE;
38
+ }
39
+
40
+ async readHeader() {
41
+ const buffer = await this.read(0, PARQUET_MAGIC.length);
42
+ const magic = buffer.toString();
43
+
44
+ switch (magic) {
45
+ case PARQUET_MAGIC:
46
+ break;
47
+
48
+ case PARQUET_MAGIC_ENCRYPTED:
49
+ throw new Error('Encrypted parquet file not supported');
50
+
51
+ default:
52
+ throw new Error(`Invalid parquet file (magic=${magic})`);
53
+ }
54
+ }
55
+
56
+ async readRowGroup(schema, rowGroup, columnList) {
57
+ const buffer = {
58
+ rowCount: Number(rowGroup.num_rows),
59
+ columnData: {}
60
+ };
61
+
62
+ for (const colChunk of rowGroup.columns) {
63
+ const colMetadata = colChunk.meta_data;
64
+ const colKey = colMetadata === null || colMetadata === void 0 ? void 0 : colMetadata.path_in_schema;
65
+
66
+ if (columnList.length > 0 && fieldIndexOf(columnList, colKey) < 0) {
67
+ continue;
68
+ }
69
+
70
+ buffer.columnData[colKey.join()] = await this.readColumnChunk(schema, colChunk);
71
+ }
72
+
73
+ return buffer;
74
+ }
75
+
76
+ async readColumnChunk(schema, colChunk) {
77
+ var _colChunk$meta_data, _colChunk$meta_data2, _colChunk$meta_data3, _colChunk$meta_data4, _colChunk$meta_data5, _colChunk$meta_data7, _colChunk$meta_data8, _options$dictionary;
78
+
79
+ if (colChunk.file_path !== undefined && colChunk.file_path !== null) {
80
+ throw new Error('external references are not supported');
81
+ }
82
+
83
+ const field = schema.findField((_colChunk$meta_data = colChunk.meta_data) === null || _colChunk$meta_data === void 0 ? void 0 : _colChunk$meta_data.path_in_schema);
84
+ const type = getThriftEnum(Type, (_colChunk$meta_data2 = colChunk.meta_data) === null || _colChunk$meta_data2 === void 0 ? void 0 : _colChunk$meta_data2.type);
85
+
86
+ if (type !== field.primitiveType) {
87
+ throw new Error(`chunk type not matching schema: ${type}`);
88
+ }
89
+
90
+ const compression = getThriftEnum(CompressionCodec, (_colChunk$meta_data3 = colChunk.meta_data) === null || _colChunk$meta_data3 === void 0 ? void 0 : _colChunk$meta_data3.codec);
91
+ const pagesOffset = Number((_colChunk$meta_data4 = colChunk.meta_data) === null || _colChunk$meta_data4 === void 0 ? void 0 : _colChunk$meta_data4.data_page_offset);
92
+ let pagesSize = Number((_colChunk$meta_data5 = colChunk.meta_data) === null || _colChunk$meta_data5 === void 0 ? void 0 : _colChunk$meta_data5.total_compressed_size);
93
+
94
+ if (!colChunk.file_path) {
95
+ var _colChunk$meta_data6;
96
+
97
+ pagesSize = Math.min(this.fileSize - pagesOffset, Number((_colChunk$meta_data6 = colChunk.meta_data) === null || _colChunk$meta_data6 === void 0 ? void 0 : _colChunk$meta_data6.total_compressed_size));
98
+ }
99
+
100
+ const options = {
101
+ type,
102
+ rLevelMax: field.rLevelMax,
103
+ dLevelMax: field.dLevelMax,
104
+ compression,
105
+ column: field,
106
+ numValues: (_colChunk$meta_data7 = colChunk.meta_data) === null || _colChunk$meta_data7 === void 0 ? void 0 : _colChunk$meta_data7.num_values,
107
+ dictionary: []
108
+ };
109
+ let dictionary;
110
+ const dictionaryPageOffset = colChunk === null || colChunk === void 0 ? void 0 : (_colChunk$meta_data8 = colChunk.meta_data) === null || _colChunk$meta_data8 === void 0 ? void 0 : _colChunk$meta_data8.dictionary_page_offset;
111
+
112
+ if (dictionaryPageOffset) {
113
+ const dictionaryOffset = Number(dictionaryPageOffset);
114
+ dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);
115
+ }
116
+
117
+ dictionary = (_options$dictionary = options.dictionary) !== null && _options$dictionary !== void 0 && _options$dictionary.length ? options.dictionary : dictionary;
118
+ const pagesBuf = await this.read(pagesOffset, pagesSize);
119
+ return await decodeDataPages(pagesBuf, { ...options,
120
+ dictionary
121
+ });
122
+ }
123
+
124
+ async getDictionary(dictionaryPageOffset, options, pagesOffset) {
125
+ if (dictionaryPageOffset === 0) {
126
+ return [];
127
+ }
128
+
129
+ const dictionarySize = Math.min(this.fileSize - dictionaryPageOffset, this.defaultDictionarySize);
130
+ const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);
131
+ const cursor = {
132
+ buffer: pagesBuf,
133
+ offset: 0,
134
+ size: pagesBuf.length
135
+ };
136
+ const decodedPage = await decodePage(cursor, options);
137
+ return decodedPage.dictionary;
138
+ }
139
+
140
+ async readFooter() {
141
+ const trailerLen = PARQUET_MAGIC.length + 4;
142
+ const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);
143
+ const magic = trailerBuf.slice(4).toString();
144
+
145
+ if (magic !== PARQUET_MAGIC) {
146
+ throw new Error(`Not a valid parquet file (magic="${magic})`);
147
+ }
148
+
149
+ const metadataSize = trailerBuf.readUInt32LE(0);
150
+ const metadataOffset = this.fileSize - metadataSize - trailerLen;
151
+
152
+ if (metadataOffset < PARQUET_MAGIC.length) {
153
+ throw new Error(`Invalid metadata size ${metadataOffset}`);
154
+ }
155
+
156
+ const metadataBuf = await this.read(metadataOffset, metadataSize);
157
+ const {
158
+ metadata
159
+ } = decodeFileMetadata(metadataBuf);
160
+ return metadata;
161
+ }
162
+
163
+ }
164
+ //# sourceMappingURL=parquet-envelope-reader.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../../src/parquetjs/parser/parquet-envelope-reader.ts"],"names":["PARQUET_MAGIC","PARQUET_MAGIC_ENCRYPTED","CompressionCodec","Type","fstat","fopen","fread","fclose","decodeFileMetadata","getThriftEnum","fieldIndexOf","decodeDataPages","decodePage","DEFAULT_DICTIONARY_SIZE","ParquetEnvelopeReader","openFile","filePath","fileStat","fileDescriptor","readFn","bind","undefined","closeFn","size","openBuffer","buffer","position","length","Promise","resolve","slice","constructor","read","close","fileSize","options","defaultDictionarySize","readHeader","magic","toString","Error","readRowGroup","schema","rowGroup","columnList","rowCount","Number","num_rows","columnData","colChunk","columns","colMetadata","meta_data","colKey","path_in_schema","join","readColumnChunk","file_path","field","findField","type","primitiveType","compression","codec","pagesOffset","data_page_offset","pagesSize","total_compressed_size","Math","min","rLevelMax","dLevelMax","column","numValues","num_values","dictionary","dictionaryPageOffset","dictionary_page_offset","dictionaryOffset","getDictionary","pagesBuf","dictionarySize","cursor","offset","decodedPage","readFooter","trailerLen","trailerBuf","metadataSize","readUInt32LE","metadataOffset","metadataBuf","metadata"],"mappings":";AAEA,SAAQA,aAAR,EAAuBC,uBAAvB,QAAqD,iBAArD;AACA,SAAqBC,gBAArB,EAA+DC,IAA/D,QAA0E,mBAA1E;AAQA,SAAQC,KAAR,EAAeC,KAAf,EAAsBC,KAAtB,EAA6BC,MAA7B,QAA0C,qBAA1C;AACA,SAAQC,kBAAR,EAA4BC,aAA5B,EAA2CC,YAA3C,QAA8D,qBAA9D;AACA,SAAQC,eAAR,EAAyBC,UAAzB,QAA0C,YAA1C;AAEA,MAAMC,uBAAuB,GAAG,GAAhC;AAQA,OAAO,MAAMC,qBAAN,CAA4B;AAUZ,eAARC,QAAQ,CAACC,QAAD,EAAmD;AACtE,UAAMC,QAAQ,GAAG,MAAMb,KAAK,CAACY,QAAD,CAA5B;AACA,UAAME,cAAc,GAAG,MAAMb,KAAK,CAACW,QAAD,CAAlC;AAEA,UAAMG,MAAM,GAAGb,KAAK,CAACc,IAAN,CAAWC,SAAX,EAAsBH,cAAtB,CAAf;AACA,UAAMI,OAAO,GAAGf,MAAM,CAACa,IAAP,CAAYC,SAAZ,EAAuBH,cAAvB,CAAhB;AAEA,WAAO,IAAIJ,qBAAJ,CAA0BK,MAA1B,EAAkCG,OAAlC,EAA2CL,QAAQ,CAACM,IAApD,CAAP;AACD;;AAEsB,eAAVC,UAAU,CAACC,MAAD,EAAiD;AACtE,UAAMN,MAAM,GAAG,CAACO,QAAD,EAAmBC,MAAnB,KACbC,OAAO,CAACC,OAAR,CAAgBJ,MAAM,CAACK,KAAP,CAAaJ,QAAb,EAAuBA,QAAQ,GAAGC,MAAlC,CAAhB,CADF;;AAEA,UAAML,OAAO,GAAG,MAAMM,OAAO,CAACC,OAAR,EAAtB;;AACA,WAAO,IAAIf,qBAAJ,CAA0BK,MAA1B,EAAkCG,OAAlC,EAA2CG,MAAM,CAACE,MAAlD,CAAP;AACD;;AAEDI,EAAAA,WAAW,CACTC,IADS,EAETC,KAFS,EAGTC,QAHS,EAITC,OAJS,EAKT;AAAA;;AAAA;;AAAA;;AAAA;;AACA,SAAKH,IAAL,GAAYA,IAAZ;AACA,SAAKC,KAAL,GAAaA,KAAb;AACA,SAAKC,QAAL,GAAgBA,QAAhB;AACA,SAAKE,qBAAL,GAA6B,CAAAD,OAAO,SAAP,IAAAA,OAAO,WAAP,YAAAA,OAAO,CAAEC,qBAAT,KAAkCvB,uBAA/D;AACD;;AAEe,QAAVwB,UAAU,GAAkB;AAChC,UAAMZ,MAAM,GAAG,MAAM,KAAKO,IAAL,CAAU,CAAV,EAAahC,aAAa,CAAC2B,MAA3B,CAArB;AAEA,UAAMW,KAAK,GAAGb,MAAM,CAACc,QAAP,EAAd;;AACA,YAAQD,KAAR;AACE,WAAKtC,aAAL;AACE;;AACF,WAAKC,uBAAL;AACE,cAAM,IAAIuC,KAAJ,CAAU,sCAAV,CAAN;;AACF;AACE,cAAM,IAAIA,KAAJ,CAAW,+BAA8BF,KAAM,GAA/C,CAAN;AANJ;AAQD;;AAEiB,QAAZG,YAAY,CAChBC,MADgB,EAEhBC,QAFgB,EAGhBC,UAHgB,EAIQ;AACxB,UAAMnB,MAAqB,GAAG;AAC5BoB,MAAAA,QAAQ,EAAEC,MAAM,CAACH,QAAQ,CAACI,QAAV,CADY;AAE5BC,MAAAA,UAAU,EAAE;AAFgB,KAA9B;;AAIA,SAAK,MAAMC,QAAX,IAAuBN,QAAQ,CAACO,OAAhC,EAAyC;AACvC,YAAMC,WAAW,GAAGF,QAAQ,CAACG,SAA7B;AACA,YAAMC,MAAM,GAAGF,WAAH,aAAGA,WAAH,uBAAGA,WAAW,CAAEG,cAA5B;;AACA,UAAIV,UAAU,CAACjB,MAAX,GAAoB,CAApB,IAAyBjB,YAAY,CAACkC,UAAD,EAAaS,MAAb,CAAZ,GAAoC,CAAjE,EAAoE;AAClE;AACD;;AACD5B,MAAAA,MAAM,CAACuB,UAAP,CAAkBK,MAAM,CAAEE,IAAR,EAAlB,IAAoC,MAAM,KAAKC,eAAL,CAAqBd,MAArB,EAA6BO,QAA7B,CAA1C;AACD;;AACD,WAAOxB,MAAP;AACD;;AAOoB,QAAf+B,eAAe,CAACd,MAAD,EAAwBO,QAAxB,EAAqE;AAAA;;AACxF,QAAIA,QAAQ,CAACQ,SAAT,KAAuBpC,SAAvB,IAAoC4B,QAAQ,CAACQ,SAAT,KAAuB,IAA/D,EAAqE;AACnE,YAAM,IAAIjB,KAAJ,CAAU,uCAAV,CAAN;AACD;;AAED,UAAMkB,KAAK,GAAGhB,MAAM,CAACiB,SAAP,wBAAiBV,QAAQ,CAACG,SAA1B,wDAAiB,oBAAoBE,cAArC,CAAd;AACA,UAAMM,IAAmB,GAAGnD,aAAa,CAACN,IAAD,0BAAO8C,QAAQ,CAACG,SAAhB,yDAAO,qBAAoBQ,IAA3B,CAAzC;;AAEA,QAAIA,IAAI,KAAKF,KAAK,CAACG,aAAnB,EAAkC;AAChC,YAAM,IAAIrB,KAAJ,CAAW,mCAAkCoB,IAAK,EAAlD,CAAN;AACD;;AAED,UAAME,WAA+B,GAAGrD,aAAa,CACnDP,gBADmD,0BAEnD+C,QAAQ,CAACG,SAF0C,yDAEnD,qBAAoBW,KAF+B,CAArD;AAKA,UAAMC,WAAW,GAAGlB,MAAM,yBAACG,QAAQ,CAACG,SAAV,yDAAC,qBAAoBa,gBAArB,CAA1B;AACA,QAAIC,SAAS,GAAGpB,MAAM,yBAACG,QAAQ,CAACG,SAAV,yDAAC,qBAAoBe,qBAArB,CAAtB;;AAEA,QAAI,CAAClB,QAAQ,CAACQ,SAAd,EAAyB;AAAA;;AACvBS,MAAAA,SAAS,GAAGE,IAAI,CAACC,GAAL,CACV,KAAKnC,QAAL,GAAgB8B,WADN,EAEVlB,MAAM,yBAACG,QAAQ,CAACG,SAAV,yDAAC,qBAAoBe,qBAArB,CAFI,CAAZ;AAID;;AAED,UAAMhC,OAAuB,GAAG;AAC9ByB,MAAAA,IAD8B;AAE9BU,MAAAA,SAAS,EAAEZ,KAAK,CAACY,SAFa;AAG9BC,MAAAA,SAAS,EAAEb,KAAK,CAACa,SAHa;AAI9BT,MAAAA,WAJ8B;AAK9BU,MAAAA,MAAM,EAAEd,KALsB;AAM9Be,MAAAA,SAAS,0BAAExB,QAAQ,CAACG,SAAX,yDAAE,qBAAoBsB,UAND;AAO9BC,MAAAA,UAAU,EAAE;AAPkB,KAAhC;AAUA,QAAIA,UAAJ;AAEA,UAAMC,oBAAoB,GAAG3B,QAAH,aAAGA,QAAH,+CAAGA,QAAQ,CAAEG,SAAb,yDAAG,qBAAqByB,sBAAlD;;AAEA,QAAID,oBAAJ,EAA0B;AACxB,YAAME,gBAAgB,GAAGhC,MAAM,CAAC8B,oBAAD,CAA/B;AAEAD,MAAAA,UAAU,GAAG,MAAM,KAAKI,aAAL,CAAmBD,gBAAnB,EAAqC3C,OAArC,EAA8C6B,WAA9C,CAAnB;AACD;;AAEDW,IAAAA,UAAU,GAAG,uBAAAxC,OAAO,CAACwC,UAAR,oEAAoBhD,MAApB,GAA6BQ,OAAO,CAACwC,UAArC,GAAkDA,UAA/D;AACA,UAAMK,QAAQ,GAAG,MAAM,KAAKhD,IAAL,CAAUgC,WAAV,EAAuBE,SAAvB,CAAvB;AACA,WAAO,MAAMvD,eAAe,CAACqE,QAAD,EAAW,EAAC,GAAG7C,OAAJ;AAAawC,MAAAA;AAAb,KAAX,CAA5B;AACD;;AASkB,QAAbI,aAAa,CACjBH,oBADiB,EAEjBzC,OAFiB,EAGjB6B,WAHiB,EAIE;AACnB,QAAIY,oBAAoB,KAAK,CAA7B,EAAgC;AAQ9B,aAAO,EAAP;AACD;;AAED,UAAMK,cAAc,GAAGb,IAAI,CAACC,GAAL,CACrB,KAAKnC,QAAL,GAAgB0C,oBADK,EAErB,KAAKxC,qBAFgB,CAAvB;AAIA,UAAM4C,QAAQ,GAAG,MAAM,KAAKhD,IAAL,CAAU4C,oBAAV,EAAgCK,cAAhC,CAAvB;AAEA,UAAMC,MAAM,GAAG;AAACzD,MAAAA,MAAM,EAAEuD,QAAT;AAAmBG,MAAAA,MAAM,EAAE,CAA3B;AAA8B5D,MAAAA,IAAI,EAAEyD,QAAQ,CAACrD;AAA7C,KAAf;AACA,UAAMyD,WAAW,GAAG,MAAMxE,UAAU,CAACsE,MAAD,EAAS/C,OAAT,CAApC;AAEA,WAAOiD,WAAW,CAACT,UAAnB;AACD;;AAEe,QAAVU,UAAU,GAA0B;AACxC,UAAMC,UAAU,GAAGtF,aAAa,CAAC2B,MAAd,GAAuB,CAA1C;AACA,UAAM4D,UAAU,GAAG,MAAM,KAAKvD,IAAL,CAAU,KAAKE,QAAL,GAAgBoD,UAA1B,EAAsCA,UAAtC,CAAzB;AAEA,UAAMhD,KAAK,GAAGiD,UAAU,CAACzD,KAAX,CAAiB,CAAjB,EAAoBS,QAApB,EAAd;;AACA,QAAID,KAAK,KAAKtC,aAAd,EAA6B;AAC3B,YAAM,IAAIwC,KAAJ,CAAW,oCAAmCF,KAAM,GAApD,CAAN;AACD;;AAED,UAAMkD,YAAY,GAAGD,UAAU,CAACE,YAAX,CAAwB,CAAxB,CAArB;AACA,UAAMC,cAAc,GAAG,KAAKxD,QAAL,GAAgBsD,YAAhB,GAA+BF,UAAtD;;AACA,QAAII,cAAc,GAAG1F,aAAa,CAAC2B,MAAnC,EAA2C;AACzC,YAAM,IAAIa,KAAJ,CAAW,yBAAwBkD,cAAe,EAAlD,CAAN;AACD;;AAED,UAAMC,WAAW,GAAG,MAAM,KAAK3D,IAAL,CAAU0D,cAAV,EAA0BF,YAA1B,CAA1B;AAGA,UAAM;AAACI,MAAAA;AAAD,QAAapF,kBAAkB,CAACmF,WAAD,CAArC;AACA,WAAOC,QAAP;AACD;;AAzLgC","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetSchema} from '../schema/schema';\nimport {PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED} from '../../constants';\nimport {ColumnChunk, CompressionCodec, FileMetaData, RowGroup, Type} from '../parquet-thrift';\nimport {\n ParquetBuffer,\n ParquetCompression,\n ParquetData,\n PrimitiveType,\n ParquetOptions\n} from '../schema/declare';\nimport {fstat, fopen, fread, fclose} from '../utils/file-utils';\nimport {decodeFileMetadata, getThriftEnum, fieldIndexOf} from '../utils/read-utils';\nimport {decodeDataPages, decodePage} from './decoders';\n\nconst DEFAULT_DICTIONARY_SIZE = 1e6;\n\n/**\n * The parquet envelope reader allows direct, unbuffered access to the individual\n * sections of the parquet file, namely the header, footer and the row groups.\n * This class is intended for advanced/internal users; if you just want to retrieve\n * rows from a parquet file use the ParquetReader instead\n */\nexport class ParquetEnvelopeReader {\n public read: (position: number, length: number) => Promise<Buffer>;\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n public close: () => Promise<void>;\n public fileSize: number;\n public defaultDictionarySize: number;\n\n static async openFile(filePath: string): Promise<ParquetEnvelopeReader> {\n const fileStat = await fstat(filePath);\n const fileDescriptor = await fopen(filePath);\n\n const readFn = fread.bind(undefined, fileDescriptor);\n const closeFn = fclose.bind(undefined, fileDescriptor);\n\n return new ParquetEnvelopeReader(readFn, closeFn, fileStat.size);\n }\n\n static async openBuffer(buffer: Buffer): Promise<ParquetEnvelopeReader> {\n const readFn = (position: number, length: number) =>\n Promise.resolve(buffer.slice(position, position + length));\n const closeFn = () => Promise.resolve();\n return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);\n }\n\n constructor(\n read: (position: number, length: number) => Promise<Buffer>,\n close: () => Promise<void>,\n fileSize: number,\n options?: any\n ) {\n this.read = read;\n this.close = close;\n this.fileSize = fileSize;\n this.defaultDictionarySize = options?.defaultDictionarySize || DEFAULT_DICTIONARY_SIZE;\n }\n\n async readHeader(): Promise<void> {\n const buffer = await this.read(0, PARQUET_MAGIC.length);\n\n const magic = buffer.toString();\n switch (magic) {\n case PARQUET_MAGIC:\n break;\n case PARQUET_MAGIC_ENCRYPTED:\n throw new Error('Encrypted parquet file not supported');\n default:\n throw new Error(`Invalid parquet file (magic=${magic})`);\n }\n }\n\n async readRowGroup(\n schema: ParquetSchema,\n rowGroup: RowGroup,\n columnList: string[][]\n ): Promise<ParquetBuffer> {\n const buffer: ParquetBuffer = {\n rowCount: Number(rowGroup.num_rows),\n columnData: {}\n };\n for (const colChunk of rowGroup.columns) {\n const colMetadata = colChunk.meta_data;\n const colKey = colMetadata?.path_in_schema;\n if (columnList.length > 0 && fieldIndexOf(columnList, colKey!) < 0) {\n continue; // eslint-disable-line no-continue\n }\n buffer.columnData[colKey!.join()] = await this.readColumnChunk(schema, colChunk);\n }\n return buffer;\n }\n\n /**\n * Do reading of parquet file's column chunk\n * @param schema\n * @param colChunk\n */\n async readColumnChunk(schema: ParquetSchema, colChunk: ColumnChunk): Promise<ParquetData> {\n if (colChunk.file_path !== undefined && colChunk.file_path !== null) {\n throw new Error('external references are not supported');\n }\n\n const field = schema.findField(colChunk.meta_data?.path_in_schema!);\n const type: PrimitiveType = getThriftEnum(Type, colChunk.meta_data?.type!) as any;\n\n if (type !== field.primitiveType) {\n throw new Error(`chunk type not matching schema: ${type}`);\n }\n\n const compression: ParquetCompression = getThriftEnum(\n CompressionCodec,\n colChunk.meta_data?.codec!\n ) as any;\n\n const pagesOffset = Number(colChunk.meta_data?.data_page_offset!);\n let pagesSize = Number(colChunk.meta_data?.total_compressed_size!);\n\n if (!colChunk.file_path) {\n pagesSize = Math.min(\n this.fileSize - pagesOffset,\n Number(colChunk.meta_data?.total_compressed_size)\n );\n }\n\n const options: ParquetOptions = {\n type,\n rLevelMax: field.rLevelMax,\n dLevelMax: field.dLevelMax,\n compression,\n column: field,\n numValues: colChunk.meta_data?.num_values,\n dictionary: []\n };\n\n let dictionary;\n\n const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;\n\n if (dictionaryPageOffset) {\n const dictionaryOffset = Number(dictionaryPageOffset);\n // Getting dictionary from column chunk to iterate all over indexes to get dataPage values.\n dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);\n }\n\n dictionary = options.dictionary?.length ? options.dictionary : dictionary;\n const pagesBuf = await this.read(pagesOffset, pagesSize);\n return await decodeDataPages(pagesBuf, {...options, dictionary});\n }\n\n /**\n * Getting dictionary for allows to flatten values by indices.\n * @param dictionaryPageOffset\n * @param options\n * @param pagesOffset\n * @returns\n */\n async getDictionary(\n dictionaryPageOffset: number,\n options: ParquetOptions,\n pagesOffset: number\n ): Promise<string[]> {\n if (dictionaryPageOffset === 0) {\n // dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);\n // pagesBuf = await this.read(pagesOffset, dictionarySize);\n\n // In this case we are working with parquet-mr files format. Problem is described below:\n // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding\n // We need to get dictionary page from column chunk if it exists.\n // Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.\n return [];\n }\n\n const dictionarySize = Math.min(\n this.fileSize - dictionaryPageOffset,\n this.defaultDictionarySize\n );\n const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);\n\n const cursor = {buffer: pagesBuf, offset: 0, size: pagesBuf.length};\n const decodedPage = await decodePage(cursor, options);\n\n return decodedPage.dictionary!;\n }\n\n async readFooter(): Promise<FileMetaData> {\n const trailerLen = PARQUET_MAGIC.length + 4;\n const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);\n\n const magic = trailerBuf.slice(4).toString();\n if (magic !== PARQUET_MAGIC) {\n throw new Error(`Not a valid parquet file (magic=\"${magic})`);\n }\n\n const metadataSize = trailerBuf.readUInt32LE(0);\n const metadataOffset = this.fileSize - metadataSize - trailerLen;\n if (metadataOffset < PARQUET_MAGIC.length) {\n throw new Error(`Invalid metadata size ${metadataOffset}`);\n }\n\n const metadataBuf = await this.read(metadataOffset, metadataSize);\n // let metadata = new parquet_thrift.FileMetaData();\n // parquet_util.decodeThrift(metadata, metadataBuf);\n const {metadata} = decodeFileMetadata(metadataBuf);\n return metadata;\n }\n}\n"],"file":"parquet-envelope-reader.js"}
@@ -0,0 +1,133 @@
1
+ import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
2
+
3
+ let _Symbol$asyncIterator;
4
+
5
+ import { ParquetEnvelopeReader } from './parquet-envelope-reader';
6
+ import { ParquetSchema } from '../schema/schema';
7
+ import { ParquetCursor } from './parquet-cursor';
8
+ import { PARQUET_VERSION } from '../../constants';
9
+ import { decodeSchema } from './decoders';
10
+ _Symbol$asyncIterator = Symbol.asyncIterator;
11
+ export class ParquetReader {
12
+ static async openBlob(blob) {
13
+ const readFn = async (start, length) => {
14
+ const arrayBuffer = await blob.slice(start, start + length).arrayBuffer();
15
+ return Buffer.from(arrayBuffer);
16
+ };
17
+
18
+ const closeFn = async () => {};
19
+
20
+ const size = blob.size;
21
+ const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);
22
+
23
+ try {
24
+ await envelopeReader.readHeader();
25
+ const metadata = await envelopeReader.readFooter();
26
+ return new ParquetReader(metadata, envelopeReader);
27
+ } catch (err) {
28
+ await envelopeReader.close();
29
+ throw err;
30
+ }
31
+ }
32
+
33
+ static async openArrayBuffer(arrayBuffer) {
34
+ const readFn = async (start, length) => Buffer.from(arrayBuffer, start, length);
35
+
36
+ const closeFn = async () => {};
37
+
38
+ const size = arrayBuffer.byteLength;
39
+ const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);
40
+
41
+ try {
42
+ await envelopeReader.readHeader();
43
+ const metadata = await envelopeReader.readFooter();
44
+ return new ParquetReader(metadata, envelopeReader);
45
+ } catch (err) {
46
+ await envelopeReader.close();
47
+ throw err;
48
+ }
49
+ }
50
+
51
+ static async openFile(filePath) {
52
+ const envelopeReader = await ParquetEnvelopeReader.openFile(filePath);
53
+
54
+ try {
55
+ await envelopeReader.readHeader();
56
+ const metadata = await envelopeReader.readFooter();
57
+ return new ParquetReader(metadata, envelopeReader);
58
+ } catch (err) {
59
+ await envelopeReader.close();
60
+ throw err;
61
+ }
62
+ }
63
+
64
+ static async openBuffer(buffer) {
65
+ const envelopeReader = await ParquetEnvelopeReader.openBuffer(buffer);
66
+
67
+ try {
68
+ await envelopeReader.readHeader();
69
+ const metadata = await envelopeReader.readFooter();
70
+ return new ParquetReader(metadata, envelopeReader);
71
+ } catch (err) {
72
+ await envelopeReader.close();
73
+ throw err;
74
+ }
75
+ }
76
+
77
+ constructor(metadata, envelopeReader) {
78
+ _defineProperty(this, "metadata", void 0);
79
+
80
+ _defineProperty(this, "envelopeReader", void 0);
81
+
82
+ _defineProperty(this, "schema", void 0);
83
+
84
+ if (metadata.version !== PARQUET_VERSION) {
85
+ throw new Error('invalid parquet version');
86
+ }
87
+
88
+ this.metadata = metadata;
89
+ this.envelopeReader = envelopeReader;
90
+ const root = this.metadata.schema[0];
91
+ const {
92
+ schema
93
+ } = decodeSchema(this.metadata.schema, 1, root.num_children);
94
+ this.schema = new ParquetSchema(schema);
95
+ }
96
+
97
+ async close() {
98
+ await this.envelopeReader.close();
99
+ }
100
+
101
+ getCursor(columnList) {
102
+ if (!columnList) {
103
+ columnList = [];
104
+ }
105
+
106
+ columnList = columnList.map(x => Array.isArray(x) ? x : [x]);
107
+ return new ParquetCursor(this.metadata, this.envelopeReader, this.schema, columnList);
108
+ }
109
+
110
+ getRowCount() {
111
+ return Number(this.metadata.num_rows);
112
+ }
113
+
114
+ getSchema() {
115
+ return this.schema;
116
+ }
117
+
118
+ getMetadata() {
119
+ const md = {};
120
+
121
+ for (const kv of this.metadata.key_value_metadata) {
122
+ md[kv.key] = kv.value;
123
+ }
124
+
125
+ return md;
126
+ }
127
+
128
+ [_Symbol$asyncIterator]() {
129
+ return this.getCursor()[Symbol.asyncIterator]();
130
+ }
131
+
132
+ }
133
+ //# sourceMappingURL=parquet-reader.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../../src/parquetjs/parser/parquet-reader.ts"],"names":["ParquetEnvelopeReader","ParquetSchema","ParquetCursor","PARQUET_VERSION","decodeSchema","Symbol","asyncIterator","ParquetReader","openBlob","blob","readFn","start","length","arrayBuffer","slice","Buffer","from","closeFn","size","envelopeReader","readHeader","metadata","readFooter","err","close","openArrayBuffer","byteLength","openFile","filePath","openBuffer","buffer","constructor","version","Error","root","schema","num_children","getCursor","columnList","map","x","Array","isArray","getRowCount","Number","num_rows","getSchema","getMetadata","md","kv","key_value_metadata","key","value"],"mappings":";;;;AACA,SAAQA,qBAAR,QAAoC,2BAApC;AAEA,SAAQC,aAAR,QAA4B,kBAA5B;AACA,SAAQC,aAAR,QAA4B,kBAA5B;AACA,SAAQC,eAAR,QAA8B,iBAA9B;AACA,SAAQC,YAAR,QAA2B,YAA3B;wBAyKGC,MAAM,CAACC,a;AAhKV,OAAO,MAAMC,aAAN,CAAmD;AAInC,eAARC,QAAQ,CAAIC,IAAJ,EAA2C;AAC9D,UAAMC,MAAM,GAAG,OAAOC,KAAP,EAAsBC,MAAtB,KAAyC;AACtD,YAAMC,WAAW,GAAG,MAAMJ,IAAI,CAACK,KAAL,CAAWH,KAAX,EAAkBA,KAAK,GAAGC,MAA1B,EAAkCC,WAAlC,EAA1B;AACA,aAAOE,MAAM,CAACC,IAAP,CAAYH,WAAZ,CAAP;AACD,KAHD;;AAIA,UAAMI,OAAO,GAAG,YAAY,CAAE,CAA9B;;AACA,UAAMC,IAAI,GAAGT,IAAI,CAACS,IAAlB;AACA,UAAMC,cAAc,GAAG,IAAInB,qBAAJ,CAA0BU,MAA1B,EAAkCO,OAAlC,EAA2CC,IAA3C,CAAvB;;AACA,QAAI;AACF,YAAMC,cAAc,CAACC,UAAf,EAAN;AACA,YAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAf,EAAvB;AACA,aAAO,IAAIf,aAAJ,CAAkBc,QAAlB,EAA4BF,cAA5B,CAAP;AACD,KAJD,CAIE,OAAOI,GAAP,EAAY;AACZ,YAAMJ,cAAc,CAACK,KAAf,EAAN;AACA,YAAMD,GAAN;AACD;AACF;;AAK2B,eAAfE,eAAe,CAAIZ,WAAJ,EAAyD;AACnF,UAAMH,MAAM,GAAG,OAAOC,KAAP,EAAsBC,MAAtB,KAAyCG,MAAM,CAACC,IAAP,CAAYH,WAAZ,EAAyBF,KAAzB,EAAgCC,MAAhC,CAAxD;;AACA,UAAMK,OAAO,GAAG,YAAY,CAAE,CAA9B;;AACA,UAAMC,IAAI,GAAGL,WAAW,CAACa,UAAzB;AACA,UAAMP,cAAc,GAAG,IAAInB,qBAAJ,CAA0BU,MAA1B,EAAkCO,OAAlC,EAA2CC,IAA3C,CAAvB;;AACA,QAAI;AACF,YAAMC,cAAc,CAACC,UAAf,EAAN;AACA,YAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAf,EAAvB;AACA,aAAO,IAAIf,aAAJ,CAAkBc,QAAlB,EAA4BF,cAA5B,CAAP;AACD,KAJD,CAIE,OAAOI,GAAP,EAAY;AACZ,YAAMJ,cAAc,CAACK,KAAf,EAAN;AACA,YAAMD,GAAN;AACD;AACF;;AAMoB,eAARI,QAAQ,CAAIC,QAAJ,EAAiD;AACpE,UAAMT,cAAc,GAAG,MAAMnB,qBAAqB,CAAC2B,QAAtB,CAA+BC,QAA/B,CAA7B;;AACA,QAAI;AACF,YAAMT,cAAc,CAACC,UAAf,EAAN;AACA,YAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAf,EAAvB;AACA,aAAO,IAAIf,aAAJ,CAAqBc,QAArB,EAA+BF,cAA/B,CAAP;AACD,KAJD,CAIE,OAAOI,GAAP,EAAY;AACZ,YAAMJ,cAAc,CAACK,KAAf,EAAN;AACA,YAAMD,GAAN;AACD;AACF;;AAEsB,eAAVM,UAAU,CAAIC,MAAJ,EAA+C;AACpE,UAAMX,cAAc,GAAG,MAAMnB,qBAAqB,CAAC6B,UAAtB,CAAiCC,MAAjC,CAA7B;;AACA,QAAI;AACF,YAAMX,cAAc,CAACC,UAAf,EAAN;AACA,YAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAf,EAAvB;AACA,aAAO,IAAIf,aAAJ,CAAqBc,QAArB,EAA+BF,cAA/B,CAAP;AACD,KAJD,CAIE,OAAOI,GAAP,EAAY;AACZ,YAAMJ,cAAc,CAACK,KAAf,EAAN;AACA,YAAMD,GAAN;AACD;AACF;;AAYDQ,EAAAA,WAAW,CAACV,QAAD,EAAyBF,cAAzB,EAAgE;AAAA;;AAAA;;AAAA;;AACzE,QAAIE,QAAQ,CAACW,OAAT,KAAqB7B,eAAzB,EAA0C;AACxC,YAAM,IAAI8B,KAAJ,CAAU,yBAAV,CAAN;AACD;;AAED,SAAKZ,QAAL,GAAgBA,QAAhB;AACA,SAAKF,cAAL,GAAsBA,cAAtB;AACA,UAAMe,IAAI,GAAG,KAAKb,QAAL,CAAcc,MAAd,CAAqB,CAArB,CAAb;AACA,UAAM;AAACA,MAAAA;AAAD,QAAW/B,YAAY,CAAC,KAAKiB,QAAL,CAAcc,MAAf,EAAuB,CAAvB,EAA0BD,IAAI,CAACE,YAA/B,CAA7B;AACA,SAAKD,MAAL,GAAc,IAAIlC,aAAJ,CAAkBkC,MAAlB,CAAd;AACD;;AAMU,QAALX,KAAK,GAAkB;AAC3B,UAAM,KAAKL,cAAL,CAAoBK,KAApB,EAAN;AAGD;;AAeDa,EAAAA,SAAS,CAACC,UAAD,EAAgE;AACvE,QAAI,CAACA,UAAL,EAAiB;AAEfA,MAAAA,UAAU,GAAG,EAAb;AACD;;AAGDA,IAAAA,UAAU,GAAGA,UAAU,CAACC,GAAX,CAAgBC,CAAD,IAAQC,KAAK,CAACC,OAAN,CAAcF,CAAd,IAAmBA,CAAnB,GAAuB,CAACA,CAAD,CAA9C,CAAb;AAEA,WAAO,IAAItC,aAAJ,CACL,KAAKmB,QADA,EAEL,KAAKF,cAFA,EAGL,KAAKgB,MAHA,EAILG,UAJK,CAAP;AAMD;;AAMDK,EAAAA,WAAW,GAAW;AACpB,WAAOC,MAAM,CAAC,KAAKvB,QAAL,CAAcwB,QAAf,CAAb;AACD;;AAKDC,EAAAA,SAAS,GAAkB;AACzB,WAAO,KAAKX,MAAZ;AACD;;AAKDY,EAAAA,WAAW,GAA2B;AACpC,UAAMC,EAA0B,GAAG,EAAnC;;AACA,SAAK,MAAMC,EAAX,IAAiB,KAAK5B,QAAL,CAAc6B,kBAA/B,EAAoD;AAClDF,MAAAA,EAAE,CAACC,EAAE,CAACE,GAAJ,CAAF,GAAaF,EAAE,CAACG,KAAhB;AACD;;AACD,WAAOJ,EAAP;AACD;;AAMD,4BAA2C;AACzC,WAAO,KAAKX,SAAL,GAAiBhC,MAAM,CAACC,aAAxB,GAAP;AACD;;AAlKuD","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetEnvelopeReader} from './parquet-envelope-reader';\nimport {FileMetaData} from '../parquet-thrift';\nimport {ParquetSchema} from '../schema/schema';\nimport {ParquetCursor} from './parquet-cursor';\nimport {PARQUET_VERSION} from '../../constants';\nimport {decodeSchema} from './decoders';\n\n/**\n * A parquet reader allows retrieving the rows from a parquet file in order.\n * The basic usage is to create a reader and then retrieve a cursor/iterator\n * which allows you to consume row after row until all rows have been read. It is\n * important that you call close() after you are finished reading the file to\n * avoid leaking file descriptors.\n */\nexport class ParquetReader<T> implements AsyncIterable<T> {\n /**\n * return a new parquet reader initialized with a read function\n */\n static async openBlob<T>(blob: Blob): Promise<ParquetReader<T>> {\n const readFn = async (start: number, length: number) => {\n const arrayBuffer = await blob.slice(start, start + length).arrayBuffer();\n return Buffer.from(arrayBuffer);\n };\n const closeFn = async () => {};\n const size = blob.size;\n const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n /**\n * return a new parquet reader initialized with a read function\n */\n static async openArrayBuffer<T>(arrayBuffer: ArrayBuffer): Promise<ParquetReader<T>> {\n const readFn = async (start: number, length: number) => Buffer.from(arrayBuffer, start, length);\n const closeFn = async () => {};\n const size = arrayBuffer.byteLength;\n const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n /**\n * Open the parquet file pointed to by the specified path and return a new\n * parquet reader\n */\n static async openFile<T>(filePath: string): Promise<ParquetReader<T>> {\n const envelopeReader = await ParquetEnvelopeReader.openFile(filePath);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader<T>(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n static async openBuffer<T>(buffer: Buffer): Promise<ParquetReader<T>> {\n const envelopeReader = await ParquetEnvelopeReader.openBuffer(buffer);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader<T>(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n public metadata: FileMetaData;\n public envelopeReader: ParquetEnvelopeReader;\n public schema: ParquetSchema;\n\n /**\n * Create a new parquet reader from the file metadata and an envelope reader.\n * It is not recommended to call this constructor directly except for advanced\n * and internal use cases. Consider using one of the open{File,Buffer} methods\n * instead\n */\n constructor(metadata: FileMetaData, envelopeReader: ParquetEnvelopeReader) {\n if (metadata.version !== PARQUET_VERSION) {\n throw new Error('invalid parquet version');\n }\n\n this.metadata = metadata;\n this.envelopeReader = envelopeReader;\n const root = this.metadata.schema[0];\n const {schema} = decodeSchema(this.metadata.schema, 1, root.num_children!);\n this.schema = new ParquetSchema(schema);\n }\n\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n async close(): Promise<void> {\n await this.envelopeReader.close();\n // this.envelopeReader = null;\n // this.metadata = null;\n }\n\n /**\n * Return a cursor to the file. You may open more than one cursor and use\n * them concurrently. All cursors become invalid once close() is called on\n * the reader object.\n *\n * The required_columns parameter controls which columns are actually read\n * from disk. An empty array or no value implies all columns. A list of column\n * names means that only those columns should be loaded from disk.\n */\n getCursor(): ParquetCursor<T>;\n // @ts-ignore\n getCursor<K extends keyof T>(columnList: (K | K[])[]): ParquetCursor<Pick<T, K>>;\n getCursor(columnList: (string | string[])[]): ParquetCursor<Partial<T>>;\n getCursor(columnList?: (string | string[])[]): ParquetCursor<Partial<T>> {\n if (!columnList) {\n // tslint:disable-next-line:no-parameter-reassignment\n columnList = [];\n }\n\n // tslint:disable-next-line:no-parameter-reassignment\n columnList = columnList.map((x) => (Array.isArray(x) ? x : [x]));\n\n return new ParquetCursor<T>(\n this.metadata,\n this.envelopeReader,\n this.schema,\n columnList as string[][]\n );\n }\n\n /**\n * Return the number of rows in this file. Note that the number of rows is\n * not neccessarily equal to the number of rows in each column.\n */\n getRowCount(): number {\n return Number(this.metadata.num_rows);\n }\n\n /**\n * Returns the ParquetSchema for this file\n */\n getSchema(): ParquetSchema {\n return this.schema;\n }\n\n /**\n * Returns the user (key/value) metadata for this file\n */\n getMetadata(): Record<string, string> {\n const md: Record<string, string> = {};\n for (const kv of this.metadata.key_value_metadata!) {\n md[kv.key] = kv.value!;\n }\n return md;\n }\n\n /**\n * Implement AsyncIterable\n */\n // tslint:disable-next-line:function-name\n [Symbol.asyncIterator](): AsyncIterator<T> {\n return this.getCursor()[Symbol.asyncIterator]();\n }\n}\n"],"file":"parquet-reader.js"}
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/parquetjs/schema/declare.ts"],"names":["ParquetBuffer","constructor","rowCount","columnData"],"mappings":";AAqFA,OAAO,MAAMA,aAAN,CAAoB;AAGzBC,EAAAA,WAAW,CAACC,QAAgB,GAAG,CAApB,EAAuBC,UAAuC,GAAG,EAAjE,EAAqE;AAAA;;AAAA;;AAC9E,SAAKD,QAAL,GAAgBA,QAAhB;AACA,SAAKC,UAAL,GAAkBA,UAAlB;AACD;;AANwB","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nexport type ParquetCodec = 'PLAIN' | 'RLE';\nexport type ParquetCompression = 'UNCOMPRESSED' | 'GZIP' | 'SNAPPY' | 'LZO' | 'BROTLI' | 'LZ4';\nexport type RepetitionType = 'REQUIRED' | 'OPTIONAL' | 'REPEATED';\nexport type ParquetType = PrimitiveType | OriginalType;\n\nexport type PrimitiveType =\n // Base Types\n | 'BOOLEAN' // 0\n | 'INT32' // 1\n | 'INT64' // 2\n | 'INT96' // 3\n | 'FLOAT' // 4\n | 'DOUBLE' // 5\n | 'BYTE_ARRAY' // 6,\n | 'FIXED_LEN_BYTE_ARRAY'; // 7\n\nexport type OriginalType =\n // Converted Types\n | 'UTF8' // 0\n // | 'MAP' // 1\n // | 'MAP_KEY_VALUE' // 2\n // | 'LIST' // 3\n // | 'ENUM' // 4\n // | 'DECIMAL' // 5\n | 'DATE' // 6\n | 'TIME_MILLIS' // 7\n | 'TIME_MICROS' // 8\n | 'TIMESTAMP_MILLIS' // 9\n | 'TIMESTAMP_MICROS' // 10\n | 'UINT_8' // 11\n | 'UINT_16' // 12\n | 'UINT_32' // 13\n | 'UINT_64' // 14\n | 'INT_8' // 15\n | 'INT_16' // 16\n | 'INT_32' // 17\n | 'INT_64' // 18\n | 'JSON' // 19\n | 'BSON' // 20\n | 'INTERVAL'; // 21\n\nexport interface SchemaDefinition {\n [string: string]: FieldDefinition;\n}\n\nexport interface FieldDefinition {\n type?: ParquetType;\n typeLength?: number;\n encoding?: ParquetCodec;\n compression?: ParquetCompression;\n optional?: boolean;\n repeated?: boolean;\n fields?: SchemaDefinition;\n}\n\nexport interface ParquetField {\n name: string;\n path: string[];\n key: string;\n primitiveType?: PrimitiveType;\n originalType?: OriginalType;\n repetitionType: RepetitionType;\n typeLength?: number;\n encoding?: ParquetCodec;\n compression?: ParquetCompression;\n rLevelMax: number;\n dLevelMax: number;\n isNested?: boolean;\n fieldCount?: number;\n fields?: Record<string, ParquetField>;\n}\n\nexport interface ParquetData {\n dlevels: number[];\n rlevels: number[];\n values: any[];\n count: number;\n}\n\nexport interface ParquetRecord {\n [key: string]: any;\n}\n\nexport class ParquetBuffer {\n rowCount: number;\n columnData: Record<string, ParquetData>;\n constructor(rowCount: number = 0, columnData: Record<string, ParquetData> = {}) {\n this.rowCount = rowCount;\n this.columnData = columnData;\n }\n}\n"],"file":"declare.js"}
1
+ {"version":3,"sources":["../../../../src/parquetjs/schema/declare.ts"],"names":["ParquetBuffer","constructor","rowCount","columnData"],"mappings":";AAmIA,OAAO,MAAMA,aAAN,CAAoB;AAGzBC,EAAAA,WAAW,CAACC,QAAgB,GAAG,CAApB,EAAuBC,UAAuC,GAAG,EAAjE,EAAqE;AAAA;;AAAA;;AAC9E,SAAKD,QAAL,GAAgBA,QAAhB;AACA,SAAKC,UAAL,GAAkBA,UAAlB;AACD;;AANwB","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport Int64 from 'node-int64';\nimport type {PageHeader} from '../parquet-thrift';\n\nexport type ParquetCodec = 'PLAIN' | 'RLE' | 'PLAIN_DICTIONARY';\nexport type ParquetCompression =\n | 'UNCOMPRESSED'\n | 'GZIP'\n | 'SNAPPY'\n | 'LZO'\n | 'BROTLI'\n | 'LZ4'\n | 'LZ4_RAW'\n | 'ZSTD';\nexport type RepetitionType = 'REQUIRED' | 'OPTIONAL' | 'REPEATED';\nexport type ParquetType = PrimitiveType | OriginalType;\n\n/**\n * Physical type\n */\nexport type PrimitiveType =\n // Base Types\n | 'BOOLEAN' // 0\n | 'INT32' // 1\n | 'INT64' // 2\n | 'INT96' // 3\n | 'FLOAT' // 4\n | 'DOUBLE' // 5\n | 'BYTE_ARRAY' // 6,\n | 'FIXED_LEN_BYTE_ARRAY'; // 7\n\n/**\n * Logical type\n */\nexport type OriginalType =\n // Converted Types\n | 'UTF8' // 0\n // | 'MAP' // 1\n // | 'MAP_KEY_VALUE' // 2\n // | 'LIST' // 3\n // | 'ENUM' // 4\n // | 'DECIMAL' // 5\n | 'DECIMAL_INT32' // 5\n | 'DECIMAL_INT64' // 5\n | 'DECIMAL_BYTE_ARRAY' // 5\n | 'DECIMAL_FIXED_LEN_BYTE_ARRAY' // 5\n | 'DATE' // 6\n | 'TIME_MILLIS' // 7\n | 'TIME_MICROS' // 8\n | 'TIMESTAMP_MILLIS' // 9\n | 'TIMESTAMP_MICROS' // 10\n | 'UINT_8' // 11\n | 'UINT_16' // 12\n | 'UINT_32' // 13\n | 'UINT_64' // 14\n | 'INT_8' // 15\n | 'INT_16' // 16\n | 'INT_32' // 17\n | 'INT_64' // 18\n | 'JSON' // 19\n | 'BSON' // 20\n | 'INTERVAL'; // 21\n\nexport type ParquetDictionary = string[];\n\nexport interface SchemaDefinition {\n [string: string]: FieldDefinition;\n}\n\nexport interface FieldDefinition {\n type?: ParquetType;\n typeLength?: number;\n presision?: number;\n scale?: number;\n encoding?: ParquetCodec;\n compression?: ParquetCompression;\n optional?: boolean;\n repeated?: boolean;\n fields?: SchemaDefinition;\n}\n\nexport interface ParquetField {\n name: string;\n path: string[];\n key: string;\n primitiveType?: PrimitiveType;\n originalType?: OriginalType;\n repetitionType: RepetitionType;\n typeLength?: number;\n presision?: number;\n scale?: number;\n encoding?: ParquetCodec;\n compression?: ParquetCompression;\n rLevelMax: number;\n dLevelMax: number;\n isNested?: boolean;\n fieldCount?: number;\n fields?: Record<string, ParquetField>;\n}\n\nexport interface ParquetOptions {\n type: ParquetType;\n rLevelMax: number;\n dLevelMax: number;\n compression: ParquetCompression;\n column: ParquetField;\n numValues?: Int64;\n dictionary?: ParquetDictionary;\n}\n\nexport interface ParquetData {\n dlevels: number[];\n rlevels: number[];\n values: any[];\n count: number;\n pageHeaders: PageHeader[];\n}\n\nexport interface ParquetPageData {\n dlevels: number[];\n rlevels: number[];\n values: any[];\n count: number;\n dictionary?: ParquetDictionary;\n pageHeader: PageHeader;\n}\n\nexport interface ParquetRecord {\n [key: string]: any;\n}\n\nexport class ParquetBuffer {\n rowCount: number;\n columnData: Record<string, ParquetData>;\n constructor(rowCount: number = 0, columnData: Record<string, ParquetData> = {}) {\n this.rowCount = rowCount;\n this.columnData = columnData;\n }\n}\n"],"file":"declare.js"}
@@ -150,6 +150,8 @@ function buildFields(schema, rLevelParentMax, dLevelParentMax, path) {
150
150
  encoding: opts.encoding,
151
151
  compression: opts.compression,
152
152
  typeLength: opts.typeLength || typeDef.typeLength,
153
+ presision: opts.presision,
154
+ scale: opts.scale,
153
155
  rLevelMax,
154
156
  dLevelMax
155
157
  };
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/parquetjs/schema/schema.ts"],"names":["PARQUET_CODECS","PARQUET_COMPRESSION_METHODS","materializeRecords","shredBuffer","shredRecord","PARQUET_LOGICAL_TYPES","ParquetSchema","constructor","schema","fields","buildFields","fieldList","listFields","findField","path","split","slice","n","length","shift","findFieldBranch","branch","push","record","buffer","compress","type","setCompress","name","node","compression","rLevelParentMax","dLevelParentMax","opts","required","optional","repeated","Boolean","rLevelMax","dLevelMax","repetitionType","cpath","concat","key","join","isNested","fieldCount","Object","keys","typeDef","Error","encoding","primitiveType","originalType","typeLength","list","k"],"mappings":";AAEA,SAAQA,cAAR,QAA6B,WAA7B;AACA,SAAQC,2BAAR,QAA0C,gBAA1C;AAUA,SAAQC,kBAAR,EAA4BC,WAA5B,EAAyCC,WAAzC,QAA2D,SAA3D;AACA,SAAQC,qBAAR,QAAoC,SAApC;AAKA,OAAO,MAAMC,aAAN,CAAoB;AAQzBC,EAAAA,WAAW,CAACC,MAAD,EAA2B;AAAA;;AAAA;;AAAA;;AACpC,SAAKA,MAAL,GAAcA,MAAd;AACA,SAAKC,MAAL,GAAcC,WAAW,CAACF,MAAD,EAAS,CAAT,EAAY,CAAZ,EAAe,EAAf,CAAzB;AACA,SAAKG,SAAL,GAAiBC,UAAU,CAAC,KAAKH,MAAN,CAA3B;AACD;;AAKDI,EAAAA,SAAS,CAACC,IAAD,EAAwC;AAC/C,QAAI,OAAOA,IAAP,KAAgB,QAApB,EAA8B;AAE5BA,MAAAA,IAAI,GAAGA,IAAI,CAACC,KAAL,CAAW,GAAX,CAAP;AACD,KAHD,MAGO;AAELD,MAAAA,IAAI,GAAGA,IAAI,CAACE,KAAL,CAAW,CAAX,CAAP;AACD;;AAED,QAAIC,CAAC,GAAG,KAAKR,MAAb;;AACA,WAAOK,IAAI,CAACI,MAAL,GAAc,CAArB,EAAwBJ,IAAI,CAACK,KAAL,EAAxB,EAAsC;AACpCF,MAAAA,CAAC,GAAGA,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAD,CAAWL,MAAf;AACD;;AAED,WAAOQ,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAR;AACD;;AAKDM,EAAAA,eAAe,CAACN,IAAD,EAA0C;AACvD,QAAI,OAAOA,IAAP,KAAgB,QAApB,EAA8B;AAE5BA,MAAAA,IAAI,GAAGA,IAAI,CAACC,KAAL,CAAW,GAAX,CAAP;AACD;;AACD,UAAMM,MAAsB,GAAG,EAA/B;AACA,QAAIJ,CAAC,GAAG,KAAKR,MAAb;;AACA,WAAOK,IAAI,CAACI,MAAL,GAAc,CAArB,EAAwBJ,IAAI,CAACK,KAAL,EAAxB,EAAsC;AACpCE,MAAAA,MAAM,CAACC,IAAP,CAAYL,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAb;;AACA,UAAIA,IAAI,CAACI,MAAL,GAAc,CAAlB,EAAqB;AACnBD,QAAAA,CAAC,GAAGA,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAD,CAAWL,MAAf;AACD;AACF;;AACD,WAAOY,MAAP;AACD;;AAEDjB,EAAAA,WAAW,CAACmB,MAAD,EAAwBC,MAAxB,EAAqD;AAC9DpB,IAAAA,WAAW,CAAC,IAAD,EAAOmB,MAAP,EAAeC,MAAf,CAAX;AACD;;AAEDtB,EAAAA,kBAAkB,CAACsB,MAAD,EAAyC;AACzD,WAAOtB,kBAAkB,CAAC,IAAD,EAAOsB,MAAP,CAAzB;AACD;;AAEDC,EAAAA,QAAQ,CAACC,IAAD,EAAiC;AACvCC,IAAAA,WAAW,CAAC,KAAKnB,MAAN,EAAckB,IAAd,CAAX;AACAC,IAAAA,WAAW,CAAC,KAAKlB,MAAN,EAAciB,IAAd,CAAX;AACA,WAAO,IAAP;AACD;;AAEDF,EAAAA,MAAM,GAAkB;AACtB,WAAOrB,WAAW,CAAC,IAAD,CAAlB;AACD;;AArEwB;;AAwE3B,SAASwB,WAAT,CAAqBnB,MAArB,EAAkCkB,IAAlC,EAA4D;AAC1D,OAAK,MAAME,IAAX,IAAmBpB,MAAnB,EAA2B;AACzB,UAAMqB,IAAI,GAAGrB,MAAM,CAACoB,IAAD,CAAnB;;AACA,QAAIC,IAAI,CAACpB,MAAT,EAAiB;AACfkB,MAAAA,WAAW,CAACE,IAAI,CAACpB,MAAN,EAAciB,IAAd,CAAX;AACD,KAFD,MAEO;AACLG,MAAAA,IAAI,CAACC,WAAL,GAAmBJ,IAAnB;AACD;AACF;AACF;;AAGD,SAAShB,WAAT,CACEF,MADF,EAEEuB,eAFF,EAGEC,eAHF,EAIElB,IAJF,EAKgC;AAC9B,QAAMH,SAAuC,GAAG,EAAhD;;AAEA,OAAK,MAAMiB,IAAX,IAAmBpB,MAAnB,EAA2B;AACzB,UAAMyB,IAAI,GAAGzB,MAAM,CAACoB,IAAD,CAAnB;AAGA,UAAMM,QAAQ,GAAG,CAACD,IAAI,CAACE,QAAvB;AACA,UAAMC,QAAQ,GAAGC,OAAO,CAACJ,IAAI,CAACG,QAAN,CAAxB;AACA,QAAIE,SAAS,GAAGP,eAAhB;AACA,QAAIQ,SAAS,GAAGP,eAAhB;AAEA,QAAIQ,cAA8B,GAAG,UAArC;;AACA,QAAI,CAACN,QAAL,EAAe;AACbM,MAAAA,cAAc,GAAG,UAAjB;AACAD,MAAAA,SAAS;AACV;;AACD,QAAIH,QAAJ,EAAc;AACZI,MAAAA,cAAc,GAAG,UAAjB;AACAF,MAAAA,SAAS;AACT,UAAIJ,QAAJ,EAAcK,SAAS;AACxB;;AAGD,QAAIN,IAAI,CAACxB,MAAT,EAAiB;AACf,YAAMgC,KAAK,GAAG3B,IAAI,CAAC4B,MAAL,CAAY,CAACd,IAAD,CAAZ,CAAd;AACAjB,MAAAA,SAAS,CAACiB,IAAD,CAAT,GAAkB;AAChBA,QAAAA,IADgB;AAEhBd,QAAAA,IAAI,EAAE2B,KAFU;AAGhBE,QAAAA,GAAG,EAAEF,KAAK,CAACG,IAAN,EAHW;AAIhBJ,QAAAA,cAJgB;AAKhBF,QAAAA,SALgB;AAMhBC,QAAAA,SANgB;AAOhBM,QAAAA,QAAQ,EAAE,IAPM;AAQhBC,QAAAA,UAAU,EAAEC,MAAM,CAACC,IAAP,CAAYf,IAAI,CAACxB,MAAjB,EAAyBS,MARrB;AAShBT,QAAAA,MAAM,EAAEC,WAAW,CAACuB,IAAI,CAACxB,MAAN,EAAc6B,SAAd,EAAyBC,SAAzB,EAAoCE,KAApC;AATH,OAAlB;AAWA;AACD;;AAED,UAAMQ,OAAY,GAAG5C,qBAAqB,CAAC4B,IAAI,CAACP,IAAN,CAA1C;;AACA,QAAI,CAACuB,OAAL,EAAc;AACZ,YAAM,IAAIC,KAAJ,CAAW,yBAAwBjB,IAAI,CAACP,IAAK,EAA7C,CAAN;AACD;;AAEDO,IAAAA,IAAI,CAACkB,QAAL,GAAgBlB,IAAI,CAACkB,QAAL,IAAiB,OAAjC;;AACA,QAAI,EAAElB,IAAI,CAACkB,QAAL,IAAiBnD,cAAnB,CAAJ,EAAwC;AACtC,YAAM,IAAIkD,KAAJ,CAAW,iCAAgCjB,IAAI,CAACkB,QAAS,EAAzD,CAAN;AACD;;AAEDlB,IAAAA,IAAI,CAACH,WAAL,GAAmBG,IAAI,CAACH,WAAL,IAAoB,cAAvC;;AACA,QAAI,EAAEG,IAAI,CAACH,WAAL,IAAoB7B,2BAAtB,CAAJ,EAAwD;AACtD,YAAM,IAAIiD,KAAJ,CAAW,mCAAkCjB,IAAI,CAACH,WAAY,EAA9D,CAAN;AACD;;AAGD,UAAMW,KAAK,GAAG3B,IAAI,CAAC4B,MAAL,CAAY,CAACd,IAAD,CAAZ,CAAd;AACAjB,IAAAA,SAAS,CAACiB,IAAD,CAAT,GAAkB;AAChBA,MAAAA,IADgB;AAEhBwB,MAAAA,aAAa,EAAEH,OAAO,CAACG,aAFP;AAGhBC,MAAAA,YAAY,EAAEJ,OAAO,CAACI,YAHN;AAIhBvC,MAAAA,IAAI,EAAE2B,KAJU;AAKhBE,MAAAA,GAAG,EAAEF,KAAK,CAACG,IAAN,EALW;AAMhBJ,MAAAA,cANgB;AAOhBW,MAAAA,QAAQ,EAAElB,IAAI,CAACkB,QAPC;AAQhBrB,MAAAA,WAAW,EAAEG,IAAI,CAACH,WARF;AAShBwB,MAAAA,UAAU,EAAErB,IAAI,CAACqB,UAAL,IAAmBL,OAAO,CAACK,UATvB;AAUhBhB,MAAAA,SAVgB;AAWhBC,MAAAA;AAXgB,KAAlB;AAaD;;AACD,SAAO5B,SAAP;AACD;;AAED,SAASC,UAAT,CAAoBH,MAApB,EAA0E;AACxE,MAAI8C,IAAoB,GAAG,EAA3B;;AACA,OAAK,MAAMC,CAAX,IAAgB/C,MAAhB,EAAwB;AACtB8C,IAAAA,IAAI,CAACjC,IAAL,CAAUb,MAAM,CAAC+C,CAAD,CAAhB;;AACA,QAAI/C,MAAM,CAAC+C,CAAD,CAAN,CAAUX,QAAd,EAAwB;AACtBU,MAAAA,IAAI,GAAGA,IAAI,CAACb,MAAL,CAAY9B,UAAU,CAACH,MAAM,CAAC+C,CAAD,CAAN,CAAU/C,MAAX,CAAtB,CAAP;AACD;AACF;;AACD,SAAO8C,IAAP;AACD","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport {PARQUET_CODECS} from '../codecs';\nimport {PARQUET_COMPRESSION_METHODS} from '../compression';\nimport {\n FieldDefinition,\n ParquetBuffer,\n ParquetCompression,\n ParquetField,\n ParquetRecord,\n RepetitionType,\n SchemaDefinition\n} from './declare';\nimport {materializeRecords, shredBuffer, shredRecord} from './shred';\nimport {PARQUET_LOGICAL_TYPES} from './types';\n\n/**\n * A parquet file schema\n */\nexport class ParquetSchema {\n public schema: Record<string, FieldDefinition>;\n public fields: Record<string, ParquetField>;\n public fieldList: ParquetField[];\n\n /**\n * Create a new schema from a JSON schema definition\n */\n constructor(schema: SchemaDefinition) {\n this.schema = schema;\n this.fields = buildFields(schema, 0, 0, []);\n this.fieldList = listFields(this.fields);\n }\n\n /**\n * Retrieve a field definition\n */\n findField(path: string | string[]): ParquetField {\n if (typeof path === 'string') {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.split(',');\n } else {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.slice(0); // clone array\n }\n\n let n = this.fields;\n for (; path.length > 1; path.shift()) {\n n = n[path[0]].fields as Record<string, ParquetField>;\n }\n\n return n[path[0]];\n }\n\n /**\n * Retrieve a field definition and all the field's ancestors\n */\n findFieldBranch(path: string | string[]): ParquetField[] {\n if (typeof path === 'string') {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.split(',');\n }\n const branch: ParquetField[] = [];\n let n = this.fields;\n for (; path.length > 0; path.shift()) {\n branch.push(n[path[0]]);\n if (path.length > 1) {\n n = n[path[0]].fields as Record<string, ParquetField>;\n }\n }\n return branch;\n }\n\n shredRecord(record: ParquetRecord, buffer: ParquetBuffer): void {\n shredRecord(this, record, buffer);\n }\n\n materializeRecords(buffer: ParquetBuffer): ParquetRecord[] {\n return materializeRecords(this, buffer);\n }\n\n compress(type: ParquetCompression): this {\n setCompress(this.schema, type);\n setCompress(this.fields, type);\n return this;\n }\n\n buffer(): ParquetBuffer {\n return shredBuffer(this);\n }\n}\n\nfunction setCompress(schema: any, type: ParquetCompression) {\n for (const name in schema) {\n const node = schema[name];\n if (node.fields) {\n setCompress(node.fields, type);\n } else {\n node.compression = type;\n }\n }\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction buildFields(\n schema: SchemaDefinition,\n rLevelParentMax: number,\n dLevelParentMax: number,\n path: string[]\n): Record<string, ParquetField> {\n const fieldList: Record<string, ParquetField> = {};\n\n for (const name in schema) {\n const opts = schema[name];\n\n /* field repetition type */\n const required = !opts.optional;\n const repeated = Boolean(opts.repeated);\n let rLevelMax = rLevelParentMax;\n let dLevelMax = dLevelParentMax;\n\n let repetitionType: RepetitionType = 'REQUIRED';\n if (!required) {\n repetitionType = 'OPTIONAL';\n dLevelMax++;\n }\n if (repeated) {\n repetitionType = 'REPEATED';\n rLevelMax++;\n if (required) dLevelMax++;\n }\n\n /* nested field */\n if (opts.fields) {\n const cpath = path.concat([name]);\n fieldList[name] = {\n name,\n path: cpath,\n key: cpath.join(),\n repetitionType,\n rLevelMax,\n dLevelMax,\n isNested: true,\n fieldCount: Object.keys(opts.fields).length,\n fields: buildFields(opts.fields, rLevelMax, dLevelMax, cpath)\n };\n continue; // eslint-disable-line no-continue\n }\n\n const typeDef: any = PARQUET_LOGICAL_TYPES[opts.type!];\n if (!typeDef) {\n throw new Error(`invalid parquet type: ${opts.type}`);\n }\n\n opts.encoding = opts.encoding || 'PLAIN';\n if (!(opts.encoding in PARQUET_CODECS)) {\n throw new Error(`unsupported parquet encoding: ${opts.encoding}`);\n }\n\n opts.compression = opts.compression || 'UNCOMPRESSED';\n if (!(opts.compression in PARQUET_COMPRESSION_METHODS)) {\n throw new Error(`unsupported compression method: ${opts.compression}`);\n }\n\n /* add to schema */\n const cpath = path.concat([name]);\n fieldList[name] = {\n name,\n primitiveType: typeDef.primitiveType,\n originalType: typeDef.originalType,\n path: cpath,\n key: cpath.join(),\n repetitionType,\n encoding: opts.encoding,\n compression: opts.compression,\n typeLength: opts.typeLength || typeDef.typeLength,\n rLevelMax,\n dLevelMax\n };\n }\n return fieldList;\n}\n\nfunction listFields(fields: Record<string, ParquetField>): ParquetField[] {\n let list: ParquetField[] = [];\n for (const k in fields) {\n list.push(fields[k]);\n if (fields[k].isNested) {\n list = list.concat(listFields(fields[k].fields!));\n }\n }\n return list;\n}\n"],"file":"schema.js"}
1
+ {"version":3,"sources":["../../../../src/parquetjs/schema/schema.ts"],"names":["PARQUET_CODECS","PARQUET_COMPRESSION_METHODS","materializeRecords","shredBuffer","shredRecord","PARQUET_LOGICAL_TYPES","ParquetSchema","constructor","schema","fields","buildFields","fieldList","listFields","findField","path","split","slice","n","length","shift","findFieldBranch","branch","push","record","buffer","compress","type","setCompress","name","node","compression","rLevelParentMax","dLevelParentMax","opts","required","optional","repeated","Boolean","rLevelMax","dLevelMax","repetitionType","cpath","concat","key","join","isNested","fieldCount","Object","keys","typeDef","Error","encoding","primitiveType","originalType","typeLength","presision","scale","list","k"],"mappings":";AAEA,SAAQA,cAAR,QAA6B,WAA7B;AACA,SAAQC,2BAAR,QAA0C,gBAA1C;AAUA,SAAQC,kBAAR,EAA4BC,WAA5B,EAAyCC,WAAzC,QAA2D,SAA3D;AACA,SAAQC,qBAAR,QAAoC,SAApC;AAKA,OAAO,MAAMC,aAAN,CAAoB;AAQzBC,EAAAA,WAAW,CAACC,MAAD,EAA2B;AAAA;;AAAA;;AAAA;;AACpC,SAAKA,MAAL,GAAcA,MAAd;AACA,SAAKC,MAAL,GAAcC,WAAW,CAACF,MAAD,EAAS,CAAT,EAAY,CAAZ,EAAe,EAAf,CAAzB;AACA,SAAKG,SAAL,GAAiBC,UAAU,CAAC,KAAKH,MAAN,CAA3B;AACD;;AAKDI,EAAAA,SAAS,CAACC,IAAD,EAAwC;AAC/C,QAAI,OAAOA,IAAP,KAAgB,QAApB,EAA8B;AAE5BA,MAAAA,IAAI,GAAGA,IAAI,CAACC,KAAL,CAAW,GAAX,CAAP;AACD,KAHD,MAGO;AAELD,MAAAA,IAAI,GAAGA,IAAI,CAACE,KAAL,CAAW,CAAX,CAAP;AACD;;AAED,QAAIC,CAAC,GAAG,KAAKR,MAAb;;AACA,WAAOK,IAAI,CAACI,MAAL,GAAc,CAArB,EAAwBJ,IAAI,CAACK,KAAL,EAAxB,EAAsC;AACpCF,MAAAA,CAAC,GAAGA,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAD,CAAWL,MAAf;AACD;;AAED,WAAOQ,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAR;AACD;;AAKDM,EAAAA,eAAe,CAACN,IAAD,EAA0C;AACvD,QAAI,OAAOA,IAAP,KAAgB,QAApB,EAA8B;AAE5BA,MAAAA,IAAI,GAAGA,IAAI,CAACC,KAAL,CAAW,GAAX,CAAP;AACD;;AACD,UAAMM,MAAsB,GAAG,EAA/B;AACA,QAAIJ,CAAC,GAAG,KAAKR,MAAb;;AACA,WAAOK,IAAI,CAACI,MAAL,GAAc,CAArB,EAAwBJ,IAAI,CAACK,KAAL,EAAxB,EAAsC;AACpCE,MAAAA,MAAM,CAACC,IAAP,CAAYL,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAb;;AACA,UAAIA,IAAI,CAACI,MAAL,GAAc,CAAlB,EAAqB;AACnBD,QAAAA,CAAC,GAAGA,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAD,CAAWL,MAAf;AACD;AACF;;AACD,WAAOY,MAAP;AACD;;AAEDjB,EAAAA,WAAW,CAACmB,MAAD,EAAwBC,MAAxB,EAAqD;AAC9DpB,IAAAA,WAAW,CAAC,IAAD,EAAOmB,MAAP,EAAeC,MAAf,CAAX;AACD;;AAEDtB,EAAAA,kBAAkB,CAACsB,MAAD,EAAyC;AACzD,WAAOtB,kBAAkB,CAAC,IAAD,EAAOsB,MAAP,CAAzB;AACD;;AAEDC,EAAAA,QAAQ,CAACC,IAAD,EAAiC;AACvCC,IAAAA,WAAW,CAAC,KAAKnB,MAAN,EAAckB,IAAd,CAAX;AACAC,IAAAA,WAAW,CAAC,KAAKlB,MAAN,EAAciB,IAAd,CAAX;AACA,WAAO,IAAP;AACD;;AAEDF,EAAAA,MAAM,GAAkB;AACtB,WAAOrB,WAAW,CAAC,IAAD,CAAlB;AACD;;AArEwB;;AAwE3B,SAASwB,WAAT,CAAqBnB,MAArB,EAAkCkB,IAAlC,EAA4D;AAC1D,OAAK,MAAME,IAAX,IAAmBpB,MAAnB,EAA2B;AACzB,UAAMqB,IAAI,GAAGrB,MAAM,CAACoB,IAAD,CAAnB;;AACA,QAAIC,IAAI,CAACpB,MAAT,EAAiB;AACfkB,MAAAA,WAAW,CAACE,IAAI,CAACpB,MAAN,EAAciB,IAAd,CAAX;AACD,KAFD,MAEO;AACLG,MAAAA,IAAI,CAACC,WAAL,GAAmBJ,IAAnB;AACD;AACF;AACF;;AAGD,SAAShB,WAAT,CACEF,MADF,EAEEuB,eAFF,EAGEC,eAHF,EAIElB,IAJF,EAKgC;AAC9B,QAAMH,SAAuC,GAAG,EAAhD;;AAEA,OAAK,MAAMiB,IAAX,IAAmBpB,MAAnB,EAA2B;AACzB,UAAMyB,IAAI,GAAGzB,MAAM,CAACoB,IAAD,CAAnB;AAGA,UAAMM,QAAQ,GAAG,CAACD,IAAI,CAACE,QAAvB;AACA,UAAMC,QAAQ,GAAGC,OAAO,CAACJ,IAAI,CAACG,QAAN,CAAxB;AACA,QAAIE,SAAS,GAAGP,eAAhB;AACA,QAAIQ,SAAS,GAAGP,eAAhB;AAEA,QAAIQ,cAA8B,GAAG,UAArC;;AACA,QAAI,CAACN,QAAL,EAAe;AACbM,MAAAA,cAAc,GAAG,UAAjB;AACAD,MAAAA,SAAS;AACV;;AACD,QAAIH,QAAJ,EAAc;AACZI,MAAAA,cAAc,GAAG,UAAjB;AACAF,MAAAA,SAAS;AACT,UAAIJ,QAAJ,EAAcK,SAAS;AACxB;;AAGD,QAAIN,IAAI,CAACxB,MAAT,EAAiB;AACf,YAAMgC,KAAK,GAAG3B,IAAI,CAAC4B,MAAL,CAAY,CAACd,IAAD,CAAZ,CAAd;AACAjB,MAAAA,SAAS,CAACiB,IAAD,CAAT,GAAkB;AAChBA,QAAAA,IADgB;AAEhBd,QAAAA,IAAI,EAAE2B,KAFU;AAGhBE,QAAAA,GAAG,EAAEF,KAAK,CAACG,IAAN,EAHW;AAIhBJ,QAAAA,cAJgB;AAKhBF,QAAAA,SALgB;AAMhBC,QAAAA,SANgB;AAOhBM,QAAAA,QAAQ,EAAE,IAPM;AAQhBC,QAAAA,UAAU,EAAEC,MAAM,CAACC,IAAP,CAAYf,IAAI,CAACxB,MAAjB,EAAyBS,MARrB;AAShBT,QAAAA,MAAM,EAAEC,WAAW,CAACuB,IAAI,CAACxB,MAAN,EAAc6B,SAAd,EAAyBC,SAAzB,EAAoCE,KAApC;AATH,OAAlB;AAWA;AACD;;AAED,UAAMQ,OAAY,GAAG5C,qBAAqB,CAAC4B,IAAI,CAACP,IAAN,CAA1C;;AACA,QAAI,CAACuB,OAAL,EAAc;AACZ,YAAM,IAAIC,KAAJ,CAAW,yBAAwBjB,IAAI,CAACP,IAAK,EAA7C,CAAN;AACD;;AAEDO,IAAAA,IAAI,CAACkB,QAAL,GAAgBlB,IAAI,CAACkB,QAAL,IAAiB,OAAjC;;AACA,QAAI,EAAElB,IAAI,CAACkB,QAAL,IAAiBnD,cAAnB,CAAJ,EAAwC;AACtC,YAAM,IAAIkD,KAAJ,CAAW,iCAAgCjB,IAAI,CAACkB,QAAS,EAAzD,CAAN;AACD;;AAEDlB,IAAAA,IAAI,CAACH,WAAL,GAAmBG,IAAI,CAACH,WAAL,IAAoB,cAAvC;;AACA,QAAI,EAAEG,IAAI,CAACH,WAAL,IAAoB7B,2BAAtB,CAAJ,EAAwD;AACtD,YAAM,IAAIiD,KAAJ,CAAW,mCAAkCjB,IAAI,CAACH,WAAY,EAA9D,CAAN;AACD;;AAGD,UAAMW,KAAK,GAAG3B,IAAI,CAAC4B,MAAL,CAAY,CAACd,IAAD,CAAZ,CAAd;AACAjB,IAAAA,SAAS,CAACiB,IAAD,CAAT,GAAkB;AAChBA,MAAAA,IADgB;AAEhBwB,MAAAA,aAAa,EAAEH,OAAO,CAACG,aAFP;AAGhBC,MAAAA,YAAY,EAAEJ,OAAO,CAACI,YAHN;AAIhBvC,MAAAA,IAAI,EAAE2B,KAJU;AAKhBE,MAAAA,GAAG,EAAEF,KAAK,CAACG,IAAN,EALW;AAMhBJ,MAAAA,cANgB;AAOhBW,MAAAA,QAAQ,EAAElB,IAAI,CAACkB,QAPC;AAQhBrB,MAAAA,WAAW,EAAEG,IAAI,CAACH,WARF;AAShBwB,MAAAA,UAAU,EAAErB,IAAI,CAACqB,UAAL,IAAmBL,OAAO,CAACK,UATvB;AAUhBC,MAAAA,SAAS,EAAEtB,IAAI,CAACsB,SAVA;AAWhBC,MAAAA,KAAK,EAAEvB,IAAI,CAACuB,KAXI;AAYhBlB,MAAAA,SAZgB;AAahBC,MAAAA;AAbgB,KAAlB;AAeD;;AACD,SAAO5B,SAAP;AACD;;AAED,SAASC,UAAT,CAAoBH,MAApB,EAA0E;AACxE,MAAIgD,IAAoB,GAAG,EAA3B;;AACA,OAAK,MAAMC,CAAX,IAAgBjD,MAAhB,EAAwB;AACtBgD,IAAAA,IAAI,CAACnC,IAAL,CAAUb,MAAM,CAACiD,CAAD,CAAhB;;AACA,QAAIjD,MAAM,CAACiD,CAAD,CAAN,CAAUb,QAAd,EAAwB;AACtBY,MAAAA,IAAI,GAAGA,IAAI,CAACf,MAAL,CAAY9B,UAAU,CAACH,MAAM,CAACiD,CAAD,CAAN,CAAUjD,MAAX,CAAtB,CAAP;AACD;AACF;;AACD,SAAOgD,IAAP;AACD","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport {PARQUET_CODECS} from '../codecs';\nimport {PARQUET_COMPRESSION_METHODS} from '../compression';\nimport {\n FieldDefinition,\n ParquetBuffer,\n ParquetCompression,\n ParquetField,\n ParquetRecord,\n RepetitionType,\n SchemaDefinition\n} from './declare';\nimport {materializeRecords, shredBuffer, shredRecord} from './shred';\nimport {PARQUET_LOGICAL_TYPES} from './types';\n\n/**\n * A parquet file schema\n */\nexport class ParquetSchema {\n public schema: Record<string, FieldDefinition>;\n public fields: Record<string, ParquetField>;\n public fieldList: ParquetField[];\n\n /**\n * Create a new schema from a JSON schema definition\n */\n constructor(schema: SchemaDefinition) {\n this.schema = schema;\n this.fields = buildFields(schema, 0, 0, []);\n this.fieldList = listFields(this.fields);\n }\n\n /**\n * Retrieve a field definition\n */\n findField(path: string | string[]): ParquetField {\n if (typeof path === 'string') {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.split(',');\n } else {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.slice(0); // clone array\n }\n\n let n = this.fields;\n for (; path.length > 1; path.shift()) {\n n = n[path[0]].fields as Record<string, ParquetField>;\n }\n\n return n[path[0]];\n }\n\n /**\n * Retrieve a field definition and all the field's ancestors\n */\n findFieldBranch(path: string | string[]): ParquetField[] {\n if (typeof path === 'string') {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.split(',');\n }\n const branch: ParquetField[] = [];\n let n = this.fields;\n for (; path.length > 0; path.shift()) {\n branch.push(n[path[0]]);\n if (path.length > 1) {\n n = n[path[0]].fields as Record<string, ParquetField>;\n }\n }\n return branch;\n }\n\n shredRecord(record: ParquetRecord, buffer: ParquetBuffer): void {\n shredRecord(this, record, buffer);\n }\n\n materializeRecords(buffer: ParquetBuffer): ParquetRecord[] {\n return materializeRecords(this, buffer);\n }\n\n compress(type: ParquetCompression): this {\n setCompress(this.schema, type);\n setCompress(this.fields, type);\n return this;\n }\n\n buffer(): ParquetBuffer {\n return shredBuffer(this);\n }\n}\n\nfunction setCompress(schema: any, type: ParquetCompression) {\n for (const name in schema) {\n const node = schema[name];\n if (node.fields) {\n setCompress(node.fields, type);\n } else {\n node.compression = type;\n }\n }\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction buildFields(\n schema: SchemaDefinition,\n rLevelParentMax: number,\n dLevelParentMax: number,\n path: string[]\n): Record<string, ParquetField> {\n const fieldList: Record<string, ParquetField> = {};\n\n for (const name in schema) {\n const opts = schema[name];\n\n /* field repetition type */\n const required = !opts.optional;\n const repeated = Boolean(opts.repeated);\n let rLevelMax = rLevelParentMax;\n let dLevelMax = dLevelParentMax;\n\n let repetitionType: RepetitionType = 'REQUIRED';\n if (!required) {\n repetitionType = 'OPTIONAL';\n dLevelMax++;\n }\n if (repeated) {\n repetitionType = 'REPEATED';\n rLevelMax++;\n if (required) dLevelMax++;\n }\n\n /* nested field */\n if (opts.fields) {\n const cpath = path.concat([name]);\n fieldList[name] = {\n name,\n path: cpath,\n key: cpath.join(),\n repetitionType,\n rLevelMax,\n dLevelMax,\n isNested: true,\n fieldCount: Object.keys(opts.fields).length,\n fields: buildFields(opts.fields, rLevelMax, dLevelMax, cpath)\n };\n continue; // eslint-disable-line no-continue\n }\n\n const typeDef: any = PARQUET_LOGICAL_TYPES[opts.type!];\n if (!typeDef) {\n throw new Error(`invalid parquet type: ${opts.type}`);\n }\n\n opts.encoding = opts.encoding || 'PLAIN';\n if (!(opts.encoding in PARQUET_CODECS)) {\n throw new Error(`unsupported parquet encoding: ${opts.encoding}`);\n }\n\n opts.compression = opts.compression || 'UNCOMPRESSED';\n if (!(opts.compression in PARQUET_COMPRESSION_METHODS)) {\n throw new Error(`unsupported compression method: ${opts.compression}`);\n }\n\n /* add to schema */\n const cpath = path.concat([name]);\n fieldList[name] = {\n name,\n primitiveType: typeDef.primitiveType,\n originalType: typeDef.originalType,\n path: cpath,\n key: cpath.join(),\n repetitionType,\n encoding: opts.encoding,\n compression: opts.compression,\n typeLength: opts.typeLength || typeDef.typeLength,\n presision: opts.presision,\n scale: opts.scale,\n rLevelMax,\n dLevelMax\n };\n }\n return fieldList;\n}\n\nfunction listFields(fields: Record<string, ParquetField>): ParquetField[] {\n let list: ParquetField[] = [];\n for (const k in fields) {\n list.push(fields[k]);\n if (fields[k].isNested) {\n list = list.concat(listFields(fields[k].fields!));\n }\n }\n return list;\n}\n"],"file":"schema.js"}
@@ -9,6 +9,7 @@ export function shredBuffer(schema) {
9
9
  dlevels: [],
10
10
  rlevels: [],
11
11
  values: [],
12
+ pageHeaders: [],
12
13
  count: 0
13
14
  };
14
15
  }
@@ -137,7 +138,7 @@ function materializeColumn(schema, buffer, key, records) {
137
138
  }
138
139
 
139
140
  if (dLevel === field.dLevelMax) {
140
- const value = Types.fromPrimitive(field.originalType || field.primitiveType, data.values[vIndex]);
141
+ const value = Types.fromPrimitive(field.originalType || field.primitiveType, data.values[vIndex], field);
141
142
  vIndex++;
142
143
 
143
144
  if (field.repetitionType === 'REPEATED') {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/parquetjs/schema/shred.ts"],"names":["ParquetBuffer","Types","shredBuffer","schema","columnData","field","fieldList","key","dlevels","rlevels","values","count","rowCount","shredRecord","record","buffer","data","shredRecordFields","fields","Array","prototype","push","apply","rLevel","dLevel","name","undefined","constructor","length","Boolean","repetitionType","Error","isNested","i","rlvl","rLevelMax","dLevelMax","toPrimitive","originalType","primitiveType","materializeRecords","records","materializeColumn","findField","branch","findFieldBranch","rLevels","fill","vIndex","rIndex","step","ix","value","fromPrimitive"],"mappings":"AAEA,SAAQA,aAAR,QAAsE,WAAtE;AAEA,OAAO,KAAKC,KAAZ,MAAuB,SAAvB;AAEA,SAAQD,aAAR;AAEA,OAAO,SAASE,WAAT,CAAqBC,MAArB,EAA2D;AAChE,QAAMC,UAAuC,GAAG,EAAhD;;AACA,OAAK,MAAMC,KAAX,IAAoBF,MAAM,CAACG,SAA3B,EAAsC;AACpCF,IAAAA,UAAU,CAACC,KAAK,CAACE,GAAP,CAAV,GAAwB;AACtBC,MAAAA,OAAO,EAAE,EADa;AAEtBC,MAAAA,OAAO,EAAE,EAFa;AAGtBC,MAAAA,MAAM,EAAE,EAHc;AAItBC,MAAAA,KAAK,EAAE;AAJe,KAAxB;AAMD;;AACD,SAAO;AAACC,IAAAA,QAAQ,EAAE,CAAX;AAAcR,IAAAA;AAAd,GAAP;AACD;AAwBD,OAAO,SAASS,WAAT,CAAqBV,MAArB,EAA4CW,MAA5C,EAAyDC,MAAzD,EAAsF;AAE3F,QAAMC,IAAI,GAAGd,WAAW,CAACC,MAAD,CAAX,CAAoBC,UAAjC;AAEAa,EAAAA,iBAAiB,CAACd,MAAM,CAACe,MAAR,EAAgBJ,MAAhB,EAAwBE,IAAxB,EAA8B,CAA9B,EAAiC,CAAjC,CAAjB;;AAGA,MAAID,MAAM,CAACH,QAAP,KAAoB,CAAxB,EAA2B;AACzBG,IAAAA,MAAM,CAACH,QAAP,GAAkB,CAAlB;AACAG,IAAAA,MAAM,CAACX,UAAP,GAAoBY,IAApB;AACA;AACD;;AACDD,EAAAA,MAAM,CAACH,QAAP,IAAmB,CAAnB;;AACA,OAAK,MAAMP,KAAX,IAAoBF,MAAM,CAACG,SAA3B,EAAsC;AACpCa,IAAAA,KAAK,CAACC,SAAN,CAAgBC,IAAhB,CAAqBC,KAArB,CAA2BP,MAAM,CAACX,UAAP,CAAkBC,KAAK,CAACE,GAAxB,EAA6BE,OAAxD,EAAiEO,IAAI,CAACX,KAAK,CAACE,GAAP,CAAJ,CAAgBE,OAAjF;AACAU,IAAAA,KAAK,CAACC,SAAN,CAAgBC,IAAhB,CAAqBC,KAArB,CAA2BP,MAAM,CAACX,UAAP,CAAkBC,KAAK,CAACE,GAAxB,EAA6BC,OAAxD,EAAiEQ,IAAI,CAACX,KAAK,CAACE,GAAP,CAAJ,CAAgBC,OAAjF;AACAW,IAAAA,KAAK,CAACC,SAAN,CAAgBC,IAAhB,CAAqBC,KAArB,CAA2BP,MAAM,CAACX,UAAP,CAAkBC,KAAK,CAACE,GAAxB,EAA6BG,MAAxD,EAAgEM,IAAI,CAACX,KAAK,CAACE,GAAP,CAAJ,CAAgBG,MAAhF;AACAK,IAAAA,MAAM,CAACX,UAAP,CAAkBC,KAAK,CAACE,GAAxB,EAA6BI,KAA7B,IAAsCK,IAAI,CAACX,KAAK,CAACE,GAAP,CAAJ,CAAgBI,KAAtD;AACD;AACF;;AAGD,SAASM,iBAAT,CACEC,MADF,EAEEJ,MAFF,EAGEE,IAHF,EAIEO,MAJF,EAKEC,MALF,EAME;AACA,OAAK,MAAMC,IAAX,IAAmBP,MAAnB,EAA2B;AACzB,UAAMb,KAAK,GAAGa,MAAM,CAACO,IAAD,CAApB;AAGA,QAAIf,MAAa,GAAG,EAApB;;AACA,QACEI,MAAM,IACNT,KAAK,CAACoB,IAAN,IAAcX,MADd,IAEAA,MAAM,CAACT,KAAK,CAACoB,IAAP,CAAN,KAAuBC,SAFvB,IAGAZ,MAAM,CAACT,KAAK,CAACoB,IAAP,CAAN,KAAuB,IAJzB,EAKE;AACA,UAAIX,MAAM,CAACT,KAAK,CAACoB,IAAP,CAAN,CAAmBE,WAAnB,KAAmCR,KAAvC,EAA8C;AAC5CT,QAAAA,MAAM,GAAGI,MAAM,CAACT,KAAK,CAACoB,IAAP,CAAf;AACD,OAFD,MAEO;AACLf,QAAAA,MAAM,CAACW,IAAP,CAAYP,MAAM,CAACT,KAAK,CAACoB,IAAP,CAAlB;AACD;AACF;;AAED,QAAIf,MAAM,CAACkB,MAAP,KAAkB,CAAlB,IAAuBC,OAAO,CAACf,MAAD,CAA9B,IAA0CT,KAAK,CAACyB,cAAN,KAAyB,UAAvE,EAAmF;AACjF,YAAM,IAAIC,KAAJ,CAAW,2BAA0B1B,KAAK,CAACoB,IAAK,EAAhD,CAAN;AACD;;AACD,QAAIf,MAAM,CAACkB,MAAP,GAAgB,CAAhB,IAAqBvB,KAAK,CAACyB,cAAN,KAAyB,UAAlD,EAA8D;AAC5D,YAAM,IAAIC,KAAJ,CAAW,8BAA6B1B,KAAK,CAACoB,IAAK,EAAnD,CAAN;AACD;;AAGD,QAAIf,MAAM,CAACkB,MAAP,KAAkB,CAAtB,EAAyB;AACvB,UAAIvB,KAAK,CAAC2B,QAAV,EAAoB;AAClBf,QAAAA,iBAAiB,CAACZ,KAAK,CAACa,MAAP,EAAgB,IAAhB,EAAsBF,IAAtB,EAA4BO,MAA5B,EAAoCC,MAApC,CAAjB;AACD,OAFD,MAEO;AACLR,QAAAA,IAAI,CAACX,KAAK,CAACE,GAAP,CAAJ,CAAgBI,KAAhB,IAAyB,CAAzB;AACAK,QAAAA,IAAI,CAACX,KAAK,CAACE,GAAP,CAAJ,CAAgBE,OAAhB,CAAwBY,IAAxB,CAA6BE,MAA7B;AACAP,QAAAA,IAAI,CAACX,KAAK,CAACE,GAAP,CAAJ,CAAgBC,OAAhB,CAAwBa,IAAxB,CAA6BG,MAA7B;AACD;;AACD;AACD;;AAGD,SAAK,IAAIS,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGvB,MAAM,CAACkB,MAA3B,EAAmCK,CAAC,EAApC,EAAwC;AACtC,YAAMC,IAAI,GAAGD,CAAC,KAAK,CAAN,GAAUV,MAAV,GAAmBlB,KAAK,CAAC8B,SAAtC;;AACA,UAAI9B,KAAK,CAAC2B,QAAV,EAAoB;AAClBf,QAAAA,iBAAiB,CAACZ,KAAK,CAACa,MAAP,EAAgBR,MAAM,CAACuB,CAAD,CAAtB,EAA2BjB,IAA3B,EAAiCkB,IAAjC,EAAuC7B,KAAK,CAAC+B,SAA7C,CAAjB;AACD,OAFD,MAEO;AACLpB,QAAAA,IAAI,CAACX,KAAK,CAACE,GAAP,CAAJ,CAAgBI,KAAhB,IAAyB,CAAzB;AACAK,QAAAA,IAAI,CAACX,KAAK,CAACE,GAAP,CAAJ,CAAgBE,OAAhB,CAAwBY,IAAxB,CAA6Ba,IAA7B;AACAlB,QAAAA,IAAI,CAACX,KAAK,CAACE,GAAP,CAAJ,CAAgBC,OAAhB,CAAwBa,IAAxB,CAA6BhB,KAAK,CAAC+B,SAAnC;AACApB,QAAAA,IAAI,CAACX,KAAK,CAACE,GAAP,CAAJ,CAAgBG,MAAhB,CAAuBW,IAAvB,CACEpB,KAAK,CAACoC,WAAN,CAAmBhC,KAAK,CAACiC,YAAN,IAAsBjC,KAAK,CAACkC,aAA/C,EAAgE7B,MAAM,CAACuB,CAAD,CAAtE,CADF;AAGD;AACF;AACF;AACF;;AAqBD,OAAO,SAASO,kBAAT,CAA4BrC,MAA5B,EAAmDY,MAAnD,EAA2F;AAChG,QAAM0B,OAAwB,GAAG,EAAjC;;AACA,OAAK,IAAIR,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGlB,MAAM,CAACH,QAA3B,EAAqCqB,CAAC,EAAtC,EAA0CQ,OAAO,CAACpB,IAAR,CAAa,EAAb;;AAC1C,OAAK,MAAMd,GAAX,IAAkBQ,MAAM,CAACX,UAAzB,EAAqC;AACnCsC,IAAAA,iBAAiB,CAACvC,MAAD,EAASY,MAAT,EAAiBR,GAAjB,EAAsBkC,OAAtB,CAAjB;AACD;;AACD,SAAOA,OAAP;AACD;;AAGD,SAASC,iBAAT,CACEvC,MADF,EAEEY,MAFF,EAGER,GAHF,EAIEkC,OAJF,EAKE;AACA,QAAMzB,IAAI,GAAGD,MAAM,CAACX,UAAP,CAAkBG,GAAlB,CAAb;AACA,MAAI,CAACS,IAAI,CAACL,KAAV,EAAiB;AAEjB,QAAMN,KAAK,GAAGF,MAAM,CAACwC,SAAP,CAAiBpC,GAAjB,CAAd;AACA,QAAMqC,MAAM,GAAGzC,MAAM,CAAC0C,eAAP,CAAuBtC,GAAvB,CAAf;AAGA,QAAMuC,OAAiB,GAAG,IAAI3B,KAAJ,CAAUd,KAAK,CAAC8B,SAAN,GAAkB,CAA5B,EAA+BY,IAA/B,CAAoC,CAApC,CAA1B;AACA,MAAIC,MAAM,GAAG,CAAb;;AACA,OAAK,IAAIf,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGjB,IAAI,CAACL,KAAzB,EAAgCsB,CAAC,EAAjC,EAAqC;AACnC,UAAMT,MAAM,GAAGR,IAAI,CAACR,OAAL,CAAayB,CAAb,CAAf;AACA,UAAMV,MAAM,GAAGP,IAAI,CAACP,OAAL,CAAawB,CAAb,CAAf;AACAa,IAAAA,OAAO,CAACvB,MAAD,CAAP;AACAuB,IAAAA,OAAO,CAACC,IAAR,CAAa,CAAb,EAAgBxB,MAAM,GAAG,CAAzB;AAEA,QAAI0B,MAAM,GAAG,CAAb;AACA,QAAInC,MAAM,GAAG2B,OAAO,CAACK,OAAO,CAACG,MAAM,EAAP,CAAP,GAAoB,CAArB,CAApB;;AAGA,SAAK,MAAMC,IAAX,IAAmBN,MAAnB,EAA2B;AACzB,UAAIM,IAAI,KAAK7C,KAAb,EAAoB;AACpB,UAAImB,MAAM,GAAG0B,IAAI,CAACd,SAAlB,EAA6B;;AAC7B,UAAIc,IAAI,CAACpB,cAAL,KAAwB,UAA5B,EAAwC;AACtC,YAAI,EAAEoB,IAAI,CAACzB,IAAL,IAAaX,MAAf,CAAJ,EAA4B;AAE1BA,UAAAA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,GAAoB,EAApB;AACD;;AACD,cAAM0B,EAAE,GAAGL,OAAO,CAACG,MAAM,EAAP,CAAlB;;AACA,eAAOnC,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,CAAkBG,MAAlB,IAA4BuB,EAAnC,EAAuC;AAErCrC,UAAAA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,CAAkBJ,IAAlB,CAAuB,EAAvB;AACD;;AACDP,QAAAA,MAAM,GAAGA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,CAAkB0B,EAAlB,CAAT;AACD,OAXD,MAWO;AACLrC,QAAAA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,GAAoBX,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,IAAqB,EAAzC;AACAX,QAAAA,MAAM,GAAGA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAf;AACD;AACF;;AAGD,QAAID,MAAM,KAAKnB,KAAK,CAAC+B,SAArB,EAAgC;AAC9B,YAAMgB,KAAK,GAAGnD,KAAK,CAACoD,aAAN,CAEZhD,KAAK,CAACiC,YAAN,IAAsBjC,KAAK,CAACkC,aAFhB,EAGZvB,IAAI,CAACN,MAAL,CAAYsC,MAAZ,CAHY,CAAd;AAKAA,MAAAA,MAAM;;AACN,UAAI3C,KAAK,CAACyB,cAAN,KAAyB,UAA7B,EAAyC;AACvC,YAAI,EAAEzB,KAAK,CAACoB,IAAN,IAAcX,MAAhB,CAAJ,EAA6B;AAE3BA,UAAAA,MAAM,CAACT,KAAK,CAACoB,IAAP,CAAN,GAAqB,EAArB;AACD;;AACD,cAAM0B,EAAE,GAAGL,OAAO,CAACG,MAAD,CAAlB;;AACA,eAAOnC,MAAM,CAACT,KAAK,CAACoB,IAAP,CAAN,CAAmBG,MAAnB,IAA6BuB,EAApC,EAAwC;AAEtCrC,UAAAA,MAAM,CAACT,KAAK,CAACoB,IAAP,CAAN,CAAmBJ,IAAnB,CAAwB,IAAxB;AACD;;AACDP,QAAAA,MAAM,CAACT,KAAK,CAACoB,IAAP,CAAN,CAAmB0B,EAAnB,IAAyBC,KAAzB;AACD,OAXD,MAWO;AACLtC,QAAAA,MAAM,CAACT,KAAK,CAACoB,IAAP,CAAN,GAAqB2B,KAArB;AACD;AACF;AACF;AACF","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport {ParquetBuffer, ParquetData, ParquetField, ParquetRecord} from './declare';\nimport {ParquetSchema} from './schema';\nimport * as Types from './types';\n\nexport {ParquetBuffer};\n\nexport function shredBuffer(schema: ParquetSchema): ParquetBuffer {\n const columnData: Record<string, ParquetData> = {};\n for (const field of schema.fieldList) {\n columnData[field.key] = {\n dlevels: [],\n rlevels: [],\n values: [],\n count: 0\n };\n }\n return {rowCount: 0, columnData};\n}\n\n/**\n * 'Shred' a record into a list of <value, repetition_level, definition_level>\n * tuples per column using the Google Dremel Algorithm..\n *\n * The buffer argument must point to an object into which the shredded record\n * will be returned. You may re-use the buffer for repeated calls to this function\n * to append to an existing buffer, as long as the schema is unchanged.\n *\n * The format in which the shredded records will be stored in the buffer is as\n * follows:\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n */\nexport function shredRecord(schema: ParquetSchema, record: any, buffer: ParquetBuffer): void {\n /* shred the record, this may raise an exception */\n const data = shredBuffer(schema).columnData;\n\n shredRecordFields(schema.fields, record, data, 0, 0);\n\n /* if no error during shredding, add the shredded record to the buffer */\n if (buffer.rowCount === 0) {\n buffer.rowCount = 1;\n buffer.columnData = data;\n return;\n }\n buffer.rowCount += 1;\n for (const field of schema.fieldList) {\n Array.prototype.push.apply(buffer.columnData[field.key].rlevels, data[field.key].rlevels);\n Array.prototype.push.apply(buffer.columnData[field.key].dlevels, data[field.key].dlevels);\n Array.prototype.push.apply(buffer.columnData[field.key].values, data[field.key].values);\n buffer.columnData[field.key].count += data[field.key].count;\n }\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction shredRecordFields(\n fields: Record<string, ParquetField>,\n record: any,\n data: Record<string, ParquetData>,\n rLevel: number,\n dLevel: number\n) {\n for (const name in fields) {\n const field = fields[name];\n\n // fetch values\n let values: any[] = [];\n if (\n record &&\n field.name in record &&\n record[field.name] !== undefined &&\n record[field.name] !== null\n ) {\n if (record[field.name].constructor === Array) {\n values = record[field.name];\n } else {\n values.push(record[field.name]);\n }\n }\n // check values\n if (values.length === 0 && Boolean(record) && field.repetitionType === 'REQUIRED') {\n throw new Error(`missing required field: ${field.name}`);\n }\n if (values.length > 1 && field.repetitionType !== 'REPEATED') {\n throw new Error(`too many values for field: ${field.name}`);\n }\n\n // push null\n if (values.length === 0) {\n if (field.isNested) {\n shredRecordFields(field.fields!, null, data, rLevel, dLevel);\n } else {\n data[field.key].count += 1;\n data[field.key].rlevels.push(rLevel);\n data[field.key].dlevels.push(dLevel);\n }\n continue; // eslint-disable-line no-continue\n }\n\n // push values\n for (let i = 0; i < values.length; i++) {\n const rlvl = i === 0 ? rLevel : field.rLevelMax;\n if (field.isNested) {\n shredRecordFields(field.fields!, values[i], data, rlvl, field.dLevelMax);\n } else {\n data[field.key].count += 1;\n data[field.key].rlevels.push(rlvl);\n data[field.key].dlevels.push(field.dLevelMax);\n data[field.key].values.push(\n Types.toPrimitive((field.originalType || field.primitiveType)!, values[i])\n );\n }\n }\n }\n}\n\n/**\n * 'Materialize' a list of <value, repetition_level, definition_level>\n * tuples back to nested records (objects/arrays) using the Google Dremel\n * Algorithm..\n *\n * The buffer argument must point to an object with the following structure (i.e.\n * the same structure that is returned by shredRecords):\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n */\nexport function materializeRecords(schema: ParquetSchema, buffer: ParquetBuffer): ParquetRecord[] {\n const records: ParquetRecord[] = [];\n for (let i = 0; i < buffer.rowCount; i++) records.push({});\n for (const key in buffer.columnData) {\n materializeColumn(schema, buffer, key, records);\n }\n return records;\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction materializeColumn(\n schema: ParquetSchema,\n buffer: ParquetBuffer,\n key: string,\n records: ParquetRecord[]\n) {\n const data = buffer.columnData[key];\n if (!data.count) return;\n\n const field = schema.findField(key);\n const branch = schema.findFieldBranch(key);\n\n // tslint:disable-next-line:prefer-array-literal\n const rLevels: number[] = new Array(field.rLevelMax + 1).fill(0);\n let vIndex = 0;\n for (let i = 0; i < data.count; i++) {\n const dLevel = data.dlevels[i];\n const rLevel = data.rlevels[i];\n rLevels[rLevel]++;\n rLevels.fill(0, rLevel + 1);\n\n let rIndex = 0;\n let record = records[rLevels[rIndex++] - 1];\n\n // Internal nodes\n for (const step of branch) {\n if (step === field) break;\n if (dLevel < step.dLevelMax) break;\n if (step.repetitionType === 'REPEATED') {\n if (!(step.name in record)) {\n // eslint-disable max-depth\n record[step.name] = [];\n }\n const ix = rLevels[rIndex++];\n while (record[step.name].length <= ix) {\n // eslint-disable max-depth\n record[step.name].push({});\n }\n record = record[step.name][ix];\n } else {\n record[step.name] = record[step.name] || {};\n record = record[step.name];\n }\n }\n\n // Leaf node\n if (dLevel === field.dLevelMax) {\n const value = Types.fromPrimitive(\n // @ts-ignore\n field.originalType || field.primitiveType,\n data.values[vIndex]\n );\n vIndex++;\n if (field.repetitionType === 'REPEATED') {\n if (!(field.name in record)) {\n // eslint-disable max-depth\n record[field.name] = [];\n }\n const ix = rLevels[rIndex];\n while (record[field.name].length <= ix) {\n // eslint-disable max-depth\n record[field.name].push(null);\n }\n record[field.name][ix] = value;\n } else {\n record[field.name] = value;\n }\n }\n }\n}\n"],"file":"shred.js"}
1
+ {"version":3,"sources":["../../../../src/parquetjs/schema/shred.ts"],"names":["ParquetBuffer","Types","shredBuffer","schema","columnData","field","fieldList","key","dlevels","rlevels","values","pageHeaders","count","rowCount","shredRecord","record","buffer","data","shredRecordFields","fields","Array","prototype","push","apply","rLevel","dLevel","name","undefined","constructor","length","Boolean","repetitionType","Error","isNested","i","rlvl","rLevelMax","dLevelMax","toPrimitive","originalType","primitiveType","materializeRecords","records","materializeColumn","findField","branch","findFieldBranch","rLevels","fill","vIndex","rIndex","step","ix","value","fromPrimitive"],"mappings":"AAEA,SAAQA,aAAR,QAAsE,WAAtE;AAEA,OAAO,KAAKC,KAAZ,MAAuB,SAAvB;AAEA,SAAQD,aAAR;AAEA,OAAO,SAASE,WAAT,CAAqBC,MAArB,EAA2D;AAChE,QAAMC,UAAuC,GAAG,EAAhD;;AACA,OAAK,MAAMC,KAAX,IAAoBF,MAAM,CAACG,SAA3B,EAAsC;AACpCF,IAAAA,UAAU,CAACC,KAAK,CAACE,GAAP,CAAV,GAAwB;AACtBC,MAAAA,OAAO,EAAE,EADa;AAEtBC,MAAAA,OAAO,EAAE,EAFa;AAGtBC,MAAAA,MAAM,EAAE,EAHc;AAItBC,MAAAA,WAAW,EAAE,EAJS;AAKtBC,MAAAA,KAAK,EAAE;AALe,KAAxB;AAOD;;AACD,SAAO;AAACC,IAAAA,QAAQ,EAAE,CAAX;AAAcT,IAAAA;AAAd,GAAP;AACD;AAwBD,OAAO,SAASU,WAAT,CAAqBX,MAArB,EAA4CY,MAA5C,EAAyDC,MAAzD,EAAsF;AAE3F,QAAMC,IAAI,GAAGf,WAAW,CAACC,MAAD,CAAX,CAAoBC,UAAjC;AAEAc,EAAAA,iBAAiB,CAACf,MAAM,CAACgB,MAAR,EAAgBJ,MAAhB,EAAwBE,IAAxB,EAA8B,CAA9B,EAAiC,CAAjC,CAAjB;;AAGA,MAAID,MAAM,CAACH,QAAP,KAAoB,CAAxB,EAA2B;AACzBG,IAAAA,MAAM,CAACH,QAAP,GAAkB,CAAlB;AACAG,IAAAA,MAAM,CAACZ,UAAP,GAAoBa,IAApB;AACA;AACD;;AACDD,EAAAA,MAAM,CAACH,QAAP,IAAmB,CAAnB;;AACA,OAAK,MAAMR,KAAX,IAAoBF,MAAM,CAACG,SAA3B,EAAsC;AACpCc,IAAAA,KAAK,CAACC,SAAN,CAAgBC,IAAhB,CAAqBC,KAArB,CAA2BP,MAAM,CAACZ,UAAP,CAAkBC,KAAK,CAACE,GAAxB,EAA6BE,OAAxD,EAAiEQ,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBE,OAAjF;AACAW,IAAAA,KAAK,CAACC,SAAN,CAAgBC,IAAhB,CAAqBC,KAArB,CAA2BP,MAAM,CAACZ,UAAP,CAAkBC,KAAK,CAACE,GAAxB,EAA6BC,OAAxD,EAAiES,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBC,OAAjF;AACAY,IAAAA,KAAK,CAACC,SAAN,CAAgBC,IAAhB,CAAqBC,KAArB,CAA2BP,MAAM,CAACZ,UAAP,CAAkBC,KAAK,CAACE,GAAxB,EAA6BG,MAAxD,EAAgEO,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBG,MAAhF;AACAM,IAAAA,MAAM,CAACZ,UAAP,CAAkBC,KAAK,CAACE,GAAxB,EAA6BK,KAA7B,IAAsCK,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBK,KAAtD;AACD;AACF;;AAGD,SAASM,iBAAT,CACEC,MADF,EAEEJ,MAFF,EAGEE,IAHF,EAIEO,MAJF,EAKEC,MALF,EAME;AACA,OAAK,MAAMC,IAAX,IAAmBP,MAAnB,EAA2B;AACzB,UAAMd,KAAK,GAAGc,MAAM,CAACO,IAAD,CAApB;AAGA,QAAIhB,MAAa,GAAG,EAApB;;AACA,QACEK,MAAM,IACNV,KAAK,CAACqB,IAAN,IAAcX,MADd,IAEAA,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,KAAuBC,SAFvB,IAGAZ,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,KAAuB,IAJzB,EAKE;AACA,UAAIX,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,CAAmBE,WAAnB,KAAmCR,KAAvC,EAA8C;AAC5CV,QAAAA,MAAM,GAAGK,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAf;AACD,OAFD,MAEO;AACLhB,QAAAA,MAAM,CAACY,IAAP,CAAYP,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAlB;AACD;AACF;;AAED,QAAIhB,MAAM,CAACmB,MAAP,KAAkB,CAAlB,IAAuBC,OAAO,CAACf,MAAD,CAA9B,IAA0CV,KAAK,CAAC0B,cAAN,KAAyB,UAAvE,EAAmF;AACjF,YAAM,IAAIC,KAAJ,CAAW,2BAA0B3B,KAAK,CAACqB,IAAK,EAAhD,CAAN;AACD;;AACD,QAAIhB,MAAM,CAACmB,MAAP,GAAgB,CAAhB,IAAqBxB,KAAK,CAAC0B,cAAN,KAAyB,UAAlD,EAA8D;AAC5D,YAAM,IAAIC,KAAJ,CAAW,8BAA6B3B,KAAK,CAACqB,IAAK,EAAnD,CAAN;AACD;;AAGD,QAAIhB,MAAM,CAACmB,MAAP,KAAkB,CAAtB,EAAyB;AACvB,UAAIxB,KAAK,CAAC4B,QAAV,EAAoB;AAClBf,QAAAA,iBAAiB,CAACb,KAAK,CAACc,MAAP,EAAgB,IAAhB,EAAsBF,IAAtB,EAA4BO,MAA5B,EAAoCC,MAApC,CAAjB;AACD,OAFD,MAEO;AACLR,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBK,KAAhB,IAAyB,CAAzB;AACAK,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBE,OAAhB,CAAwBa,IAAxB,CAA6BE,MAA7B;AACAP,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBC,OAAhB,CAAwBc,IAAxB,CAA6BG,MAA7B;AACD;;AACD;AACD;;AAGD,SAAK,IAAIS,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGxB,MAAM,CAACmB,MAA3B,EAAmCK,CAAC,EAApC,EAAwC;AACtC,YAAMC,IAAI,GAAGD,CAAC,KAAK,CAAN,GAAUV,MAAV,GAAmBnB,KAAK,CAAC+B,SAAtC;;AACA,UAAI/B,KAAK,CAAC4B,QAAV,EAAoB;AAClBf,QAAAA,iBAAiB,CAACb,KAAK,CAACc,MAAP,EAAgBT,MAAM,CAACwB,CAAD,CAAtB,EAA2BjB,IAA3B,EAAiCkB,IAAjC,EAAuC9B,KAAK,CAACgC,SAA7C,CAAjB;AACD,OAFD,MAEO;AACLpB,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBK,KAAhB,IAAyB,CAAzB;AACAK,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBE,OAAhB,CAAwBa,IAAxB,CAA6Ba,IAA7B;AACAlB,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBC,OAAhB,CAAwBc,IAAxB,CAA6BjB,KAAK,CAACgC,SAAnC;AACApB,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBG,MAAhB,CAAuBY,IAAvB,CACErB,KAAK,CAACqC,WAAN,CAAmBjC,KAAK,CAACkC,YAAN,IAAsBlC,KAAK,CAACmC,aAA/C,EAAgE9B,MAAM,CAACwB,CAAD,CAAtE,CADF;AAGD;AACF;AACF;AACF;;AAqBD,OAAO,SAASO,kBAAT,CAA4BtC,MAA5B,EAAmDa,MAAnD,EAA2F;AAChG,QAAM0B,OAAwB,GAAG,EAAjC;;AACA,OAAK,IAAIR,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGlB,MAAM,CAACH,QAA3B,EAAqCqB,CAAC,EAAtC,EAA0CQ,OAAO,CAACpB,IAAR,CAAa,EAAb;;AAC1C,OAAK,MAAMf,GAAX,IAAkBS,MAAM,CAACZ,UAAzB,EAAqC;AACnCuC,IAAAA,iBAAiB,CAACxC,MAAD,EAASa,MAAT,EAAiBT,GAAjB,EAAsBmC,OAAtB,CAAjB;AACD;;AACD,SAAOA,OAAP;AACD;;AAGD,SAASC,iBAAT,CACExC,MADF,EAEEa,MAFF,EAGET,GAHF,EAIEmC,OAJF,EAKE;AACA,QAAMzB,IAAI,GAAGD,MAAM,CAACZ,UAAP,CAAkBG,GAAlB,CAAb;AACA,MAAI,CAACU,IAAI,CAACL,KAAV,EAAiB;AAEjB,QAAMP,KAAK,GAAGF,MAAM,CAACyC,SAAP,CAAiBrC,GAAjB,CAAd;AACA,QAAMsC,MAAM,GAAG1C,MAAM,CAAC2C,eAAP,CAAuBvC,GAAvB,CAAf;AAGA,QAAMwC,OAAiB,GAAG,IAAI3B,KAAJ,CAAUf,KAAK,CAAC+B,SAAN,GAAkB,CAA5B,EAA+BY,IAA/B,CAAoC,CAApC,CAA1B;AACA,MAAIC,MAAM,GAAG,CAAb;;AACA,OAAK,IAAIf,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGjB,IAAI,CAACL,KAAzB,EAAgCsB,CAAC,EAAjC,EAAqC;AACnC,UAAMT,MAAM,GAAGR,IAAI,CAACT,OAAL,CAAa0B,CAAb,CAAf;AACA,UAAMV,MAAM,GAAGP,IAAI,CAACR,OAAL,CAAayB,CAAb,CAAf;AACAa,IAAAA,OAAO,CAACvB,MAAD,CAAP;AACAuB,IAAAA,OAAO,CAACC,IAAR,CAAa,CAAb,EAAgBxB,MAAM,GAAG,CAAzB;AAEA,QAAI0B,MAAM,GAAG,CAAb;AACA,QAAInC,MAAM,GAAG2B,OAAO,CAACK,OAAO,CAACG,MAAM,EAAP,CAAP,GAAoB,CAArB,CAApB;;AAGA,SAAK,MAAMC,IAAX,IAAmBN,MAAnB,EAA2B;AACzB,UAAIM,IAAI,KAAK9C,KAAb,EAAoB;AACpB,UAAIoB,MAAM,GAAG0B,IAAI,CAACd,SAAlB,EAA6B;;AAC7B,UAAIc,IAAI,CAACpB,cAAL,KAAwB,UAA5B,EAAwC;AACtC,YAAI,EAAEoB,IAAI,CAACzB,IAAL,IAAaX,MAAf,CAAJ,EAA4B;AAE1BA,UAAAA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,GAAoB,EAApB;AACD;;AACD,cAAM0B,EAAE,GAAGL,OAAO,CAACG,MAAM,EAAP,CAAlB;;AACA,eAAOnC,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,CAAkBG,MAAlB,IAA4BuB,EAAnC,EAAuC;AAErCrC,UAAAA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,CAAkBJ,IAAlB,CAAuB,EAAvB;AACD;;AACDP,QAAAA,MAAM,GAAGA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,CAAkB0B,EAAlB,CAAT;AACD,OAXD,MAWO;AACLrC,QAAAA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,GAAoBX,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,IAAqB,EAAzC;AACAX,QAAAA,MAAM,GAAGA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAf;AACD;AACF;;AAGD,QAAID,MAAM,KAAKpB,KAAK,CAACgC,SAArB,EAAgC;AAC9B,YAAMgB,KAAK,GAAGpD,KAAK,CAACqD,aAAN,CAEZjD,KAAK,CAACkC,YAAN,IAAsBlC,KAAK,CAACmC,aAFhB,EAGZvB,IAAI,CAACP,MAAL,CAAYuC,MAAZ,CAHY,EAIZ5C,KAJY,CAAd;AAMA4C,MAAAA,MAAM;;AACN,UAAI5C,KAAK,CAAC0B,cAAN,KAAyB,UAA7B,EAAyC;AACvC,YAAI,EAAE1B,KAAK,CAACqB,IAAN,IAAcX,MAAhB,CAAJ,EAA6B;AAE3BA,UAAAA,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,GAAqB,EAArB;AACD;;AACD,cAAM0B,EAAE,GAAGL,OAAO,CAACG,MAAD,CAAlB;;AACA,eAAOnC,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,CAAmBG,MAAnB,IAA6BuB,EAApC,EAAwC;AAEtCrC,UAAAA,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,CAAmBJ,IAAnB,CAAwB,IAAxB;AACD;;AACDP,QAAAA,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,CAAmB0B,EAAnB,IAAyBC,KAAzB;AACD,OAXD,MAWO;AACLtC,QAAAA,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,GAAqB2B,KAArB;AACD;AACF;AACF;AACF","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport {ParquetBuffer, ParquetData, ParquetField, ParquetRecord} from './declare';\nimport {ParquetSchema} from './schema';\nimport * as Types from './types';\n\nexport {ParquetBuffer};\n\nexport function shredBuffer(schema: ParquetSchema): ParquetBuffer {\n const columnData: Record<string, ParquetData> = {};\n for (const field of schema.fieldList) {\n columnData[field.key] = {\n dlevels: [],\n rlevels: [],\n values: [],\n pageHeaders: [],\n count: 0\n };\n }\n return {rowCount: 0, columnData};\n}\n\n/**\n * 'Shred' a record into a list of <value, repetition_level, definition_level>\n * tuples per column using the Google Dremel Algorithm..\n *\n * The buffer argument must point to an object into which the shredded record\n * will be returned. You may re-use the buffer for repeated calls to this function\n * to append to an existing buffer, as long as the schema is unchanged.\n *\n * The format in which the shredded records will be stored in the buffer is as\n * follows:\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n */\nexport function shredRecord(schema: ParquetSchema, record: any, buffer: ParquetBuffer): void {\n /* shred the record, this may raise an exception */\n const data = shredBuffer(schema).columnData;\n\n shredRecordFields(schema.fields, record, data, 0, 0);\n\n /* if no error during shredding, add the shredded record to the buffer */\n if (buffer.rowCount === 0) {\n buffer.rowCount = 1;\n buffer.columnData = data;\n return;\n }\n buffer.rowCount += 1;\n for (const field of schema.fieldList) {\n Array.prototype.push.apply(buffer.columnData[field.key].rlevels, data[field.key].rlevels);\n Array.prototype.push.apply(buffer.columnData[field.key].dlevels, data[field.key].dlevels);\n Array.prototype.push.apply(buffer.columnData[field.key].values, data[field.key].values);\n buffer.columnData[field.key].count += data[field.key].count;\n }\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction shredRecordFields(\n fields: Record<string, ParquetField>,\n record: any,\n data: Record<string, ParquetData>,\n rLevel: number,\n dLevel: number\n) {\n for (const name in fields) {\n const field = fields[name];\n\n // fetch values\n let values: any[] = [];\n if (\n record &&\n field.name in record &&\n record[field.name] !== undefined &&\n record[field.name] !== null\n ) {\n if (record[field.name].constructor === Array) {\n values = record[field.name];\n } else {\n values.push(record[field.name]);\n }\n }\n // check values\n if (values.length === 0 && Boolean(record) && field.repetitionType === 'REQUIRED') {\n throw new Error(`missing required field: ${field.name}`);\n }\n if (values.length > 1 && field.repetitionType !== 'REPEATED') {\n throw new Error(`too many values for field: ${field.name}`);\n }\n\n // push null\n if (values.length === 0) {\n if (field.isNested) {\n shredRecordFields(field.fields!, null, data, rLevel, dLevel);\n } else {\n data[field.key].count += 1;\n data[field.key].rlevels.push(rLevel);\n data[field.key].dlevels.push(dLevel);\n }\n continue; // eslint-disable-line no-continue\n }\n\n // push values\n for (let i = 0; i < values.length; i++) {\n const rlvl = i === 0 ? rLevel : field.rLevelMax;\n if (field.isNested) {\n shredRecordFields(field.fields!, values[i], data, rlvl, field.dLevelMax);\n } else {\n data[field.key].count += 1;\n data[field.key].rlevels.push(rlvl);\n data[field.key].dlevels.push(field.dLevelMax);\n data[field.key].values.push(\n Types.toPrimitive((field.originalType || field.primitiveType)!, values[i])\n );\n }\n }\n }\n}\n\n/**\n * 'Materialize' a list of <value, repetition_level, definition_level>\n * tuples back to nested records (objects/arrays) using the Google Dremel\n * Algorithm..\n *\n * The buffer argument must point to an object with the following structure (i.e.\n * the same structure that is returned by shredRecords):\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n */\nexport function materializeRecords(schema: ParquetSchema, buffer: ParquetBuffer): ParquetRecord[] {\n const records: ParquetRecord[] = [];\n for (let i = 0; i < buffer.rowCount; i++) records.push({});\n for (const key in buffer.columnData) {\n materializeColumn(schema, buffer, key, records);\n }\n return records;\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction materializeColumn(\n schema: ParquetSchema,\n buffer: ParquetBuffer,\n key: string,\n records: ParquetRecord[]\n) {\n const data = buffer.columnData[key];\n if (!data.count) return;\n\n const field = schema.findField(key);\n const branch = schema.findFieldBranch(key);\n\n // tslint:disable-next-line:prefer-array-literal\n const rLevels: number[] = new Array(field.rLevelMax + 1).fill(0);\n let vIndex = 0;\n for (let i = 0; i < data.count; i++) {\n const dLevel = data.dlevels[i];\n const rLevel = data.rlevels[i];\n rLevels[rLevel]++;\n rLevels.fill(0, rLevel + 1);\n\n let rIndex = 0;\n let record = records[rLevels[rIndex++] - 1];\n\n // Internal nodes\n for (const step of branch) {\n if (step === field) break;\n if (dLevel < step.dLevelMax) break;\n if (step.repetitionType === 'REPEATED') {\n if (!(step.name in record)) {\n // eslint-disable max-depth\n record[step.name] = [];\n }\n const ix = rLevels[rIndex++];\n while (record[step.name].length <= ix) {\n // eslint-disable max-depth\n record[step.name].push({});\n }\n record = record[step.name][ix];\n } else {\n record[step.name] = record[step.name] || {};\n record = record[step.name];\n }\n }\n\n // Leaf node\n if (dLevel === field.dLevelMax) {\n const value = Types.fromPrimitive(\n // @ts-ignore\n field.originalType || field.primitiveType,\n data.values[vIndex],\n field\n );\n vIndex++;\n if (field.repetitionType === 'REPEATED') {\n if (!(field.name in record)) {\n // eslint-disable max-depth\n record[field.name] = [];\n }\n const ix = rLevels[rIndex];\n while (record[field.name].length <= ix) {\n // eslint-disable max-depth\n record[field.name].push(null);\n }\n record[field.name][ix] = value;\n } else {\n record[field.name] = value;\n }\n }\n }\n}\n"],"file":"shred.js"}
@@ -125,16 +125,40 @@ export const PARQUET_LOGICAL_TYPES = {
125
125
  typeLength: 12,
126
126
  toPrimitive: toPrimitive_INTERVAL,
127
127
  fromPrimitive: fromPrimitive_INTERVAL
128
+ },
129
+ DECIMAL_INT32: {
130
+ primitiveType: 'INT32',
131
+ originalType: 'DECIMAL_INT32',
132
+ toPrimitive: decimalToPrimitive_INT32,
133
+ fromPrimitive: decimalFromPrimitive_INT
134
+ },
135
+ DECIMAL_INT64: {
136
+ primitiveType: 'INT64',
137
+ originalType: 'DECIMAL_INT64',
138
+ toPrimitive: decimalToPrimitive_INT64,
139
+ fromPrimitive: decimalFromPrimitive_INT
140
+ },
141
+ DECIMAL_BYTE_ARRAY: {
142
+ primitiveType: 'BYTE_ARRAY',
143
+ originalType: 'DECIMAL_BYTE_ARRAY',
144
+ toPrimitive: decimalToPrimitive_BYTE_ARRAY,
145
+ fromPrimitive: decimalFromPrimitive_BYTE_ARRAY
146
+ },
147
+ DECIMAL_FIXED_LEN_BYTE_ARRAY: {
148
+ primitiveType: 'FIXED_LEN_BYTE_ARRAY',
149
+ originalType: 'DECIMAL_FIXED_LEN_BYTE_ARRAY',
150
+ toPrimitive: decimalToPrimitive_BYTE_ARRAY,
151
+ fromPrimitive: decimalFromPrimitive_BYTE_ARRAY
128
152
  }
129
153
  };
130
- export function toPrimitive(type, value) {
154
+ export function toPrimitive(type, value, field) {
131
155
  if (!(type in PARQUET_LOGICAL_TYPES)) {
132
156
  throw new Error(`invalid type: ${type}`);
133
157
  }
134
158
 
135
- return PARQUET_LOGICAL_TYPES[type].toPrimitive(value);
159
+ return PARQUET_LOGICAL_TYPES[type].toPrimitive(value, field);
136
160
  }
137
- export function fromPrimitive(type, value) {
161
+ export function fromPrimitive(type, value, field) {
138
162
  if (!(type in PARQUET_LOGICAL_TYPES)) {
139
163
  throw new Error(`invalid type: ${type}`);
140
164
  }
@@ -142,7 +166,7 @@ export function fromPrimitive(type, value) {
142
166
  if ('fromPrimitive' in PARQUET_LOGICAL_TYPES[type]) {
143
167
  var _PARQUET_LOGICAL_TYPE, _PARQUET_LOGICAL_TYPE2;
144
168
 
145
- return (_PARQUET_LOGICAL_TYPE = (_PARQUET_LOGICAL_TYPE2 = PARQUET_LOGICAL_TYPES[type]).fromPrimitive) === null || _PARQUET_LOGICAL_TYPE === void 0 ? void 0 : _PARQUET_LOGICAL_TYPE.call(_PARQUET_LOGICAL_TYPE2, value);
169
+ return (_PARQUET_LOGICAL_TYPE = (_PARQUET_LOGICAL_TYPE2 = PARQUET_LOGICAL_TYPES[type]).fromPrimitive) === null || _PARQUET_LOGICAL_TYPE === void 0 ? void 0 : _PARQUET_LOGICAL_TYPE.call(_PARQUET_LOGICAL_TYPE2, value, field);
146
170
  }
147
171
 
148
172
  return value;
@@ -226,6 +250,17 @@ function toPrimitive_INT32(value) {
226
250
  return v;
227
251
  }
228
252
 
253
+ function decimalToPrimitive_INT32(value, field) {
254
+ const primitiveValue = value * 10 ** (field.scale || 0);
255
+ const v = Math.round(primitiveValue * 10 ** -field.presision % 1 * 10 ** field.presision);
256
+
257
+ if (v < -0x80000000 || v > 0x7fffffff || isNaN(v)) {
258
+ throw new Error(`invalid value for INT32: ${value}`);
259
+ }
260
+
261
+ return v;
262
+ }
263
+
229
264
  function toPrimitive_UINT32(value) {
230
265
  const v = parseInt(value, 10);
231
266
 
@@ -246,6 +281,17 @@ function toPrimitive_INT64(value) {
246
281
  return v;
247
282
  }
248
283
 
284
+ function decimalToPrimitive_INT64(value, field) {
285
+ const primitiveValue = value * 10 ** (field.scale || 0);
286
+ const v = Math.round(primitiveValue * 10 ** -field.presision % 1 * 10 ** field.presision);
287
+
288
+ if (isNaN(v)) {
289
+ throw new Error(`invalid value for INT64: ${value}`);
290
+ }
291
+
292
+ return v;
293
+ }
294
+
249
295
  function toPrimitive_UINT64(value) {
250
296
  const v = parseInt(value, 10);
251
297
 
@@ -270,6 +316,10 @@ function toPrimitive_BYTE_ARRAY(value) {
270
316
  return Buffer.from(value);
271
317
  }
272
318
 
319
+ function decimalToPrimitive_BYTE_ARRAY(value) {
320
+ return Buffer.from(value);
321
+ }
322
+
273
323
  function toPrimitive_UTF8(value) {
274
324
  return Buffer.from(value, 'utf8');
275
325
  }
@@ -399,4 +449,28 @@ function fromPrimitive_INTERVAL(value) {
399
449
  milliseconds: millis
400
450
  };
401
451
  }
452
+
453
+ function decimalFromPrimitive_INT(value, field) {
454
+ const presisionInt = Math.round(value * 10 ** -field.presision % 1 * 10 ** field.presision);
455
+ return presisionInt * 10 ** -(field.scale || 0);
456
+ }
457
+
458
+ function decimalFromPrimitive_BYTE_ARRAY(value, field) {
459
+ let number = 0;
460
+
461
+ if (value.length <= 4) {
462
+ for (let i = 0; i < value.length; i++) {
463
+ const component = value[i] << 8 * (value.length - i - 1);
464
+ number += component;
465
+ }
466
+ } else {
467
+ for (let i = 0; i < value.length; i++) {
468
+ const component = value[i] * 2 ** (8 * (value.length - 1 - i));
469
+ number += component;
470
+ }
471
+ }
472
+
473
+ const presisionInt = Math.round(number * 10 ** -field.presision % 1 * 10 ** field.presision);
474
+ return presisionInt * 10 ** -(field.scale || 0);
475
+ }
402
476
  //# sourceMappingURL=types.js.map