@loaders.gl/parquet 3.4.0-alpha.3 → 3.4.0-alpha.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (124) hide show
  1. package/dist/dist.min.js +13 -13
  2. package/dist/dist.min.js.map +3 -3
  3. package/dist/es5/lib/arrow/convert-schema-from-parquet.js.map +1 -1
  4. package/dist/es5/lib/geo/decode-geo-metadata.js.map +1 -1
  5. package/dist/es5/lib/parsers/parse-parquet-to-columns.js.map +1 -1
  6. package/dist/es5/lib/wasm/encode-parquet-wasm.js.map +1 -1
  7. package/dist/es5/lib/wasm/parse-parquet-wasm.js.map +1 -1
  8. package/dist/es5/parquet-loader.js +1 -1
  9. package/dist/es5/parquet-wasm-loader.js +1 -1
  10. package/dist/es5/parquet-wasm-writer.js +1 -1
  11. package/dist/es5/parquet-writer.js +1 -1
  12. package/dist/es5/parquetjs/codecs/dictionary.js.map +1 -1
  13. package/dist/es5/parquetjs/codecs/plain.js.map +1 -1
  14. package/dist/es5/parquetjs/codecs/rle.js.map +1 -1
  15. package/dist/es5/parquetjs/compression.js +1 -6
  16. package/dist/es5/parquetjs/compression.js.map +1 -1
  17. package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -1
  18. package/dist/es5/parquetjs/parquet-thrift/BsonType.js.map +1 -1
  19. package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -1
  20. package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -1
  21. package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -1
  22. package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -1
  23. package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -1
  24. package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -1
  25. package/dist/es5/parquetjs/parquet-thrift/DateType.js.map +1 -1
  26. package/dist/es5/parquetjs/parquet-thrift/DecimalType.js.map +1 -1
  27. package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -1
  28. package/dist/es5/parquetjs/parquet-thrift/EnumType.js.map +1 -1
  29. package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js.map +1 -1
  30. package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -1
  31. package/dist/es5/parquetjs/parquet-thrift/IntType.js.map +1 -1
  32. package/dist/es5/parquetjs/parquet-thrift/JsonType.js.map +1 -1
  33. package/dist/es5/parquetjs/parquet-thrift/KeyValue.js.map +1 -1
  34. package/dist/es5/parquetjs/parquet-thrift/ListType.js.map +1 -1
  35. package/dist/es5/parquetjs/parquet-thrift/LogicalType.js.map +1 -1
  36. package/dist/es5/parquetjs/parquet-thrift/MapType.js.map +1 -1
  37. package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -1
  38. package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -1
  39. package/dist/es5/parquetjs/parquet-thrift/NullType.js.map +1 -1
  40. package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -1
  41. package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -1
  42. package/dist/es5/parquetjs/parquet-thrift/PageHeader.js.map +1 -1
  43. package/dist/es5/parquetjs/parquet-thrift/PageLocation.js.map +1 -1
  44. package/dist/es5/parquetjs/parquet-thrift/RowGroup.js.map +1 -1
  45. package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js.map +1 -1
  46. package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js.map +1 -1
  47. package/dist/es5/parquetjs/parquet-thrift/Statistics.js.map +1 -1
  48. package/dist/es5/parquetjs/parquet-thrift/StringType.js.map +1 -1
  49. package/dist/es5/parquetjs/parquet-thrift/TimeType.js.map +1 -1
  50. package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js.map +1 -1
  51. package/dist/es5/parquetjs/parquet-thrift/TimestampType.js.map +1 -1
  52. package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -1
  53. package/dist/es5/parquetjs/parquet-thrift/UUIDType.js.map +1 -1
  54. package/dist/es5/parquetjs/parser/decoders.js.map +1 -1
  55. package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
  56. package/dist/es5/parquetjs/schema/schema.js.map +1 -1
  57. package/dist/es5/parquetjs/schema/shred.js.map +1 -1
  58. package/dist/es5/parquetjs/schema/types.js.map +1 -1
  59. package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
  60. package/dist/es5/parquetjs/utils/read-utils.js.map +1 -1
  61. package/dist/esm/lib/arrow/convert-schema-from-parquet.js.map +1 -1
  62. package/dist/esm/lib/geo/decode-geo-metadata.js.map +1 -1
  63. package/dist/esm/lib/parsers/parse-parquet-to-columns.js.map +1 -1
  64. package/dist/esm/lib/wasm/encode-parquet-wasm.js.map +1 -1
  65. package/dist/esm/lib/wasm/parse-parquet-wasm.js.map +1 -1
  66. package/dist/esm/parquet-loader.js +1 -1
  67. package/dist/esm/parquet-wasm-loader.js +1 -1
  68. package/dist/esm/parquet-wasm-writer.js +1 -1
  69. package/dist/esm/parquet-writer.js +1 -1
  70. package/dist/esm/parquetjs/codecs/plain.js.map +1 -1
  71. package/dist/esm/parquetjs/codecs/rle.js.map +1 -1
  72. package/dist/esm/parquetjs/compression.js +2 -7
  73. package/dist/esm/parquetjs/compression.js.map +1 -1
  74. package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +1 -1
  75. package/dist/esm/parquetjs/parquet-thrift/BsonType.js.map +1 -1
  76. package/dist/esm/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -1
  77. package/dist/esm/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -1
  78. package/dist/esm/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -1
  79. package/dist/esm/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -1
  80. package/dist/esm/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -1
  81. package/dist/esm/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -1
  82. package/dist/esm/parquetjs/parquet-thrift/DateType.js.map +1 -1
  83. package/dist/esm/parquetjs/parquet-thrift/DecimalType.js.map +1 -1
  84. package/dist/esm/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -1
  85. package/dist/esm/parquetjs/parquet-thrift/EnumType.js.map +1 -1
  86. package/dist/esm/parquetjs/parquet-thrift/FileMetaData.js.map +1 -1
  87. package/dist/esm/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -1
  88. package/dist/esm/parquetjs/parquet-thrift/IntType.js.map +1 -1
  89. package/dist/esm/parquetjs/parquet-thrift/JsonType.js.map +1 -1
  90. package/dist/esm/parquetjs/parquet-thrift/KeyValue.js.map +1 -1
  91. package/dist/esm/parquetjs/parquet-thrift/ListType.js.map +1 -1
  92. package/dist/esm/parquetjs/parquet-thrift/LogicalType.js.map +1 -1
  93. package/dist/esm/parquetjs/parquet-thrift/MapType.js.map +1 -1
  94. package/dist/esm/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -1
  95. package/dist/esm/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -1
  96. package/dist/esm/parquetjs/parquet-thrift/NullType.js.map +1 -1
  97. package/dist/esm/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -1
  98. package/dist/esm/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -1
  99. package/dist/esm/parquetjs/parquet-thrift/PageHeader.js.map +1 -1
  100. package/dist/esm/parquetjs/parquet-thrift/PageLocation.js.map +1 -1
  101. package/dist/esm/parquetjs/parquet-thrift/RowGroup.js.map +1 -1
  102. package/dist/esm/parquetjs/parquet-thrift/SchemaElement.js.map +1 -1
  103. package/dist/esm/parquetjs/parquet-thrift/SortingColumn.js.map +1 -1
  104. package/dist/esm/parquetjs/parquet-thrift/Statistics.js.map +1 -1
  105. package/dist/esm/parquetjs/parquet-thrift/StringType.js.map +1 -1
  106. package/dist/esm/parquetjs/parquet-thrift/TimeType.js.map +1 -1
  107. package/dist/esm/parquetjs/parquet-thrift/TimeUnit.js.map +1 -1
  108. package/dist/esm/parquetjs/parquet-thrift/TimestampType.js.map +1 -1
  109. package/dist/esm/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -1
  110. package/dist/esm/parquetjs/parquet-thrift/UUIDType.js.map +1 -1
  111. package/dist/esm/parquetjs/parser/decoders.js.map +1 -1
  112. package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
  113. package/dist/esm/parquetjs/schema/schema.js.map +1 -1
  114. package/dist/esm/parquetjs/schema/shred.js.map +1 -1
  115. package/dist/esm/parquetjs/schema/types.js.map +1 -1
  116. package/dist/esm/parquetjs/utils/file-utils.js.map +1 -1
  117. package/dist/esm/parquetjs/utils/read-utils.js.map +1 -1
  118. package/dist/parquet-worker.js +13 -13
  119. package/dist/parquet-worker.js.map +3 -3
  120. package/dist/parquetjs/compression.d.ts +3 -0
  121. package/dist/parquetjs/compression.d.ts.map +1 -1
  122. package/dist/parquetjs/compression.js +9 -5
  123. package/package.json +6 -8
  124. package/src/parquetjs/compression.ts +10 -6
@@ -1 +1 @@
1
- {"version":3,"file":"decode-geo-metadata.js","names":["getGeoMetadata","schema","stringifiedGeoMetadata","metadata","get","geoMetadata","JSON","parse","setGeoMetadata","stringify","set","unpackGeoMetadata","version","primary_column","columns","Object","keys","join","columnName","columnMetadata","entries","field","fields","find","name","unpackGeoFieldMetadata","key","value","concat"],"sources":["../../../../src/lib/geo/decode-geo-metadata.ts"],"sourcesContent":["// loaders.gl, MIT license\nimport {Schema, Field} from '@loaders.gl/schema';\n\n/* eslint-disable camelcase */\n\n/** A geoarrow / geoparquet geo metadata object (stored in stringified form in the top level metadata 'geo' key) */\nexport type GeoMetadata = {\n version?: string;\n primary_column?: string;\n columns: Record<string, GeoColumnMetadata>;\n [key: string]: unknown;\n};\n\n/** A geoarrow / geoparquet geo metadata for one geometry column */\nexport type GeoColumnMetadata = {\n bounding_box?:\n | [number, number, number, number]\n | [number, number, number, number, number, number];\n crs?: string;\n geometry_type?: string[];\n edges?: string;\n [key: string]: unknown;\n};\n\n/**\n * Reads the GeoMetadata object from the metadata\n * @note geoarrow / parquet schema is stringified into a single key-value pair in the parquet metadata */\nexport function getGeoMetadata(schema: Schema): GeoMetadata | null {\n const stringifiedGeoMetadata = schema.metadata.get('geo');\n if (!stringifiedGeoMetadata) {\n return null;\n }\n\n try {\n const geoMetadata = JSON.parse(stringifiedGeoMetadata) as GeoMetadata;\n return geoMetadata;\n } catch {\n return null;\n }\n}\n\n/**\n * Stores a geoarrow / geoparquet geo metadata object in the schema\n * @note geoarrow / geoparquet geo metadata is a single stringified JSON field\n */\nexport function setGeoMetadata(schema: Schema, geoMetadata: GeoMetadata): void {\n const stringifiedGeoMetadata = JSON.stringify(geoMetadata);\n schema.metadata.set('geo', stringifiedGeoMetadata);\n}\n\n/**\n * Unpacks geo metadata into separate metadata fields (parses the long JSON string)\n * @note geoarrow / parquet schema is stringified into a single key-value pair in the parquet metadata\n */\nexport function unpackGeoMetadata(schema: Schema): void {\n const geoMetadata = getGeoMetadata(schema);\n if (!geoMetadata) {\n return;\n }\n\n // Store Parquet Schema Level Metadata\n\n const {version, primary_column, columns} = geoMetadata;\n if (version) {\n schema.metadata.set('geo.version', version);\n }\n\n if (primary_column) {\n schema.metadata.set('geo.primary_column', primary_column);\n }\n\n // store column names as comma separated list\n schema.metadata.set('geo.columns', Object.keys(columns || {}).join(''));\n\n for (const [columnName, columnMetadata] of Object.entries(columns || {})) {\n const field = schema.fields.find((field) => field.name === columnName);\n if (field) {\n if (field.name === primary_column) {\n field.metadata.set('geo.primary_field', 'true');\n }\n unpackGeoFieldMetadata(field, columnMetadata);\n }\n }\n}\n\nfunction unpackGeoFieldMetadata(field: Field, columnMetadata): void {\n for (const [key, value] of Object.entries(columnMetadata || {})) {\n switch (key) {\n case 'geometry_type':\n field.metadata.set(`geo.${key}`, (value as string[]).join(','));\n break;\n case 'bbox':\n case 'crs':\n case 'edges':\n default:\n field.metadata.set(`geo.${key}`, typeof value === 'string' ? value : JSON.stringify(value));\n }\n }\n}\n"],"mappings":"AA2BA,OAAO,SAASA,cAAcA,CAACC,MAAc,EAAsB;EACjE,MAAMC,sBAAsB,GAAGD,MAAM,CAACE,QAAQ,CAACC,GAAG,CAAC,KAAK,CAAC;EACzD,IAAI,CAACF,sBAAsB,EAAE;IAC3B,OAAO,IAAI;EACb;EAEA,IAAI;IACF,MAAMG,WAAW,GAAGC,IAAI,CAACC,KAAK,CAACL,sBAAsB,CAAgB;IACrE,OAAOG,WAAW;EACpB,CAAC,CAAC,MAAM;IACN,OAAO,IAAI;EACb;AACF;AAMA,OAAO,SAASG,cAAcA,CAACP,MAAc,EAAEI,WAAwB,EAAQ;EAC7E,MAAMH,sBAAsB,GAAGI,IAAI,CAACG,SAAS,CAACJ,WAAW,CAAC;EAC1DJ,MAAM,CAACE,QAAQ,CAACO,GAAG,CAAC,KAAK,EAAER,sBAAsB,CAAC;AACpD;AAMA,OAAO,SAASS,iBAAiBA,CAACV,MAAc,EAAQ;EACtD,MAAMI,WAAW,GAAGL,cAAc,CAACC,MAAM,CAAC;EAC1C,IAAI,CAACI,WAAW,EAAE;IAChB;EACF;EAIA,MAAM;IAACO,OAAO;IAAEC,cAAc;IAAEC;EAAO,CAAC,GAAGT,WAAW;EACtD,IAAIO,OAAO,EAAE;IACXX,MAAM,CAACE,QAAQ,CAACO,GAAG,CAAC,aAAa,EAAEE,OAAO,CAAC;EAC7C;EAEA,IAAIC,cAAc,EAAE;IAClBZ,MAAM,CAACE,QAAQ,CAACO,GAAG,CAAC,oBAAoB,EAAEG,cAAc,CAAC;EAC3D;EAGAZ,MAAM,CAACE,QAAQ,CAACO,GAAG,CAAC,aAAa,EAAEK,MAAM,CAACC,IAAI,CAACF,OAAO,IAAI,CAAC,CAAC,CAAC,CAACG,IAAI,CAAC,EAAE,CAAC,CAAC;EAEvE,KAAK,MAAM,CAACC,UAAU,EAAEC,cAAc,CAAC,IAAIJ,MAAM,CAACK,OAAO,CAACN,OAAO,IAAI,CAAC,CAAC,CAAC,EAAE;IACxE,MAAMO,KAAK,GAAGpB,MAAM,CAACqB,MAAM,CAACC,IAAI,CAAEF,KAAK,IAAKA,KAAK,CAACG,IAAI,KAAKN,UAAU,CAAC;IACtE,IAAIG,KAAK,EAAE;MACT,IAAIA,KAAK,CAACG,IAAI,KAAKX,cAAc,EAAE;QACjCQ,KAAK,CAAClB,QAAQ,CAACO,GAAG,CAAC,mBAAmB,EAAE,MAAM,CAAC;MACjD;MACAe,sBAAsB,CAACJ,KAAK,EAAEF,cAAc,CAAC;IAC/C;EACF;AACF;AAEA,SAASM,sBAAsBA,CAACJ,KAAY,EAAEF,cAAc,EAAQ;EAClE,KAAK,MAAM,CAACO,GAAG,EAAEC,KAAK,CAAC,IAAIZ,MAAM,CAACK,OAAO,CAACD,cAAc,IAAI,CAAC,CAAC,CAAC,EAAE;IAC/D,QAAQO,GAAG;MACT,KAAK,eAAe;QAClBL,KAAK,CAAClB,QAAQ,CAACO,GAAG,QAAAkB,MAAA,CAAQF,GAAG,GAAKC,KAAK,CAAcV,IAAI,CAAC,GAAG,CAAC,CAAC;QAC/D;MACF,KAAK,MAAM;MACX,KAAK,KAAK;MACV,KAAK,OAAO;MACZ;QACEI,KAAK,CAAClB,QAAQ,CAACO,GAAG,QAAAkB,MAAA,CAAQF,GAAG,GAAI,OAAOC,KAAK,KAAK,QAAQ,GAAGA,KAAK,GAAGrB,IAAI,CAACG,SAAS,CAACkB,KAAK,CAAC,CAAC;IAAC;EAElG;AACF"}
1
+ {"version":3,"file":"decode-geo-metadata.js","names":["getGeoMetadata","schema","stringifiedGeoMetadata","metadata","get","geoMetadata","JSON","parse","setGeoMetadata","stringify","set","unpackGeoMetadata","version","primary_column","columns","Object","keys","join","columnName","columnMetadata","entries","field","fields","find","name","unpackGeoFieldMetadata","key","value","concat"],"sources":["../../../../src/lib/geo/decode-geo-metadata.ts"],"sourcesContent":["// loaders.gl, MIT license\nimport {Schema, Field} from '@loaders.gl/schema';\n\n/* eslint-disable camelcase */\n\n/** A geoarrow / geoparquet geo metadata object (stored in stringified form in the top level metadata 'geo' key) */\nexport type GeoMetadata = {\n version?: string;\n primary_column?: string;\n columns: Record<string, GeoColumnMetadata>;\n [key: string]: unknown;\n};\n\n/** A geoarrow / geoparquet geo metadata for one geometry column */\nexport type GeoColumnMetadata = {\n bounding_box?:\n | [number, number, number, number]\n | [number, number, number, number, number, number];\n crs?: string;\n geometry_type?: string[];\n edges?: string;\n [key: string]: unknown;\n};\n\n/**\n * Reads the GeoMetadata object from the metadata\n * @note geoarrow / parquet schema is stringified into a single key-value pair in the parquet metadata */\nexport function getGeoMetadata(schema: Schema): GeoMetadata | null {\n const stringifiedGeoMetadata = schema.metadata.get('geo');\n if (!stringifiedGeoMetadata) {\n return null;\n }\n\n try {\n const geoMetadata = JSON.parse(stringifiedGeoMetadata) as GeoMetadata;\n return geoMetadata;\n } catch {\n return null;\n }\n}\n\n/**\n * Stores a geoarrow / geoparquet geo metadata object in the schema\n * @note geoarrow / geoparquet geo metadata is a single stringified JSON field\n */\nexport function setGeoMetadata(schema: Schema, geoMetadata: GeoMetadata): void {\n const stringifiedGeoMetadata = JSON.stringify(geoMetadata);\n schema.metadata.set('geo', stringifiedGeoMetadata);\n}\n\n/**\n * Unpacks geo metadata into separate metadata fields (parses the long JSON string)\n * @note geoarrow / parquet schema is stringified into a single key-value pair in the parquet metadata\n */\nexport function unpackGeoMetadata(schema: Schema): void {\n const geoMetadata = getGeoMetadata(schema);\n if (!geoMetadata) {\n return;\n }\n\n // Store Parquet Schema Level Metadata\n\n const {version, primary_column, columns} = geoMetadata;\n if (version) {\n schema.metadata.set('geo.version', version);\n }\n\n if (primary_column) {\n schema.metadata.set('geo.primary_column', primary_column);\n }\n\n // store column names as comma separated list\n schema.metadata.set('geo.columns', Object.keys(columns || {}).join(''));\n\n for (const [columnName, columnMetadata] of Object.entries(columns || {})) {\n const field = schema.fields.find((field) => field.name === columnName);\n if (field) {\n if (field.name === primary_column) {\n field.metadata.set('geo.primary_field', 'true');\n }\n unpackGeoFieldMetadata(field, columnMetadata);\n }\n }\n}\n\nfunction unpackGeoFieldMetadata(field: Field, columnMetadata): void {\n for (const [key, value] of Object.entries(columnMetadata || {})) {\n switch (key) {\n case 'geometry_type':\n field.metadata.set(`geo.${key}`, (value as string[]).join(','));\n break;\n case 'bbox':\n case 'crs':\n case 'edges':\n default:\n field.metadata.set(`geo.${key}`, typeof value === 'string' ? value : JSON.stringify(value));\n }\n }\n}\n"],"mappings":"AA2BA,OAAO,SAASA,cAAcA,CAACC,MAAc,EAAsB;EACjE,MAAMC,sBAAsB,GAAGD,MAAM,CAACE,QAAQ,CAACC,GAAG,CAAC,KAAK,CAAC;EACzD,IAAI,CAACF,sBAAsB,EAAE;IAC3B,OAAO,IAAI;EACb;EAEA,IAAI;IACF,MAAMG,WAAW,GAAGC,IAAI,CAACC,KAAK,CAACL,sBAAsB,CAAgB;IACrE,OAAOG,WAAW;EACpB,CAAC,CAAC,MAAM;IACN,OAAO,IAAI;EACb;AACF;AAMA,OAAO,SAASG,cAAcA,CAACP,MAAc,EAAEI,WAAwB,EAAQ;EAC7E,MAAMH,sBAAsB,GAAGI,IAAI,CAACG,SAAS,CAACJ,WAAW,CAAC;EAC1DJ,MAAM,CAACE,QAAQ,CAACO,GAAG,CAAC,KAAK,EAAER,sBAAsB,CAAC;AACpD;AAMA,OAAO,SAASS,iBAAiBA,CAACV,MAAc,EAAQ;EACtD,MAAMI,WAAW,GAAGL,cAAc,CAACC,MAAM,CAAC;EAC1C,IAAI,CAACI,WAAW,EAAE;IAChB;EACF;EAIA,MAAM;IAACO,OAAO;IAAEC,cAAc;IAAEC;EAAO,CAAC,GAAGT,WAAW;EACtD,IAAIO,OAAO,EAAE;IACXX,MAAM,CAACE,QAAQ,CAACO,GAAG,CAAC,aAAa,EAAEE,OAAO,CAAC;EAC7C;EAEA,IAAIC,cAAc,EAAE;IAClBZ,MAAM,CAACE,QAAQ,CAACO,GAAG,CAAC,oBAAoB,EAAEG,cAAc,CAAC;EAC3D;EAGAZ,MAAM,CAACE,QAAQ,CAACO,GAAG,CAAC,aAAa,EAAEK,MAAM,CAACC,IAAI,CAACF,OAAO,IAAI,CAAC,CAAC,CAAC,CAACG,IAAI,CAAC,EAAE,CAAC,CAAC;EAEvE,KAAK,MAAM,CAACC,UAAU,EAAEC,cAAc,CAAC,IAAIJ,MAAM,CAACK,OAAO,CAACN,OAAO,IAAI,CAAC,CAAC,CAAC,EAAE;IACxE,MAAMO,KAAK,GAAGpB,MAAM,CAACqB,MAAM,CAACC,IAAI,CAAEF,KAAK,IAAKA,KAAK,CAACG,IAAI,KAAKN,UAAU,CAAC;IACtE,IAAIG,KAAK,EAAE;MACT,IAAIA,KAAK,CAACG,IAAI,KAAKX,cAAc,EAAE;QACjCQ,KAAK,CAAClB,QAAQ,CAACO,GAAG,CAAC,mBAAmB,EAAE,MAAM,CAAC;MACjD;MACAe,sBAAsB,CAACJ,KAAK,EAAEF,cAAc,CAAC;IAC/C;EACF;AACF;AAEA,SAASM,sBAAsBA,CAACJ,KAAY,EAAEF,cAAc,EAAQ;EAClE,KAAK,MAAM,CAACO,GAAG,EAAEC,KAAK,CAAC,IAAIZ,MAAM,CAACK,OAAO,CAACD,cAAc,IAAI,CAAC,CAAC,CAAC,EAAE;IAC/D,QAAQO,GAAG;MACT,KAAK,eAAe;QAClBL,KAAK,CAAClB,QAAQ,CAACO,GAAG,QAAAkB,MAAA,CAAQF,GAAG,GAAKC,KAAK,CAAcV,IAAI,CAAC,GAAG,CAAC,CAAC;QAC/D;MACF,KAAK,MAAM;MACX,KAAK,KAAK;MACV,KAAK,OAAO;MACZ;QACEI,KAAK,CAAClB,QAAQ,CAACO,GAAG,QAAAkB,MAAA,CAAQF,GAAG,GAAI,OAAOC,KAAK,KAAK,QAAQ,GAAGA,KAAK,GAAGrB,IAAI,CAACG,SAAS,CAACkB,KAAK,CAAC,CAAC;IAC/F;EACF;AACF"}
@@ -1 +1 @@
1
- {"version":3,"file":"parse-parquet-to-columns.js","names":["makeReadableFile","ParquetReader","convertSchemaFromParquet","convertParquetRowGroupToColumns","unpackGeoMetadata","parseParquetInColumns","arrayBuffer","options","blob","Blob","batch","parseParquetFileInColumnarBatches","file","reader","parquetSchema","getSchema","parquetMetadata","getFileMetadata","schema","rowGroups","rowGroupIterator","parquet","rowGroup","convertRowGroupToTableBatch","data","shape","batchType","length","rowCount"],"sources":["../../../../src/lib/parsers/parse-parquet-to-columns.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\nimport {ColumnarTableBatch, Schema} from '@loaders.gl/schema';\nimport {makeReadableFile} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../../parquet-loader';\nimport {ParquetReader} from '../../parquetjs/parser/parquet-reader';\nimport {ParquetBuffer} from '../../parquetjs/schema/declare';\nimport {convertSchemaFromParquet} from '../arrow/convert-schema-from-parquet';\nimport {convertParquetRowGroupToColumns} from '../arrow/convert-row-group-to-columns';\nimport {unpackGeoMetadata} from '../geo/decode-geo-metadata';\n\nexport async function parseParquetInColumns(\n arrayBuffer: ArrayBuffer,\n options?: ParquetLoaderOptions\n) {\n const blob = new Blob([arrayBuffer]);\n for await (const batch of parseParquetFileInColumnarBatches(blob, options)) {\n return batch;\n }\n return null;\n}\n\nexport async function* parseParquetFileInColumnarBatches(\n blob: Blob,\n options?: ParquetLoaderOptions\n): AsyncIterable<ColumnarTableBatch> {\n const file = makeReadableFile(blob);\n const reader = new ParquetReader(file);\n const parquetSchema = await reader.getSchema();\n const parquetMetadata = await reader.getFileMetadata();\n const schema = convertSchemaFromParquet(parquetSchema, parquetMetadata);\n unpackGeoMetadata(schema);\n const rowGroups = reader.rowGroupIterator(options?.parquet);\n for await (const rowGroup of rowGroups) {\n yield convertRowGroupToTableBatch(schema, rowGroup);\n }\n}\n\nfunction convertRowGroupToTableBatch(schema: Schema, rowGroup: ParquetBuffer): ColumnarTableBatch {\n const data = convertParquetRowGroupToColumns(schema, rowGroup);\n return {\n shape: 'columnar-table',\n batchType: 'data',\n schema,\n data,\n length: rowGroup.rowCount\n };\n}\n"],"mappings":"AAIA,SAAQA,gBAAgB,QAAO,0BAA0B;AAEzD,SAAQC,aAAa,QAAO,uCAAuC;AAEnE,SAAQC,wBAAwB,QAAO,sCAAsC;AAC7E,SAAQC,+BAA+B,QAAO,uCAAuC;AACrF,SAAQC,iBAAiB,QAAO,4BAA4B;AAE5D,OAAO,eAAeC,qBAAqBA,CACzCC,WAAwB,EACxBC,OAA8B,EAC9B;EACA,MAAMC,IAAI,GAAG,IAAIC,IAAI,CAAC,CAACH,WAAW,CAAC,CAAC;EACpC,WAAW,MAAMI,KAAK,IAAIC,iCAAiC,CAACH,IAAI,EAAED,OAAO,CAAC,EAAE;IAC1E,OAAOG,KAAK;EACd;EACA,OAAO,IAAI;AACb;AAEA,OAAO,gBAAgBC,iCAAiCA,CACtDH,IAAU,EACVD,OAA8B,EACK;EACnC,MAAMK,IAAI,GAAGZ,gBAAgB,CAACQ,IAAI,CAAC;EACnC,MAAMK,MAAM,GAAG,IAAIZ,aAAa,CAACW,IAAI,CAAC;EACtC,MAAME,aAAa,GAAG,MAAMD,MAAM,CAACE,SAAS,EAAE;EAC9C,MAAMC,eAAe,GAAG,MAAMH,MAAM,CAACI,eAAe,EAAE;EACtD,MAAMC,MAAM,GAAGhB,wBAAwB,CAACY,aAAa,EAAEE,eAAe,CAAC;EACvEZ,iBAAiB,CAACc,MAAM,CAAC;EACzB,MAAMC,SAAS,GAAGN,MAAM,CAACO,gBAAgB,CAACb,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEc,OAAO,CAAC;EAC3D,WAAW,MAAMC,QAAQ,IAAIH,SAAS,EAAE;IACtC,MAAMI,2BAA2B,CAACL,MAAM,EAAEI,QAAQ,CAAC;EACrD;AACF;AAEA,SAASC,2BAA2BA,CAACL,MAAc,EAAEI,QAAuB,EAAsB;EAChG,MAAME,IAAI,GAAGrB,+BAA+B,CAACe,MAAM,EAAEI,QAAQ,CAAC;EAC9D,OAAO;IACLG,KAAK,EAAE,gBAAgB;IACvBC,SAAS,EAAE,MAAM;IACjBR,MAAM;IACNM,IAAI;IACJG,MAAM,EAAEL,QAAQ,CAACM;EACnB,CAAC;AACH"}
1
+ {"version":3,"file":"parse-parquet-to-columns.js","names":["makeReadableFile","ParquetReader","convertSchemaFromParquet","convertParquetRowGroupToColumns","unpackGeoMetadata","parseParquetInColumns","arrayBuffer","options","blob","Blob","batch","parseParquetFileInColumnarBatches","file","reader","parquetSchema","getSchema","parquetMetadata","getFileMetadata","schema","rowGroups","rowGroupIterator","parquet","rowGroup","convertRowGroupToTableBatch","data","shape","batchType","length","rowCount"],"sources":["../../../../src/lib/parsers/parse-parquet-to-columns.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\nimport {ColumnarTableBatch, Schema} from '@loaders.gl/schema';\nimport {makeReadableFile} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../../parquet-loader';\nimport {ParquetReader} from '../../parquetjs/parser/parquet-reader';\nimport {ParquetBuffer} from '../../parquetjs/schema/declare';\nimport {convertSchemaFromParquet} from '../arrow/convert-schema-from-parquet';\nimport {convertParquetRowGroupToColumns} from '../arrow/convert-row-group-to-columns';\nimport {unpackGeoMetadata} from '../geo/decode-geo-metadata';\n\nexport async function parseParquetInColumns(\n arrayBuffer: ArrayBuffer,\n options?: ParquetLoaderOptions\n) {\n const blob = new Blob([arrayBuffer]);\n for await (const batch of parseParquetFileInColumnarBatches(blob, options)) {\n return batch;\n }\n return null;\n}\n\nexport async function* parseParquetFileInColumnarBatches(\n blob: Blob,\n options?: ParquetLoaderOptions\n): AsyncIterable<ColumnarTableBatch> {\n const file = makeReadableFile(blob);\n const reader = new ParquetReader(file);\n const parquetSchema = await reader.getSchema();\n const parquetMetadata = await reader.getFileMetadata();\n const schema = convertSchemaFromParquet(parquetSchema, parquetMetadata);\n unpackGeoMetadata(schema);\n const rowGroups = reader.rowGroupIterator(options?.parquet);\n for await (const rowGroup of rowGroups) {\n yield convertRowGroupToTableBatch(schema, rowGroup);\n }\n}\n\nfunction convertRowGroupToTableBatch(schema: Schema, rowGroup: ParquetBuffer): ColumnarTableBatch {\n const data = convertParquetRowGroupToColumns(schema, rowGroup);\n return {\n shape: 'columnar-table',\n batchType: 'data',\n schema,\n data,\n length: rowGroup.rowCount\n };\n}\n"],"mappings":"AAIA,SAAQA,gBAAgB,QAAO,0BAA0B;AAEzD,SAAQC,aAAa,QAAO,uCAAuC;AAEnE,SAAQC,wBAAwB,QAAO,sCAAsC;AAC7E,SAAQC,+BAA+B,QAAO,uCAAuC;AACrF,SAAQC,iBAAiB,QAAO,4BAA4B;AAE5D,OAAO,eAAeC,qBAAqBA,CACzCC,WAAwB,EACxBC,OAA8B,EAC9B;EACA,MAAMC,IAAI,GAAG,IAAIC,IAAI,CAAC,CAACH,WAAW,CAAC,CAAC;EACpC,WAAW,MAAMI,KAAK,IAAIC,iCAAiC,CAACH,IAAI,EAAED,OAAO,CAAC,EAAE;IAC1E,OAAOG,KAAK;EACd;EACA,OAAO,IAAI;AACb;AAEA,OAAO,gBAAgBC,iCAAiCA,CACtDH,IAAU,EACVD,OAA8B,EACK;EACnC,MAAMK,IAAI,GAAGZ,gBAAgB,CAACQ,IAAI,CAAC;EACnC,MAAMK,MAAM,GAAG,IAAIZ,aAAa,CAACW,IAAI,CAAC;EACtC,MAAME,aAAa,GAAG,MAAMD,MAAM,CAACE,SAAS,CAAC,CAAC;EAC9C,MAAMC,eAAe,GAAG,MAAMH,MAAM,CAACI,eAAe,CAAC,CAAC;EACtD,MAAMC,MAAM,GAAGhB,wBAAwB,CAACY,aAAa,EAAEE,eAAe,CAAC;EACvEZ,iBAAiB,CAACc,MAAM,CAAC;EACzB,MAAMC,SAAS,GAAGN,MAAM,CAACO,gBAAgB,CAACb,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEc,OAAO,CAAC;EAC3D,WAAW,MAAMC,QAAQ,IAAIH,SAAS,EAAE;IACtC,MAAMI,2BAA2B,CAACL,MAAM,EAAEI,QAAQ,CAAC;EACrD;AACF;AAEA,SAASC,2BAA2BA,CAACL,MAAc,EAAEI,QAAuB,EAAsB;EAChG,MAAME,IAAI,GAAGrB,+BAA+B,CAACe,MAAM,EAAEI,QAAQ,CAAC;EAC9D,OAAO;IACLG,KAAK,EAAE,gBAAgB;IACvBC,SAAS,EAAE,MAAM;IACjBR,MAAM;IACNM,IAAI;IACJG,MAAM,EAAEL,QAAQ,CAACM;EACnB,CAAC;AACH"}
@@ -1 +1 @@
1
- {"version":3,"file":"encode-parquet-wasm.js","names":["RecordBatchStreamWriter","loadWasm","encode","table","options","_options$parquet","wasmUrl","parquet","wasm","arrowIPCBytes","tableToIPC","writerProperties","WriterPropertiesBuilder","build","parquetBytes","writeParquet","buffer","slice","byteOffset","byteLength","writeAll","toUint8Array"],"sources":["../../../../src/lib/wasm/encode-parquet-wasm.ts"],"sourcesContent":["import type {Table} from 'apache-arrow';\nimport type {WriterOptions} from '@loaders.gl/loader-utils';\n\nimport {RecordBatchStreamWriter} from 'apache-arrow';\nimport {loadWasm} from './load-wasm';\n\nexport type ParquetWriterOptions = WriterOptions & {\n parquet?: {\n wasmUrl?: string;\n };\n};\n\n/**\n * Encode Arrow Table to Parquet buffer\n */\nexport async function encode(table: Table, options?: ParquetWriterOptions): Promise<ArrayBuffer> {\n const wasmUrl = options?.parquet?.wasmUrl;\n const wasm = await loadWasm(wasmUrl);\n\n const arrowIPCBytes = tableToIPC(table);\n // TODO: provide options for how to write table.\n const writerProperties = new wasm.WriterPropertiesBuilder().build();\n const parquetBytes = wasm.writeParquet(arrowIPCBytes, writerProperties);\n return parquetBytes.buffer.slice(\n parquetBytes.byteOffset,\n parquetBytes.byteLength + parquetBytes.byteOffset\n );\n}\n\n/**\n * Serialize a {@link Table} to the IPC format. This function is a convenience\n * wrapper for {@link RecordBatchStreamWriter} and {@link RecordBatchFileWriter}.\n * Opposite of {@link tableFromIPC}.\n *\n * @param table The Table to serialize.\n * @param type Whether to serialize the Table as a file or a stream.\n */\nexport function tableToIPC(table: Table): Uint8Array {\n return RecordBatchStreamWriter.writeAll(table).toUint8Array(true);\n}\n"],"mappings":"AAGA,SAAQA,uBAAuB,QAAO,cAAc;AACpD,SAAQC,QAAQ,QAAO,aAAa;AAWpC,OAAO,eAAeC,MAAMA,CAACC,KAAY,EAAEC,OAA8B,EAAwB;EAAA,IAAAC,gBAAA;EAC/F,MAAMC,OAAO,GAAGF,OAAO,aAAPA,OAAO,wBAAAC,gBAAA,GAAPD,OAAO,CAAEG,OAAO,cAAAF,gBAAA,uBAAhBA,gBAAA,CAAkBC,OAAO;EACzC,MAAME,IAAI,GAAG,MAAMP,QAAQ,CAACK,OAAO,CAAC;EAEpC,MAAMG,aAAa,GAAGC,UAAU,CAACP,KAAK,CAAC;EAEvC,MAAMQ,gBAAgB,GAAG,IAAIH,IAAI,CAACI,uBAAuB,EAAE,CAACC,KAAK,EAAE;EACnE,MAAMC,YAAY,GAAGN,IAAI,CAACO,YAAY,CAACN,aAAa,EAAEE,gBAAgB,CAAC;EACvE,OAAOG,YAAY,CAACE,MAAM,CAACC,KAAK,CAC9BH,YAAY,CAACI,UAAU,EACvBJ,YAAY,CAACK,UAAU,GAAGL,YAAY,CAACI,UAAU,CAClD;AACH;AAUA,OAAO,SAASR,UAAUA,CAACP,KAAY,EAAc;EACnD,OAAOH,uBAAuB,CAACoB,QAAQ,CAACjB,KAAK,CAAC,CAACkB,YAAY,CAAC,IAAI,CAAC;AACnE"}
1
+ {"version":3,"file":"encode-parquet-wasm.js","names":["RecordBatchStreamWriter","loadWasm","encode","table","options","_options$parquet","wasmUrl","parquet","wasm","arrowIPCBytes","tableToIPC","writerProperties","WriterPropertiesBuilder","build","parquetBytes","writeParquet","buffer","slice","byteOffset","byteLength","writeAll","toUint8Array"],"sources":["../../../../src/lib/wasm/encode-parquet-wasm.ts"],"sourcesContent":["import type {Table} from 'apache-arrow';\nimport type {WriterOptions} from '@loaders.gl/loader-utils';\n\nimport {RecordBatchStreamWriter} from 'apache-arrow';\nimport {loadWasm} from './load-wasm';\n\nexport type ParquetWriterOptions = WriterOptions & {\n parquet?: {\n wasmUrl?: string;\n };\n};\n\n/**\n * Encode Arrow Table to Parquet buffer\n */\nexport async function encode(table: Table, options?: ParquetWriterOptions): Promise<ArrayBuffer> {\n const wasmUrl = options?.parquet?.wasmUrl;\n const wasm = await loadWasm(wasmUrl);\n\n const arrowIPCBytes = tableToIPC(table);\n // TODO: provide options for how to write table.\n const writerProperties = new wasm.WriterPropertiesBuilder().build();\n const parquetBytes = wasm.writeParquet(arrowIPCBytes, writerProperties);\n return parquetBytes.buffer.slice(\n parquetBytes.byteOffset,\n parquetBytes.byteLength + parquetBytes.byteOffset\n );\n}\n\n/**\n * Serialize a {@link Table} to the IPC format. This function is a convenience\n * wrapper for {@link RecordBatchStreamWriter} and {@link RecordBatchFileWriter}.\n * Opposite of {@link tableFromIPC}.\n *\n * @param table The Table to serialize.\n * @param type Whether to serialize the Table as a file or a stream.\n */\nexport function tableToIPC(table: Table): Uint8Array {\n return RecordBatchStreamWriter.writeAll(table).toUint8Array(true);\n}\n"],"mappings":"AAGA,SAAQA,uBAAuB,QAAO,cAAc;AACpD,SAAQC,QAAQ,QAAO,aAAa;AAWpC,OAAO,eAAeC,MAAMA,CAACC,KAAY,EAAEC,OAA8B,EAAwB;EAAA,IAAAC,gBAAA;EAC/F,MAAMC,OAAO,GAAGF,OAAO,aAAPA,OAAO,wBAAAC,gBAAA,GAAPD,OAAO,CAAEG,OAAO,cAAAF,gBAAA,uBAAhBA,gBAAA,CAAkBC,OAAO;EACzC,MAAME,IAAI,GAAG,MAAMP,QAAQ,CAACK,OAAO,CAAC;EAEpC,MAAMG,aAAa,GAAGC,UAAU,CAACP,KAAK,CAAC;EAEvC,MAAMQ,gBAAgB,GAAG,IAAIH,IAAI,CAACI,uBAAuB,CAAC,CAAC,CAACC,KAAK,CAAC,CAAC;EACnE,MAAMC,YAAY,GAAGN,IAAI,CAACO,YAAY,CAACN,aAAa,EAAEE,gBAAgB,CAAC;EACvE,OAAOG,YAAY,CAACE,MAAM,CAACC,KAAK,CAC9BH,YAAY,CAACI,UAAU,EACvBJ,YAAY,CAACK,UAAU,GAAGL,YAAY,CAACI,UACzC,CAAC;AACH;AAUA,OAAO,SAASR,UAAUA,CAACP,KAAY,EAAc;EACnD,OAAOH,uBAAuB,CAACoB,QAAQ,CAACjB,KAAK,CAAC,CAACkB,YAAY,CAAC,IAAI,CAAC;AACnE"}
@@ -1 +1 @@
1
- {"version":3,"file":"parse-parquet-wasm.js","names":["Table","RecordBatchStreamReader","loadWasm","parseParquet","arrayBuffer","options","_options$parquet","wasmUrl","parquet","wasm","arr","Uint8Array","arrowIPCUint8Arr","readParquet","arrowIPCBuffer","buffer","slice","byteOffset","byteLength","arrowTable","tableFromIPC","input","reader","from","recordBatches","recordBatch","push"],"sources":["../../../../src/lib/wasm/parse-parquet-wasm.ts"],"sourcesContent":["// eslint-disable\nimport type {RecordBatch} from 'apache-arrow';\nimport type {LoaderOptions} from '@loaders.gl/loader-utils';\nimport {Table, RecordBatchStreamReader} from 'apache-arrow';\nimport {loadWasm} from './load-wasm/load-wasm-node';\n\nexport type ParquetLoaderOptions = LoaderOptions & {\n parquet?: {\n type?: 'arrow-table';\n wasmUrl?: string;\n };\n};\n\nexport async function parseParquet(\n arrayBuffer: ArrayBuffer,\n options?: ParquetLoaderOptions\n): Promise<Table> {\n const wasmUrl = options?.parquet?.wasmUrl;\n const wasm = await loadWasm(wasmUrl);\n\n const arr = new Uint8Array(arrayBuffer);\n const arrowIPCUint8Arr = wasm.readParquet(arr);\n const arrowIPCBuffer = arrowIPCUint8Arr.buffer.slice(\n arrowIPCUint8Arr.byteOffset,\n arrowIPCUint8Arr.byteLength + arrowIPCUint8Arr.byteOffset\n );\n const arrowTable = tableFromIPC(arrowIPCBuffer);\n return arrowTable;\n}\n\n/**\n * Deserialize the IPC format into a {@link Table}. This function is a\n * convenience wrapper for {@link RecordBatchReader}. Opposite of {@link tableToIPC}.\n */\nfunction tableFromIPC(input: ArrayBuffer): Table {\n const reader = RecordBatchStreamReader.from(input);\n const recordBatches: RecordBatch[] = [];\n for (const recordBatch of reader) {\n recordBatches.push(recordBatch);\n }\n return new Table(recordBatches);\n}\n"],"mappings":"AAGA,SAAQA,KAAK,EAAEC,uBAAuB,QAAO,cAAc;AAC3D,SAAQC,QAAQ,QAAO,4BAA4B;AASnD,OAAO,eAAeC,YAAYA,CAChCC,WAAwB,EACxBC,OAA8B,EACd;EAAA,IAAAC,gBAAA;EAChB,MAAMC,OAAO,GAAGF,OAAO,aAAPA,OAAO,wBAAAC,gBAAA,GAAPD,OAAO,CAAEG,OAAO,cAAAF,gBAAA,uBAAhBA,gBAAA,CAAkBC,OAAO;EACzC,MAAME,IAAI,GAAG,MAAMP,QAAQ,CAACK,OAAO,CAAC;EAEpC,MAAMG,GAAG,GAAG,IAAIC,UAAU,CAACP,WAAW,CAAC;EACvC,MAAMQ,gBAAgB,GAAGH,IAAI,CAACI,WAAW,CAACH,GAAG,CAAC;EAC9C,MAAMI,cAAc,GAAGF,gBAAgB,CAACG,MAAM,CAACC,KAAK,CAClDJ,gBAAgB,CAACK,UAAU,EAC3BL,gBAAgB,CAACM,UAAU,GAAGN,gBAAgB,CAACK,UAAU,CAC1D;EACD,MAAME,UAAU,GAAGC,YAAY,CAACN,cAAc,CAAC;EAC/C,OAAOK,UAAU;AACnB;AAMA,SAASC,YAAYA,CAACC,KAAkB,EAAS;EAC/C,MAAMC,MAAM,GAAGrB,uBAAuB,CAACsB,IAAI,CAACF,KAAK,CAAC;EAClD,MAAMG,aAA4B,GAAG,EAAE;EACvC,KAAK,MAAMC,WAAW,IAAIH,MAAM,EAAE;IAChCE,aAAa,CAACE,IAAI,CAACD,WAAW,CAAC;EACjC;EACA,OAAO,IAAIzB,KAAK,CAACwB,aAAa,CAAC;AACjC"}
1
+ {"version":3,"file":"parse-parquet-wasm.js","names":["Table","RecordBatchStreamReader","loadWasm","parseParquet","arrayBuffer","options","_options$parquet","wasmUrl","parquet","wasm","arr","Uint8Array","arrowIPCUint8Arr","readParquet","arrowIPCBuffer","buffer","slice","byteOffset","byteLength","arrowTable","tableFromIPC","input","reader","from","recordBatches","recordBatch","push"],"sources":["../../../../src/lib/wasm/parse-parquet-wasm.ts"],"sourcesContent":["// eslint-disable\nimport type {RecordBatch} from 'apache-arrow';\nimport type {LoaderOptions} from '@loaders.gl/loader-utils';\nimport {Table, RecordBatchStreamReader} from 'apache-arrow';\nimport {loadWasm} from './load-wasm/load-wasm-node';\n\nexport type ParquetLoaderOptions = LoaderOptions & {\n parquet?: {\n type?: 'arrow-table';\n wasmUrl?: string;\n };\n};\n\nexport async function parseParquet(\n arrayBuffer: ArrayBuffer,\n options?: ParquetLoaderOptions\n): Promise<Table> {\n const wasmUrl = options?.parquet?.wasmUrl;\n const wasm = await loadWasm(wasmUrl);\n\n const arr = new Uint8Array(arrayBuffer);\n const arrowIPCUint8Arr = wasm.readParquet(arr);\n const arrowIPCBuffer = arrowIPCUint8Arr.buffer.slice(\n arrowIPCUint8Arr.byteOffset,\n arrowIPCUint8Arr.byteLength + arrowIPCUint8Arr.byteOffset\n );\n const arrowTable = tableFromIPC(arrowIPCBuffer);\n return arrowTable;\n}\n\n/**\n * Deserialize the IPC format into a {@link Table}. This function is a\n * convenience wrapper for {@link RecordBatchReader}. Opposite of {@link tableToIPC}.\n */\nfunction tableFromIPC(input: ArrayBuffer): Table {\n const reader = RecordBatchStreamReader.from(input);\n const recordBatches: RecordBatch[] = [];\n for (const recordBatch of reader) {\n recordBatches.push(recordBatch);\n }\n return new Table(recordBatches);\n}\n"],"mappings":"AAGA,SAAQA,KAAK,EAAEC,uBAAuB,QAAO,cAAc;AAC3D,SAAQC,QAAQ,QAAO,4BAA4B;AASnD,OAAO,eAAeC,YAAYA,CAChCC,WAAwB,EACxBC,OAA8B,EACd;EAAA,IAAAC,gBAAA;EAChB,MAAMC,OAAO,GAAGF,OAAO,aAAPA,OAAO,wBAAAC,gBAAA,GAAPD,OAAO,CAAEG,OAAO,cAAAF,gBAAA,uBAAhBA,gBAAA,CAAkBC,OAAO;EACzC,MAAME,IAAI,GAAG,MAAMP,QAAQ,CAACK,OAAO,CAAC;EAEpC,MAAMG,GAAG,GAAG,IAAIC,UAAU,CAACP,WAAW,CAAC;EACvC,MAAMQ,gBAAgB,GAAGH,IAAI,CAACI,WAAW,CAACH,GAAG,CAAC;EAC9C,MAAMI,cAAc,GAAGF,gBAAgB,CAACG,MAAM,CAACC,KAAK,CAClDJ,gBAAgB,CAACK,UAAU,EAC3BL,gBAAgB,CAACM,UAAU,GAAGN,gBAAgB,CAACK,UACjD,CAAC;EACD,MAAME,UAAU,GAAGC,YAAY,CAACN,cAAc,CAAC;EAC/C,OAAOK,UAAU;AACnB;AAMA,SAASC,YAAYA,CAACC,KAAkB,EAAS;EAC/C,MAAMC,MAAM,GAAGrB,uBAAuB,CAACsB,IAAI,CAACF,KAAK,CAAC;EAClD,MAAMG,aAA4B,GAAG,EAAE;EACvC,KAAK,MAAMC,WAAW,IAAIH,MAAM,EAAE;IAChCE,aAAa,CAACE,IAAI,CAACD,WAAW,CAAC;EACjC;EACA,OAAO,IAAIzB,KAAK,CAACwB,aAAa,CAAC;AACjC"}
@@ -1,4 +1,4 @@
1
- const VERSION = typeof "3.4.0-alpha.3" !== 'undefined' ? "3.4.0-alpha.3" : 'latest';
1
+ const VERSION = typeof "3.4.0-alpha.5" !== 'undefined' ? "3.4.0-alpha.5" : 'latest';
2
2
  const DEFAULT_PARQUET_LOADER_OPTIONS = {
3
3
  parquet: {
4
4
  type: 'object-row-table',
@@ -1,4 +1,4 @@
1
- const VERSION = typeof "3.4.0-alpha.3" !== 'undefined' ? "3.4.0-alpha.3" : 'latest';
1
+ const VERSION = typeof "3.4.0-alpha.5" !== 'undefined' ? "3.4.0-alpha.5" : 'latest';
2
2
  const DEFAULT_PARQUET_LOADER_OPTIONS = {
3
3
  parquet: {
4
4
  type: 'arrow-table',
@@ -1,5 +1,5 @@
1
1
  import { encode } from './lib/wasm/encode-parquet-wasm';
2
- const VERSION = typeof "3.4.0-alpha.3" !== 'undefined' ? "3.4.0-alpha.3" : 'latest';
2
+ const VERSION = typeof "3.4.0-alpha.5" !== 'undefined' ? "3.4.0-alpha.5" : 'latest';
3
3
  const DEFAULT_PARQUET_WRITER_OPTIONS = {
4
4
  parquet: {
5
5
  wasmUrl: 'https://unpkg.com/parquet-wasm@0.3.1/esm2/arrow1_bg.wasm'
@@ -1,4 +1,4 @@
1
- const VERSION = typeof "3.4.0-alpha.3" !== 'undefined' ? "3.4.0-alpha.3" : 'latest';
1
+ const VERSION = typeof "3.4.0-alpha.5" !== 'undefined' ? "3.4.0-alpha.5" : 'latest';
2
2
  const DEFAULT_PARQUET_LOADER_OPTIONS = {};
3
3
  export const ParquetWriter = {
4
4
  name: 'Apache Parquet',
@@ -1 +1 @@
1
- {"version":3,"file":"plain.js","names":["INT53","encodeValues","type","values","opts","encodeValues_BOOLEAN","encodeValues_INT32","encodeValues_INT64","encodeValues_INT96","encodeValues_FLOAT","encodeValues_DOUBLE","encodeValues_BYTE_ARRAY","encodeValues_FIXED_LEN_BYTE_ARRAY","Error","concat","decodeValues","cursor","count","decodeValues_BOOLEAN","decodeValues_INT32","decodeValues_INT64","decodeValues_INT96","decodeValues_FLOAT","decodeValues_DOUBLE","decodeValues_BYTE_ARRAY","decodeValues_FIXED_LEN_BYTE_ARRAY","buf","Buffer","alloc","Math","ceil","length","fill","i","floor","b","buffer","offset","push","writeInt32LE","readInt32LE","writeInt64LE","readInt64LE","writeUInt32LE","low","high","readUInt32LE","writeFloatLE","readFloatLE","writeDoubleLE","readDoubleLE","buf_len","from","buf_pos","copy","len","slice","typeLength"],"sources":["../../../../src/parquetjs/codecs/plain.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/* eslint-disable camelcase */\nimport type {PrimitiveType} from '../schema/declare';\nimport type {CursorBuffer, ParquetCodecOptions} from './declare';\nimport INT53 from 'int53';\n\nexport function encodeValues(\n type: PrimitiveType,\n values: any[],\n opts: ParquetCodecOptions\n): Buffer {\n switch (type) {\n case 'BOOLEAN':\n return encodeValues_BOOLEAN(values);\n case 'INT32':\n return encodeValues_INT32(values);\n case 'INT64':\n return encodeValues_INT64(values);\n case 'INT96':\n return encodeValues_INT96(values);\n case 'FLOAT':\n return encodeValues_FLOAT(values);\n case 'DOUBLE':\n return encodeValues_DOUBLE(values);\n case 'BYTE_ARRAY':\n return encodeValues_BYTE_ARRAY(values);\n case 'FIXED_LEN_BYTE_ARRAY':\n return encodeValues_FIXED_LEN_BYTE_ARRAY(values, opts);\n default:\n throw new Error(`unsupported type: ${type}`);\n }\n}\n\nexport function decodeValues(\n type: PrimitiveType,\n cursor: CursorBuffer,\n count: number,\n opts: ParquetCodecOptions\n): any[] {\n switch (type) {\n case 'BOOLEAN':\n return decodeValues_BOOLEAN(cursor, count);\n case 'INT32':\n return decodeValues_INT32(cursor, count);\n case 'INT64':\n return decodeValues_INT64(cursor, count);\n case 'INT96':\n return decodeValues_INT96(cursor, count);\n case 'FLOAT':\n return decodeValues_FLOAT(cursor, count);\n case 'DOUBLE':\n return decodeValues_DOUBLE(cursor, count);\n case 'BYTE_ARRAY':\n return decodeValues_BYTE_ARRAY(cursor, count);\n case 'FIXED_LEN_BYTE_ARRAY':\n return decodeValues_FIXED_LEN_BYTE_ARRAY(cursor, count, opts);\n default:\n throw new Error(`unsupported type: ${type}`);\n }\n}\n\nfunction encodeValues_BOOLEAN(values: boolean[]): Buffer {\n const buf = Buffer.alloc(Math.ceil(values.length / 8));\n buf.fill(0);\n for (let i = 0; i < values.length; i++) {\n if (values[i]) {\n buf[Math.floor(i / 8)] |= 1 << i % 8;\n }\n }\n return buf;\n}\n\nfunction decodeValues_BOOLEAN(cursor: CursorBuffer, count: number): boolean[] {\n const values: boolean[] = [];\n for (let i = 0; i < count; i++) {\n const b = cursor.buffer[cursor.offset + Math.floor(i / 8)];\n values.push((b & (1 << i % 8)) > 0);\n }\n cursor.offset += Math.ceil(count / 8);\n return values;\n}\n\nfunction encodeValues_INT32(values: number[]): Buffer {\n const buf = Buffer.alloc(4 * values.length);\n for (let i = 0; i < values.length; i++) {\n buf.writeInt32LE(values[i], i * 4);\n }\n return buf;\n}\n\nfunction decodeValues_INT32(cursor: CursorBuffer, count: number): number[] {\n const values: number[] = [];\n for (let i = 0; i < count; i++) {\n values.push(cursor.buffer.readInt32LE(cursor.offset));\n cursor.offset += 4;\n }\n return values;\n}\n\nfunction encodeValues_INT64(values: number[]): Buffer {\n const buf = Buffer.alloc(8 * values.length);\n for (let i = 0; i < values.length; i++) {\n INT53.writeInt64LE(values[i], buf, i * 8);\n }\n return buf;\n}\n\nfunction decodeValues_INT64(cursor: CursorBuffer, count: number): number[] {\n const values: number[] = [];\n for (let i = 0; i < count; i++) {\n values.push(INT53.readInt64LE(cursor.buffer, cursor.offset));\n cursor.offset += 8;\n }\n return values;\n}\n\nfunction encodeValues_INT96(values: number[]): Buffer {\n const buf = Buffer.alloc(12 * values.length);\n for (let i = 0; i < values.length; i++) {\n if (values[i] >= 0) {\n INT53.writeInt64LE(values[i], buf, i * 12);\n buf.writeUInt32LE(0, i * 12 + 8); // truncate to 64 actual precision\n } else {\n INT53.writeInt64LE(~-values[i] + 1, buf, i * 12);\n buf.writeUInt32LE(0xffffffff, i * 12 + 8); // truncate to 64 actual precision\n }\n }\n return buf;\n}\n\nfunction decodeValues_INT96(cursor: CursorBuffer, count: number): number[] {\n const values: number[] = [];\n for (let i = 0; i < count; i++) {\n const low = INT53.readInt64LE(cursor.buffer, cursor.offset);\n const high = cursor.buffer.readUInt32LE(cursor.offset + 8);\n if (high === 0xffffffff) {\n values.push(~-low + 1); // truncate to 64 actual precision\n } else {\n values.push(low); // truncate to 64 actual precision\n }\n cursor.offset += 12;\n }\n return values;\n}\n\nfunction encodeValues_FLOAT(values: number[]): Buffer {\n const buf = Buffer.alloc(4 * values.length);\n for (let i = 0; i < values.length; i++) {\n buf.writeFloatLE(values[i], i * 4);\n }\n return buf;\n}\n\nfunction decodeValues_FLOAT(cursor: CursorBuffer, count: number): number[] {\n const values: number[] = [];\n for (let i = 0; i < count; i++) {\n values.push(cursor.buffer.readFloatLE(cursor.offset));\n cursor.offset += 4;\n }\n return values;\n}\n\nfunction encodeValues_DOUBLE(values: number[]): Buffer {\n const buf = Buffer.alloc(8 * values.length);\n for (let i = 0; i < values.length; i++) {\n buf.writeDoubleLE(values[i], i * 8);\n }\n return buf;\n}\n\nfunction decodeValues_DOUBLE(cursor: CursorBuffer, count: number): number[] {\n const values: number[] = [];\n for (let i = 0; i < count; i++) {\n values.push(cursor.buffer.readDoubleLE(cursor.offset));\n cursor.offset += 8;\n }\n return values;\n}\n\nfunction encodeValues_BYTE_ARRAY(values: Buffer[]): Buffer {\n // tslint:disable-next-line:variable-name\n let buf_len = 0;\n for (let i = 0; i < values.length; i++) {\n values[i] = Buffer.from(values[i]);\n buf_len += 4 + values[i].length;\n }\n const buf = Buffer.alloc(buf_len);\n // tslint:disable-next-line:variable-name\n let buf_pos = 0;\n for (let i = 0; i < values.length; i++) {\n buf.writeUInt32LE(values[i].length, buf_pos);\n values[i].copy(buf, buf_pos + 4);\n buf_pos += 4 + values[i].length;\n }\n return buf;\n}\n\nfunction decodeValues_BYTE_ARRAY(cursor: CursorBuffer, count: number): Buffer[] {\n const values: Buffer[] = [];\n for (let i = 0; i < count; i++) {\n const len = cursor.buffer.readUInt32LE(cursor.offset);\n cursor.offset += 4;\n values.push(cursor.buffer.slice(cursor.offset, cursor.offset + len));\n cursor.offset += len;\n }\n return values;\n}\n\nfunction encodeValues_FIXED_LEN_BYTE_ARRAY(values: Buffer[], opts: ParquetCodecOptions): Buffer {\n if (!opts.typeLength) {\n throw new Error('missing option: typeLength (required for FIXED_LEN_BYTE_ARRAY)');\n }\n for (let i = 0; i < values.length; i++) {\n values[i] = Buffer.from(values[i]);\n if (values[i].length !== opts.typeLength) {\n throw new Error(`invalid value for FIXED_LEN_BYTE_ARRAY: ${values[i]}`);\n }\n }\n return Buffer.concat(values);\n}\n\nfunction decodeValues_FIXED_LEN_BYTE_ARRAY(\n cursor: CursorBuffer,\n count: number,\n opts: ParquetCodecOptions\n): Buffer[] {\n const values: Buffer[] = [];\n if (!opts.typeLength) {\n throw new Error('missing option: typeLength (required for FIXED_LEN_BYTE_ARRAY)');\n }\n for (let i = 0; i < count; i++) {\n values.push(cursor.buffer.slice(cursor.offset, cursor.offset + opts.typeLength));\n cursor.offset += opts.typeLength;\n }\n return values;\n}\n"],"mappings":"AAIA,OAAOA,KAAK,MAAM,OAAO;AAEzB,OAAO,SAASC,YAAYA,CAC1BC,IAAmB,EACnBC,MAAa,EACbC,IAAyB,EACjB;EACR,QAAQF,IAAI;IACV,KAAK,SAAS;MACZ,OAAOG,oBAAoB,CAACF,MAAM,CAAC;IACrC,KAAK,OAAO;MACV,OAAOG,kBAAkB,CAACH,MAAM,CAAC;IACnC,KAAK,OAAO;MACV,OAAOI,kBAAkB,CAACJ,MAAM,CAAC;IACnC,KAAK,OAAO;MACV,OAAOK,kBAAkB,CAACL,MAAM,CAAC;IACnC,KAAK,OAAO;MACV,OAAOM,kBAAkB,CAACN,MAAM,CAAC;IACnC,KAAK,QAAQ;MACX,OAAOO,mBAAmB,CAACP,MAAM,CAAC;IACpC,KAAK,YAAY;MACf,OAAOQ,uBAAuB,CAACR,MAAM,CAAC;IACxC,KAAK,sBAAsB;MACzB,OAAOS,iCAAiC,CAACT,MAAM,EAAEC,IAAI,CAAC;IACxD;MACE,MAAM,IAAIS,KAAK,sBAAAC,MAAA,CAAsBZ,IAAI,EAAG;EAAC;AAEnD;AAEA,OAAO,SAASa,YAAYA,CAC1Bb,IAAmB,EACnBc,MAAoB,EACpBC,KAAa,EACbb,IAAyB,EAClB;EACP,QAAQF,IAAI;IACV,KAAK,SAAS;MACZ,OAAOgB,oBAAoB,CAACF,MAAM,EAAEC,KAAK,CAAC;IAC5C,KAAK,OAAO;MACV,OAAOE,kBAAkB,CAACH,MAAM,EAAEC,KAAK,CAAC;IAC1C,KAAK,OAAO;MACV,OAAOG,kBAAkB,CAACJ,MAAM,EAAEC,KAAK,CAAC;IAC1C,KAAK,OAAO;MACV,OAAOI,kBAAkB,CAACL,MAAM,EAAEC,KAAK,CAAC;IAC1C,KAAK,OAAO;MACV,OAAOK,kBAAkB,CAACN,MAAM,EAAEC,KAAK,CAAC;IAC1C,KAAK,QAAQ;MACX,OAAOM,mBAAmB,CAACP,MAAM,EAAEC,KAAK,CAAC;IAC3C,KAAK,YAAY;MACf,OAAOO,uBAAuB,CAACR,MAAM,EAAEC,KAAK,CAAC;IAC/C,KAAK,sBAAsB;MACzB,OAAOQ,iCAAiC,CAACT,MAAM,EAAEC,KAAK,EAAEb,IAAI,CAAC;IAC/D;MACE,MAAM,IAAIS,KAAK,sBAAAC,MAAA,CAAsBZ,IAAI,EAAG;EAAC;AAEnD;AAEA,SAASG,oBAAoBA,CAACF,MAAiB,EAAU;EACvD,MAAMuB,GAAG,GAAGC,MAAM,CAACC,KAAK,CAACC,IAAI,CAACC,IAAI,CAAC3B,MAAM,CAAC4B,MAAM,GAAG,CAAC,CAAC,CAAC;EACtDL,GAAG,CAACM,IAAI,CAAC,CAAC,CAAC;EACX,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,MAAM,CAAC4B,MAAM,EAAEE,CAAC,EAAE,EAAE;IACtC,IAAI9B,MAAM,CAAC8B,CAAC,CAAC,EAAE;MACbP,GAAG,CAACG,IAAI,CAACK,KAAK,CAACD,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,IAAIA,CAAC,GAAG,CAAC;IACtC;EACF;EACA,OAAOP,GAAG;AACZ;AAEA,SAASR,oBAAoBA,CAACF,MAAoB,EAAEC,KAAa,EAAa;EAC5E,MAAMd,MAAiB,GAAG,EAAE;EAC5B,KAAK,IAAI8B,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGhB,KAAK,EAAEgB,CAAC,EAAE,EAAE;IAC9B,MAAME,CAAC,GAAGnB,MAAM,CAACoB,MAAM,CAACpB,MAAM,CAACqB,MAAM,GAAGR,IAAI,CAACK,KAAK,CAACD,CAAC,GAAG,CAAC,CAAC,CAAC;IAC1D9B,MAAM,CAACmC,IAAI,CAAC,CAACH,CAAC,GAAI,CAAC,IAAIF,CAAC,GAAG,CAAE,IAAI,CAAC,CAAC;EACrC;EACAjB,MAAM,CAACqB,MAAM,IAAIR,IAAI,CAACC,IAAI,CAACb,KAAK,GAAG,CAAC,CAAC;EACrC,OAAOd,MAAM;AACf;AAEA,SAASG,kBAAkBA,CAACH,MAAgB,EAAU;EACpD,MAAMuB,GAAG,GAAGC,MAAM,CAACC,KAAK,CAAC,CAAC,GAAGzB,MAAM,CAAC4B,MAAM,CAAC;EAC3C,KAAK,IAAIE,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,MAAM,CAAC4B,MAAM,EAAEE,CAAC,EAAE,EAAE;IACtCP,GAAG,CAACa,YAAY,CAACpC,MAAM,CAAC8B,CAAC,CAAC,EAAEA,CAAC,GAAG,CAAC,CAAC;EACpC;EACA,OAAOP,GAAG;AACZ;AAEA,SAASP,kBAAkBA,CAACH,MAAoB,EAAEC,KAAa,EAAY;EACzE,MAAMd,MAAgB,GAAG,EAAE;EAC3B,KAAK,IAAI8B,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGhB,KAAK,EAAEgB,CAAC,EAAE,EAAE;IAC9B9B,MAAM,CAACmC,IAAI,CAACtB,MAAM,CAACoB,MAAM,CAACI,WAAW,CAACxB,MAAM,CAACqB,MAAM,CAAC,CAAC;IACrDrB,MAAM,CAACqB,MAAM,IAAI,CAAC;EACpB;EACA,OAAOlC,MAAM;AACf;AAEA,SAASI,kBAAkBA,CAACJ,MAAgB,EAAU;EACpD,MAAMuB,GAAG,GAAGC,MAAM,CAACC,KAAK,CAAC,CAAC,GAAGzB,MAAM,CAAC4B,MAAM,CAAC;EAC3C,KAAK,IAAIE,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,MAAM,CAAC4B,MAAM,EAAEE,CAAC,EAAE,EAAE;IACtCjC,KAAK,CAACyC,YAAY,CAACtC,MAAM,CAAC8B,CAAC,CAAC,EAAEP,GAAG,EAAEO,CAAC,GAAG,CAAC,CAAC;EAC3C;EACA,OAAOP,GAAG;AACZ;AAEA,SAASN,kBAAkBA,CAACJ,MAAoB,EAAEC,KAAa,EAAY;EACzE,MAAMd,MAAgB,GAAG,EAAE;EAC3B,KAAK,IAAI8B,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGhB,KAAK,EAAEgB,CAAC,EAAE,EAAE;IAC9B9B,MAAM,CAACmC,IAAI,CAACtC,KAAK,CAAC0C,WAAW,CAAC1B,MAAM,CAACoB,MAAM,EAAEpB,MAAM,CAACqB,MAAM,CAAC,CAAC;IAC5DrB,MAAM,CAACqB,MAAM,IAAI,CAAC;EACpB;EACA,OAAOlC,MAAM;AACf;AAEA,SAASK,kBAAkBA,CAACL,MAAgB,EAAU;EACpD,MAAMuB,GAAG,GAAGC,MAAM,CAACC,KAAK,CAAC,EAAE,GAAGzB,MAAM,CAAC4B,MAAM,CAAC;EAC5C,KAAK,IAAIE,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,MAAM,CAAC4B,MAAM,EAAEE,CAAC,EAAE,EAAE;IACtC,IAAI9B,MAAM,CAAC8B,CAAC,CAAC,IAAI,CAAC,EAAE;MAClBjC,KAAK,CAACyC,YAAY,CAACtC,MAAM,CAAC8B,CAAC,CAAC,EAAEP,GAAG,EAAEO,CAAC,GAAG,EAAE,CAAC;MAC1CP,GAAG,CAACiB,aAAa,CAAC,CAAC,EAAEV,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;IAClC,CAAC,MAAM;MACLjC,KAAK,CAACyC,YAAY,CAAC,CAAC,CAACtC,MAAM,CAAC8B,CAAC,CAAC,GAAG,CAAC,EAAEP,GAAG,EAAEO,CAAC,GAAG,EAAE,CAAC;MAChDP,GAAG,CAACiB,aAAa,CAAC,UAAU,EAAEV,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;IAC3C;EACF;EACA,OAAOP,GAAG;AACZ;AAEA,SAASL,kBAAkBA,CAACL,MAAoB,EAAEC,KAAa,EAAY;EACzE,MAAMd,MAAgB,GAAG,EAAE;EAC3B,KAAK,IAAI8B,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGhB,KAAK,EAAEgB,CAAC,EAAE,EAAE;IAC9B,MAAMW,GAAG,GAAG5C,KAAK,CAAC0C,WAAW,CAAC1B,MAAM,CAACoB,MAAM,EAAEpB,MAAM,CAACqB,MAAM,CAAC;IAC3D,MAAMQ,IAAI,GAAG7B,MAAM,CAACoB,MAAM,CAACU,YAAY,CAAC9B,MAAM,CAACqB,MAAM,GAAG,CAAC,CAAC;IAC1D,IAAIQ,IAAI,KAAK,UAAU,EAAE;MACvB1C,MAAM,CAACmC,IAAI,CAAC,CAAC,CAACM,GAAG,GAAG,CAAC,CAAC;IACxB,CAAC,MAAM;MACLzC,MAAM,CAACmC,IAAI,CAACM,GAAG,CAAC;IAClB;IACA5B,MAAM,CAACqB,MAAM,IAAI,EAAE;EACrB;EACA,OAAOlC,MAAM;AACf;AAEA,SAASM,kBAAkBA,CAACN,MAAgB,EAAU;EACpD,MAAMuB,GAAG,GAAGC,MAAM,CAACC,KAAK,CAAC,CAAC,GAAGzB,MAAM,CAAC4B,MAAM,CAAC;EAC3C,KAAK,IAAIE,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,MAAM,CAAC4B,MAAM,EAAEE,CAAC,EAAE,EAAE;IACtCP,GAAG,CAACqB,YAAY,CAAC5C,MAAM,CAAC8B,CAAC,CAAC,EAAEA,CAAC,GAAG,CAAC,CAAC;EACpC;EACA,OAAOP,GAAG;AACZ;AAEA,SAASJ,kBAAkBA,CAACN,MAAoB,EAAEC,KAAa,EAAY;EACzE,MAAMd,MAAgB,GAAG,EAAE;EAC3B,KAAK,IAAI8B,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGhB,KAAK,EAAEgB,CAAC,EAAE,EAAE;IAC9B9B,MAAM,CAACmC,IAAI,CAACtB,MAAM,CAACoB,MAAM,CAACY,WAAW,CAAChC,MAAM,CAACqB,MAAM,CAAC,CAAC;IACrDrB,MAAM,CAACqB,MAAM,IAAI,CAAC;EACpB;EACA,OAAOlC,MAAM;AACf;AAEA,SAASO,mBAAmBA,CAACP,MAAgB,EAAU;EACrD,MAAMuB,GAAG,GAAGC,MAAM,CAACC,KAAK,CAAC,CAAC,GAAGzB,MAAM,CAAC4B,MAAM,CAAC;EAC3C,KAAK,IAAIE,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,MAAM,CAAC4B,MAAM,EAAEE,CAAC,EAAE,EAAE;IACtCP,GAAG,CAACuB,aAAa,CAAC9C,MAAM,CAAC8B,CAAC,CAAC,EAAEA,CAAC,GAAG,CAAC,CAAC;EACrC;EACA,OAAOP,GAAG;AACZ;AAEA,SAASH,mBAAmBA,CAACP,MAAoB,EAAEC,KAAa,EAAY;EAC1E,MAAMd,MAAgB,GAAG,EAAE;EAC3B,KAAK,IAAI8B,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGhB,KAAK,EAAEgB,CAAC,EAAE,EAAE;IAC9B9B,MAAM,CAACmC,IAAI,CAACtB,MAAM,CAACoB,MAAM,CAACc,YAAY,CAAClC,MAAM,CAACqB,MAAM,CAAC,CAAC;IACtDrB,MAAM,CAACqB,MAAM,IAAI,CAAC;EACpB;EACA,OAAOlC,MAAM;AACf;AAEA,SAASQ,uBAAuBA,CAACR,MAAgB,EAAU;EAEzD,IAAIgD,OAAO,GAAG,CAAC;EACf,KAAK,IAAIlB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,MAAM,CAAC4B,MAAM,EAAEE,CAAC,EAAE,EAAE;IACtC9B,MAAM,CAAC8B,CAAC,CAAC,GAAGN,MAAM,CAACyB,IAAI,CAACjD,MAAM,CAAC8B,CAAC,CAAC,CAAC;IAClCkB,OAAO,IAAI,CAAC,GAAGhD,MAAM,CAAC8B,CAAC,CAAC,CAACF,MAAM;EACjC;EACA,MAAML,GAAG,GAAGC,MAAM,CAACC,KAAK,CAACuB,OAAO,CAAC;EAEjC,IAAIE,OAAO,GAAG,CAAC;EACf,KAAK,IAAIpB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,MAAM,CAAC4B,MAAM,EAAEE,CAAC,EAAE,EAAE;IACtCP,GAAG,CAACiB,aAAa,CAACxC,MAAM,CAAC8B,CAAC,CAAC,CAACF,MAAM,EAAEsB,OAAO,CAAC;IAC5ClD,MAAM,CAAC8B,CAAC,CAAC,CAACqB,IAAI,CAAC5B,GAAG,EAAE2B,OAAO,GAAG,CAAC,CAAC;IAChCA,OAAO,IAAI,CAAC,GAAGlD,MAAM,CAAC8B,CAAC,CAAC,CAACF,MAAM;EACjC;EACA,OAAOL,GAAG;AACZ;AAEA,SAASF,uBAAuBA,CAACR,MAAoB,EAAEC,KAAa,EAAY;EAC9E,MAAMd,MAAgB,GAAG,EAAE;EAC3B,KAAK,IAAI8B,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGhB,KAAK,EAAEgB,CAAC,EAAE,EAAE;IAC9B,MAAMsB,GAAG,GAAGvC,MAAM,CAACoB,MAAM,CAACU,YAAY,CAAC9B,MAAM,CAACqB,MAAM,CAAC;IACrDrB,MAAM,CAACqB,MAAM,IAAI,CAAC;IAClBlC,MAAM,CAACmC,IAAI,CAACtB,MAAM,CAACoB,MAAM,CAACoB,KAAK,CAACxC,MAAM,CAACqB,MAAM,EAAErB,MAAM,CAACqB,MAAM,GAAGkB,GAAG,CAAC,CAAC;IACpEvC,MAAM,CAACqB,MAAM,IAAIkB,GAAG;EACtB;EACA,OAAOpD,MAAM;AACf;AAEA,SAASS,iCAAiCA,CAACT,MAAgB,EAAEC,IAAyB,EAAU;EAC9F,IAAI,CAACA,IAAI,CAACqD,UAAU,EAAE;IACpB,MAAM,IAAI5C,KAAK,CAAC,gEAAgE,CAAC;EACnF;EACA,KAAK,IAAIoB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,MAAM,CAAC4B,MAAM,EAAEE,CAAC,EAAE,EAAE;IACtC9B,MAAM,CAAC8B,CAAC,CAAC,GAAGN,MAAM,CAACyB,IAAI,CAACjD,MAAM,CAAC8B,CAAC,CAAC,CAAC;IAClC,IAAI9B,MAAM,CAAC8B,CAAC,CAAC,CAACF,MAAM,KAAK3B,IAAI,CAACqD,UAAU,EAAE;MACxC,MAAM,IAAI5C,KAAK,4CAAAC,MAAA,CAA4CX,MAAM,CAAC8B,CAAC,CAAC,EAAG;IACzE;EACF;EACA,OAAON,MAAM,CAACb,MAAM,CAACX,MAAM,CAAC;AAC9B;AAEA,SAASsB,iCAAiCA,CACxCT,MAAoB,EACpBC,KAAa,EACbb,IAAyB,EACf;EACV,MAAMD,MAAgB,GAAG,EAAE;EAC3B,IAAI,CAACC,IAAI,CAACqD,UAAU,EAAE;IACpB,MAAM,IAAI5C,KAAK,CAAC,gEAAgE,CAAC;EACnF;EACA,KAAK,IAAIoB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGhB,KAAK,EAAEgB,CAAC,EAAE,EAAE;IAC9B9B,MAAM,CAACmC,IAAI,CAACtB,MAAM,CAACoB,MAAM,CAACoB,KAAK,CAACxC,MAAM,CAACqB,MAAM,EAAErB,MAAM,CAACqB,MAAM,GAAGjC,IAAI,CAACqD,UAAU,CAAC,CAAC;IAChFzC,MAAM,CAACqB,MAAM,IAAIjC,IAAI,CAACqD,UAAU;EAClC;EACA,OAAOtD,MAAM;AACf"}
1
+ {"version":3,"file":"plain.js","names":["INT53","encodeValues","type","values","opts","encodeValues_BOOLEAN","encodeValues_INT32","encodeValues_INT64","encodeValues_INT96","encodeValues_FLOAT","encodeValues_DOUBLE","encodeValues_BYTE_ARRAY","encodeValues_FIXED_LEN_BYTE_ARRAY","Error","concat","decodeValues","cursor","count","decodeValues_BOOLEAN","decodeValues_INT32","decodeValues_INT64","decodeValues_INT96","decodeValues_FLOAT","decodeValues_DOUBLE","decodeValues_BYTE_ARRAY","decodeValues_FIXED_LEN_BYTE_ARRAY","buf","Buffer","alloc","Math","ceil","length","fill","i","floor","b","buffer","offset","push","writeInt32LE","readInt32LE","writeInt64LE","readInt64LE","writeUInt32LE","low","high","readUInt32LE","writeFloatLE","readFloatLE","writeDoubleLE","readDoubleLE","buf_len","from","buf_pos","copy","len","slice","typeLength"],"sources":["../../../../src/parquetjs/codecs/plain.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/* eslint-disable camelcase */\nimport type {PrimitiveType} from '../schema/declare';\nimport type {CursorBuffer, ParquetCodecOptions} from './declare';\nimport INT53 from 'int53';\n\nexport function encodeValues(\n type: PrimitiveType,\n values: any[],\n opts: ParquetCodecOptions\n): Buffer {\n switch (type) {\n case 'BOOLEAN':\n return encodeValues_BOOLEAN(values);\n case 'INT32':\n return encodeValues_INT32(values);\n case 'INT64':\n return encodeValues_INT64(values);\n case 'INT96':\n return encodeValues_INT96(values);\n case 'FLOAT':\n return encodeValues_FLOAT(values);\n case 'DOUBLE':\n return encodeValues_DOUBLE(values);\n case 'BYTE_ARRAY':\n return encodeValues_BYTE_ARRAY(values);\n case 'FIXED_LEN_BYTE_ARRAY':\n return encodeValues_FIXED_LEN_BYTE_ARRAY(values, opts);\n default:\n throw new Error(`unsupported type: ${type}`);\n }\n}\n\nexport function decodeValues(\n type: PrimitiveType,\n cursor: CursorBuffer,\n count: number,\n opts: ParquetCodecOptions\n): any[] {\n switch (type) {\n case 'BOOLEAN':\n return decodeValues_BOOLEAN(cursor, count);\n case 'INT32':\n return decodeValues_INT32(cursor, count);\n case 'INT64':\n return decodeValues_INT64(cursor, count);\n case 'INT96':\n return decodeValues_INT96(cursor, count);\n case 'FLOAT':\n return decodeValues_FLOAT(cursor, count);\n case 'DOUBLE':\n return decodeValues_DOUBLE(cursor, count);\n case 'BYTE_ARRAY':\n return decodeValues_BYTE_ARRAY(cursor, count);\n case 'FIXED_LEN_BYTE_ARRAY':\n return decodeValues_FIXED_LEN_BYTE_ARRAY(cursor, count, opts);\n default:\n throw new Error(`unsupported type: ${type}`);\n }\n}\n\nfunction encodeValues_BOOLEAN(values: boolean[]): Buffer {\n const buf = Buffer.alloc(Math.ceil(values.length / 8));\n buf.fill(0);\n for (let i = 0; i < values.length; i++) {\n if (values[i]) {\n buf[Math.floor(i / 8)] |= 1 << i % 8;\n }\n }\n return buf;\n}\n\nfunction decodeValues_BOOLEAN(cursor: CursorBuffer, count: number): boolean[] {\n const values: boolean[] = [];\n for (let i = 0; i < count; i++) {\n const b = cursor.buffer[cursor.offset + Math.floor(i / 8)];\n values.push((b & (1 << i % 8)) > 0);\n }\n cursor.offset += Math.ceil(count / 8);\n return values;\n}\n\nfunction encodeValues_INT32(values: number[]): Buffer {\n const buf = Buffer.alloc(4 * values.length);\n for (let i = 0; i < values.length; i++) {\n buf.writeInt32LE(values[i], i * 4);\n }\n return buf;\n}\n\nfunction decodeValues_INT32(cursor: CursorBuffer, count: number): number[] {\n const values: number[] = [];\n for (let i = 0; i < count; i++) {\n values.push(cursor.buffer.readInt32LE(cursor.offset));\n cursor.offset += 4;\n }\n return values;\n}\n\nfunction encodeValues_INT64(values: number[]): Buffer {\n const buf = Buffer.alloc(8 * values.length);\n for (let i = 0; i < values.length; i++) {\n INT53.writeInt64LE(values[i], buf, i * 8);\n }\n return buf;\n}\n\nfunction decodeValues_INT64(cursor: CursorBuffer, count: number): number[] {\n const values: number[] = [];\n for (let i = 0; i < count; i++) {\n values.push(INT53.readInt64LE(cursor.buffer, cursor.offset));\n cursor.offset += 8;\n }\n return values;\n}\n\nfunction encodeValues_INT96(values: number[]): Buffer {\n const buf = Buffer.alloc(12 * values.length);\n for (let i = 0; i < values.length; i++) {\n if (values[i] >= 0) {\n INT53.writeInt64LE(values[i], buf, i * 12);\n buf.writeUInt32LE(0, i * 12 + 8); // truncate to 64 actual precision\n } else {\n INT53.writeInt64LE(~-values[i] + 1, buf, i * 12);\n buf.writeUInt32LE(0xffffffff, i * 12 + 8); // truncate to 64 actual precision\n }\n }\n return buf;\n}\n\nfunction decodeValues_INT96(cursor: CursorBuffer, count: number): number[] {\n const values: number[] = [];\n for (let i = 0; i < count; i++) {\n const low = INT53.readInt64LE(cursor.buffer, cursor.offset);\n const high = cursor.buffer.readUInt32LE(cursor.offset + 8);\n if (high === 0xffffffff) {\n values.push(~-low + 1); // truncate to 64 actual precision\n } else {\n values.push(low); // truncate to 64 actual precision\n }\n cursor.offset += 12;\n }\n return values;\n}\n\nfunction encodeValues_FLOAT(values: number[]): Buffer {\n const buf = Buffer.alloc(4 * values.length);\n for (let i = 0; i < values.length; i++) {\n buf.writeFloatLE(values[i], i * 4);\n }\n return buf;\n}\n\nfunction decodeValues_FLOAT(cursor: CursorBuffer, count: number): number[] {\n const values: number[] = [];\n for (let i = 0; i < count; i++) {\n values.push(cursor.buffer.readFloatLE(cursor.offset));\n cursor.offset += 4;\n }\n return values;\n}\n\nfunction encodeValues_DOUBLE(values: number[]): Buffer {\n const buf = Buffer.alloc(8 * values.length);\n for (let i = 0; i < values.length; i++) {\n buf.writeDoubleLE(values[i], i * 8);\n }\n return buf;\n}\n\nfunction decodeValues_DOUBLE(cursor: CursorBuffer, count: number): number[] {\n const values: number[] = [];\n for (let i = 0; i < count; i++) {\n values.push(cursor.buffer.readDoubleLE(cursor.offset));\n cursor.offset += 8;\n }\n return values;\n}\n\nfunction encodeValues_BYTE_ARRAY(values: Buffer[]): Buffer {\n // tslint:disable-next-line:variable-name\n let buf_len = 0;\n for (let i = 0; i < values.length; i++) {\n values[i] = Buffer.from(values[i]);\n buf_len += 4 + values[i].length;\n }\n const buf = Buffer.alloc(buf_len);\n // tslint:disable-next-line:variable-name\n let buf_pos = 0;\n for (let i = 0; i < values.length; i++) {\n buf.writeUInt32LE(values[i].length, buf_pos);\n values[i].copy(buf, buf_pos + 4);\n buf_pos += 4 + values[i].length;\n }\n return buf;\n}\n\nfunction decodeValues_BYTE_ARRAY(cursor: CursorBuffer, count: number): Buffer[] {\n const values: Buffer[] = [];\n for (let i = 0; i < count; i++) {\n const len = cursor.buffer.readUInt32LE(cursor.offset);\n cursor.offset += 4;\n values.push(cursor.buffer.slice(cursor.offset, cursor.offset + len));\n cursor.offset += len;\n }\n return values;\n}\n\nfunction encodeValues_FIXED_LEN_BYTE_ARRAY(values: Buffer[], opts: ParquetCodecOptions): Buffer {\n if (!opts.typeLength) {\n throw new Error('missing option: typeLength (required for FIXED_LEN_BYTE_ARRAY)');\n }\n for (let i = 0; i < values.length; i++) {\n values[i] = Buffer.from(values[i]);\n if (values[i].length !== opts.typeLength) {\n throw new Error(`invalid value for FIXED_LEN_BYTE_ARRAY: ${values[i]}`);\n }\n }\n return Buffer.concat(values);\n}\n\nfunction decodeValues_FIXED_LEN_BYTE_ARRAY(\n cursor: CursorBuffer,\n count: number,\n opts: ParquetCodecOptions\n): Buffer[] {\n const values: Buffer[] = [];\n if (!opts.typeLength) {\n throw new Error('missing option: typeLength (required for FIXED_LEN_BYTE_ARRAY)');\n }\n for (let i = 0; i < count; i++) {\n values.push(cursor.buffer.slice(cursor.offset, cursor.offset + opts.typeLength));\n cursor.offset += opts.typeLength;\n }\n return values;\n}\n"],"mappings":"AAIA,OAAOA,KAAK,MAAM,OAAO;AAEzB,OAAO,SAASC,YAAYA,CAC1BC,IAAmB,EACnBC,MAAa,EACbC,IAAyB,EACjB;EACR,QAAQF,IAAI;IACV,KAAK,SAAS;MACZ,OAAOG,oBAAoB,CAACF,MAAM,CAAC;IACrC,KAAK,OAAO;MACV,OAAOG,kBAAkB,CAACH,MAAM,CAAC;IACnC,KAAK,OAAO;MACV,OAAOI,kBAAkB,CAACJ,MAAM,CAAC;IACnC,KAAK,OAAO;MACV,OAAOK,kBAAkB,CAACL,MAAM,CAAC;IACnC,KAAK,OAAO;MACV,OAAOM,kBAAkB,CAACN,MAAM,CAAC;IACnC,KAAK,QAAQ;MACX,OAAOO,mBAAmB,CAACP,MAAM,CAAC;IACpC,KAAK,YAAY;MACf,OAAOQ,uBAAuB,CAACR,MAAM,CAAC;IACxC,KAAK,sBAAsB;MACzB,OAAOS,iCAAiC,CAACT,MAAM,EAAEC,IAAI,CAAC;IACxD;MACE,MAAM,IAAIS,KAAK,sBAAAC,MAAA,CAAsBZ,IAAI,CAAE,CAAC;EAChD;AACF;AAEA,OAAO,SAASa,YAAYA,CAC1Bb,IAAmB,EACnBc,MAAoB,EACpBC,KAAa,EACbb,IAAyB,EAClB;EACP,QAAQF,IAAI;IACV,KAAK,SAAS;MACZ,OAAOgB,oBAAoB,CAACF,MAAM,EAAEC,KAAK,CAAC;IAC5C,KAAK,OAAO;MACV,OAAOE,kBAAkB,CAACH,MAAM,EAAEC,KAAK,CAAC;IAC1C,KAAK,OAAO;MACV,OAAOG,kBAAkB,CAACJ,MAAM,EAAEC,KAAK,CAAC;IAC1C,KAAK,OAAO;MACV,OAAOI,kBAAkB,CAACL,MAAM,EAAEC,KAAK,CAAC;IAC1C,KAAK,OAAO;MACV,OAAOK,kBAAkB,CAACN,MAAM,EAAEC,KAAK,CAAC;IAC1C,KAAK,QAAQ;MACX,OAAOM,mBAAmB,CAACP,MAAM,EAAEC,KAAK,CAAC;IAC3C,KAAK,YAAY;MACf,OAAOO,uBAAuB,CAACR,MAAM,EAAEC,KAAK,CAAC;IAC/C,KAAK,sBAAsB;MACzB,OAAOQ,iCAAiC,CAACT,MAAM,EAAEC,KAAK,EAAEb,IAAI,CAAC;IAC/D;MACE,MAAM,IAAIS,KAAK,sBAAAC,MAAA,CAAsBZ,IAAI,CAAE,CAAC;EAChD;AACF;AAEA,SAASG,oBAAoBA,CAACF,MAAiB,EAAU;EACvD,MAAMuB,GAAG,GAAGC,MAAM,CAACC,KAAK,CAACC,IAAI,CAACC,IAAI,CAAC3B,MAAM,CAAC4B,MAAM,GAAG,CAAC,CAAC,CAAC;EACtDL,GAAG,CAACM,IAAI,CAAC,CAAC,CAAC;EACX,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,MAAM,CAAC4B,MAAM,EAAEE,CAAC,EAAE,EAAE;IACtC,IAAI9B,MAAM,CAAC8B,CAAC,CAAC,EAAE;MACbP,GAAG,CAACG,IAAI,CAACK,KAAK,CAACD,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,IAAIA,CAAC,GAAG,CAAC;IACtC;EACF;EACA,OAAOP,GAAG;AACZ;AAEA,SAASR,oBAAoBA,CAACF,MAAoB,EAAEC,KAAa,EAAa;EAC5E,MAAMd,MAAiB,GAAG,EAAE;EAC5B,KAAK,IAAI8B,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGhB,KAAK,EAAEgB,CAAC,EAAE,EAAE;IAC9B,MAAME,CAAC,GAAGnB,MAAM,CAACoB,MAAM,CAACpB,MAAM,CAACqB,MAAM,GAAGR,IAAI,CAACK,KAAK,CAACD,CAAC,GAAG,CAAC,CAAC,CAAC;IAC1D9B,MAAM,CAACmC,IAAI,CAAC,CAACH,CAAC,GAAI,CAAC,IAAIF,CAAC,GAAG,CAAE,IAAI,CAAC,CAAC;EACrC;EACAjB,MAAM,CAACqB,MAAM,IAAIR,IAAI,CAACC,IAAI,CAACb,KAAK,GAAG,CAAC,CAAC;EACrC,OAAOd,MAAM;AACf;AAEA,SAASG,kBAAkBA,CAACH,MAAgB,EAAU;EACpD,MAAMuB,GAAG,GAAGC,MAAM,CAACC,KAAK,CAAC,CAAC,GAAGzB,MAAM,CAAC4B,MAAM,CAAC;EAC3C,KAAK,IAAIE,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,MAAM,CAAC4B,MAAM,EAAEE,CAAC,EAAE,EAAE;IACtCP,GAAG,CAACa,YAAY,CAACpC,MAAM,CAAC8B,CAAC,CAAC,EAAEA,CAAC,GAAG,CAAC,CAAC;EACpC;EACA,OAAOP,GAAG;AACZ;AAEA,SAASP,kBAAkBA,CAACH,MAAoB,EAAEC,KAAa,EAAY;EACzE,MAAMd,MAAgB,GAAG,EAAE;EAC3B,KAAK,IAAI8B,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGhB,KAAK,EAAEgB,CAAC,EAAE,EAAE;IAC9B9B,MAAM,CAACmC,IAAI,CAACtB,MAAM,CAACoB,MAAM,CAACI,WAAW,CAACxB,MAAM,CAACqB,MAAM,CAAC,CAAC;IACrDrB,MAAM,CAACqB,MAAM,IAAI,CAAC;EACpB;EACA,OAAOlC,MAAM;AACf;AAEA,SAASI,kBAAkBA,CAACJ,MAAgB,EAAU;EACpD,MAAMuB,GAAG,GAAGC,MAAM,CAACC,KAAK,CAAC,CAAC,GAAGzB,MAAM,CAAC4B,MAAM,CAAC;EAC3C,KAAK,IAAIE,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,MAAM,CAAC4B,MAAM,EAAEE,CAAC,EAAE,EAAE;IACtCjC,KAAK,CAACyC,YAAY,CAACtC,MAAM,CAAC8B,CAAC,CAAC,EAAEP,GAAG,EAAEO,CAAC,GAAG,CAAC,CAAC;EAC3C;EACA,OAAOP,GAAG;AACZ;AAEA,SAASN,kBAAkBA,CAACJ,MAAoB,EAAEC,KAAa,EAAY;EACzE,MAAMd,MAAgB,GAAG,EAAE;EAC3B,KAAK,IAAI8B,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGhB,KAAK,EAAEgB,CAAC,EAAE,EAAE;IAC9B9B,MAAM,CAACmC,IAAI,CAACtC,KAAK,CAAC0C,WAAW,CAAC1B,MAAM,CAACoB,MAAM,EAAEpB,MAAM,CAACqB,MAAM,CAAC,CAAC;IAC5DrB,MAAM,CAACqB,MAAM,IAAI,CAAC;EACpB;EACA,OAAOlC,MAAM;AACf;AAEA,SAASK,kBAAkBA,CAACL,MAAgB,EAAU;EACpD,MAAMuB,GAAG,GAAGC,MAAM,CAACC,KAAK,CAAC,EAAE,GAAGzB,MAAM,CAAC4B,MAAM,CAAC;EAC5C,KAAK,IAAIE,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,MAAM,CAAC4B,MAAM,EAAEE,CAAC,EAAE,EAAE;IACtC,IAAI9B,MAAM,CAAC8B,CAAC,CAAC,IAAI,CAAC,EAAE;MAClBjC,KAAK,CAACyC,YAAY,CAACtC,MAAM,CAAC8B,CAAC,CAAC,EAAEP,GAAG,EAAEO,CAAC,GAAG,EAAE,CAAC;MAC1CP,GAAG,CAACiB,aAAa,CAAC,CAAC,EAAEV,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;IAClC,CAAC,MAAM;MACLjC,KAAK,CAACyC,YAAY,CAAC,CAAC,CAACtC,MAAM,CAAC8B,CAAC,CAAC,GAAG,CAAC,EAAEP,GAAG,EAAEO,CAAC,GAAG,EAAE,CAAC;MAChDP,GAAG,CAACiB,aAAa,CAAC,UAAU,EAAEV,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;IAC3C;EACF;EACA,OAAOP,GAAG;AACZ;AAEA,SAASL,kBAAkBA,CAACL,MAAoB,EAAEC,KAAa,EAAY;EACzE,MAAMd,MAAgB,GAAG,EAAE;EAC3B,KAAK,IAAI8B,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGhB,KAAK,EAAEgB,CAAC,EAAE,EAAE;IAC9B,MAAMW,GAAG,GAAG5C,KAAK,CAAC0C,WAAW,CAAC1B,MAAM,CAACoB,MAAM,EAAEpB,MAAM,CAACqB,MAAM,CAAC;IAC3D,MAAMQ,IAAI,GAAG7B,MAAM,CAACoB,MAAM,CAACU,YAAY,CAAC9B,MAAM,CAACqB,MAAM,GAAG,CAAC,CAAC;IAC1D,IAAIQ,IAAI,KAAK,UAAU,EAAE;MACvB1C,MAAM,CAACmC,IAAI,CAAC,CAAC,CAACM,GAAG,GAAG,CAAC,CAAC;IACxB,CAAC,MAAM;MACLzC,MAAM,CAACmC,IAAI,CAACM,GAAG,CAAC;IAClB;IACA5B,MAAM,CAACqB,MAAM,IAAI,EAAE;EACrB;EACA,OAAOlC,MAAM;AACf;AAEA,SAASM,kBAAkBA,CAACN,MAAgB,EAAU;EACpD,MAAMuB,GAAG,GAAGC,MAAM,CAACC,KAAK,CAAC,CAAC,GAAGzB,MAAM,CAAC4B,MAAM,CAAC;EAC3C,KAAK,IAAIE,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,MAAM,CAAC4B,MAAM,EAAEE,CAAC,EAAE,EAAE;IACtCP,GAAG,CAACqB,YAAY,CAAC5C,MAAM,CAAC8B,CAAC,CAAC,EAAEA,CAAC,GAAG,CAAC,CAAC;EACpC;EACA,OAAOP,GAAG;AACZ;AAEA,SAASJ,kBAAkBA,CAACN,MAAoB,EAAEC,KAAa,EAAY;EACzE,MAAMd,MAAgB,GAAG,EAAE;EAC3B,KAAK,IAAI8B,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGhB,KAAK,EAAEgB,CAAC,EAAE,EAAE;IAC9B9B,MAAM,CAACmC,IAAI,CAACtB,MAAM,CAACoB,MAAM,CAACY,WAAW,CAAChC,MAAM,CAACqB,MAAM,CAAC,CAAC;IACrDrB,MAAM,CAACqB,MAAM,IAAI,CAAC;EACpB;EACA,OAAOlC,MAAM;AACf;AAEA,SAASO,mBAAmBA,CAACP,MAAgB,EAAU;EACrD,MAAMuB,GAAG,GAAGC,MAAM,CAACC,KAAK,CAAC,CAAC,GAAGzB,MAAM,CAAC4B,MAAM,CAAC;EAC3C,KAAK,IAAIE,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,MAAM,CAAC4B,MAAM,EAAEE,CAAC,EAAE,EAAE;IACtCP,GAAG,CAACuB,aAAa,CAAC9C,MAAM,CAAC8B,CAAC,CAAC,EAAEA,CAAC,GAAG,CAAC,CAAC;EACrC;EACA,OAAOP,GAAG;AACZ;AAEA,SAASH,mBAAmBA,CAACP,MAAoB,EAAEC,KAAa,EAAY;EAC1E,MAAMd,MAAgB,GAAG,EAAE;EAC3B,KAAK,IAAI8B,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGhB,KAAK,EAAEgB,CAAC,EAAE,EAAE;IAC9B9B,MAAM,CAACmC,IAAI,CAACtB,MAAM,CAACoB,MAAM,CAACc,YAAY,CAAClC,MAAM,CAACqB,MAAM,CAAC,CAAC;IACtDrB,MAAM,CAACqB,MAAM,IAAI,CAAC;EACpB;EACA,OAAOlC,MAAM;AACf;AAEA,SAASQ,uBAAuBA,CAACR,MAAgB,EAAU;EAEzD,IAAIgD,OAAO,GAAG,CAAC;EACf,KAAK,IAAIlB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,MAAM,CAAC4B,MAAM,EAAEE,CAAC,EAAE,EAAE;IACtC9B,MAAM,CAAC8B,CAAC,CAAC,GAAGN,MAAM,CAACyB,IAAI,CAACjD,MAAM,CAAC8B,CAAC,CAAC,CAAC;IAClCkB,OAAO,IAAI,CAAC,GAAGhD,MAAM,CAAC8B,CAAC,CAAC,CAACF,MAAM;EACjC;EACA,MAAML,GAAG,GAAGC,MAAM,CAACC,KAAK,CAACuB,OAAO,CAAC;EAEjC,IAAIE,OAAO,GAAG,CAAC;EACf,KAAK,IAAIpB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,MAAM,CAAC4B,MAAM,EAAEE,CAAC,EAAE,EAAE;IACtCP,GAAG,CAACiB,aAAa,CAACxC,MAAM,CAAC8B,CAAC,CAAC,CAACF,MAAM,EAAEsB,OAAO,CAAC;IAC5ClD,MAAM,CAAC8B,CAAC,CAAC,CAACqB,IAAI,CAAC5B,GAAG,EAAE2B,OAAO,GAAG,CAAC,CAAC;IAChCA,OAAO,IAAI,CAAC,GAAGlD,MAAM,CAAC8B,CAAC,CAAC,CAACF,MAAM;EACjC;EACA,OAAOL,GAAG;AACZ;AAEA,SAASF,uBAAuBA,CAACR,MAAoB,EAAEC,KAAa,EAAY;EAC9E,MAAMd,MAAgB,GAAG,EAAE;EAC3B,KAAK,IAAI8B,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGhB,KAAK,EAAEgB,CAAC,EAAE,EAAE;IAC9B,MAAMsB,GAAG,GAAGvC,MAAM,CAACoB,MAAM,CAACU,YAAY,CAAC9B,MAAM,CAACqB,MAAM,CAAC;IACrDrB,MAAM,CAACqB,MAAM,IAAI,CAAC;IAClBlC,MAAM,CAACmC,IAAI,CAACtB,MAAM,CAACoB,MAAM,CAACoB,KAAK,CAACxC,MAAM,CAACqB,MAAM,EAAErB,MAAM,CAACqB,MAAM,GAAGkB,GAAG,CAAC,CAAC;IACpEvC,MAAM,CAACqB,MAAM,IAAIkB,GAAG;EACtB;EACA,OAAOpD,MAAM;AACf;AAEA,SAASS,iCAAiCA,CAACT,MAAgB,EAAEC,IAAyB,EAAU;EAC9F,IAAI,CAACA,IAAI,CAACqD,UAAU,EAAE;IACpB,MAAM,IAAI5C,KAAK,CAAC,gEAAgE,CAAC;EACnF;EACA,KAAK,IAAIoB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,MAAM,CAAC4B,MAAM,EAAEE,CAAC,EAAE,EAAE;IACtC9B,MAAM,CAAC8B,CAAC,CAAC,GAAGN,MAAM,CAACyB,IAAI,CAACjD,MAAM,CAAC8B,CAAC,CAAC,CAAC;IAClC,IAAI9B,MAAM,CAAC8B,CAAC,CAAC,CAACF,MAAM,KAAK3B,IAAI,CAACqD,UAAU,EAAE;MACxC,MAAM,IAAI5C,KAAK,4CAAAC,MAAA,CAA4CX,MAAM,CAAC8B,CAAC,CAAC,CAAE,CAAC;IACzE;EACF;EACA,OAAON,MAAM,CAACb,MAAM,CAACX,MAAM,CAAC;AAC9B;AAEA,SAASsB,iCAAiCA,CACxCT,MAAoB,EACpBC,KAAa,EACbb,IAAyB,EACf;EACV,MAAMD,MAAgB,GAAG,EAAE;EAC3B,IAAI,CAACC,IAAI,CAACqD,UAAU,EAAE;IACpB,MAAM,IAAI5C,KAAK,CAAC,gEAAgE,CAAC;EACnF;EACA,KAAK,IAAIoB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGhB,KAAK,EAAEgB,CAAC,EAAE,EAAE;IAC9B9B,MAAM,CAACmC,IAAI,CAACtB,MAAM,CAACoB,MAAM,CAACoB,KAAK,CAACxC,MAAM,CAACqB,MAAM,EAAErB,MAAM,CAACqB,MAAM,GAAGjC,IAAI,CAACqD,UAAU,CAAC,CAAC;IAChFzC,MAAM,CAACqB,MAAM,IAAIjC,IAAI,CAACqD,UAAU;EAClC;EACA,OAAOtD,MAAM;AACf"}
@@ -1 +1 @@
1
- {"version":3,"file":"rle.js","names":["varint","encodeValues","type","values","opts","Error","map","x","parseInt","concat","buf","Buffer","alloc","run","repeats","i","length","encodeRunBitpacked","encodeRunRepeated","push","disableEnvelope","envelope","writeUInt32LE","undefined","copy","decodeValues","cursor","count","offset","header","decode","buffer","encodingLength","decodeRunBitpacked","decodeRunRepeated","slice","bitWidth","Array","fill","b","Math","floor","value","ceil","from","encode","writeUInt8"],"sources":["../../../../src/parquetjs/codecs/rle.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport type {PrimitiveType} from '../schema/declare';\nimport type {CursorBuffer, ParquetCodecOptions} from './declare';\nimport varint from 'varint';\n\n// eslint-disable-next-line max-statements, complexity\nexport function encodeValues(\n type: PrimitiveType,\n values: any[],\n opts: ParquetCodecOptions\n): Buffer {\n if (!('bitWidth' in opts)) {\n throw new Error('bitWidth is required');\n }\n\n switch (type) {\n case 'BOOLEAN':\n case 'INT32':\n case 'INT64':\n // tslint:disable-next-line:no-parameter-reassignment\n values = values.map((x) => parseInt(x, 10));\n break;\n\n default:\n throw new Error(`unsupported type: ${type}`);\n }\n\n let buf = Buffer.alloc(0);\n let run: any[] = [];\n let repeats = 0;\n\n for (let i = 0; i < values.length; i++) {\n // If we are at the beginning of a run and the next value is same we start\n // collecting repeated values\n if (repeats === 0 && run.length % 8 === 0 && values[i] === values[i + 1]) {\n // If we have any data in runs we need to encode them\n if (run.length) {\n buf = Buffer.concat([buf, encodeRunBitpacked(run, opts)]);\n run = [];\n }\n repeats = 1;\n } else if (repeats > 0 && values[i] === values[i - 1]) {\n repeats += 1;\n } else {\n // If values changes we need to post any previous repeated values\n if (repeats) {\n buf = Buffer.concat([buf, encodeRunRepeated(values[i - 1], repeats, opts)]);\n repeats = 0;\n }\n run.push(values[i]);\n }\n }\n\n if (repeats) {\n buf = Buffer.concat([buf, encodeRunRepeated(values[values.length - 1], repeats, opts)]);\n } else if (run.length) {\n buf = Buffer.concat([buf, encodeRunBitpacked(run, opts)]);\n }\n\n if (opts.disableEnvelope) {\n return buf;\n }\n\n const envelope = Buffer.alloc(buf.length + 4);\n envelope.writeUInt32LE(buf.length, undefined);\n buf.copy(envelope, 4);\n\n return envelope;\n}\n\nexport function decodeValues(\n type: PrimitiveType,\n cursor: CursorBuffer,\n count: number,\n opts: ParquetCodecOptions\n): number[] {\n if (!('bitWidth' in opts)) {\n throw new Error('bitWidth is required');\n }\n\n if (!opts.disableEnvelope) {\n cursor.offset += 4;\n }\n\n let values: number[] = [];\n while (values.length < count) {\n const header = varint.decode(cursor.buffer, cursor.offset);\n cursor.offset += varint.encodingLength(header);\n if (header & 1) {\n const count = (header >> 1) * 8;\n values.push(...decodeRunBitpacked(cursor, count, opts));\n } else {\n const count = header >> 1;\n values.push(...decodeRunRepeated(cursor, count, opts));\n }\n }\n values = values.slice(0, count);\n\n if (values.length !== count) {\n throw new Error('invalid RLE encoding');\n }\n\n return values;\n}\n\nfunction decodeRunBitpacked(\n cursor: CursorBuffer,\n count: number,\n opts: ParquetCodecOptions\n): number[] {\n // @ts-ignore\n const bitWidth: number = opts.bitWidth;\n\n if (count % 8 !== 0) {\n throw new Error('must be a multiple of 8');\n }\n\n // tslint:disable-next-line:prefer-array-literal\n const values = new Array(count).fill(0);\n for (let b = 0; b < bitWidth * count; b++) {\n if (cursor.buffer[cursor.offset + Math.floor(b / 8)] & (1 << b % 8)) {\n values[Math.floor(b / bitWidth)] |= 1 << b % bitWidth;\n }\n }\n\n cursor.offset += bitWidth * (count / 8);\n return values;\n}\n\nfunction decodeRunRepeated(\n cursor: CursorBuffer,\n count: number,\n opts: ParquetCodecOptions\n): number[] {\n // @ts-ignore\n const bitWidth: number = opts.bitWidth;\n\n let value = 0;\n for (let i = 0; i < Math.ceil(bitWidth / 8); i++) {\n // eslint-disable-next-line\n value << 8; // TODO - this looks wrong\n value += cursor.buffer[cursor.offset];\n cursor.offset += 1;\n }\n\n // tslint:disable-next-line:prefer-array-literal\n return new Array(count).fill(value);\n}\n\nfunction encodeRunBitpacked(values: number[], opts: ParquetCodecOptions): Buffer {\n // @ts-ignore\n const bitWidth: number = opts.bitWidth;\n\n for (let i = 0; i < values.length % 8; i++) {\n values.push(0);\n }\n\n const buf = Buffer.alloc(Math.ceil(bitWidth * (values.length / 8)));\n for (let b = 0; b < bitWidth * values.length; b++) {\n if ((values[Math.floor(b / bitWidth)] & (1 << b % bitWidth)) > 0) {\n buf[Math.floor(b / 8)] |= 1 << b % 8;\n }\n }\n\n return Buffer.concat([Buffer.from(varint.encode(((values.length / 8) << 1) | 1)), buf]);\n}\n\nfunction encodeRunRepeated(value: number, count: number, opts: ParquetCodecOptions): Buffer {\n // @ts-ignore\n const bitWidth: number = opts.bitWidth;\n\n const buf = Buffer.alloc(Math.ceil(bitWidth / 8));\n\n for (let i = 0; i < buf.length; i++) {\n buf.writeUInt8(value & 0xff, i);\n // eslint-disable-next-line\n value >> 8; // TODO - this looks wrong\n }\n\n return Buffer.concat([Buffer.from(varint.encode(count << 1)), buf]);\n}\n"],"mappings":"AAIA,OAAOA,MAAM,MAAM,QAAQ;AAG3B,OAAO,SAASC,YAAYA,CAC1BC,IAAmB,EACnBC,MAAa,EACbC,IAAyB,EACjB;EACR,IAAI,EAAE,UAAU,IAAIA,IAAI,CAAC,EAAE;IACzB,MAAM,IAAIC,KAAK,CAAC,sBAAsB,CAAC;EACzC;EAEA,QAAQH,IAAI;IACV,KAAK,SAAS;IACd,KAAK,OAAO;IACZ,KAAK,OAAO;MAEVC,MAAM,GAAGA,MAAM,CAACG,GAAG,CAAEC,CAAC,IAAKC,QAAQ,CAACD,CAAC,EAAE,EAAE,CAAC,CAAC;MAC3C;IAEF;MACE,MAAM,IAAIF,KAAK,sBAAAI,MAAA,CAAsBP,IAAI,EAAG;EAAC;EAGjD,IAAIQ,GAAG,GAAGC,MAAM,CAACC,KAAK,CAAC,CAAC,CAAC;EACzB,IAAIC,GAAU,GAAG,EAAE;EACnB,IAAIC,OAAO,GAAG,CAAC;EAEf,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGZ,MAAM,CAACa,MAAM,EAAED,CAAC,EAAE,EAAE;IAGtC,IAAID,OAAO,KAAK,CAAC,IAAID,GAAG,CAACG,MAAM,GAAG,CAAC,KAAK,CAAC,IAAIb,MAAM,CAACY,CAAC,CAAC,KAAKZ,MAAM,CAACY,CAAC,GAAG,CAAC,CAAC,EAAE;MAExE,IAAIF,GAAG,CAACG,MAAM,EAAE;QACdN,GAAG,GAAGC,MAAM,CAACF,MAAM,CAAC,CAACC,GAAG,EAAEO,kBAAkB,CAACJ,GAAG,EAAET,IAAI,CAAC,CAAC,CAAC;QACzDS,GAAG,GAAG,EAAE;MACV;MACAC,OAAO,GAAG,CAAC;IACb,CAAC,MAAM,IAAIA,OAAO,GAAG,CAAC,IAAIX,MAAM,CAACY,CAAC,CAAC,KAAKZ,MAAM,CAACY,CAAC,GAAG,CAAC,CAAC,EAAE;MACrDD,OAAO,IAAI,CAAC;IACd,CAAC,MAAM;MAEL,IAAIA,OAAO,EAAE;QACXJ,GAAG,GAAGC,MAAM,CAACF,MAAM,CAAC,CAACC,GAAG,EAAEQ,iBAAiB,CAACf,MAAM,CAACY,CAAC,GAAG,CAAC,CAAC,EAAED,OAAO,EAAEV,IAAI,CAAC,CAAC,CAAC;QAC3EU,OAAO,GAAG,CAAC;MACb;MACAD,GAAG,CAACM,IAAI,CAAChB,MAAM,CAACY,CAAC,CAAC,CAAC;IACrB;EACF;EAEA,IAAID,OAAO,EAAE;IACXJ,GAAG,GAAGC,MAAM,CAACF,MAAM,CAAC,CAACC,GAAG,EAAEQ,iBAAiB,CAACf,MAAM,CAACA,MAAM,CAACa,MAAM,GAAG,CAAC,CAAC,EAAEF,OAAO,EAAEV,IAAI,CAAC,CAAC,CAAC;EACzF,CAAC,MAAM,IAAIS,GAAG,CAACG,MAAM,EAAE;IACrBN,GAAG,GAAGC,MAAM,CAACF,MAAM,CAAC,CAACC,GAAG,EAAEO,kBAAkB,CAACJ,GAAG,EAAET,IAAI,CAAC,CAAC,CAAC;EAC3D;EAEA,IAAIA,IAAI,CAACgB,eAAe,EAAE;IACxB,OAAOV,GAAG;EACZ;EAEA,MAAMW,QAAQ,GAAGV,MAAM,CAACC,KAAK,CAACF,GAAG,CAACM,MAAM,GAAG,CAAC,CAAC;EAC7CK,QAAQ,CAACC,aAAa,CAACZ,GAAG,CAACM,MAAM,EAAEO,SAAS,CAAC;EAC7Cb,GAAG,CAACc,IAAI,CAACH,QAAQ,EAAE,CAAC,CAAC;EAErB,OAAOA,QAAQ;AACjB;AAEA,OAAO,SAASI,YAAYA,CAC1BvB,IAAmB,EACnBwB,MAAoB,EACpBC,KAAa,EACbvB,IAAyB,EACf;EACV,IAAI,EAAE,UAAU,IAAIA,IAAI,CAAC,EAAE;IACzB,MAAM,IAAIC,KAAK,CAAC,sBAAsB,CAAC;EACzC;EAEA,IAAI,CAACD,IAAI,CAACgB,eAAe,EAAE;IACzBM,MAAM,CAACE,MAAM,IAAI,CAAC;EACpB;EAEA,IAAIzB,MAAgB,GAAG,EAAE;EACzB,OAAOA,MAAM,CAACa,MAAM,GAAGW,KAAK,EAAE;IAC5B,MAAME,MAAM,GAAG7B,MAAM,CAAC8B,MAAM,CAACJ,MAAM,CAACK,MAAM,EAAEL,MAAM,CAACE,MAAM,CAAC;IAC1DF,MAAM,CAACE,MAAM,IAAI5B,MAAM,CAACgC,cAAc,CAACH,MAAM,CAAC;IAC9C,IAAIA,MAAM,GAAG,CAAC,EAAE;MACd,MAAMF,KAAK,GAAG,CAACE,MAAM,IAAI,CAAC,IAAI,CAAC;MAC/B1B,MAAM,CAACgB,IAAI,CAAC,GAAGc,kBAAkB,CAACP,MAAM,EAAEC,KAAK,EAAEvB,IAAI,CAAC,CAAC;IACzD,CAAC,MAAM;MACL,MAAMuB,KAAK,GAAGE,MAAM,IAAI,CAAC;MACzB1B,MAAM,CAACgB,IAAI,CAAC,GAAGe,iBAAiB,CAACR,MAAM,EAAEC,KAAK,EAAEvB,IAAI,CAAC,CAAC;IACxD;EACF;EACAD,MAAM,GAAGA,MAAM,CAACgC,KAAK,CAAC,CAAC,EAAER,KAAK,CAAC;EAE/B,IAAIxB,MAAM,CAACa,MAAM,KAAKW,KAAK,EAAE;IAC3B,MAAM,IAAItB,KAAK,CAAC,sBAAsB,CAAC;EACzC;EAEA,OAAOF,MAAM;AACf;AAEA,SAAS8B,kBAAkBA,CACzBP,MAAoB,EACpBC,KAAa,EACbvB,IAAyB,EACf;EAEV,MAAMgC,QAAgB,GAAGhC,IAAI,CAACgC,QAAQ;EAEtC,IAAIT,KAAK,GAAG,CAAC,KAAK,CAAC,EAAE;IACnB,MAAM,IAAItB,KAAK,CAAC,yBAAyB,CAAC;EAC5C;EAGA,MAAMF,MAAM,GAAG,IAAIkC,KAAK,CAACV,KAAK,CAAC,CAACW,IAAI,CAAC,CAAC,CAAC;EACvC,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGH,QAAQ,GAAGT,KAAK,EAAEY,CAAC,EAAE,EAAE;IACzC,IAAIb,MAAM,CAACK,MAAM,CAACL,MAAM,CAACE,MAAM,GAAGY,IAAI,CAACC,KAAK,CAACF,CAAC,GAAG,CAAC,CAAC,CAAC,GAAI,CAAC,IAAIA,CAAC,GAAG,CAAE,EAAE;MACnEpC,MAAM,CAACqC,IAAI,CAACC,KAAK,CAACF,CAAC,GAAGH,QAAQ,CAAC,CAAC,IAAI,CAAC,IAAIG,CAAC,GAAGH,QAAQ;IACvD;EACF;EAEAV,MAAM,CAACE,MAAM,IAAIQ,QAAQ,IAAIT,KAAK,GAAG,CAAC,CAAC;EACvC,OAAOxB,MAAM;AACf;AAEA,SAAS+B,iBAAiBA,CACxBR,MAAoB,EACpBC,KAAa,EACbvB,IAAyB,EACf;EAEV,MAAMgC,QAAgB,GAAGhC,IAAI,CAACgC,QAAQ;EAEtC,IAAIM,KAAK,GAAG,CAAC;EACb,KAAK,IAAI3B,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGyB,IAAI,CAACG,IAAI,CAACP,QAAQ,GAAG,CAAC,CAAC,EAAErB,CAAC,EAAE,EAAE;IAEhD2B,KAAK,IAAI,CAAC;IACVA,KAAK,IAAIhB,MAAM,CAACK,MAAM,CAACL,MAAM,CAACE,MAAM,CAAC;IACrCF,MAAM,CAACE,MAAM,IAAI,CAAC;EACpB;EAGA,OAAO,IAAIS,KAAK,CAACV,KAAK,CAAC,CAACW,IAAI,CAACI,KAAK,CAAC;AACrC;AAEA,SAASzB,kBAAkBA,CAACd,MAAgB,EAAEC,IAAyB,EAAU;EAE/E,MAAMgC,QAAgB,GAAGhC,IAAI,CAACgC,QAAQ;EAEtC,KAAK,IAAIrB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGZ,MAAM,CAACa,MAAM,GAAG,CAAC,EAAED,CAAC,EAAE,EAAE;IAC1CZ,MAAM,CAACgB,IAAI,CAAC,CAAC,CAAC;EAChB;EAEA,MAAMT,GAAG,GAAGC,MAAM,CAACC,KAAK,CAAC4B,IAAI,CAACG,IAAI,CAACP,QAAQ,IAAIjC,MAAM,CAACa,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC;EACnE,KAAK,IAAIuB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGH,QAAQ,GAAGjC,MAAM,CAACa,MAAM,EAAEuB,CAAC,EAAE,EAAE;IACjD,IAAI,CAACpC,MAAM,CAACqC,IAAI,CAACC,KAAK,CAACF,CAAC,GAAGH,QAAQ,CAAC,CAAC,GAAI,CAAC,IAAIG,CAAC,GAAGH,QAAS,IAAI,CAAC,EAAE;MAChE1B,GAAG,CAAC8B,IAAI,CAACC,KAAK,CAACF,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,IAAIA,CAAC,GAAG,CAAC;IACtC;EACF;EAEA,OAAO5B,MAAM,CAACF,MAAM,CAAC,CAACE,MAAM,CAACiC,IAAI,CAAC5C,MAAM,CAAC6C,MAAM,CAAG1C,MAAM,CAACa,MAAM,GAAG,CAAC,IAAK,CAAC,GAAI,CAAC,CAAC,CAAC,EAAEN,GAAG,CAAC,CAAC;AACzF;AAEA,SAASQ,iBAAiBA,CAACwB,KAAa,EAAEf,KAAa,EAAEvB,IAAyB,EAAU;EAE1F,MAAMgC,QAAgB,GAAGhC,IAAI,CAACgC,QAAQ;EAEtC,MAAM1B,GAAG,GAAGC,MAAM,CAACC,KAAK,CAAC4B,IAAI,CAACG,IAAI,CAACP,QAAQ,GAAG,CAAC,CAAC,CAAC;EAEjD,KAAK,IAAIrB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGL,GAAG,CAACM,MAAM,EAAED,CAAC,EAAE,EAAE;IACnCL,GAAG,CAACoC,UAAU,CAACJ,KAAK,GAAG,IAAI,EAAE3B,CAAC,CAAC;IAE/B2B,KAAK,IAAI,CAAC;EACZ;EAEA,OAAO/B,MAAM,CAACF,MAAM,CAAC,CAACE,MAAM,CAACiC,IAAI,CAAC5C,MAAM,CAAC6C,MAAM,CAAClB,KAAK,IAAI,CAAC,CAAC,CAAC,EAAEjB,GAAG,CAAC,CAAC;AACrE"}
1
+ {"version":3,"file":"rle.js","names":["varint","encodeValues","type","values","opts","Error","map","x","parseInt","concat","buf","Buffer","alloc","run","repeats","i","length","encodeRunBitpacked","encodeRunRepeated","push","disableEnvelope","envelope","writeUInt32LE","undefined","copy","decodeValues","cursor","count","offset","header","decode","buffer","encodingLength","decodeRunBitpacked","decodeRunRepeated","slice","bitWidth","Array","fill","b","Math","floor","value","ceil","from","encode","writeUInt8"],"sources":["../../../../src/parquetjs/codecs/rle.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport type {PrimitiveType} from '../schema/declare';\nimport type {CursorBuffer, ParquetCodecOptions} from './declare';\nimport varint from 'varint';\n\n// eslint-disable-next-line max-statements, complexity\nexport function encodeValues(\n type: PrimitiveType,\n values: any[],\n opts: ParquetCodecOptions\n): Buffer {\n if (!('bitWidth' in opts)) {\n throw new Error('bitWidth is required');\n }\n\n switch (type) {\n case 'BOOLEAN':\n case 'INT32':\n case 'INT64':\n // tslint:disable-next-line:no-parameter-reassignment\n values = values.map((x) => parseInt(x, 10));\n break;\n\n default:\n throw new Error(`unsupported type: ${type}`);\n }\n\n let buf = Buffer.alloc(0);\n let run: any[] = [];\n let repeats = 0;\n\n for (let i = 0; i < values.length; i++) {\n // If we are at the beginning of a run and the next value is same we start\n // collecting repeated values\n if (repeats === 0 && run.length % 8 === 0 && values[i] === values[i + 1]) {\n // If we have any data in runs we need to encode them\n if (run.length) {\n buf = Buffer.concat([buf, encodeRunBitpacked(run, opts)]);\n run = [];\n }\n repeats = 1;\n } else if (repeats > 0 && values[i] === values[i - 1]) {\n repeats += 1;\n } else {\n // If values changes we need to post any previous repeated values\n if (repeats) {\n buf = Buffer.concat([buf, encodeRunRepeated(values[i - 1], repeats, opts)]);\n repeats = 0;\n }\n run.push(values[i]);\n }\n }\n\n if (repeats) {\n buf = Buffer.concat([buf, encodeRunRepeated(values[values.length - 1], repeats, opts)]);\n } else if (run.length) {\n buf = Buffer.concat([buf, encodeRunBitpacked(run, opts)]);\n }\n\n if (opts.disableEnvelope) {\n return buf;\n }\n\n const envelope = Buffer.alloc(buf.length + 4);\n envelope.writeUInt32LE(buf.length, undefined);\n buf.copy(envelope, 4);\n\n return envelope;\n}\n\nexport function decodeValues(\n type: PrimitiveType,\n cursor: CursorBuffer,\n count: number,\n opts: ParquetCodecOptions\n): number[] {\n if (!('bitWidth' in opts)) {\n throw new Error('bitWidth is required');\n }\n\n if (!opts.disableEnvelope) {\n cursor.offset += 4;\n }\n\n let values: number[] = [];\n while (values.length < count) {\n const header = varint.decode(cursor.buffer, cursor.offset);\n cursor.offset += varint.encodingLength(header);\n if (header & 1) {\n const count = (header >> 1) * 8;\n values.push(...decodeRunBitpacked(cursor, count, opts));\n } else {\n const count = header >> 1;\n values.push(...decodeRunRepeated(cursor, count, opts));\n }\n }\n values = values.slice(0, count);\n\n if (values.length !== count) {\n throw new Error('invalid RLE encoding');\n }\n\n return values;\n}\n\nfunction decodeRunBitpacked(\n cursor: CursorBuffer,\n count: number,\n opts: ParquetCodecOptions\n): number[] {\n // @ts-ignore\n const bitWidth: number = opts.bitWidth;\n\n if (count % 8 !== 0) {\n throw new Error('must be a multiple of 8');\n }\n\n // tslint:disable-next-line:prefer-array-literal\n const values = new Array(count).fill(0);\n for (let b = 0; b < bitWidth * count; b++) {\n if (cursor.buffer[cursor.offset + Math.floor(b / 8)] & (1 << b % 8)) {\n values[Math.floor(b / bitWidth)] |= 1 << b % bitWidth;\n }\n }\n\n cursor.offset += bitWidth * (count / 8);\n return values;\n}\n\nfunction decodeRunRepeated(\n cursor: CursorBuffer,\n count: number,\n opts: ParquetCodecOptions\n): number[] {\n // @ts-ignore\n const bitWidth: number = opts.bitWidth;\n\n let value = 0;\n for (let i = 0; i < Math.ceil(bitWidth / 8); i++) {\n // eslint-disable-next-line\n value << 8; // TODO - this looks wrong\n value += cursor.buffer[cursor.offset];\n cursor.offset += 1;\n }\n\n // tslint:disable-next-line:prefer-array-literal\n return new Array(count).fill(value);\n}\n\nfunction encodeRunBitpacked(values: number[], opts: ParquetCodecOptions): Buffer {\n // @ts-ignore\n const bitWidth: number = opts.bitWidth;\n\n for (let i = 0; i < values.length % 8; i++) {\n values.push(0);\n }\n\n const buf = Buffer.alloc(Math.ceil(bitWidth * (values.length / 8)));\n for (let b = 0; b < bitWidth * values.length; b++) {\n if ((values[Math.floor(b / bitWidth)] & (1 << b % bitWidth)) > 0) {\n buf[Math.floor(b / 8)] |= 1 << b % 8;\n }\n }\n\n return Buffer.concat([Buffer.from(varint.encode(((values.length / 8) << 1) | 1)), buf]);\n}\n\nfunction encodeRunRepeated(value: number, count: number, opts: ParquetCodecOptions): Buffer {\n // @ts-ignore\n const bitWidth: number = opts.bitWidth;\n\n const buf = Buffer.alloc(Math.ceil(bitWidth / 8));\n\n for (let i = 0; i < buf.length; i++) {\n buf.writeUInt8(value & 0xff, i);\n // eslint-disable-next-line\n value >> 8; // TODO - this looks wrong\n }\n\n return Buffer.concat([Buffer.from(varint.encode(count << 1)), buf]);\n}\n"],"mappings":"AAIA,OAAOA,MAAM,MAAM,QAAQ;AAG3B,OAAO,SAASC,YAAYA,CAC1BC,IAAmB,EACnBC,MAAa,EACbC,IAAyB,EACjB;EACR,IAAI,EAAE,UAAU,IAAIA,IAAI,CAAC,EAAE;IACzB,MAAM,IAAIC,KAAK,CAAC,sBAAsB,CAAC;EACzC;EAEA,QAAQH,IAAI;IACV,KAAK,SAAS;IACd,KAAK,OAAO;IACZ,KAAK,OAAO;MAEVC,MAAM,GAAGA,MAAM,CAACG,GAAG,CAAEC,CAAC,IAAKC,QAAQ,CAACD,CAAC,EAAE,EAAE,CAAC,CAAC;MAC3C;IAEF;MACE,MAAM,IAAIF,KAAK,sBAAAI,MAAA,CAAsBP,IAAI,CAAE,CAAC;EAChD;EAEA,IAAIQ,GAAG,GAAGC,MAAM,CAACC,KAAK,CAAC,CAAC,CAAC;EACzB,IAAIC,GAAU,GAAG,EAAE;EACnB,IAAIC,OAAO,GAAG,CAAC;EAEf,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGZ,MAAM,CAACa,MAAM,EAAED,CAAC,EAAE,EAAE;IAGtC,IAAID,OAAO,KAAK,CAAC,IAAID,GAAG,CAACG,MAAM,GAAG,CAAC,KAAK,CAAC,IAAIb,MAAM,CAACY,CAAC,CAAC,KAAKZ,MAAM,CAACY,CAAC,GAAG,CAAC,CAAC,EAAE;MAExE,IAAIF,GAAG,CAACG,MAAM,EAAE;QACdN,GAAG,GAAGC,MAAM,CAACF,MAAM,CAAC,CAACC,GAAG,EAAEO,kBAAkB,CAACJ,GAAG,EAAET,IAAI,CAAC,CAAC,CAAC;QACzDS,GAAG,GAAG,EAAE;MACV;MACAC,OAAO,GAAG,CAAC;IACb,CAAC,MAAM,IAAIA,OAAO,GAAG,CAAC,IAAIX,MAAM,CAACY,CAAC,CAAC,KAAKZ,MAAM,CAACY,CAAC,GAAG,CAAC,CAAC,EAAE;MACrDD,OAAO,IAAI,CAAC;IACd,CAAC,MAAM;MAEL,IAAIA,OAAO,EAAE;QACXJ,GAAG,GAAGC,MAAM,CAACF,MAAM,CAAC,CAACC,GAAG,EAAEQ,iBAAiB,CAACf,MAAM,CAACY,CAAC,GAAG,CAAC,CAAC,EAAED,OAAO,EAAEV,IAAI,CAAC,CAAC,CAAC;QAC3EU,OAAO,GAAG,CAAC;MACb;MACAD,GAAG,CAACM,IAAI,CAAChB,MAAM,CAACY,CAAC,CAAC,CAAC;IACrB;EACF;EAEA,IAAID,OAAO,EAAE;IACXJ,GAAG,GAAGC,MAAM,CAACF,MAAM,CAAC,CAACC,GAAG,EAAEQ,iBAAiB,CAACf,MAAM,CAACA,MAAM,CAACa,MAAM,GAAG,CAAC,CAAC,EAAEF,OAAO,EAAEV,IAAI,CAAC,CAAC,CAAC;EACzF,CAAC,MAAM,IAAIS,GAAG,CAACG,MAAM,EAAE;IACrBN,GAAG,GAAGC,MAAM,CAACF,MAAM,CAAC,CAACC,GAAG,EAAEO,kBAAkB,CAACJ,GAAG,EAAET,IAAI,CAAC,CAAC,CAAC;EAC3D;EAEA,IAAIA,IAAI,CAACgB,eAAe,EAAE;IACxB,OAAOV,GAAG;EACZ;EAEA,MAAMW,QAAQ,GAAGV,MAAM,CAACC,KAAK,CAACF,GAAG,CAACM,MAAM,GAAG,CAAC,CAAC;EAC7CK,QAAQ,CAACC,aAAa,CAACZ,GAAG,CAACM,MAAM,EAAEO,SAAS,CAAC;EAC7Cb,GAAG,CAACc,IAAI,CAACH,QAAQ,EAAE,CAAC,CAAC;EAErB,OAAOA,QAAQ;AACjB;AAEA,OAAO,SAASI,YAAYA,CAC1BvB,IAAmB,EACnBwB,MAAoB,EACpBC,KAAa,EACbvB,IAAyB,EACf;EACV,IAAI,EAAE,UAAU,IAAIA,IAAI,CAAC,EAAE;IACzB,MAAM,IAAIC,KAAK,CAAC,sBAAsB,CAAC;EACzC;EAEA,IAAI,CAACD,IAAI,CAACgB,eAAe,EAAE;IACzBM,MAAM,CAACE,MAAM,IAAI,CAAC;EACpB;EAEA,IAAIzB,MAAgB,GAAG,EAAE;EACzB,OAAOA,MAAM,CAACa,MAAM,GAAGW,KAAK,EAAE;IAC5B,MAAME,MAAM,GAAG7B,MAAM,CAAC8B,MAAM,CAACJ,MAAM,CAACK,MAAM,EAAEL,MAAM,CAACE,MAAM,CAAC;IAC1DF,MAAM,CAACE,MAAM,IAAI5B,MAAM,CAACgC,cAAc,CAACH,MAAM,CAAC;IAC9C,IAAIA,MAAM,GAAG,CAAC,EAAE;MACd,MAAMF,KAAK,GAAG,CAACE,MAAM,IAAI,CAAC,IAAI,CAAC;MAC/B1B,MAAM,CAACgB,IAAI,CAAC,GAAGc,kBAAkB,CAACP,MAAM,EAAEC,KAAK,EAAEvB,IAAI,CAAC,CAAC;IACzD,CAAC,MAAM;MACL,MAAMuB,KAAK,GAAGE,MAAM,IAAI,CAAC;MACzB1B,MAAM,CAACgB,IAAI,CAAC,GAAGe,iBAAiB,CAACR,MAAM,EAAEC,KAAK,EAAEvB,IAAI,CAAC,CAAC;IACxD;EACF;EACAD,MAAM,GAAGA,MAAM,CAACgC,KAAK,CAAC,CAAC,EAAER,KAAK,CAAC;EAE/B,IAAIxB,MAAM,CAACa,MAAM,KAAKW,KAAK,EAAE;IAC3B,MAAM,IAAItB,KAAK,CAAC,sBAAsB,CAAC;EACzC;EAEA,OAAOF,MAAM;AACf;AAEA,SAAS8B,kBAAkBA,CACzBP,MAAoB,EACpBC,KAAa,EACbvB,IAAyB,EACf;EAEV,MAAMgC,QAAgB,GAAGhC,IAAI,CAACgC,QAAQ;EAEtC,IAAIT,KAAK,GAAG,CAAC,KAAK,CAAC,EAAE;IACnB,MAAM,IAAItB,KAAK,CAAC,yBAAyB,CAAC;EAC5C;EAGA,MAAMF,MAAM,GAAG,IAAIkC,KAAK,CAACV,KAAK,CAAC,CAACW,IAAI,CAAC,CAAC,CAAC;EACvC,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGH,QAAQ,GAAGT,KAAK,EAAEY,CAAC,EAAE,EAAE;IACzC,IAAIb,MAAM,CAACK,MAAM,CAACL,MAAM,CAACE,MAAM,GAAGY,IAAI,CAACC,KAAK,CAACF,CAAC,GAAG,CAAC,CAAC,CAAC,GAAI,CAAC,IAAIA,CAAC,GAAG,CAAE,EAAE;MACnEpC,MAAM,CAACqC,IAAI,CAACC,KAAK,CAACF,CAAC,GAAGH,QAAQ,CAAC,CAAC,IAAI,CAAC,IAAIG,CAAC,GAAGH,QAAQ;IACvD;EACF;EAEAV,MAAM,CAACE,MAAM,IAAIQ,QAAQ,IAAIT,KAAK,GAAG,CAAC,CAAC;EACvC,OAAOxB,MAAM;AACf;AAEA,SAAS+B,iBAAiBA,CACxBR,MAAoB,EACpBC,KAAa,EACbvB,IAAyB,EACf;EAEV,MAAMgC,QAAgB,GAAGhC,IAAI,CAACgC,QAAQ;EAEtC,IAAIM,KAAK,GAAG,CAAC;EACb,KAAK,IAAI3B,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGyB,IAAI,CAACG,IAAI,CAACP,QAAQ,GAAG,CAAC,CAAC,EAAErB,CAAC,EAAE,EAAE;IAEhD2B,KAAK,IAAI,CAAC;IACVA,KAAK,IAAIhB,MAAM,CAACK,MAAM,CAACL,MAAM,CAACE,MAAM,CAAC;IACrCF,MAAM,CAACE,MAAM,IAAI,CAAC;EACpB;EAGA,OAAO,IAAIS,KAAK,CAACV,KAAK,CAAC,CAACW,IAAI,CAACI,KAAK,CAAC;AACrC;AAEA,SAASzB,kBAAkBA,CAACd,MAAgB,EAAEC,IAAyB,EAAU;EAE/E,MAAMgC,QAAgB,GAAGhC,IAAI,CAACgC,QAAQ;EAEtC,KAAK,IAAIrB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGZ,MAAM,CAACa,MAAM,GAAG,CAAC,EAAED,CAAC,EAAE,EAAE;IAC1CZ,MAAM,CAACgB,IAAI,CAAC,CAAC,CAAC;EAChB;EAEA,MAAMT,GAAG,GAAGC,MAAM,CAACC,KAAK,CAAC4B,IAAI,CAACG,IAAI,CAACP,QAAQ,IAAIjC,MAAM,CAACa,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC;EACnE,KAAK,IAAIuB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGH,QAAQ,GAAGjC,MAAM,CAACa,MAAM,EAAEuB,CAAC,EAAE,EAAE;IACjD,IAAI,CAACpC,MAAM,CAACqC,IAAI,CAACC,KAAK,CAACF,CAAC,GAAGH,QAAQ,CAAC,CAAC,GAAI,CAAC,IAAIG,CAAC,GAAGH,QAAS,IAAI,CAAC,EAAE;MAChE1B,GAAG,CAAC8B,IAAI,CAACC,KAAK,CAACF,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,IAAIA,CAAC,GAAG,CAAC;IACtC;EACF;EAEA,OAAO5B,MAAM,CAACF,MAAM,CAAC,CAACE,MAAM,CAACiC,IAAI,CAAC5C,MAAM,CAAC6C,MAAM,CAAG1C,MAAM,CAACa,MAAM,GAAG,CAAC,IAAK,CAAC,GAAI,CAAC,CAAC,CAAC,EAAEN,GAAG,CAAC,CAAC;AACzF;AAEA,SAASQ,iBAAiBA,CAACwB,KAAa,EAAEf,KAAa,EAAEvB,IAAyB,EAAU;EAE1F,MAAMgC,QAAgB,GAAGhC,IAAI,CAACgC,QAAQ;EAEtC,MAAM1B,GAAG,GAAGC,MAAM,CAACC,KAAK,CAAC4B,IAAI,CAACG,IAAI,CAACP,QAAQ,GAAG,CAAC,CAAC,CAAC;EAEjD,KAAK,IAAIrB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGL,GAAG,CAACM,MAAM,EAAED,CAAC,EAAE,EAAE;IACnCL,GAAG,CAACoC,UAAU,CAACJ,KAAK,GAAG,IAAI,EAAE3B,CAAC,CAAC;IAE/B2B,KAAK,IAAI,CAAC;EACZ;EAEA,OAAO/B,MAAM,CAACF,MAAM,CAAC,CAACE,MAAM,CAACiC,IAAI,CAAC5C,MAAM,CAAC6C,MAAM,CAAClB,KAAK,IAAI,CAAC,CAAC,CAAC,EAAEjB,GAAG,CAAC,CAAC;AACrE"}
@@ -1,4 +1,4 @@
1
- import { NoCompression, GZipCompression, SnappyCompression, BrotliCompression, LZOCompression, LZ4Compression, ZstdCompression } from '@loaders.gl/compression';
1
+ import { NoCompression, GZipCompression, SnappyCompression, BrotliCompression, LZ4Compression, ZstdCompression } from '@loaders.gl/compression';
2
2
  function toBuffer(arrayBuffer) {
3
3
  return Buffer.from(arrayBuffer);
4
4
  }
@@ -10,10 +10,8 @@ function toArrayBuffer(buffer) {
10
10
  return buffer;
11
11
  }
12
12
  import lz4js from 'lz4js';
13
- import lzo from 'lzo';
14
13
  const modules = {
15
- lz4js,
16
- lzo
14
+ lz4js
17
15
  };
18
16
  export const PARQUET_COMPRESSION_METHODS = {
19
17
  UNCOMPRESSED: new NoCompression(),
@@ -28,9 +26,6 @@ export const PARQUET_COMPRESSION_METHODS = {
28
26
  LZ4_RAW: new LZ4Compression({
29
27
  modules
30
28
  }),
31
- LZO: new LZOCompression({
32
- modules
33
- }),
34
29
  ZSTD: new ZstdCompression({
35
30
  modules
36
31
  })
@@ -1 +1 @@
1
- {"version":3,"file":"compression.js","names":["NoCompression","GZipCompression","SnappyCompression","BrotliCompression","LZOCompression","LZ4Compression","ZstdCompression","toBuffer","arrayBuffer","Buffer","from","toArrayBuffer","buffer","isBuffer","typedArray","Uint8Array","byteOffset","length","slice","lz4js","lzo","modules","PARQUET_COMPRESSION_METHODS","UNCOMPRESSED","GZIP","SNAPPY","BROTLI","LZ4","LZ4_RAW","LZO","ZSTD","preloadCompressions","options","compressions","Object","values","Promise","all","map","compression","preload","deflate","method","value","Error","concat","inputArrayBuffer","compressedArrayBuffer","compress","decompress","size","inflate"],"sources":["../../../src/parquetjs/compression.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/* eslint-disable camelcase */\n// Forked from https://github.com/ironSource/parquetjs under MIT license\n\nimport {\n Compression,\n NoCompression,\n GZipCompression,\n SnappyCompression,\n BrotliCompression,\n LZOCompression,\n LZ4Compression,\n ZstdCompression\n} from '@loaders.gl/compression';\n\nimport {ParquetCompression} from './schema/declare';\n\n/** We can't use loaders-util buffer handling since we are dependent on buffers even in the browser */\nfunction toBuffer(arrayBuffer: ArrayBuffer): Buffer {\n return Buffer.from(arrayBuffer);\n}\n\nfunction toArrayBuffer(buffer: Buffer): ArrayBuffer {\n // TODO - per docs we should just be able to call buffer.buffer, but there are issues\n if (Buffer.isBuffer(buffer)) {\n const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);\n return typedArray.slice().buffer;\n }\n return buffer;\n}\n\n// TODO switch to worker compression to avoid bundling...\n\n// import brotli from 'brotli'; - brotli has problems with decompress in browsers\n// import brotliDecompress from 'brotli/decompress';\nimport lz4js from 'lz4js';\nimport lzo from 'lzo';\n// import {ZstdCodec} from 'zstd-codec';\n\n// Inject large dependencies through Compression constructor options\nconst modules = {\n // brotli has problems with decompress in browsers\n // brotli: {\n // decompress: brotliDecompress,\n // compress: () => {\n // throw new Error('brotli compress');\n // }\n // },\n lz4js,\n lzo\n // 'zstd-codec': ZstdCodec\n};\n\n// See https://github.com/apache/parquet-format/blob/master/Compression.md\nexport const PARQUET_COMPRESSION_METHODS: Record<ParquetCompression, Compression> = {\n UNCOMPRESSED: new NoCompression(),\n GZIP: new GZipCompression(),\n SNAPPY: new SnappyCompression(),\n BROTLI: new BrotliCompression({modules}),\n // TODO: Understand difference between LZ4 and LZ4_RAW\n LZ4: new LZ4Compression({modules}),\n LZ4_RAW: new LZ4Compression({modules}),\n LZO: new LZOCompression({modules}),\n ZSTD: new ZstdCompression({modules})\n};\n\n/**\n * Register compressions that have big external libraries\n * @param options.modules External library dependencies\n */\nexport async function preloadCompressions(options?: {modules: {[key: string]: any}}) {\n const compressions = Object.values(PARQUET_COMPRESSION_METHODS);\n return await Promise.all(compressions.map((compression) => compression.preload()));\n}\n\n/**\n * Deflate a value using compression method `method`\n */\nexport async function deflate(method: ParquetCompression, value: Buffer): Promise<Buffer> {\n const compression = PARQUET_COMPRESSION_METHODS[method];\n if (!compression) {\n throw new Error(`parquet: invalid compression method: ${method}`);\n }\n const inputArrayBuffer = toArrayBuffer(value);\n const compressedArrayBuffer = await compression.compress(inputArrayBuffer);\n return toBuffer(compressedArrayBuffer);\n}\n\n/**\n * Inflate a value using compression method `method`\n */\nexport async function decompress(\n method: ParquetCompression,\n value: Buffer,\n size: number\n): Promise<Buffer> {\n const compression = PARQUET_COMPRESSION_METHODS[method];\n if (!compression) {\n throw new Error(`parquet: invalid compression method: ${method}`);\n }\n const inputArrayBuffer = toArrayBuffer(value);\n const compressedArrayBuffer = await compression.decompress(inputArrayBuffer, size);\n return toBuffer(compressedArrayBuffer);\n}\n\n/*\n * Inflate a value using compression method `method`\n */\nexport function inflate(method: ParquetCompression, value: Buffer, size: number): Buffer {\n if (!(method in PARQUET_COMPRESSION_METHODS)) {\n throw new Error(`invalid compression method: ${method}`);\n }\n // @ts-ignore\n return PARQUET_COMPRESSION_METHODS[method].inflate(value, size);\n}\n\n/*\nfunction deflate_identity(value: Buffer): Buffer {\n return value;\n}\n\nfunction deflate_gzip(value: Buffer): Buffer {\n return zlib.gzipSync(value);\n}\n\nfunction deflate_snappy(value: Buffer): Buffer {\n return snappyjs.compress(value);\n}\n\nfunction deflate_lzo(value: Buffer): Buffer {\n lzo = lzo || Util.load('lzo');\n return lzo.compress(value);\n}\n\nfunction deflate_brotli(value: Buffer): Buffer {\n brotli = brotli || Util.load('brotli');\n const result = brotli.compress(value, {\n mode: 0,\n quality: 8,\n lgwin: 22\n });\n return result ? Buffer.from(result) : Buffer.alloc(0);\n}\n\nfunction deflate_lz4(value: Buffer): Buffer {\n lz4js = lz4js || Util.load('lz4js');\n try {\n // let result = Buffer.alloc(lz4js.encodeBound(value.length));\n // const compressedSize = lz4.encodeBlock(value, result);\n // // remove unnecessary bytes\n // result = result.slice(0, compressedSize);\n // return result;\n return Buffer.from(lz4js.compress(value));\n } catch (err) {\n throw err;\n }\n}\nfunction inflate_identity(value: Buffer): Buffer {\n return value;\n}\n\nfunction inflate_gzip(value: Buffer): Buffer {\n return zlib.gunzipSync(value);\n}\n\nfunction inflate_snappy(value: Buffer): Buffer {\n return snappyjs.uncompress(value);\n}\n\nfunction inflate_lzo(value: Buffer, size: number): Buffer {\n lzo = lzo || Util.load('lzo');\n return lzo.decompress(value, size);\n}\n\nfunction inflate_lz4(value: Buffer, size: number): Buffer {\n lz4js = lz4js || Util.load('lz4js');\n try {\n // let result = Buffer.alloc(size);\n // const uncompressedSize = lz4js.decodeBlock(value, result);\n // // remove unnecessary bytes\n // result = result.slice(0, uncompressedSize);\n // return result;\n return Buffer.from(lz4js.decompress(value, size));\n } catch (err) {\n throw err;\n }\n}\n\nfunction inflate_brotli(value: Buffer): Buffer {\n brotli = brotli || Util.load('brotli');\n if (!value.length) {\n return Buffer.alloc(0);\n }\n return Buffer.from(brotli.decompress(value));\n}\n*/\n"],"mappings":"AAIA,SAEEA,aAAa,EACbC,eAAe,EACfC,iBAAiB,EACjBC,iBAAiB,EACjBC,cAAc,EACdC,cAAc,EACdC,eAAe,QACV,yBAAyB;AAKhC,SAASC,QAAQA,CAACC,WAAwB,EAAU;EAClD,OAAOC,MAAM,CAACC,IAAI,CAACF,WAAW,CAAC;AACjC;AAEA,SAASG,aAAaA,CAACC,MAAc,EAAe;EAElD,IAAIH,MAAM,CAACI,QAAQ,CAACD,MAAM,CAAC,EAAE;IAC3B,MAAME,UAAU,GAAG,IAAIC,UAAU,CAACH,MAAM,CAACA,MAAM,EAAEA,MAAM,CAACI,UAAU,EAAEJ,MAAM,CAACK,MAAM,CAAC;IAClF,OAAOH,UAAU,CAACI,KAAK,EAAE,CAACN,MAAM;EAClC;EACA,OAAOA,MAAM;AACf;AAMA,OAAOO,KAAK,MAAM,OAAO;AACzB,OAAOC,GAAG,MAAM,KAAK;AAIrB,MAAMC,OAAO,GAAG;EAQdF,KAAK;EACLC;AAEF,CAAC;AAGD,OAAO,MAAME,2BAAoE,GAAG;EAClFC,YAAY,EAAE,IAAIvB,aAAa,EAAE;EACjCwB,IAAI,EAAE,IAAIvB,eAAe,EAAE;EAC3BwB,MAAM,EAAE,IAAIvB,iBAAiB,EAAE;EAC/BwB,MAAM,EAAE,IAAIvB,iBAAiB,CAAC;IAACkB;EAAO,CAAC,CAAC;EAExCM,GAAG,EAAE,IAAItB,cAAc,CAAC;IAACgB;EAAO,CAAC,CAAC;EAClCO,OAAO,EAAE,IAAIvB,cAAc,CAAC;IAACgB;EAAO,CAAC,CAAC;EACtCQ,GAAG,EAAE,IAAIzB,cAAc,CAAC;IAACiB;EAAO,CAAC,CAAC;EAClCS,IAAI,EAAE,IAAIxB,eAAe,CAAC;IAACe;EAAO,CAAC;AACrC,CAAC;AAMD,OAAO,eAAeU,mBAAmBA,CAACC,OAAyC,EAAE;EACnF,MAAMC,YAAY,GAAGC,MAAM,CAACC,MAAM,CAACb,2BAA2B,CAAC;EAC/D,OAAO,MAAMc,OAAO,CAACC,GAAG,CAACJ,YAAY,CAACK,GAAG,CAAEC,WAAW,IAAKA,WAAW,CAACC,OAAO,EAAE,CAAC,CAAC;AACpF;AAKA,OAAO,eAAeC,OAAOA,CAACC,MAA0B,EAAEC,KAAa,EAAmB;EACxF,MAAMJ,WAAW,GAAGjB,2BAA2B,CAACoB,MAAM,CAAC;EACvD,IAAI,CAACH,WAAW,EAAE;IAChB,MAAM,IAAIK,KAAK,yCAAAC,MAAA,CAAyCH,MAAM,EAAG;EACnE;EACA,MAAMI,gBAAgB,GAAGnC,aAAa,CAACgC,KAAK,CAAC;EAC7C,MAAMI,qBAAqB,GAAG,MAAMR,WAAW,CAACS,QAAQ,CAACF,gBAAgB,CAAC;EAC1E,OAAOvC,QAAQ,CAACwC,qBAAqB,CAAC;AACxC;AAKA,OAAO,eAAeE,UAAUA,CAC9BP,MAA0B,EAC1BC,KAAa,EACbO,IAAY,EACK;EACjB,MAAMX,WAAW,GAAGjB,2BAA2B,CAACoB,MAAM,CAAC;EACvD,IAAI,CAACH,WAAW,EAAE;IAChB,MAAM,IAAIK,KAAK,yCAAAC,MAAA,CAAyCH,MAAM,EAAG;EACnE;EACA,MAAMI,gBAAgB,GAAGnC,aAAa,CAACgC,KAAK,CAAC;EAC7C,MAAMI,qBAAqB,GAAG,MAAMR,WAAW,CAACU,UAAU,CAACH,gBAAgB,EAAEI,IAAI,CAAC;EAClF,OAAO3C,QAAQ,CAACwC,qBAAqB,CAAC;AACxC;AAKA,OAAO,SAASI,OAAOA,CAACT,MAA0B,EAAEC,KAAa,EAAEO,IAAY,EAAU;EACvF,IAAI,EAAER,MAAM,IAAIpB,2BAA2B,CAAC,EAAE;IAC5C,MAAM,IAAIsB,KAAK,gCAAAC,MAAA,CAAgCH,MAAM,EAAG;EAC1D;EAEA,OAAOpB,2BAA2B,CAACoB,MAAM,CAAC,CAACS,OAAO,CAACR,KAAK,EAAEO,IAAI,CAAC;AACjE"}
1
+ {"version":3,"file":"compression.js","names":["NoCompression","GZipCompression","SnappyCompression","BrotliCompression","LZ4Compression","ZstdCompression","toBuffer","arrayBuffer","Buffer","from","toArrayBuffer","buffer","isBuffer","typedArray","Uint8Array","byteOffset","length","slice","lz4js","modules","PARQUET_COMPRESSION_METHODS","UNCOMPRESSED","GZIP","SNAPPY","BROTLI","LZ4","LZ4_RAW","ZSTD","preloadCompressions","options","compressions","Object","values","Promise","all","map","compression","preload","deflate","method","value","Error","concat","inputArrayBuffer","compressedArrayBuffer","compress","decompress","size","inflate"],"sources":["../../../src/parquetjs/compression.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/* eslint-disable camelcase */\n// Forked from https://github.com/ironSource/parquetjs under MIT license\n\nimport {\n Compression,\n NoCompression,\n GZipCompression,\n SnappyCompression,\n BrotliCompression,\n // LZOCompression,\n LZ4Compression,\n ZstdCompression\n} from '@loaders.gl/compression';\n\nimport {ParquetCompression} from './schema/declare';\n\n/** We can't use loaders-util buffer handling since we are dependent on buffers even in the browser */\nfunction toBuffer(arrayBuffer: ArrayBuffer): Buffer {\n return Buffer.from(arrayBuffer);\n}\n\nfunction toArrayBuffer(buffer: Buffer): ArrayBuffer {\n // TODO - per docs we should just be able to call buffer.buffer, but there are issues\n if (Buffer.isBuffer(buffer)) {\n const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);\n return typedArray.slice().buffer;\n }\n return buffer;\n}\n\n// TODO switch to worker compression to avoid bundling...\n\n// import brotli from 'brotli'; - brotli has problems with decompress in browsers\n// import brotliDecompress from 'brotli/decompress';\nimport lz4js from 'lz4js';\n// import lzo from 'lzo';\n// import {ZstdCodec} from 'zstd-codec';\n\n// Inject large dependencies through Compression constructor options\nconst modules = {\n // brotli has problems with decompress in browsers\n // brotli: {\n // decompress: brotliDecompress,\n // compress: () => {\n // throw new Error('brotli compress');\n // }\n // },\n lz4js\n // lzo\n // 'zstd-codec': ZstdCodec\n};\n\n/**\n * See https://github.com/apache/parquet-format/blob/master/Compression.md\n */\n// @ts-expect-error\nexport const PARQUET_COMPRESSION_METHODS: Record<ParquetCompression, Compression> = {\n UNCOMPRESSED: new NoCompression(),\n GZIP: new GZipCompression(),\n SNAPPY: new SnappyCompression(),\n BROTLI: new BrotliCompression({modules}),\n // TODO: Understand difference between LZ4 and LZ4_RAW\n LZ4: new LZ4Compression({modules}),\n LZ4_RAW: new LZ4Compression({modules}),\n //\n // LZO: new LZOCompression({modules}),\n ZSTD: new ZstdCompression({modules})\n};\n\n/**\n * Register compressions that have big external libraries\n * @param options.modules External library dependencies\n */\nexport async function preloadCompressions(options?: {modules: {[key: string]: any}}) {\n const compressions = Object.values(PARQUET_COMPRESSION_METHODS);\n return await Promise.all(compressions.map((compression) => compression.preload()));\n}\n\n/**\n * Deflate a value using compression method `method`\n */\nexport async function deflate(method: ParquetCompression, value: Buffer): Promise<Buffer> {\n const compression = PARQUET_COMPRESSION_METHODS[method];\n if (!compression) {\n throw new Error(`parquet: invalid compression method: ${method}`);\n }\n const inputArrayBuffer = toArrayBuffer(value);\n const compressedArrayBuffer = await compression.compress(inputArrayBuffer);\n return toBuffer(compressedArrayBuffer);\n}\n\n/**\n * Inflate a value using compression method `method`\n */\nexport async function decompress(\n method: ParquetCompression,\n value: Buffer,\n size: number\n): Promise<Buffer> {\n const compression = PARQUET_COMPRESSION_METHODS[method];\n if (!compression) {\n throw new Error(`parquet: invalid compression method: ${method}`);\n }\n const inputArrayBuffer = toArrayBuffer(value);\n const compressedArrayBuffer = await compression.decompress(inputArrayBuffer, size);\n return toBuffer(compressedArrayBuffer);\n}\n\n/*\n * Inflate a value using compression method `method`\n */\nexport function inflate(method: ParquetCompression, value: Buffer, size: number): Buffer {\n if (!(method in PARQUET_COMPRESSION_METHODS)) {\n throw new Error(`invalid compression method: ${method}`);\n }\n // @ts-ignore\n return PARQUET_COMPRESSION_METHODS[method].inflate(value, size);\n}\n\n/*\nfunction deflate_identity(value: Buffer): Buffer {\n return value;\n}\n\nfunction deflate_gzip(value: Buffer): Buffer {\n return zlib.gzipSync(value);\n}\n\nfunction deflate_snappy(value: Buffer): Buffer {\n return snappyjs.compress(value);\n}\n\nfunction deflate_lzo(value: Buffer): Buffer {\n lzo = lzo || Util.load('lzo');\n return lzo.compress(value);\n}\n\nfunction deflate_brotli(value: Buffer): Buffer {\n brotli = brotli || Util.load('brotli');\n const result = brotli.compress(value, {\n mode: 0,\n quality: 8,\n lgwin: 22\n });\n return result ? Buffer.from(result) : Buffer.alloc(0);\n}\n\nfunction deflate_lz4(value: Buffer): Buffer {\n lz4js = lz4js || Util.load('lz4js');\n try {\n // let result = Buffer.alloc(lz4js.encodeBound(value.length));\n // const compressedSize = lz4.encodeBlock(value, result);\n // // remove unnecessary bytes\n // result = result.slice(0, compressedSize);\n // return result;\n return Buffer.from(lz4js.compress(value));\n } catch (err) {\n throw err;\n }\n}\nfunction inflate_identity(value: Buffer): Buffer {\n return value;\n}\n\nfunction inflate_gzip(value: Buffer): Buffer {\n return zlib.gunzipSync(value);\n}\n\nfunction inflate_snappy(value: Buffer): Buffer {\n return snappyjs.uncompress(value);\n}\n\nfunction inflate_lzo(value: Buffer, size: number): Buffer {\n lzo = lzo || Util.load('lzo');\n return lzo.decompress(value, size);\n}\n\nfunction inflate_lz4(value: Buffer, size: number): Buffer {\n lz4js = lz4js || Util.load('lz4js');\n try {\n // let result = Buffer.alloc(size);\n // const uncompressedSize = lz4js.decodeBlock(value, result);\n // // remove unnecessary bytes\n // result = result.slice(0, uncompressedSize);\n // return result;\n return Buffer.from(lz4js.decompress(value, size));\n } catch (err) {\n throw err;\n }\n}\n\nfunction inflate_brotli(value: Buffer): Buffer {\n brotli = brotli || Util.load('brotli');\n if (!value.length) {\n return Buffer.alloc(0);\n }\n return Buffer.from(brotli.decompress(value));\n}\n*/\n"],"mappings":"AAIA,SAEEA,aAAa,EACbC,eAAe,EACfC,iBAAiB,EACjBC,iBAAiB,EAEjBC,cAAc,EACdC,eAAe,QACV,yBAAyB;AAKhC,SAASC,QAAQA,CAACC,WAAwB,EAAU;EAClD,OAAOC,MAAM,CAACC,IAAI,CAACF,WAAW,CAAC;AACjC;AAEA,SAASG,aAAaA,CAACC,MAAc,EAAe;EAElD,IAAIH,MAAM,CAACI,QAAQ,CAACD,MAAM,CAAC,EAAE;IAC3B,MAAME,UAAU,GAAG,IAAIC,UAAU,CAACH,MAAM,CAACA,MAAM,EAAEA,MAAM,CAACI,UAAU,EAAEJ,MAAM,CAACK,MAAM,CAAC;IAClF,OAAOH,UAAU,CAACI,KAAK,CAAC,CAAC,CAACN,MAAM;EAClC;EACA,OAAOA,MAAM;AACf;AAMA,OAAOO,KAAK,MAAM,OAAO;AAKzB,MAAMC,OAAO,GAAG;EAQdD;AAGF,CAAC;AAMD,OAAO,MAAME,2BAAoE,GAAG;EAClFC,YAAY,EAAE,IAAIrB,aAAa,CAAC,CAAC;EACjCsB,IAAI,EAAE,IAAIrB,eAAe,CAAC,CAAC;EAC3BsB,MAAM,EAAE,IAAIrB,iBAAiB,CAAC,CAAC;EAC/BsB,MAAM,EAAE,IAAIrB,iBAAiB,CAAC;IAACgB;EAAO,CAAC,CAAC;EAExCM,GAAG,EAAE,IAAIrB,cAAc,CAAC;IAACe;EAAO,CAAC,CAAC;EAClCO,OAAO,EAAE,IAAItB,cAAc,CAAC;IAACe;EAAO,CAAC,CAAC;EAGtCQ,IAAI,EAAE,IAAItB,eAAe,CAAC;IAACc;EAAO,CAAC;AACrC,CAAC;AAMD,OAAO,eAAeS,mBAAmBA,CAACC,OAAyC,EAAE;EACnF,MAAMC,YAAY,GAAGC,MAAM,CAACC,MAAM,CAACZ,2BAA2B,CAAC;EAC/D,OAAO,MAAMa,OAAO,CAACC,GAAG,CAACJ,YAAY,CAACK,GAAG,CAAEC,WAAW,IAAKA,WAAW,CAACC,OAAO,CAAC,CAAC,CAAC,CAAC;AACpF;AAKA,OAAO,eAAeC,OAAOA,CAACC,MAA0B,EAAEC,KAAa,EAAmB;EACxF,MAAMJ,WAAW,GAAGhB,2BAA2B,CAACmB,MAAM,CAAC;EACvD,IAAI,CAACH,WAAW,EAAE;IAChB,MAAM,IAAIK,KAAK,yCAAAC,MAAA,CAAyCH,MAAM,CAAE,CAAC;EACnE;EACA,MAAMI,gBAAgB,GAAGjC,aAAa,CAAC8B,KAAK,CAAC;EAC7C,MAAMI,qBAAqB,GAAG,MAAMR,WAAW,CAACS,QAAQ,CAACF,gBAAgB,CAAC;EAC1E,OAAOrC,QAAQ,CAACsC,qBAAqB,CAAC;AACxC;AAKA,OAAO,eAAeE,UAAUA,CAC9BP,MAA0B,EAC1BC,KAAa,EACbO,IAAY,EACK;EACjB,MAAMX,WAAW,GAAGhB,2BAA2B,CAACmB,MAAM,CAAC;EACvD,IAAI,CAACH,WAAW,EAAE;IAChB,MAAM,IAAIK,KAAK,yCAAAC,MAAA,CAAyCH,MAAM,CAAE,CAAC;EACnE;EACA,MAAMI,gBAAgB,GAAGjC,aAAa,CAAC8B,KAAK,CAAC;EAC7C,MAAMI,qBAAqB,GAAG,MAAMR,WAAW,CAACU,UAAU,CAACH,gBAAgB,EAAEI,IAAI,CAAC;EAClF,OAAOzC,QAAQ,CAACsC,qBAAqB,CAAC;AACxC;AAKA,OAAO,SAASI,OAAOA,CAACT,MAA0B,EAAEC,KAAa,EAAEO,IAAY,EAAU;EACvF,IAAI,EAAER,MAAM,IAAInB,2BAA2B,CAAC,EAAE;IAC5C,MAAM,IAAIqB,KAAK,gCAAAC,MAAA,CAAgCH,MAAM,CAAE,CAAC;EAC1D;EAEA,OAAOnB,2BAA2B,CAACmB,MAAM,CAAC,CAACS,OAAO,CAACR,KAAK,EAAEO,IAAI,CAAC;AACjE"}
@@ -1 +1 @@
1
- {"version":3,"file":"parquet-encoder.js","names":["PARQUET_CODECS","Compression","Shred","ColumnChunk","ColumnMetaData","CompressionCodec","ConvertedType","DataPageHeader","DataPageHeaderV2","Encoding","FieldRepetitionType","FileMetaData","KeyValue","PageHeader","PageType","RowGroup","SchemaElement","Type","osopen","oswrite","osclose","getBitWidth","serializeThrift","Int64","PARQUET_MAGIC","PARQUET_VERSION","PARQUET_DEFAULT_PAGE_SIZE","PARQUET_DEFAULT_ROW_GROUP_SIZE","PARQUET_RDLVL_TYPE","PARQUET_RDLVL_ENCODING","ParquetEncoder","openFile","schema","path","opts","outputStream","openStream","arguments","length","undefined","envelopeWriter","ParquetEnvelopeWriter","constructor","_defineProperty","rowBuffer","rowGroupSize","closed","userMetadata","writeHeader","err","close","appendRow","row","Error","shredRecord","rowCount","callback","writeFooter","setMetadata","key","value","String","setRowGroupSize","cnt","setPageSize","writeFn","bind","closeFn","fileOffset","write","offset","rowGroups","pageSize","useDataPageV2","Boolean","writeSection","buf","Buffer","from","writeRowGroup","records","rgroup","encodeRowGroup","baseOffset","push","metadata","body","encodeFooter","encodeValues","type","encoding","values","concat","encodeDataPage","column","data","rLevelsBuf","alloc","rLevelMax","rlevels","bitWidth","dLevelsBuf","dLevelMax","dlevels","valuesBuf","primitiveType","typeLength","dataBuf","compressedBuf","deflate","compression","header","DATA_PAGE","data_page_header","num_values","count","definition_level_encoding","repetition_level_encoding","uncompressed_page_size","compressed_page_size","headerBuf","page","headerSize","encodeDataPageV2","disableEnvelope","DATA_PAGE_V2","data_page_header_v2","num_nulls","num_rows","definition_levels_byte_length","repetition_levels_byte_length","is_compressed","encodeColumnChunk","buffer","columnData","join","pageBuf","total_uncompressed_size","total_compressed_size","result","path_in_schema","data_page_offset","encodings","codec","metadataOffset","columns","total_byte_size","field","fieldList","isNested","cchunkData","cchunk","file_offset","meta_data","Number","version","created_by","row_groups","key_value_metadata","_metadata$key_value_m","_metadata$key_value_m2","_metadata$key_value_m3","kv","call","schemaRoot","name","num_children","Object","keys","fields","relt","repetitionType","schemaElem","repetition_type","fieldCount","originalType","converted_type","type_length","metadataEncoded","footerEncoded","copy","writeUInt32LE"],"sources":["../../../../src/parquetjs/encoder/parquet-encoder.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/* eslint-disable camelcase */\nimport {stream} from '@loaders.gl/loader-utils';\nimport {ParquetCodecOptions, PARQUET_CODECS} from '../codecs';\nimport * as Compression from '../compression';\nimport {\n ParquetBuffer,\n ParquetCodec,\n ParquetData,\n ParquetField,\n PrimitiveType\n} from '../schema/declare';\nimport {ParquetSchema} from '../schema/schema';\nimport * as Shred from '../schema/shred';\nimport {\n ColumnChunk,\n ColumnMetaData,\n CompressionCodec,\n ConvertedType,\n DataPageHeader,\n DataPageHeaderV2,\n Encoding,\n FieldRepetitionType,\n FileMetaData,\n KeyValue,\n PageHeader,\n PageType,\n RowGroup,\n SchemaElement,\n Type\n} from '../parquet-thrift';\nimport {osopen, oswrite, osclose} from '../utils/file-utils';\nimport {getBitWidth, serializeThrift} from '../utils/read-utils';\nimport Int64 from 'node-int64';\n\n/**\n * Parquet File Magic String\n */\nconst PARQUET_MAGIC = 'PAR1';\n\n/**\n * Parquet File Format Version\n */\nconst PARQUET_VERSION = 1;\n\n/**\n * Default Page and Row Group sizes\n */\nconst PARQUET_DEFAULT_PAGE_SIZE = 8192;\nconst PARQUET_DEFAULT_ROW_GROUP_SIZE = 4096;\n\n/**\n * Repetition and Definition Level Encoding\n */\nconst PARQUET_RDLVL_TYPE = 'INT32';\nconst PARQUET_RDLVL_ENCODING = 'RLE';\n\nexport interface ParquetEncoderOptions {\n baseOffset?: number;\n rowGroupSize?: number;\n pageSize?: number;\n useDataPageV2?: boolean;\n\n // Write Stream Options\n flags?: string;\n encoding?: string;\n fd?: number;\n mode?: number;\n autoClose?: boolean;\n start?: number;\n}\n\n/**\n * Write a parquet file to an output stream. The ParquetEncoder will perform\n * buffering/batching for performance, so close() must be called after all rows\n * are written.\n */\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nexport class ParquetEncoder<T> {\n /**\n * Convenience method to create a new buffered parquet writer that writes to\n * the specified file\n */\n static async openFile<T>(\n schema: ParquetSchema,\n path: string,\n opts?: ParquetEncoderOptions\n ): Promise<ParquetEncoder<T>> {\n const outputStream = await osopen(path, opts);\n return ParquetEncoder.openStream(schema, outputStream, opts);\n }\n\n /**\n * Convenience method to create a new buffered parquet writer that writes to\n * the specified stream\n */\n static async openStream<T>(\n schema: ParquetSchema,\n outputStream: stream.Writable,\n opts: ParquetEncoderOptions = {}\n ): Promise<ParquetEncoder<T>> {\n const envelopeWriter = await ParquetEnvelopeWriter.openStream(schema, outputStream, opts);\n return new ParquetEncoder(schema, envelopeWriter, opts);\n }\n\n public schema: ParquetSchema;\n public envelopeWriter: ParquetEnvelopeWriter;\n public rowBuffer: ParquetBuffer;\n public rowGroupSize: number;\n public closed: boolean;\n public userMetadata: Record<string, string>;\n\n /**\n * Create a new buffered parquet writer for a given envelope writer\n */\n constructor(\n schema: ParquetSchema,\n envelopeWriter: ParquetEnvelopeWriter,\n opts: ParquetEncoderOptions\n ) {\n this.schema = schema;\n this.envelopeWriter = envelopeWriter;\n // @ts-ignore Row buffer typings...\n this.rowBuffer = {};\n this.rowGroupSize = opts.rowGroupSize || PARQUET_DEFAULT_ROW_GROUP_SIZE;\n this.closed = false;\n this.userMetadata = {};\n\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n this.writeHeader();\n }\n\n async writeHeader(): Promise<void> {\n // TODO - better not mess with promises in the constructor\n try {\n await this.envelopeWriter.writeHeader();\n } catch (err) {\n await this.envelopeWriter.close();\n throw err;\n }\n }\n\n /**\n * Append a single row to the parquet file. Rows are buffered in memory until\n * rowGroupSize rows are in the buffer or close() is called\n */\n async appendRow<T>(row: T): Promise<void> {\n if (this.closed) {\n throw new Error('writer was closed');\n }\n Shred.shredRecord(this.schema, row, this.rowBuffer);\n if (this.rowBuffer.rowCount >= this.rowGroupSize) {\n // @ts-ignore\n this.rowBuffer = {};\n }\n }\n\n /**\n * Finish writing the parquet file and commit the footer to disk. This method\n * MUST be called after you are finished adding rows. You must not call this\n * method twice on the same object or add any rows after the close() method has\n * been called\n */\n async close(callback?: () => void): Promise<void> {\n if (this.closed) {\n throw new Error('writer was closed');\n }\n\n this.closed = true;\n\n if (this.rowBuffer.rowCount > 0 || this.rowBuffer.rowCount >= this.rowGroupSize) {\n // @ts-ignore\n this.rowBuffer = {};\n }\n\n await this.envelopeWriter.writeFooter(this.userMetadata);\n await this.envelopeWriter.close();\n // this.envelopeWriter = null;\n\n if (callback) {\n callback();\n }\n }\n\n /**\n * Add key<>value metadata to the file\n */\n setMetadata(key: string, value: string): void {\n // TODO: value to be any, obj -> JSON\n this.userMetadata[String(key)] = String(value);\n }\n\n /**\n * Set the parquet row group size. This values controls the maximum number\n * of rows that are buffered in memory at any given time as well as the number\n * of rows that are co-located on disk. A higher value is generally better for\n * read-time I/O performance at the tradeoff of write-time memory usage.\n */\n setRowGroupSize(cnt: number): void {\n this.rowGroupSize = cnt;\n }\n\n /**\n * Set the parquet data page size. The data page size controls the maximum\n * number of column values that are written to disk as a consecutive array\n */\n setPageSize(cnt: number): void {\n this.envelopeWriter.setPageSize(cnt);\n }\n}\n\n/**\n * Create a parquet file from a schema and a number of row groups. This class\n * performs direct, unbuffered writes to the underlying output stream and is\n * intendend for advanced and internal users; the writeXXX methods must be\n * called in the correct order to produce a valid file.\n */\nexport class ParquetEnvelopeWriter {\n /**\n * Create a new parquet envelope writer that writes to the specified stream\n */\n static async openStream(\n schema: ParquetSchema,\n outputStream: stream.Writable,\n opts: ParquetEncoderOptions\n ): Promise<ParquetEnvelopeWriter> {\n const writeFn = oswrite.bind(undefined, outputStream);\n const closeFn = osclose.bind(undefined, outputStream);\n return new ParquetEnvelopeWriter(schema, writeFn, closeFn, 0, opts);\n }\n\n public schema: ParquetSchema;\n public write: (buf: Buffer) => Promise<void>;\n public close: () => Promise<void>;\n public offset: number;\n public rowCount: number;\n public rowGroups: RowGroup[];\n public pageSize: number;\n public useDataPageV2: boolean;\n\n constructor(\n schema: ParquetSchema,\n writeFn: (buf: Buffer) => Promise<void>,\n closeFn: () => Promise<void>,\n fileOffset: number,\n opts: ParquetEncoderOptions\n ) {\n this.schema = schema;\n this.write = writeFn;\n this.close = closeFn;\n this.offset = fileOffset;\n this.rowCount = 0;\n this.rowGroups = [];\n this.pageSize = opts.pageSize || PARQUET_DEFAULT_PAGE_SIZE;\n this.useDataPageV2 = 'useDataPageV2' in opts ? Boolean(opts.useDataPageV2) : false;\n }\n\n writeSection(buf: Buffer): Promise<void> {\n this.offset += buf.length;\n return this.write(buf);\n }\n\n /**\n * Encode the parquet file header\n */\n writeHeader(): Promise<void> {\n return this.writeSection(Buffer.from(PARQUET_MAGIC));\n }\n\n /**\n * Encode a parquet row group. The records object should be created using the\n * shredRecord method\n */\n async writeRowGroup(records: ParquetBuffer): Promise<void> {\n const rgroup = await encodeRowGroup(this.schema, records, {\n baseOffset: this.offset,\n pageSize: this.pageSize,\n useDataPageV2: this.useDataPageV2\n });\n\n this.rowCount += records.rowCount;\n this.rowGroups.push(rgroup.metadata);\n return await this.writeSection(rgroup.body);\n }\n\n /**\n * Write the parquet file footer\n */\n writeFooter(userMetadata: Record<string, string>): Promise<void> {\n if (!userMetadata) {\n // tslint:disable-next-line:no-parameter-reassignment\n userMetadata = {};\n }\n\n return this.writeSection(\n encodeFooter(this.schema, this.rowCount, this.rowGroups, userMetadata)\n );\n }\n\n /**\n * Set the parquet data page size. The data page size controls the maximum\n * number of column values that are written to disk as a consecutive array\n */\n setPageSize(cnt: number): void {\n this.pageSize = cnt;\n }\n}\n\n/**\n * Create a parquet transform stream\nexport class ParquetTransformer<T> extends stream.Transform {\n public writer: ParquetEncoder<T>;\n\n constructor(schema: ParquetSchema, opts: ParquetEncoderOptions = {}) {\n super({objectMode: true});\n\n const writeProxy = (function (t: ParquetTransformer<any>) {\n return async function (b: any): Promise<void> {\n t.push(b);\n };\n })(this);\n\n this.writer = new ParquetEncoder(\n schema,\n new ParquetEnvelopeWriter(schema, writeProxy, async () => {}, 0, opts),\n opts\n );\n }\n\n // tslint:disable-next-line:function-name\n _transform(row: any, encoding: string, callback: (val?: any) => void): Promise<void> {\n if (row) {\n return this.writer.appendRow(row).then(callback);\n }\n callback();\n return Promise.resolve();\n }\n\n // tslint:disable-next-line:function-name\n async _flush(callback: (val?: any) => void) {\n await this.writer.close(callback);\n }\n}\n */\n\n/**\n * Encode a consecutive array of data using one of the parquet encodings\n */\nfunction encodeValues(\n type: PrimitiveType,\n encoding: ParquetCodec,\n values: any[],\n opts: ParquetCodecOptions\n) {\n if (!(encoding in PARQUET_CODECS)) {\n throw new Error(`invalid encoding: ${encoding}`);\n }\n return PARQUET_CODECS[encoding].encodeValues(type, values, opts);\n}\n\n/**\n * Encode a parquet data page\n */\nasync function encodeDataPage(\n column: ParquetField,\n data: ParquetData\n): Promise<{\n header: PageHeader;\n headerSize: number;\n page: Buffer;\n}> {\n /* encode repetition and definition levels */\n let rLevelsBuf = Buffer.alloc(0);\n if (column.rLevelMax > 0) {\n rLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.rlevels, {\n bitWidth: getBitWidth(column.rLevelMax)\n // disableEnvelope: false\n });\n }\n\n let dLevelsBuf = Buffer.alloc(0);\n if (column.dLevelMax > 0) {\n dLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.dlevels, {\n bitWidth: getBitWidth(column.dLevelMax)\n // disableEnvelope: false\n });\n }\n\n /* encode values */\n const valuesBuf = encodeValues(column.primitiveType!, column.encoding!, data.values, {\n typeLength: column.typeLength,\n bitWidth: column.typeLength\n });\n\n const dataBuf = Buffer.concat([rLevelsBuf, dLevelsBuf, valuesBuf]);\n\n // compression = column.compression === 'UNCOMPRESSED' ? (compression || 'UNCOMPRESSED') : column.compression;\n const compressedBuf = await Compression.deflate(column.compression!, dataBuf);\n\n /* build page header */\n const header = new PageHeader({\n type: PageType.DATA_PAGE,\n data_page_header: new DataPageHeader({\n num_values: data.count,\n encoding: Encoding[column.encoding!] as any,\n definition_level_encoding: Encoding[PARQUET_RDLVL_ENCODING], // [PARQUET_RDLVL_ENCODING],\n repetition_level_encoding: Encoding[PARQUET_RDLVL_ENCODING] // [PARQUET_RDLVL_ENCODING]\n }),\n uncompressed_page_size: dataBuf.length,\n compressed_page_size: compressedBuf.length\n });\n\n /* concat page header, repetition and definition levels and values */\n const headerBuf = serializeThrift(header);\n const page = Buffer.concat([headerBuf, compressedBuf]);\n\n return {header, headerSize: headerBuf.length, page};\n}\n\n/**\n * Encode a parquet data page (v2)\n */\nasync function encodeDataPageV2(\n column: ParquetField,\n data: ParquetData,\n rowCount: number\n): Promise<{\n header: PageHeader;\n headerSize: number;\n page: Buffer;\n}> {\n /* encode values */\n const valuesBuf = encodeValues(column.primitiveType!, column.encoding!, data.values, {\n typeLength: column.typeLength,\n bitWidth: column.typeLength\n });\n\n // compression = column.compression === 'UNCOMPRESSED' ? (compression || 'UNCOMPRESSED') : column.compression;\n const compressedBuf = await Compression.deflate(column.compression!, valuesBuf);\n\n /* encode repetition and definition levels */\n let rLevelsBuf = Buffer.alloc(0);\n if (column.rLevelMax > 0) {\n rLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.rlevels, {\n bitWidth: getBitWidth(column.rLevelMax),\n disableEnvelope: true\n });\n }\n\n let dLevelsBuf = Buffer.alloc(0);\n if (column.dLevelMax > 0) {\n dLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.dlevels, {\n bitWidth: getBitWidth(column.dLevelMax),\n disableEnvelope: true\n });\n }\n\n /* build page header */\n const header = new PageHeader({\n type: PageType.DATA_PAGE_V2,\n data_page_header_v2: new DataPageHeaderV2({\n num_values: data.count,\n num_nulls: data.count - data.values.length,\n num_rows: rowCount,\n encoding: Encoding[column.encoding!] as any,\n definition_levels_byte_length: dLevelsBuf.length,\n repetition_levels_byte_length: rLevelsBuf.length,\n is_compressed: column.compression !== 'UNCOMPRESSED'\n }),\n uncompressed_page_size: rLevelsBuf.length + dLevelsBuf.length + valuesBuf.length,\n compressed_page_size: rLevelsBuf.length + dLevelsBuf.length + compressedBuf.length\n });\n\n /* concat page header, repetition and definition levels and values */\n const headerBuf = serializeThrift(header);\n const page = Buffer.concat([headerBuf, rLevelsBuf, dLevelsBuf, compressedBuf]);\n return {header, headerSize: headerBuf.length, page};\n}\n\n/**\n * Encode an array of values into a parquet column chunk\n */\nasync function encodeColumnChunk(\n column: ParquetField,\n buffer: ParquetBuffer,\n offset: number,\n opts: ParquetEncoderOptions\n): Promise<{\n body: Buffer;\n metadata: ColumnMetaData;\n metadataOffset: number;\n}> {\n const data = buffer.columnData[column.path.join()];\n const baseOffset = (opts.baseOffset || 0) + offset;\n /* encode data page(s) */\n // const pages: Buffer[] = [];\n let pageBuf: Buffer;\n // tslint:disable-next-line:variable-name\n let total_uncompressed_size = 0;\n // tslint:disable-next-line:variable-name\n let total_compressed_size = 0;\n {\n const result = opts.useDataPageV2\n ? await encodeDataPageV2(column, data, buffer.rowCount)\n : await encodeDataPage(column, data);\n // pages.push(result.page);\n pageBuf = result.page;\n total_uncompressed_size += result.header.uncompressed_page_size + result.headerSize;\n total_compressed_size += result.header.compressed_page_size + result.headerSize;\n }\n\n // const pagesBuf = Buffer.concat(pages);\n // const compression = column.compression === 'UNCOMPRESSED' ? (opts.compression || 'UNCOMPRESSED') : column.compression;\n\n /* prepare metadata header */\n const metadata = new ColumnMetaData({\n path_in_schema: column.path,\n num_values: data.count,\n data_page_offset: baseOffset,\n encodings: [],\n total_uncompressed_size, // : pagesBuf.length,\n total_compressed_size,\n type: Type[column.primitiveType!],\n codec: CompressionCodec[column.compression!]\n });\n\n /* list encodings */\n metadata.encodings.push(Encoding[PARQUET_RDLVL_ENCODING]);\n metadata.encodings.push(Encoding[column.encoding!]);\n\n /* concat metadata header and data pages */\n const metadataOffset = baseOffset + pageBuf.length;\n const body = Buffer.concat([pageBuf, serializeThrift(metadata)]);\n return {body, metadata, metadataOffset};\n}\n\n/**\n * Encode a list of column values into a parquet row group\n */\nasync function encodeRowGroup(\n schema: ParquetSchema,\n data: ParquetBuffer,\n opts: ParquetEncoderOptions\n): Promise<{\n body: Buffer;\n metadata: RowGroup;\n}> {\n const metadata = new RowGroup({\n num_rows: data.rowCount,\n columns: [],\n total_byte_size: 0\n });\n\n let body = Buffer.alloc(0);\n for (const field of schema.fieldList) {\n if (field.isNested) {\n continue; // eslint-disable-line no-continue\n }\n\n const cchunkData = await encodeColumnChunk(field, data, body.length, opts);\n\n const cchunk = new ColumnChunk({\n file_offset: cchunkData.metadataOffset,\n meta_data: cchunkData.metadata\n });\n\n metadata.columns.push(cchunk);\n metadata.total_byte_size = new Int64(Number(metadata.total_byte_size) + cchunkData.body.length);\n\n body = Buffer.concat([body, cchunkData.body]);\n }\n\n return {body, metadata};\n}\n\n/**\n * Encode a parquet file metadata footer\n */\nfunction encodeFooter(\n schema: ParquetSchema,\n rowCount: number,\n rowGroups: RowGroup[],\n userMetadata: Record<string, string>\n): Buffer {\n const metadata = new FileMetaData({\n version: PARQUET_VERSION,\n created_by: 'parquets',\n num_rows: rowCount,\n row_groups: rowGroups,\n schema: [],\n key_value_metadata: []\n });\n\n for (const key in userMetadata) {\n const kv = new KeyValue({\n key,\n value: userMetadata[key]\n });\n metadata.key_value_metadata?.push?.(kv);\n }\n\n {\n const schemaRoot = new SchemaElement({\n name: 'root',\n num_children: Object.keys(schema.fields).length\n });\n metadata.schema.push(schemaRoot);\n }\n\n for (const field of schema.fieldList) {\n const relt = FieldRepetitionType[field.repetitionType];\n const schemaElem = new SchemaElement({\n name: field.name,\n repetition_type: relt as any\n });\n\n if (field.isNested) {\n schemaElem.num_children = field.fieldCount;\n } else {\n schemaElem.type = Type[field.primitiveType!] as Type;\n }\n\n if (field.originalType) {\n schemaElem.converted_type = ConvertedType[field.originalType] as ConvertedType;\n }\n\n schemaElem.type_length = field.typeLength;\n\n metadata.schema.push(schemaElem);\n }\n\n const metadataEncoded = serializeThrift(metadata);\n const footerEncoded = Buffer.alloc(metadataEncoded.length + 8);\n metadataEncoded.copy(footerEncoded);\n footerEncoded.writeUInt32LE(metadataEncoded.length, metadataEncoded.length);\n footerEncoded.write(PARQUET_MAGIC, metadataEncoded.length + 4);\n return footerEncoded;\n}\n"],"mappings":";AAGA,SAA6BA,cAAc,QAAO,WAAW;AAC7D,OAAO,KAAKC,WAAW,MAAM,gBAAgB;AAS7C,OAAO,KAAKC,KAAK,MAAM,iBAAiB;AACxC,SACEC,WAAW,EACXC,cAAc,EACdC,gBAAgB,EAChBC,aAAa,EACbC,cAAc,EACdC,gBAAgB,EAChBC,QAAQ,EACRC,mBAAmB,EACnBC,YAAY,EACZC,QAAQ,EACRC,UAAU,EACVC,QAAQ,EACRC,QAAQ,EACRC,aAAa,EACbC,IAAI,QACC,mBAAmB;AAC1B,SAAQC,MAAM,EAAEC,OAAO,EAAEC,OAAO,QAAO,qBAAqB;AAC5D,SAAQC,WAAW,EAAEC,eAAe,QAAO,qBAAqB;AAChE,OAAOC,KAAK,MAAM,YAAY;AAK9B,MAAMC,aAAa,GAAG,MAAM;AAK5B,MAAMC,eAAe,GAAG,CAAC;AAKzB,MAAMC,yBAAyB,GAAG,IAAI;AACtC,MAAMC,8BAA8B,GAAG,IAAI;AAK3C,MAAMC,kBAAkB,GAAG,OAAO;AAClC,MAAMC,sBAAsB,GAAG,KAAK;AAuBpC,OAAO,MAAMC,cAAc,CAAI;EAK7B,aAAaC,QAAQA,CACnBC,MAAqB,EACrBC,IAAY,EACZC,IAA4B,EACA;IAC5B,MAAMC,YAAY,GAAG,MAAMjB,MAAM,CAACe,IAAI,EAAEC,IAAI,CAAC;IAC7C,OAAOJ,cAAc,CAACM,UAAU,CAACJ,MAAM,EAAEG,YAAY,EAAED,IAAI,CAAC;EAC9D;EAMA,aAAaE,UAAUA,CACrBJ,MAAqB,EACrBG,YAA6B,EAED;IAAA,IAD5BD,IAA2B,GAAAG,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC,CAAC;IAEhC,MAAMG,cAAc,GAAG,MAAMC,qBAAqB,CAACL,UAAU,CAACJ,MAAM,EAAEG,YAAY,EAAED,IAAI,CAAC;IACzF,OAAO,IAAIJ,cAAc,CAACE,MAAM,EAAEQ,cAAc,EAAEN,IAAI,CAAC;EACzD;EAYAQ,WAAWA,CACTV,MAAqB,EACrBQ,cAAqC,EACrCN,IAA2B,EAC3B;IAAAS,eAAA;IAAAA,eAAA;IAAAA,eAAA;IAAAA,eAAA;IAAAA,eAAA;IAAAA,eAAA;IACA,IAAI,CAACX,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACQ,cAAc,GAAGA,cAAc;IAEpC,IAAI,CAACI,SAAS,GAAG,CAAC,CAAC;IACnB,IAAI,CAACC,YAAY,GAAGX,IAAI,CAACW,YAAY,IAAIlB,8BAA8B;IACvE,IAAI,CAACmB,MAAM,GAAG,KAAK;IACnB,IAAI,CAACC,YAAY,GAAG,CAAC,CAAC;IAGtB,IAAI,CAACC,WAAW,EAAE;EACpB;EAEA,MAAMA,WAAWA,CAAA,EAAkB;IAEjC,IAAI;MACF,MAAM,IAAI,CAACR,cAAc,CAACQ,WAAW,EAAE;IACzC,CAAC,CAAC,OAAOC,GAAG,EAAE;MACZ,MAAM,IAAI,CAACT,cAAc,CAACU,KAAK,EAAE;MACjC,MAAMD,GAAG;IACX;EACF;EAMA,MAAME,SAASA,CAAIC,GAAM,EAAiB;IACxC,IAAI,IAAI,CAACN,MAAM,EAAE;MACf,MAAM,IAAIO,KAAK,CAAC,mBAAmB,CAAC;IACtC;IACAnD,KAAK,CAACoD,WAAW,CAAC,IAAI,CAACtB,MAAM,EAAEoB,GAAG,EAAE,IAAI,CAACR,SAAS,CAAC;IACnD,IAAI,IAAI,CAACA,SAAS,CAACW,QAAQ,IAAI,IAAI,CAACV,YAAY,EAAE;MAEhD,IAAI,CAACD,SAAS,GAAG,CAAC,CAAC;IACrB;EACF;EAQA,MAAMM,KAAKA,CAACM,QAAqB,EAAiB;IAChD,IAAI,IAAI,CAACV,MAAM,EAAE;MACf,MAAM,IAAIO,KAAK,CAAC,mBAAmB,CAAC;IACtC;IAEA,IAAI,CAACP,MAAM,GAAG,IAAI;IAElB,IAAI,IAAI,CAACF,SAAS,CAACW,QAAQ,GAAG,CAAC,IAAI,IAAI,CAACX,SAAS,CAACW,QAAQ,IAAI,IAAI,CAACV,YAAY,EAAE;MAE/E,IAAI,CAACD,SAAS,GAAG,CAAC,CAAC;IACrB;IAEA,MAAM,IAAI,CAACJ,cAAc,CAACiB,WAAW,CAAC,IAAI,CAACV,YAAY,CAAC;IACxD,MAAM,IAAI,CAACP,cAAc,CAACU,KAAK,EAAE;IAGjC,IAAIM,QAAQ,EAAE;MACZA,QAAQ,EAAE;IACZ;EACF;EAKAE,WAAWA,CAACC,GAAW,EAAEC,KAAa,EAAQ;IAE5C,IAAI,CAACb,YAAY,CAACc,MAAM,CAACF,GAAG,CAAC,CAAC,GAAGE,MAAM,CAACD,KAAK,CAAC;EAChD;EAQAE,eAAeA,CAACC,GAAW,EAAQ;IACjC,IAAI,CAAClB,YAAY,GAAGkB,GAAG;EACzB;EAMAC,WAAWA,CAACD,GAAW,EAAQ;IAC7B,IAAI,CAACvB,cAAc,CAACwB,WAAW,CAACD,GAAG,CAAC;EACtC;AACF;AAQA,OAAO,MAAMtB,qBAAqB,CAAC;EAIjC,aAAaL,UAAUA,CACrBJ,MAAqB,EACrBG,YAA6B,EAC7BD,IAA2B,EACK;IAChC,MAAM+B,OAAO,GAAG9C,OAAO,CAAC+C,IAAI,CAAC3B,SAAS,EAAEJ,YAAY,CAAC;IACrD,MAAMgC,OAAO,GAAG/C,OAAO,CAAC8C,IAAI,CAAC3B,SAAS,EAAEJ,YAAY,CAAC;IACrD,OAAO,IAAIM,qBAAqB,CAACT,MAAM,EAAEiC,OAAO,EAAEE,OAAO,EAAE,CAAC,EAAEjC,IAAI,CAAC;EACrE;EAWAQ,WAAWA,CACTV,MAAqB,EACrBiC,OAAuC,EACvCE,OAA4B,EAC5BC,UAAkB,EAClBlC,IAA2B,EAC3B;IAAAS,eAAA;IAAAA,eAAA;IAAAA,eAAA;IAAAA,eAAA;IAAAA,eAAA;IAAAA,eAAA;IAAAA,eAAA;IAAAA,eAAA;IACA,IAAI,CAACX,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACqC,KAAK,GAAGJ,OAAO;IACpB,IAAI,CAACf,KAAK,GAAGiB,OAAO;IACpB,IAAI,CAACG,MAAM,GAAGF,UAAU;IACxB,IAAI,CAACb,QAAQ,GAAG,CAAC;IACjB,IAAI,CAACgB,SAAS,GAAG,EAAE;IACnB,IAAI,CAACC,QAAQ,GAAGtC,IAAI,CAACsC,QAAQ,IAAI9C,yBAAyB;IAC1D,IAAI,CAAC+C,aAAa,GAAG,eAAe,IAAIvC,IAAI,GAAGwC,OAAO,CAACxC,IAAI,CAACuC,aAAa,CAAC,GAAG,KAAK;EACpF;EAEAE,YAAYA,CAACC,GAAW,EAAiB;IACvC,IAAI,CAACN,MAAM,IAAIM,GAAG,CAACtC,MAAM;IACzB,OAAO,IAAI,CAAC+B,KAAK,CAACO,GAAG,CAAC;EACxB;EAKA5B,WAAWA,CAAA,EAAkB;IAC3B,OAAO,IAAI,CAAC2B,YAAY,CAACE,MAAM,CAACC,IAAI,CAACtD,aAAa,CAAC,CAAC;EACtD;EAMA,MAAMuD,aAAaA,CAACC,OAAsB,EAAiB;IACzD,MAAMC,MAAM,GAAG,MAAMC,cAAc,CAAC,IAAI,CAAClD,MAAM,EAAEgD,OAAO,EAAE;MACxDG,UAAU,EAAE,IAAI,CAACb,MAAM;MACvBE,QAAQ,EAAE,IAAI,CAACA,QAAQ;MACvBC,aAAa,EAAE,IAAI,CAACA;IACtB,CAAC,CAAC;IAEF,IAAI,CAAClB,QAAQ,IAAIyB,OAAO,CAACzB,QAAQ;IACjC,IAAI,CAACgB,SAAS,CAACa,IAAI,CAACH,MAAM,CAACI,QAAQ,CAAC;IACpC,OAAO,MAAM,IAAI,CAACV,YAAY,CAACM,MAAM,CAACK,IAAI,CAAC;EAC7C;EAKA7B,WAAWA,CAACV,YAAoC,EAAiB;IAC/D,IAAI,CAACA,YAAY,EAAE;MAEjBA,YAAY,GAAG,CAAC,CAAC;IACnB;IAEA,OAAO,IAAI,CAAC4B,YAAY,CACtBY,YAAY,CAAC,IAAI,CAACvD,MAAM,EAAE,IAAI,CAACuB,QAAQ,EAAE,IAAI,CAACgB,SAAS,EAAExB,YAAY,CAAC,CACvE;EACH;EAMAiB,WAAWA,CAACD,GAAW,EAAQ;IAC7B,IAAI,CAACS,QAAQ,GAAGT,GAAG;EACrB;AACF;AA0CA,SAASyB,YAAYA,CACnBC,IAAmB,EACnBC,QAAsB,EACtBC,MAAa,EACbzD,IAAyB,EACzB;EACA,IAAI,EAAEwD,QAAQ,IAAI1F,cAAc,CAAC,EAAE;IACjC,MAAM,IAAIqD,KAAK,sBAAAuC,MAAA,CAAsBF,QAAQ,EAAG;EAClD;EACA,OAAO1F,cAAc,CAAC0F,QAAQ,CAAC,CAACF,YAAY,CAACC,IAAI,EAAEE,MAAM,EAAEzD,IAAI,CAAC;AAClE;AAKA,eAAe2D,cAAcA,CAC3BC,MAAoB,EACpBC,IAAiB,EAKhB;EAED,IAAIC,UAAU,GAAGnB,MAAM,CAACoB,KAAK,CAAC,CAAC,CAAC;EAChC,IAAIH,MAAM,CAACI,SAAS,GAAG,CAAC,EAAE;IACxBF,UAAU,GAAGR,YAAY,CAAC5D,kBAAkB,EAAEC,sBAAsB,EAAEkE,IAAI,CAACI,OAAO,EAAE;MAClFC,QAAQ,EAAE/E,WAAW,CAACyE,MAAM,CAACI,SAAS;IAExC,CAAC,CAAC;EACJ;EAEA,IAAIG,UAAU,GAAGxB,MAAM,CAACoB,KAAK,CAAC,CAAC,CAAC;EAChC,IAAIH,MAAM,CAACQ,SAAS,GAAG,CAAC,EAAE;IACxBD,UAAU,GAAGb,YAAY,CAAC5D,kBAAkB,EAAEC,sBAAsB,EAAEkE,IAAI,CAACQ,OAAO,EAAE;MAClFH,QAAQ,EAAE/E,WAAW,CAACyE,MAAM,CAACQ,SAAS;IAExC,CAAC,CAAC;EACJ;EAGA,MAAME,SAAS,GAAGhB,YAAY,CAACM,MAAM,CAACW,aAAa,EAAGX,MAAM,CAACJ,QAAQ,EAAGK,IAAI,CAACJ,MAAM,EAAE;IACnFe,UAAU,EAAEZ,MAAM,CAACY,UAAU;IAC7BN,QAAQ,EAAEN,MAAM,CAACY;EACnB,CAAC,CAAC;EAEF,MAAMC,OAAO,GAAG9B,MAAM,CAACe,MAAM,CAAC,CAACI,UAAU,EAAEK,UAAU,EAAEG,SAAS,CAAC,CAAC;EAGlE,MAAMI,aAAa,GAAG,MAAM3G,WAAW,CAAC4G,OAAO,CAACf,MAAM,CAACgB,WAAW,EAAGH,OAAO,CAAC;EAG7E,MAAMI,MAAM,GAAG,IAAIlG,UAAU,CAAC;IAC5B4E,IAAI,EAAE3E,QAAQ,CAACkG,SAAS;IACxBC,gBAAgB,EAAE,IAAI1G,cAAc,CAAC;MACnC2G,UAAU,EAAEnB,IAAI,CAACoB,KAAK;MACtBzB,QAAQ,EAAEjF,QAAQ,CAACqF,MAAM,CAACJ,QAAQ,CAAS;MAC3C0B,yBAAyB,EAAE3G,QAAQ,CAACoB,sBAAsB,CAAC;MAC3DwF,yBAAyB,EAAE5G,QAAQ,CAACoB,sBAAsB;IAC5D,CAAC,CAAC;IACFyF,sBAAsB,EAAEX,OAAO,CAACrE,MAAM;IACtCiF,oBAAoB,EAAEX,aAAa,CAACtE;EACtC,CAAC,CAAC;EAGF,MAAMkF,SAAS,GAAGlG,eAAe,CAACyF,MAAM,CAAC;EACzC,MAAMU,IAAI,GAAG5C,MAAM,CAACe,MAAM,CAAC,CAAC4B,SAAS,EAAEZ,aAAa,CAAC,CAAC;EAEtD,OAAO;IAACG,MAAM;IAAEW,UAAU,EAAEF,SAAS,CAAClF,MAAM;IAAEmF;EAAI,CAAC;AACrD;AAKA,eAAeE,gBAAgBA,CAC7B7B,MAAoB,EACpBC,IAAiB,EACjBxC,QAAgB,EAKf;EAED,MAAMiD,SAAS,GAAGhB,YAAY,CAACM,MAAM,CAACW,aAAa,EAAGX,MAAM,CAACJ,QAAQ,EAAGK,IAAI,CAACJ,MAAM,EAAE;IACnFe,UAAU,EAAEZ,MAAM,CAACY,UAAU;IAC7BN,QAAQ,EAAEN,MAAM,CAACY;EACnB,CAAC,CAAC;EAGF,MAAME,aAAa,GAAG,MAAM3G,WAAW,CAAC4G,OAAO,CAACf,MAAM,CAACgB,WAAW,EAAGN,SAAS,CAAC;EAG/E,IAAIR,UAAU,GAAGnB,MAAM,CAACoB,KAAK,CAAC,CAAC,CAAC;EAChC,IAAIH,MAAM,CAACI,SAAS,GAAG,CAAC,EAAE;IACxBF,UAAU,GAAGR,YAAY,CAAC5D,kBAAkB,EAAEC,sBAAsB,EAAEkE,IAAI,CAACI,OAAO,EAAE;MAClFC,QAAQ,EAAE/E,WAAW,CAACyE,MAAM,CAACI,SAAS,CAAC;MACvC0B,eAAe,EAAE;IACnB,CAAC,CAAC;EACJ;EAEA,IAAIvB,UAAU,GAAGxB,MAAM,CAACoB,KAAK,CAAC,CAAC,CAAC;EAChC,IAAIH,MAAM,CAACQ,SAAS,GAAG,CAAC,EAAE;IACxBD,UAAU,GAAGb,YAAY,CAAC5D,kBAAkB,EAAEC,sBAAsB,EAAEkE,IAAI,CAACQ,OAAO,EAAE;MAClFH,QAAQ,EAAE/E,WAAW,CAACyE,MAAM,CAACQ,SAAS,CAAC;MACvCsB,eAAe,EAAE;IACnB,CAAC,CAAC;EACJ;EAGA,MAAMb,MAAM,GAAG,IAAIlG,UAAU,CAAC;IAC5B4E,IAAI,EAAE3E,QAAQ,CAAC+G,YAAY;IAC3BC,mBAAmB,EAAE,IAAItH,gBAAgB,CAAC;MACxC0G,UAAU,EAAEnB,IAAI,CAACoB,KAAK;MACtBY,SAAS,EAAEhC,IAAI,CAACoB,KAAK,GAAGpB,IAAI,CAACJ,MAAM,CAACrD,MAAM;MAC1C0F,QAAQ,EAAEzE,QAAQ;MAClBmC,QAAQ,EAAEjF,QAAQ,CAACqF,MAAM,CAACJ,QAAQ,CAAS;MAC3CuC,6BAA6B,EAAE5B,UAAU,CAAC/D,MAAM;MAChD4F,6BAA6B,EAAElC,UAAU,CAAC1D,MAAM;MAChD6F,aAAa,EAAErC,MAAM,CAACgB,WAAW,KAAK;IACxC,CAAC,CAAC;IACFQ,sBAAsB,EAAEtB,UAAU,CAAC1D,MAAM,GAAG+D,UAAU,CAAC/D,MAAM,GAAGkE,SAAS,CAAClE,MAAM;IAChFiF,oBAAoB,EAAEvB,UAAU,CAAC1D,MAAM,GAAG+D,UAAU,CAAC/D,MAAM,GAAGsE,aAAa,CAACtE;EAC9E,CAAC,CAAC;EAGF,MAAMkF,SAAS,GAAGlG,eAAe,CAACyF,MAAM,CAAC;EACzC,MAAMU,IAAI,GAAG5C,MAAM,CAACe,MAAM,CAAC,CAAC4B,SAAS,EAAExB,UAAU,EAAEK,UAAU,EAAEO,aAAa,CAAC,CAAC;EAC9E,OAAO;IAACG,MAAM;IAAEW,UAAU,EAAEF,SAAS,CAAClF,MAAM;IAAEmF;EAAI,CAAC;AACrD;AAKA,eAAeW,iBAAiBA,CAC9BtC,MAAoB,EACpBuC,MAAqB,EACrB/D,MAAc,EACdpC,IAA2B,EAK1B;EACD,MAAM6D,IAAI,GAAGsC,MAAM,CAACC,UAAU,CAACxC,MAAM,CAAC7D,IAAI,CAACsG,IAAI,EAAE,CAAC;EAClD,MAAMpD,UAAU,GAAG,CAACjD,IAAI,CAACiD,UAAU,IAAI,CAAC,IAAIb,MAAM;EAGlD,IAAIkE,OAAe;EAEnB,IAAIC,uBAAuB,GAAG,CAAC;EAE/B,IAAIC,qBAAqB,GAAG,CAAC;EAC7B;IACE,MAAMC,MAAM,GAAGzG,IAAI,CAACuC,aAAa,GAC7B,MAAMkD,gBAAgB,CAAC7B,MAAM,EAAEC,IAAI,EAAEsC,MAAM,CAAC9E,QAAQ,CAAC,GACrD,MAAMsC,cAAc,CAACC,MAAM,EAAEC,IAAI,CAAC;IAEtCyC,OAAO,GAAGG,MAAM,CAAClB,IAAI;IACrBgB,uBAAuB,IAAIE,MAAM,CAAC5B,MAAM,CAACO,sBAAsB,GAAGqB,MAAM,CAACjB,UAAU;IACnFgB,qBAAqB,IAAIC,MAAM,CAAC5B,MAAM,CAACQ,oBAAoB,GAAGoB,MAAM,CAACjB,UAAU;EACjF;EAMA,MAAMrC,QAAQ,GAAG,IAAIjF,cAAc,CAAC;IAClCwI,cAAc,EAAE9C,MAAM,CAAC7D,IAAI;IAC3BiF,UAAU,EAAEnB,IAAI,CAACoB,KAAK;IACtB0B,gBAAgB,EAAE1D,UAAU;IAC5B2D,SAAS,EAAE,EAAE;IACbL,uBAAuB;IACvBC,qBAAqB;IACrBjD,IAAI,EAAExE,IAAI,CAAC6E,MAAM,CAACW,aAAa,CAAE;IACjCsC,KAAK,EAAE1I,gBAAgB,CAACyF,MAAM,CAACgB,WAAW;EAC5C,CAAC,CAAC;EAGFzB,QAAQ,CAACyD,SAAS,CAAC1D,IAAI,CAAC3E,QAAQ,CAACoB,sBAAsB,CAAC,CAAC;EACzDwD,QAAQ,CAACyD,SAAS,CAAC1D,IAAI,CAAC3E,QAAQ,CAACqF,MAAM,CAACJ,QAAQ,CAAE,CAAC;EAGnD,MAAMsD,cAAc,GAAG7D,UAAU,GAAGqD,OAAO,CAAClG,MAAM;EAClD,MAAMgD,IAAI,GAAGT,MAAM,CAACe,MAAM,CAAC,CAAC4C,OAAO,EAAElH,eAAe,CAAC+D,QAAQ,CAAC,CAAC,CAAC;EAChE,OAAO;IAACC,IAAI;IAAED,QAAQ;IAAE2D;EAAc,CAAC;AACzC;AAKA,eAAe9D,cAAcA,CAC3BlD,MAAqB,EACrB+D,IAAmB,EACnB7D,IAA2B,EAI1B;EACD,MAAMmD,QAAQ,GAAG,IAAItE,QAAQ,CAAC;IAC5BiH,QAAQ,EAAEjC,IAAI,CAACxC,QAAQ;IACvB0F,OAAO,EAAE,EAAE;IACXC,eAAe,EAAE;EACnB,CAAC,CAAC;EAEF,IAAI5D,IAAI,GAAGT,MAAM,CAACoB,KAAK,CAAC,CAAC,CAAC;EAC1B,KAAK,MAAMkD,KAAK,IAAInH,MAAM,CAACoH,SAAS,EAAE;IACpC,IAAID,KAAK,CAACE,QAAQ,EAAE;MAClB;IACF;IAEA,MAAMC,UAAU,GAAG,MAAMlB,iBAAiB,CAACe,KAAK,EAAEpD,IAAI,EAAET,IAAI,CAAChD,MAAM,EAAEJ,IAAI,CAAC;IAE1E,MAAMqH,MAAM,GAAG,IAAIpJ,WAAW,CAAC;MAC7BqJ,WAAW,EAAEF,UAAU,CAACN,cAAc;MACtCS,SAAS,EAAEH,UAAU,CAACjE;IACxB,CAAC,CAAC;IAEFA,QAAQ,CAAC4D,OAAO,CAAC7D,IAAI,CAACmE,MAAM,CAAC;IAC7BlE,QAAQ,CAAC6D,eAAe,GAAG,IAAI3H,KAAK,CAACmI,MAAM,CAACrE,QAAQ,CAAC6D,eAAe,CAAC,GAAGI,UAAU,CAAChE,IAAI,CAAChD,MAAM,CAAC;IAE/FgD,IAAI,GAAGT,MAAM,CAACe,MAAM,CAAC,CAACN,IAAI,EAAEgE,UAAU,CAAChE,IAAI,CAAC,CAAC;EAC/C;EAEA,OAAO;IAACA,IAAI;IAAED;EAAQ,CAAC;AACzB;AAKA,SAASE,YAAYA,CACnBvD,MAAqB,EACrBuB,QAAgB,EAChBgB,SAAqB,EACrBxB,YAAoC,EAC5B;EACR,MAAMsC,QAAQ,GAAG,IAAI1E,YAAY,CAAC;IAChCgJ,OAAO,EAAElI,eAAe;IACxBmI,UAAU,EAAE,UAAU;IACtB5B,QAAQ,EAAEzE,QAAQ;IAClBsG,UAAU,EAAEtF,SAAS;IACrBvC,MAAM,EAAE,EAAE;IACV8H,kBAAkB,EAAE;EACtB,CAAC,CAAC;EAEF,KAAK,MAAMnG,GAAG,IAAIZ,YAAY,EAAE;IAAA,IAAAgH,qBAAA,EAAAC,sBAAA,EAAAC,sBAAA;IAC9B,MAAMC,EAAE,GAAG,IAAItJ,QAAQ,CAAC;MACtB+C,GAAG;MACHC,KAAK,EAAEb,YAAY,CAACY,GAAG;IACzB,CAAC,CAAC;IACF,CAAAoG,qBAAA,GAAA1E,QAAQ,CAACyE,kBAAkB,cAAAC,qBAAA,wBAAAC,sBAAA,GAA3B,CAAAC,sBAAA,GAAAF,qBAAA,EAA6B3E,IAAI,cAAA4E,sBAAA,uBAAjCA,sBAAA,CAAAG,IAAA,CAAAF,sBAAA,EAAoCC,EAAE,CAAC;EACzC;EAEA;IACE,MAAME,UAAU,GAAG,IAAIpJ,aAAa,CAAC;MACnCqJ,IAAI,EAAE,MAAM;MACZC,YAAY,EAAEC,MAAM,CAACC,IAAI,CAACxI,MAAM,CAACyI,MAAM,CAAC,CAACnI;IAC3C,CAAC,CAAC;IACF+C,QAAQ,CAACrD,MAAM,CAACoD,IAAI,CAACgF,UAAU,CAAC;EAClC;EAEA,KAAK,MAAMjB,KAAK,IAAInH,MAAM,CAACoH,SAAS,EAAE;IACpC,MAAMsB,IAAI,GAAGhK,mBAAmB,CAACyI,KAAK,CAACwB,cAAc,CAAC;IACtD,MAAMC,UAAU,GAAG,IAAI5J,aAAa,CAAC;MACnCqJ,IAAI,EAAElB,KAAK,CAACkB,IAAI;MAChBQ,eAAe,EAAEH;IACnB,CAAC,CAAC;IAEF,IAAIvB,KAAK,CAACE,QAAQ,EAAE;MAClBuB,UAAU,CAACN,YAAY,GAAGnB,KAAK,CAAC2B,UAAU;IAC5C,CAAC,MAAM;MACLF,UAAU,CAACnF,IAAI,GAAGxE,IAAI,CAACkI,KAAK,CAAC1C,aAAa,CAAU;IACtD;IAEA,IAAI0C,KAAK,CAAC4B,YAAY,EAAE;MACtBH,UAAU,CAACI,cAAc,GAAG1K,aAAa,CAAC6I,KAAK,CAAC4B,YAAY,CAAkB;IAChF;IAEAH,UAAU,CAACK,WAAW,GAAG9B,KAAK,CAACzC,UAAU;IAEzCrB,QAAQ,CAACrD,MAAM,CAACoD,IAAI,CAACwF,UAAU,CAAC;EAClC;EAEA,MAAMM,eAAe,GAAG5J,eAAe,CAAC+D,QAAQ,CAAC;EACjD,MAAM8F,aAAa,GAAGtG,MAAM,CAACoB,KAAK,CAACiF,eAAe,CAAC5I,MAAM,GAAG,CAAC,CAAC;EAC9D4I,eAAe,CAACE,IAAI,CAACD,aAAa,CAAC;EACnCA,aAAa,CAACE,aAAa,CAACH,eAAe,CAAC5I,MAAM,EAAE4I,eAAe,CAAC5I,MAAM,CAAC;EAC3E6I,aAAa,CAAC9G,KAAK,CAAC7C,aAAa,EAAE0J,eAAe,CAAC5I,MAAM,GAAG,CAAC,CAAC;EAC9D,OAAO6I,aAAa;AACtB"}
1
+ {"version":3,"file":"parquet-encoder.js","names":["PARQUET_CODECS","Compression","Shred","ColumnChunk","ColumnMetaData","CompressionCodec","ConvertedType","DataPageHeader","DataPageHeaderV2","Encoding","FieldRepetitionType","FileMetaData","KeyValue","PageHeader","PageType","RowGroup","SchemaElement","Type","osopen","oswrite","osclose","getBitWidth","serializeThrift","Int64","PARQUET_MAGIC","PARQUET_VERSION","PARQUET_DEFAULT_PAGE_SIZE","PARQUET_DEFAULT_ROW_GROUP_SIZE","PARQUET_RDLVL_TYPE","PARQUET_RDLVL_ENCODING","ParquetEncoder","openFile","schema","path","opts","outputStream","openStream","arguments","length","undefined","envelopeWriter","ParquetEnvelopeWriter","constructor","_defineProperty","rowBuffer","rowGroupSize","closed","userMetadata","writeHeader","err","close","appendRow","row","Error","shredRecord","rowCount","callback","writeFooter","setMetadata","key","value","String","setRowGroupSize","cnt","setPageSize","writeFn","bind","closeFn","fileOffset","write","offset","rowGroups","pageSize","useDataPageV2","Boolean","writeSection","buf","Buffer","from","writeRowGroup","records","rgroup","encodeRowGroup","baseOffset","push","metadata","body","encodeFooter","encodeValues","type","encoding","values","concat","encodeDataPage","column","data","rLevelsBuf","alloc","rLevelMax","rlevels","bitWidth","dLevelsBuf","dLevelMax","dlevels","valuesBuf","primitiveType","typeLength","dataBuf","compressedBuf","deflate","compression","header","DATA_PAGE","data_page_header","num_values","count","definition_level_encoding","repetition_level_encoding","uncompressed_page_size","compressed_page_size","headerBuf","page","headerSize","encodeDataPageV2","disableEnvelope","DATA_PAGE_V2","data_page_header_v2","num_nulls","num_rows","definition_levels_byte_length","repetition_levels_byte_length","is_compressed","encodeColumnChunk","buffer","columnData","join","pageBuf","total_uncompressed_size","total_compressed_size","result","path_in_schema","data_page_offset","encodings","codec","metadataOffset","columns","total_byte_size","field","fieldList","isNested","cchunkData","cchunk","file_offset","meta_data","Number","version","created_by","row_groups","key_value_metadata","_metadata$key_value_m","_metadata$key_value_m2","_metadata$key_value_m3","kv","call","schemaRoot","name","num_children","Object","keys","fields","relt","repetitionType","schemaElem","repetition_type","fieldCount","originalType","converted_type","type_length","metadataEncoded","footerEncoded","copy","writeUInt32LE"],"sources":["../../../../src/parquetjs/encoder/parquet-encoder.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/* eslint-disable camelcase */\nimport {stream} from '@loaders.gl/loader-utils';\nimport {ParquetCodecOptions, PARQUET_CODECS} from '../codecs';\nimport * as Compression from '../compression';\nimport {\n ParquetBuffer,\n ParquetCodec,\n ParquetData,\n ParquetField,\n PrimitiveType\n} from '../schema/declare';\nimport {ParquetSchema} from '../schema/schema';\nimport * as Shred from '../schema/shred';\nimport {\n ColumnChunk,\n ColumnMetaData,\n CompressionCodec,\n ConvertedType,\n DataPageHeader,\n DataPageHeaderV2,\n Encoding,\n FieldRepetitionType,\n FileMetaData,\n KeyValue,\n PageHeader,\n PageType,\n RowGroup,\n SchemaElement,\n Type\n} from '../parquet-thrift';\nimport {osopen, oswrite, osclose} from '../utils/file-utils';\nimport {getBitWidth, serializeThrift} from '../utils/read-utils';\nimport Int64 from 'node-int64';\n\n/**\n * Parquet File Magic String\n */\nconst PARQUET_MAGIC = 'PAR1';\n\n/**\n * Parquet File Format Version\n */\nconst PARQUET_VERSION = 1;\n\n/**\n * Default Page and Row Group sizes\n */\nconst PARQUET_DEFAULT_PAGE_SIZE = 8192;\nconst PARQUET_DEFAULT_ROW_GROUP_SIZE = 4096;\n\n/**\n * Repetition and Definition Level Encoding\n */\nconst PARQUET_RDLVL_TYPE = 'INT32';\nconst PARQUET_RDLVL_ENCODING = 'RLE';\n\nexport interface ParquetEncoderOptions {\n baseOffset?: number;\n rowGroupSize?: number;\n pageSize?: number;\n useDataPageV2?: boolean;\n\n // Write Stream Options\n flags?: string;\n encoding?: string;\n fd?: number;\n mode?: number;\n autoClose?: boolean;\n start?: number;\n}\n\n/**\n * Write a parquet file to an output stream. The ParquetEncoder will perform\n * buffering/batching for performance, so close() must be called after all rows\n * are written.\n */\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nexport class ParquetEncoder<T> {\n /**\n * Convenience method to create a new buffered parquet writer that writes to\n * the specified file\n */\n static async openFile<T>(\n schema: ParquetSchema,\n path: string,\n opts?: ParquetEncoderOptions\n ): Promise<ParquetEncoder<T>> {\n const outputStream = await osopen(path, opts);\n return ParquetEncoder.openStream(schema, outputStream, opts);\n }\n\n /**\n * Convenience method to create a new buffered parquet writer that writes to\n * the specified stream\n */\n static async openStream<T>(\n schema: ParquetSchema,\n outputStream: stream.Writable,\n opts: ParquetEncoderOptions = {}\n ): Promise<ParquetEncoder<T>> {\n const envelopeWriter = await ParquetEnvelopeWriter.openStream(schema, outputStream, opts);\n return new ParquetEncoder(schema, envelopeWriter, opts);\n }\n\n public schema: ParquetSchema;\n public envelopeWriter: ParquetEnvelopeWriter;\n public rowBuffer: ParquetBuffer;\n public rowGroupSize: number;\n public closed: boolean;\n public userMetadata: Record<string, string>;\n\n /**\n * Create a new buffered parquet writer for a given envelope writer\n */\n constructor(\n schema: ParquetSchema,\n envelopeWriter: ParquetEnvelopeWriter,\n opts: ParquetEncoderOptions\n ) {\n this.schema = schema;\n this.envelopeWriter = envelopeWriter;\n // @ts-ignore Row buffer typings...\n this.rowBuffer = {};\n this.rowGroupSize = opts.rowGroupSize || PARQUET_DEFAULT_ROW_GROUP_SIZE;\n this.closed = false;\n this.userMetadata = {};\n\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n this.writeHeader();\n }\n\n async writeHeader(): Promise<void> {\n // TODO - better not mess with promises in the constructor\n try {\n await this.envelopeWriter.writeHeader();\n } catch (err) {\n await this.envelopeWriter.close();\n throw err;\n }\n }\n\n /**\n * Append a single row to the parquet file. Rows are buffered in memory until\n * rowGroupSize rows are in the buffer or close() is called\n */\n async appendRow<T>(row: T): Promise<void> {\n if (this.closed) {\n throw new Error('writer was closed');\n }\n Shred.shredRecord(this.schema, row, this.rowBuffer);\n if (this.rowBuffer.rowCount >= this.rowGroupSize) {\n // @ts-ignore\n this.rowBuffer = {};\n }\n }\n\n /**\n * Finish writing the parquet file and commit the footer to disk. This method\n * MUST be called after you are finished adding rows. You must not call this\n * method twice on the same object or add any rows after the close() method has\n * been called\n */\n async close(callback?: () => void): Promise<void> {\n if (this.closed) {\n throw new Error('writer was closed');\n }\n\n this.closed = true;\n\n if (this.rowBuffer.rowCount > 0 || this.rowBuffer.rowCount >= this.rowGroupSize) {\n // @ts-ignore\n this.rowBuffer = {};\n }\n\n await this.envelopeWriter.writeFooter(this.userMetadata);\n await this.envelopeWriter.close();\n // this.envelopeWriter = null;\n\n if (callback) {\n callback();\n }\n }\n\n /**\n * Add key<>value metadata to the file\n */\n setMetadata(key: string, value: string): void {\n // TODO: value to be any, obj -> JSON\n this.userMetadata[String(key)] = String(value);\n }\n\n /**\n * Set the parquet row group size. This values controls the maximum number\n * of rows that are buffered in memory at any given time as well as the number\n * of rows that are co-located on disk. A higher value is generally better for\n * read-time I/O performance at the tradeoff of write-time memory usage.\n */\n setRowGroupSize(cnt: number): void {\n this.rowGroupSize = cnt;\n }\n\n /**\n * Set the parquet data page size. The data page size controls the maximum\n * number of column values that are written to disk as a consecutive array\n */\n setPageSize(cnt: number): void {\n this.envelopeWriter.setPageSize(cnt);\n }\n}\n\n/**\n * Create a parquet file from a schema and a number of row groups. This class\n * performs direct, unbuffered writes to the underlying output stream and is\n * intendend for advanced and internal users; the writeXXX methods must be\n * called in the correct order to produce a valid file.\n */\nexport class ParquetEnvelopeWriter {\n /**\n * Create a new parquet envelope writer that writes to the specified stream\n */\n static async openStream(\n schema: ParquetSchema,\n outputStream: stream.Writable,\n opts: ParquetEncoderOptions\n ): Promise<ParquetEnvelopeWriter> {\n const writeFn = oswrite.bind(undefined, outputStream);\n const closeFn = osclose.bind(undefined, outputStream);\n return new ParquetEnvelopeWriter(schema, writeFn, closeFn, 0, opts);\n }\n\n public schema: ParquetSchema;\n public write: (buf: Buffer) => Promise<void>;\n public close: () => Promise<void>;\n public offset: number;\n public rowCount: number;\n public rowGroups: RowGroup[];\n public pageSize: number;\n public useDataPageV2: boolean;\n\n constructor(\n schema: ParquetSchema,\n writeFn: (buf: Buffer) => Promise<void>,\n closeFn: () => Promise<void>,\n fileOffset: number,\n opts: ParquetEncoderOptions\n ) {\n this.schema = schema;\n this.write = writeFn;\n this.close = closeFn;\n this.offset = fileOffset;\n this.rowCount = 0;\n this.rowGroups = [];\n this.pageSize = opts.pageSize || PARQUET_DEFAULT_PAGE_SIZE;\n this.useDataPageV2 = 'useDataPageV2' in opts ? Boolean(opts.useDataPageV2) : false;\n }\n\n writeSection(buf: Buffer): Promise<void> {\n this.offset += buf.length;\n return this.write(buf);\n }\n\n /**\n * Encode the parquet file header\n */\n writeHeader(): Promise<void> {\n return this.writeSection(Buffer.from(PARQUET_MAGIC));\n }\n\n /**\n * Encode a parquet row group. The records object should be created using the\n * shredRecord method\n */\n async writeRowGroup(records: ParquetBuffer): Promise<void> {\n const rgroup = await encodeRowGroup(this.schema, records, {\n baseOffset: this.offset,\n pageSize: this.pageSize,\n useDataPageV2: this.useDataPageV2\n });\n\n this.rowCount += records.rowCount;\n this.rowGroups.push(rgroup.metadata);\n return await this.writeSection(rgroup.body);\n }\n\n /**\n * Write the parquet file footer\n */\n writeFooter(userMetadata: Record<string, string>): Promise<void> {\n if (!userMetadata) {\n // tslint:disable-next-line:no-parameter-reassignment\n userMetadata = {};\n }\n\n return this.writeSection(\n encodeFooter(this.schema, this.rowCount, this.rowGroups, userMetadata)\n );\n }\n\n /**\n * Set the parquet data page size. The data page size controls the maximum\n * number of column values that are written to disk as a consecutive array\n */\n setPageSize(cnt: number): void {\n this.pageSize = cnt;\n }\n}\n\n/**\n * Create a parquet transform stream\nexport class ParquetTransformer<T> extends stream.Transform {\n public writer: ParquetEncoder<T>;\n\n constructor(schema: ParquetSchema, opts: ParquetEncoderOptions = {}) {\n super({objectMode: true});\n\n const writeProxy = (function (t: ParquetTransformer<any>) {\n return async function (b: any): Promise<void> {\n t.push(b);\n };\n })(this);\n\n this.writer = new ParquetEncoder(\n schema,\n new ParquetEnvelopeWriter(schema, writeProxy, async () => {}, 0, opts),\n opts\n );\n }\n\n // tslint:disable-next-line:function-name\n _transform(row: any, encoding: string, callback: (val?: any) => void): Promise<void> {\n if (row) {\n return this.writer.appendRow(row).then(callback);\n }\n callback();\n return Promise.resolve();\n }\n\n // tslint:disable-next-line:function-name\n async _flush(callback: (val?: any) => void) {\n await this.writer.close(callback);\n }\n}\n */\n\n/**\n * Encode a consecutive array of data using one of the parquet encodings\n */\nfunction encodeValues(\n type: PrimitiveType,\n encoding: ParquetCodec,\n values: any[],\n opts: ParquetCodecOptions\n) {\n if (!(encoding in PARQUET_CODECS)) {\n throw new Error(`invalid encoding: ${encoding}`);\n }\n return PARQUET_CODECS[encoding].encodeValues(type, values, opts);\n}\n\n/**\n * Encode a parquet data page\n */\nasync function encodeDataPage(\n column: ParquetField,\n data: ParquetData\n): Promise<{\n header: PageHeader;\n headerSize: number;\n page: Buffer;\n}> {\n /* encode repetition and definition levels */\n let rLevelsBuf = Buffer.alloc(0);\n if (column.rLevelMax > 0) {\n rLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.rlevels, {\n bitWidth: getBitWidth(column.rLevelMax)\n // disableEnvelope: false\n });\n }\n\n let dLevelsBuf = Buffer.alloc(0);\n if (column.dLevelMax > 0) {\n dLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.dlevels, {\n bitWidth: getBitWidth(column.dLevelMax)\n // disableEnvelope: false\n });\n }\n\n /* encode values */\n const valuesBuf = encodeValues(column.primitiveType!, column.encoding!, data.values, {\n typeLength: column.typeLength,\n bitWidth: column.typeLength\n });\n\n const dataBuf = Buffer.concat([rLevelsBuf, dLevelsBuf, valuesBuf]);\n\n // compression = column.compression === 'UNCOMPRESSED' ? (compression || 'UNCOMPRESSED') : column.compression;\n const compressedBuf = await Compression.deflate(column.compression!, dataBuf);\n\n /* build page header */\n const header = new PageHeader({\n type: PageType.DATA_PAGE,\n data_page_header: new DataPageHeader({\n num_values: data.count,\n encoding: Encoding[column.encoding!] as any,\n definition_level_encoding: Encoding[PARQUET_RDLVL_ENCODING], // [PARQUET_RDLVL_ENCODING],\n repetition_level_encoding: Encoding[PARQUET_RDLVL_ENCODING] // [PARQUET_RDLVL_ENCODING]\n }),\n uncompressed_page_size: dataBuf.length,\n compressed_page_size: compressedBuf.length\n });\n\n /* concat page header, repetition and definition levels and values */\n const headerBuf = serializeThrift(header);\n const page = Buffer.concat([headerBuf, compressedBuf]);\n\n return {header, headerSize: headerBuf.length, page};\n}\n\n/**\n * Encode a parquet data page (v2)\n */\nasync function encodeDataPageV2(\n column: ParquetField,\n data: ParquetData,\n rowCount: number\n): Promise<{\n header: PageHeader;\n headerSize: number;\n page: Buffer;\n}> {\n /* encode values */\n const valuesBuf = encodeValues(column.primitiveType!, column.encoding!, data.values, {\n typeLength: column.typeLength,\n bitWidth: column.typeLength\n });\n\n // compression = column.compression === 'UNCOMPRESSED' ? (compression || 'UNCOMPRESSED') : column.compression;\n const compressedBuf = await Compression.deflate(column.compression!, valuesBuf);\n\n /* encode repetition and definition levels */\n let rLevelsBuf = Buffer.alloc(0);\n if (column.rLevelMax > 0) {\n rLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.rlevels, {\n bitWidth: getBitWidth(column.rLevelMax),\n disableEnvelope: true\n });\n }\n\n let dLevelsBuf = Buffer.alloc(0);\n if (column.dLevelMax > 0) {\n dLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.dlevels, {\n bitWidth: getBitWidth(column.dLevelMax),\n disableEnvelope: true\n });\n }\n\n /* build page header */\n const header = new PageHeader({\n type: PageType.DATA_PAGE_V2,\n data_page_header_v2: new DataPageHeaderV2({\n num_values: data.count,\n num_nulls: data.count - data.values.length,\n num_rows: rowCount,\n encoding: Encoding[column.encoding!] as any,\n definition_levels_byte_length: dLevelsBuf.length,\n repetition_levels_byte_length: rLevelsBuf.length,\n is_compressed: column.compression !== 'UNCOMPRESSED'\n }),\n uncompressed_page_size: rLevelsBuf.length + dLevelsBuf.length + valuesBuf.length,\n compressed_page_size: rLevelsBuf.length + dLevelsBuf.length + compressedBuf.length\n });\n\n /* concat page header, repetition and definition levels and values */\n const headerBuf = serializeThrift(header);\n const page = Buffer.concat([headerBuf, rLevelsBuf, dLevelsBuf, compressedBuf]);\n return {header, headerSize: headerBuf.length, page};\n}\n\n/**\n * Encode an array of values into a parquet column chunk\n */\nasync function encodeColumnChunk(\n column: ParquetField,\n buffer: ParquetBuffer,\n offset: number,\n opts: ParquetEncoderOptions\n): Promise<{\n body: Buffer;\n metadata: ColumnMetaData;\n metadataOffset: number;\n}> {\n const data = buffer.columnData[column.path.join()];\n const baseOffset = (opts.baseOffset || 0) + offset;\n /* encode data page(s) */\n // const pages: Buffer[] = [];\n let pageBuf: Buffer;\n // tslint:disable-next-line:variable-name\n let total_uncompressed_size = 0;\n // tslint:disable-next-line:variable-name\n let total_compressed_size = 0;\n {\n const result = opts.useDataPageV2\n ? await encodeDataPageV2(column, data, buffer.rowCount)\n : await encodeDataPage(column, data);\n // pages.push(result.page);\n pageBuf = result.page;\n total_uncompressed_size += result.header.uncompressed_page_size + result.headerSize;\n total_compressed_size += result.header.compressed_page_size + result.headerSize;\n }\n\n // const pagesBuf = Buffer.concat(pages);\n // const compression = column.compression === 'UNCOMPRESSED' ? (opts.compression || 'UNCOMPRESSED') : column.compression;\n\n /* prepare metadata header */\n const metadata = new ColumnMetaData({\n path_in_schema: column.path,\n num_values: data.count,\n data_page_offset: baseOffset,\n encodings: [],\n total_uncompressed_size, // : pagesBuf.length,\n total_compressed_size,\n type: Type[column.primitiveType!],\n codec: CompressionCodec[column.compression!]\n });\n\n /* list encodings */\n metadata.encodings.push(Encoding[PARQUET_RDLVL_ENCODING]);\n metadata.encodings.push(Encoding[column.encoding!]);\n\n /* concat metadata header and data pages */\n const metadataOffset = baseOffset + pageBuf.length;\n const body = Buffer.concat([pageBuf, serializeThrift(metadata)]);\n return {body, metadata, metadataOffset};\n}\n\n/**\n * Encode a list of column values into a parquet row group\n */\nasync function encodeRowGroup(\n schema: ParquetSchema,\n data: ParquetBuffer,\n opts: ParquetEncoderOptions\n): Promise<{\n body: Buffer;\n metadata: RowGroup;\n}> {\n const metadata = new RowGroup({\n num_rows: data.rowCount,\n columns: [],\n total_byte_size: 0\n });\n\n let body = Buffer.alloc(0);\n for (const field of schema.fieldList) {\n if (field.isNested) {\n continue; // eslint-disable-line no-continue\n }\n\n const cchunkData = await encodeColumnChunk(field, data, body.length, opts);\n\n const cchunk = new ColumnChunk({\n file_offset: cchunkData.metadataOffset,\n meta_data: cchunkData.metadata\n });\n\n metadata.columns.push(cchunk);\n metadata.total_byte_size = new Int64(Number(metadata.total_byte_size) + cchunkData.body.length);\n\n body = Buffer.concat([body, cchunkData.body]);\n }\n\n return {body, metadata};\n}\n\n/**\n * Encode a parquet file metadata footer\n */\nfunction encodeFooter(\n schema: ParquetSchema,\n rowCount: number,\n rowGroups: RowGroup[],\n userMetadata: Record<string, string>\n): Buffer {\n const metadata = new FileMetaData({\n version: PARQUET_VERSION,\n created_by: 'parquets',\n num_rows: rowCount,\n row_groups: rowGroups,\n schema: [],\n key_value_metadata: []\n });\n\n for (const key in userMetadata) {\n const kv = new KeyValue({\n key,\n value: userMetadata[key]\n });\n metadata.key_value_metadata?.push?.(kv);\n }\n\n {\n const schemaRoot = new SchemaElement({\n name: 'root',\n num_children: Object.keys(schema.fields).length\n });\n metadata.schema.push(schemaRoot);\n }\n\n for (const field of schema.fieldList) {\n const relt = FieldRepetitionType[field.repetitionType];\n const schemaElem = new SchemaElement({\n name: field.name,\n repetition_type: relt as any\n });\n\n if (field.isNested) {\n schemaElem.num_children = field.fieldCount;\n } else {\n schemaElem.type = Type[field.primitiveType!] as Type;\n }\n\n if (field.originalType) {\n schemaElem.converted_type = ConvertedType[field.originalType] as ConvertedType;\n }\n\n schemaElem.type_length = field.typeLength;\n\n metadata.schema.push(schemaElem);\n }\n\n const metadataEncoded = serializeThrift(metadata);\n const footerEncoded = Buffer.alloc(metadataEncoded.length + 8);\n metadataEncoded.copy(footerEncoded);\n footerEncoded.writeUInt32LE(metadataEncoded.length, metadataEncoded.length);\n footerEncoded.write(PARQUET_MAGIC, metadataEncoded.length + 4);\n return footerEncoded;\n}\n"],"mappings":";AAGA,SAA6BA,cAAc,QAAO,WAAW;AAC7D,OAAO,KAAKC,WAAW,MAAM,gBAAgB;AAS7C,OAAO,KAAKC,KAAK,MAAM,iBAAiB;AACxC,SACEC,WAAW,EACXC,cAAc,EACdC,gBAAgB,EAChBC,aAAa,EACbC,cAAc,EACdC,gBAAgB,EAChBC,QAAQ,EACRC,mBAAmB,EACnBC,YAAY,EACZC,QAAQ,EACRC,UAAU,EACVC,QAAQ,EACRC,QAAQ,EACRC,aAAa,EACbC,IAAI,QACC,mBAAmB;AAC1B,SAAQC,MAAM,EAAEC,OAAO,EAAEC,OAAO,QAAO,qBAAqB;AAC5D,SAAQC,WAAW,EAAEC,eAAe,QAAO,qBAAqB;AAChE,OAAOC,KAAK,MAAM,YAAY;AAK9B,MAAMC,aAAa,GAAG,MAAM;AAK5B,MAAMC,eAAe,GAAG,CAAC;AAKzB,MAAMC,yBAAyB,GAAG,IAAI;AACtC,MAAMC,8BAA8B,GAAG,IAAI;AAK3C,MAAMC,kBAAkB,GAAG,OAAO;AAClC,MAAMC,sBAAsB,GAAG,KAAK;AAuBpC,OAAO,MAAMC,cAAc,CAAI;EAK7B,aAAaC,QAAQA,CACnBC,MAAqB,EACrBC,IAAY,EACZC,IAA4B,EACA;IAC5B,MAAMC,YAAY,GAAG,MAAMjB,MAAM,CAACe,IAAI,EAAEC,IAAI,CAAC;IAC7C,OAAOJ,cAAc,CAACM,UAAU,CAACJ,MAAM,EAAEG,YAAY,EAAED,IAAI,CAAC;EAC9D;EAMA,aAAaE,UAAUA,CACrBJ,MAAqB,EACrBG,YAA6B,EAED;IAAA,IAD5BD,IAA2B,GAAAG,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC,CAAC;IAEhC,MAAMG,cAAc,GAAG,MAAMC,qBAAqB,CAACL,UAAU,CAACJ,MAAM,EAAEG,YAAY,EAAED,IAAI,CAAC;IACzF,OAAO,IAAIJ,cAAc,CAACE,MAAM,EAAEQ,cAAc,EAAEN,IAAI,CAAC;EACzD;EAYAQ,WAAWA,CACTV,MAAqB,EACrBQ,cAAqC,EACrCN,IAA2B,EAC3B;IAAAS,eAAA;IAAAA,eAAA;IAAAA,eAAA;IAAAA,eAAA;IAAAA,eAAA;IAAAA,eAAA;IACA,IAAI,CAACX,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACQ,cAAc,GAAGA,cAAc;IAEpC,IAAI,CAACI,SAAS,GAAG,CAAC,CAAC;IACnB,IAAI,CAACC,YAAY,GAAGX,IAAI,CAACW,YAAY,IAAIlB,8BAA8B;IACvE,IAAI,CAACmB,MAAM,GAAG,KAAK;IACnB,IAAI,CAACC,YAAY,GAAG,CAAC,CAAC;IAGtB,IAAI,CAACC,WAAW,CAAC,CAAC;EACpB;EAEA,MAAMA,WAAWA,CAAA,EAAkB;IAEjC,IAAI;MACF,MAAM,IAAI,CAACR,cAAc,CAACQ,WAAW,CAAC,CAAC;IACzC,CAAC,CAAC,OAAOC,GAAG,EAAE;MACZ,MAAM,IAAI,CAACT,cAAc,CAACU,KAAK,CAAC,CAAC;MACjC,MAAMD,GAAG;IACX;EACF;EAMA,MAAME,SAASA,CAAIC,GAAM,EAAiB;IACxC,IAAI,IAAI,CAACN,MAAM,EAAE;MACf,MAAM,IAAIO,KAAK,CAAC,mBAAmB,CAAC;IACtC;IACAnD,KAAK,CAACoD,WAAW,CAAC,IAAI,CAACtB,MAAM,EAAEoB,GAAG,EAAE,IAAI,CAACR,SAAS,CAAC;IACnD,IAAI,IAAI,CAACA,SAAS,CAACW,QAAQ,IAAI,IAAI,CAACV,YAAY,EAAE;MAEhD,IAAI,CAACD,SAAS,GAAG,CAAC,CAAC;IACrB;EACF;EAQA,MAAMM,KAAKA,CAACM,QAAqB,EAAiB;IAChD,IAAI,IAAI,CAACV,MAAM,EAAE;MACf,MAAM,IAAIO,KAAK,CAAC,mBAAmB,CAAC;IACtC;IAEA,IAAI,CAACP,MAAM,GAAG,IAAI;IAElB,IAAI,IAAI,CAACF,SAAS,CAACW,QAAQ,GAAG,CAAC,IAAI,IAAI,CAACX,SAAS,CAACW,QAAQ,IAAI,IAAI,CAACV,YAAY,EAAE;MAE/E,IAAI,CAACD,SAAS,GAAG,CAAC,CAAC;IACrB;IAEA,MAAM,IAAI,CAACJ,cAAc,CAACiB,WAAW,CAAC,IAAI,CAACV,YAAY,CAAC;IACxD,MAAM,IAAI,CAACP,cAAc,CAACU,KAAK,CAAC,CAAC;IAGjC,IAAIM,QAAQ,EAAE;MACZA,QAAQ,CAAC,CAAC;IACZ;EACF;EAKAE,WAAWA,CAACC,GAAW,EAAEC,KAAa,EAAQ;IAE5C,IAAI,CAACb,YAAY,CAACc,MAAM,CAACF,GAAG,CAAC,CAAC,GAAGE,MAAM,CAACD,KAAK,CAAC;EAChD;EAQAE,eAAeA,CAACC,GAAW,EAAQ;IACjC,IAAI,CAAClB,YAAY,GAAGkB,GAAG;EACzB;EAMAC,WAAWA,CAACD,GAAW,EAAQ;IAC7B,IAAI,CAACvB,cAAc,CAACwB,WAAW,CAACD,GAAG,CAAC;EACtC;AACF;AAQA,OAAO,MAAMtB,qBAAqB,CAAC;EAIjC,aAAaL,UAAUA,CACrBJ,MAAqB,EACrBG,YAA6B,EAC7BD,IAA2B,EACK;IAChC,MAAM+B,OAAO,GAAG9C,OAAO,CAAC+C,IAAI,CAAC3B,SAAS,EAAEJ,YAAY,CAAC;IACrD,MAAMgC,OAAO,GAAG/C,OAAO,CAAC8C,IAAI,CAAC3B,SAAS,EAAEJ,YAAY,CAAC;IACrD,OAAO,IAAIM,qBAAqB,CAACT,MAAM,EAAEiC,OAAO,EAAEE,OAAO,EAAE,CAAC,EAAEjC,IAAI,CAAC;EACrE;EAWAQ,WAAWA,CACTV,MAAqB,EACrBiC,OAAuC,EACvCE,OAA4B,EAC5BC,UAAkB,EAClBlC,IAA2B,EAC3B;IAAAS,eAAA;IAAAA,eAAA;IAAAA,eAAA;IAAAA,eAAA;IAAAA,eAAA;IAAAA,eAAA;IAAAA,eAAA;IAAAA,eAAA;IACA,IAAI,CAACX,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACqC,KAAK,GAAGJ,OAAO;IACpB,IAAI,CAACf,KAAK,GAAGiB,OAAO;IACpB,IAAI,CAACG,MAAM,GAAGF,UAAU;IACxB,IAAI,CAACb,QAAQ,GAAG,CAAC;IACjB,IAAI,CAACgB,SAAS,GAAG,EAAE;IACnB,IAAI,CAACC,QAAQ,GAAGtC,IAAI,CAACsC,QAAQ,IAAI9C,yBAAyB;IAC1D,IAAI,CAAC+C,aAAa,GAAG,eAAe,IAAIvC,IAAI,GAAGwC,OAAO,CAACxC,IAAI,CAACuC,aAAa,CAAC,GAAG,KAAK;EACpF;EAEAE,YAAYA,CAACC,GAAW,EAAiB;IACvC,IAAI,CAACN,MAAM,IAAIM,GAAG,CAACtC,MAAM;IACzB,OAAO,IAAI,CAAC+B,KAAK,CAACO,GAAG,CAAC;EACxB;EAKA5B,WAAWA,CAAA,EAAkB;IAC3B,OAAO,IAAI,CAAC2B,YAAY,CAACE,MAAM,CAACC,IAAI,CAACtD,aAAa,CAAC,CAAC;EACtD;EAMA,MAAMuD,aAAaA,CAACC,OAAsB,EAAiB;IACzD,MAAMC,MAAM,GAAG,MAAMC,cAAc,CAAC,IAAI,CAAClD,MAAM,EAAEgD,OAAO,EAAE;MACxDG,UAAU,EAAE,IAAI,CAACb,MAAM;MACvBE,QAAQ,EAAE,IAAI,CAACA,QAAQ;MACvBC,aAAa,EAAE,IAAI,CAACA;IACtB,CAAC,CAAC;IAEF,IAAI,CAAClB,QAAQ,IAAIyB,OAAO,CAACzB,QAAQ;IACjC,IAAI,CAACgB,SAAS,CAACa,IAAI,CAACH,MAAM,CAACI,QAAQ,CAAC;IACpC,OAAO,MAAM,IAAI,CAACV,YAAY,CAACM,MAAM,CAACK,IAAI,CAAC;EAC7C;EAKA7B,WAAWA,CAACV,YAAoC,EAAiB;IAC/D,IAAI,CAACA,YAAY,EAAE;MAEjBA,YAAY,GAAG,CAAC,CAAC;IACnB;IAEA,OAAO,IAAI,CAAC4B,YAAY,CACtBY,YAAY,CAAC,IAAI,CAACvD,MAAM,EAAE,IAAI,CAACuB,QAAQ,EAAE,IAAI,CAACgB,SAAS,EAAExB,YAAY,CACvE,CAAC;EACH;EAMAiB,WAAWA,CAACD,GAAW,EAAQ;IAC7B,IAAI,CAACS,QAAQ,GAAGT,GAAG;EACrB;AACF;AA0CA,SAASyB,YAAYA,CACnBC,IAAmB,EACnBC,QAAsB,EACtBC,MAAa,EACbzD,IAAyB,EACzB;EACA,IAAI,EAAEwD,QAAQ,IAAI1F,cAAc,CAAC,EAAE;IACjC,MAAM,IAAIqD,KAAK,sBAAAuC,MAAA,CAAsBF,QAAQ,CAAE,CAAC;EAClD;EACA,OAAO1F,cAAc,CAAC0F,QAAQ,CAAC,CAACF,YAAY,CAACC,IAAI,EAAEE,MAAM,EAAEzD,IAAI,CAAC;AAClE;AAKA,eAAe2D,cAAcA,CAC3BC,MAAoB,EACpBC,IAAiB,EAKhB;EAED,IAAIC,UAAU,GAAGnB,MAAM,CAACoB,KAAK,CAAC,CAAC,CAAC;EAChC,IAAIH,MAAM,CAACI,SAAS,GAAG,CAAC,EAAE;IACxBF,UAAU,GAAGR,YAAY,CAAC5D,kBAAkB,EAAEC,sBAAsB,EAAEkE,IAAI,CAACI,OAAO,EAAE;MAClFC,QAAQ,EAAE/E,WAAW,CAACyE,MAAM,CAACI,SAAS;IAExC,CAAC,CAAC;EACJ;EAEA,IAAIG,UAAU,GAAGxB,MAAM,CAACoB,KAAK,CAAC,CAAC,CAAC;EAChC,IAAIH,MAAM,CAACQ,SAAS,GAAG,CAAC,EAAE;IACxBD,UAAU,GAAGb,YAAY,CAAC5D,kBAAkB,EAAEC,sBAAsB,EAAEkE,IAAI,CAACQ,OAAO,EAAE;MAClFH,QAAQ,EAAE/E,WAAW,CAACyE,MAAM,CAACQ,SAAS;IAExC,CAAC,CAAC;EACJ;EAGA,MAAME,SAAS,GAAGhB,YAAY,CAACM,MAAM,CAACW,aAAa,EAAGX,MAAM,CAACJ,QAAQ,EAAGK,IAAI,CAACJ,MAAM,EAAE;IACnFe,UAAU,EAAEZ,MAAM,CAACY,UAAU;IAC7BN,QAAQ,EAAEN,MAAM,CAACY;EACnB,CAAC,CAAC;EAEF,MAAMC,OAAO,GAAG9B,MAAM,CAACe,MAAM,CAAC,CAACI,UAAU,EAAEK,UAAU,EAAEG,SAAS,CAAC,CAAC;EAGlE,MAAMI,aAAa,GAAG,MAAM3G,WAAW,CAAC4G,OAAO,CAACf,MAAM,CAACgB,WAAW,EAAGH,OAAO,CAAC;EAG7E,MAAMI,MAAM,GAAG,IAAIlG,UAAU,CAAC;IAC5B4E,IAAI,EAAE3E,QAAQ,CAACkG,SAAS;IACxBC,gBAAgB,EAAE,IAAI1G,cAAc,CAAC;MACnC2G,UAAU,EAAEnB,IAAI,CAACoB,KAAK;MACtBzB,QAAQ,EAAEjF,QAAQ,CAACqF,MAAM,CAACJ,QAAQ,CAAS;MAC3C0B,yBAAyB,EAAE3G,QAAQ,CAACoB,sBAAsB,CAAC;MAC3DwF,yBAAyB,EAAE5G,QAAQ,CAACoB,sBAAsB;IAC5D,CAAC,CAAC;IACFyF,sBAAsB,EAAEX,OAAO,CAACrE,MAAM;IACtCiF,oBAAoB,EAAEX,aAAa,CAACtE;EACtC,CAAC,CAAC;EAGF,MAAMkF,SAAS,GAAGlG,eAAe,CAACyF,MAAM,CAAC;EACzC,MAAMU,IAAI,GAAG5C,MAAM,CAACe,MAAM,CAAC,CAAC4B,SAAS,EAAEZ,aAAa,CAAC,CAAC;EAEtD,OAAO;IAACG,MAAM;IAAEW,UAAU,EAAEF,SAAS,CAAClF,MAAM;IAAEmF;EAAI,CAAC;AACrD;AAKA,eAAeE,gBAAgBA,CAC7B7B,MAAoB,EACpBC,IAAiB,EACjBxC,QAAgB,EAKf;EAED,MAAMiD,SAAS,GAAGhB,YAAY,CAACM,MAAM,CAACW,aAAa,EAAGX,MAAM,CAACJ,QAAQ,EAAGK,IAAI,CAACJ,MAAM,EAAE;IACnFe,UAAU,EAAEZ,MAAM,CAACY,UAAU;IAC7BN,QAAQ,EAAEN,MAAM,CAACY;EACnB,CAAC,CAAC;EAGF,MAAME,aAAa,GAAG,MAAM3G,WAAW,CAAC4G,OAAO,CAACf,MAAM,CAACgB,WAAW,EAAGN,SAAS,CAAC;EAG/E,IAAIR,UAAU,GAAGnB,MAAM,CAACoB,KAAK,CAAC,CAAC,CAAC;EAChC,IAAIH,MAAM,CAACI,SAAS,GAAG,CAAC,EAAE;IACxBF,UAAU,GAAGR,YAAY,CAAC5D,kBAAkB,EAAEC,sBAAsB,EAAEkE,IAAI,CAACI,OAAO,EAAE;MAClFC,QAAQ,EAAE/E,WAAW,CAACyE,MAAM,CAACI,SAAS,CAAC;MACvC0B,eAAe,EAAE;IACnB,CAAC,CAAC;EACJ;EAEA,IAAIvB,UAAU,GAAGxB,MAAM,CAACoB,KAAK,CAAC,CAAC,CAAC;EAChC,IAAIH,MAAM,CAACQ,SAAS,GAAG,CAAC,EAAE;IACxBD,UAAU,GAAGb,YAAY,CAAC5D,kBAAkB,EAAEC,sBAAsB,EAAEkE,IAAI,CAACQ,OAAO,EAAE;MAClFH,QAAQ,EAAE/E,WAAW,CAACyE,MAAM,CAACQ,SAAS,CAAC;MACvCsB,eAAe,EAAE;IACnB,CAAC,CAAC;EACJ;EAGA,MAAMb,MAAM,GAAG,IAAIlG,UAAU,CAAC;IAC5B4E,IAAI,EAAE3E,QAAQ,CAAC+G,YAAY;IAC3BC,mBAAmB,EAAE,IAAItH,gBAAgB,CAAC;MACxC0G,UAAU,EAAEnB,IAAI,CAACoB,KAAK;MACtBY,SAAS,EAAEhC,IAAI,CAACoB,KAAK,GAAGpB,IAAI,CAACJ,MAAM,CAACrD,MAAM;MAC1C0F,QAAQ,EAAEzE,QAAQ;MAClBmC,QAAQ,EAAEjF,QAAQ,CAACqF,MAAM,CAACJ,QAAQ,CAAS;MAC3CuC,6BAA6B,EAAE5B,UAAU,CAAC/D,MAAM;MAChD4F,6BAA6B,EAAElC,UAAU,CAAC1D,MAAM;MAChD6F,aAAa,EAAErC,MAAM,CAACgB,WAAW,KAAK;IACxC,CAAC,CAAC;IACFQ,sBAAsB,EAAEtB,UAAU,CAAC1D,MAAM,GAAG+D,UAAU,CAAC/D,MAAM,GAAGkE,SAAS,CAAClE,MAAM;IAChFiF,oBAAoB,EAAEvB,UAAU,CAAC1D,MAAM,GAAG+D,UAAU,CAAC/D,MAAM,GAAGsE,aAAa,CAACtE;EAC9E,CAAC,CAAC;EAGF,MAAMkF,SAAS,GAAGlG,eAAe,CAACyF,MAAM,CAAC;EACzC,MAAMU,IAAI,GAAG5C,MAAM,CAACe,MAAM,CAAC,CAAC4B,SAAS,EAAExB,UAAU,EAAEK,UAAU,EAAEO,aAAa,CAAC,CAAC;EAC9E,OAAO;IAACG,MAAM;IAAEW,UAAU,EAAEF,SAAS,CAAClF,MAAM;IAAEmF;EAAI,CAAC;AACrD;AAKA,eAAeW,iBAAiBA,CAC9BtC,MAAoB,EACpBuC,MAAqB,EACrB/D,MAAc,EACdpC,IAA2B,EAK1B;EACD,MAAM6D,IAAI,GAAGsC,MAAM,CAACC,UAAU,CAACxC,MAAM,CAAC7D,IAAI,CAACsG,IAAI,CAAC,CAAC,CAAC;EAClD,MAAMpD,UAAU,GAAG,CAACjD,IAAI,CAACiD,UAAU,IAAI,CAAC,IAAIb,MAAM;EAGlD,IAAIkE,OAAe;EAEnB,IAAIC,uBAAuB,GAAG,CAAC;EAE/B,IAAIC,qBAAqB,GAAG,CAAC;EAC7B;IACE,MAAMC,MAAM,GAAGzG,IAAI,CAACuC,aAAa,GAC7B,MAAMkD,gBAAgB,CAAC7B,MAAM,EAAEC,IAAI,EAAEsC,MAAM,CAAC9E,QAAQ,CAAC,GACrD,MAAMsC,cAAc,CAACC,MAAM,EAAEC,IAAI,CAAC;IAEtCyC,OAAO,GAAGG,MAAM,CAAClB,IAAI;IACrBgB,uBAAuB,IAAIE,MAAM,CAAC5B,MAAM,CAACO,sBAAsB,GAAGqB,MAAM,CAACjB,UAAU;IACnFgB,qBAAqB,IAAIC,MAAM,CAAC5B,MAAM,CAACQ,oBAAoB,GAAGoB,MAAM,CAACjB,UAAU;EACjF;EAMA,MAAMrC,QAAQ,GAAG,IAAIjF,cAAc,CAAC;IAClCwI,cAAc,EAAE9C,MAAM,CAAC7D,IAAI;IAC3BiF,UAAU,EAAEnB,IAAI,CAACoB,KAAK;IACtB0B,gBAAgB,EAAE1D,UAAU;IAC5B2D,SAAS,EAAE,EAAE;IACbL,uBAAuB;IACvBC,qBAAqB;IACrBjD,IAAI,EAAExE,IAAI,CAAC6E,MAAM,CAACW,aAAa,CAAE;IACjCsC,KAAK,EAAE1I,gBAAgB,CAACyF,MAAM,CAACgB,WAAW;EAC5C,CAAC,CAAC;EAGFzB,QAAQ,CAACyD,SAAS,CAAC1D,IAAI,CAAC3E,QAAQ,CAACoB,sBAAsB,CAAC,CAAC;EACzDwD,QAAQ,CAACyD,SAAS,CAAC1D,IAAI,CAAC3E,QAAQ,CAACqF,MAAM,CAACJ,QAAQ,CAAE,CAAC;EAGnD,MAAMsD,cAAc,GAAG7D,UAAU,GAAGqD,OAAO,CAAClG,MAAM;EAClD,MAAMgD,IAAI,GAAGT,MAAM,CAACe,MAAM,CAAC,CAAC4C,OAAO,EAAElH,eAAe,CAAC+D,QAAQ,CAAC,CAAC,CAAC;EAChE,OAAO;IAACC,IAAI;IAAED,QAAQ;IAAE2D;EAAc,CAAC;AACzC;AAKA,eAAe9D,cAAcA,CAC3BlD,MAAqB,EACrB+D,IAAmB,EACnB7D,IAA2B,EAI1B;EACD,MAAMmD,QAAQ,GAAG,IAAItE,QAAQ,CAAC;IAC5BiH,QAAQ,EAAEjC,IAAI,CAACxC,QAAQ;IACvB0F,OAAO,EAAE,EAAE;IACXC,eAAe,EAAE;EACnB,CAAC,CAAC;EAEF,IAAI5D,IAAI,GAAGT,MAAM,CAACoB,KAAK,CAAC,CAAC,CAAC;EAC1B,KAAK,MAAMkD,KAAK,IAAInH,MAAM,CAACoH,SAAS,EAAE;IACpC,IAAID,KAAK,CAACE,QAAQ,EAAE;MAClB;IACF;IAEA,MAAMC,UAAU,GAAG,MAAMlB,iBAAiB,CAACe,KAAK,EAAEpD,IAAI,EAAET,IAAI,CAAChD,MAAM,EAAEJ,IAAI,CAAC;IAE1E,MAAMqH,MAAM,GAAG,IAAIpJ,WAAW,CAAC;MAC7BqJ,WAAW,EAAEF,UAAU,CAACN,cAAc;MACtCS,SAAS,EAAEH,UAAU,CAACjE;IACxB,CAAC,CAAC;IAEFA,QAAQ,CAAC4D,OAAO,CAAC7D,IAAI,CAACmE,MAAM,CAAC;IAC7BlE,QAAQ,CAAC6D,eAAe,GAAG,IAAI3H,KAAK,CAACmI,MAAM,CAACrE,QAAQ,CAAC6D,eAAe,CAAC,GAAGI,UAAU,CAAChE,IAAI,CAAChD,MAAM,CAAC;IAE/FgD,IAAI,GAAGT,MAAM,CAACe,MAAM,CAAC,CAACN,IAAI,EAAEgE,UAAU,CAAChE,IAAI,CAAC,CAAC;EAC/C;EAEA,OAAO;IAACA,IAAI;IAAED;EAAQ,CAAC;AACzB;AAKA,SAASE,YAAYA,CACnBvD,MAAqB,EACrBuB,QAAgB,EAChBgB,SAAqB,EACrBxB,YAAoC,EAC5B;EACR,MAAMsC,QAAQ,GAAG,IAAI1E,YAAY,CAAC;IAChCgJ,OAAO,EAAElI,eAAe;IACxBmI,UAAU,EAAE,UAAU;IACtB5B,QAAQ,EAAEzE,QAAQ;IAClBsG,UAAU,EAAEtF,SAAS;IACrBvC,MAAM,EAAE,EAAE;IACV8H,kBAAkB,EAAE;EACtB,CAAC,CAAC;EAEF,KAAK,MAAMnG,GAAG,IAAIZ,YAAY,EAAE;IAAA,IAAAgH,qBAAA,EAAAC,sBAAA,EAAAC,sBAAA;IAC9B,MAAMC,EAAE,GAAG,IAAItJ,QAAQ,CAAC;MACtB+C,GAAG;MACHC,KAAK,EAAEb,YAAY,CAACY,GAAG;IACzB,CAAC,CAAC;IACF,CAAAoG,qBAAA,GAAA1E,QAAQ,CAACyE,kBAAkB,cAAAC,qBAAA,wBAAAC,sBAAA,GAA3B,CAAAC,sBAAA,GAAAF,qBAAA,EAA6B3E,IAAI,cAAA4E,sBAAA,uBAAjCA,sBAAA,CAAAG,IAAA,CAAAF,sBAAA,EAAoCC,EAAE,CAAC;EACzC;EAEA;IACE,MAAME,UAAU,GAAG,IAAIpJ,aAAa,CAAC;MACnCqJ,IAAI,EAAE,MAAM;MACZC,YAAY,EAAEC,MAAM,CAACC,IAAI,CAACxI,MAAM,CAACyI,MAAM,CAAC,CAACnI;IAC3C,CAAC,CAAC;IACF+C,QAAQ,CAACrD,MAAM,CAACoD,IAAI,CAACgF,UAAU,CAAC;EAClC;EAEA,KAAK,MAAMjB,KAAK,IAAInH,MAAM,CAACoH,SAAS,EAAE;IACpC,MAAMsB,IAAI,GAAGhK,mBAAmB,CAACyI,KAAK,CAACwB,cAAc,CAAC;IACtD,MAAMC,UAAU,GAAG,IAAI5J,aAAa,CAAC;MACnCqJ,IAAI,EAAElB,KAAK,CAACkB,IAAI;MAChBQ,eAAe,EAAEH;IACnB,CAAC,CAAC;IAEF,IAAIvB,KAAK,CAACE,QAAQ,EAAE;MAClBuB,UAAU,CAACN,YAAY,GAAGnB,KAAK,CAAC2B,UAAU;IAC5C,CAAC,MAAM;MACLF,UAAU,CAACnF,IAAI,GAAGxE,IAAI,CAACkI,KAAK,CAAC1C,aAAa,CAAU;IACtD;IAEA,IAAI0C,KAAK,CAAC4B,YAAY,EAAE;MACtBH,UAAU,CAACI,cAAc,GAAG1K,aAAa,CAAC6I,KAAK,CAAC4B,YAAY,CAAkB;IAChF;IAEAH,UAAU,CAACK,WAAW,GAAG9B,KAAK,CAACzC,UAAU;IAEzCrB,QAAQ,CAACrD,MAAM,CAACoD,IAAI,CAACwF,UAAU,CAAC;EAClC;EAEA,MAAMM,eAAe,GAAG5J,eAAe,CAAC+D,QAAQ,CAAC;EACjD,MAAM8F,aAAa,GAAGtG,MAAM,CAACoB,KAAK,CAACiF,eAAe,CAAC5I,MAAM,GAAG,CAAC,CAAC;EAC9D4I,eAAe,CAACE,IAAI,CAACD,aAAa,CAAC;EACnCA,aAAa,CAACE,aAAa,CAACH,eAAe,CAAC5I,MAAM,EAAE4I,eAAe,CAAC5I,MAAM,CAAC;EAC3E6I,aAAa,CAAC9G,KAAK,CAAC7C,aAAa,EAAE0J,eAAe,CAAC5I,MAAM,GAAG,CAAC,CAAC;EAC9D,OAAO6I,aAAa;AACtB"}
@@ -1 +1 @@
1
- {"version":3,"file":"BsonType.js","names":["thrift","BsonType","constructor","write","output","writeStructBegin","writeFieldStop","writeStructEnd","read","input","readStructBegin","ret","readFieldBegin","fieldType","ftype","fieldId","fid","Thrift","Type","STOP","skip","readFieldEnd","readStructEnd"],"sources":["../../../../src/parquetjs/parquet-thrift/BsonType.ts"],"sourcesContent":["/* tslint:disable */\n/* eslint-disable */\n/*\n * Autogenerated by @creditkarma/thrift-typescript v3.7.2\n * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING\n */\nimport * as thrift from 'thrift';\nexport interface IBsonTypeArgs {}\nexport class BsonType {\n constructor() {}\n public write(output: thrift.TProtocol): void {\n output.writeStructBegin('BsonType');\n output.writeFieldStop();\n output.writeStructEnd();\n return;\n }\n public static read(input: thrift.TProtocol): BsonType {\n input.readStructBegin();\n while (true) {\n const ret: thrift.TField = input.readFieldBegin();\n const fieldType: thrift.Thrift.Type = ret.ftype;\n const fieldId: number = ret.fid;\n if (fieldType === thrift.Thrift.Type.STOP) {\n break;\n }\n switch (fieldId) {\n default: {\n input.skip(fieldType);\n }\n }\n input.readFieldEnd();\n }\n input.readStructEnd();\n return new BsonType();\n }\n}\n"],"mappings":"AAMA,OAAO,KAAKA,MAAM,MAAM,QAAQ;AAEhC,OAAO,MAAMC,QAAQ,CAAC;EACpBC,WAAWA,CAAA,EAAG,CAAC;EACRC,KAAKA,CAACC,MAAwB,EAAQ;IAC3CA,MAAM,CAACC,gBAAgB,CAAC,UAAU,CAAC;IACnCD,MAAM,CAACE,cAAc,EAAE;IACvBF,MAAM,CAACG,cAAc,EAAE;IACvB;EACF;EACA,OAAcC,IAAIA,CAACC,KAAuB,EAAY;IACpDA,KAAK,CAACC,eAAe,EAAE;IACvB,OAAO,IAAI,EAAE;MACX,MAAMC,GAAkB,GAAGF,KAAK,CAACG,cAAc,EAAE;MACjD,MAAMC,SAA6B,GAAGF,GAAG,CAACG,KAAK;MAC/C,MAAMC,OAAe,GAAGJ,GAAG,CAACK,GAAG;MAC/B,IAAIH,SAAS,KAAKb,MAAM,CAACiB,MAAM,CAACC,IAAI,CAACC,IAAI,EAAE;QACzC;MACF;MACA,QAAQJ,OAAO;QACb;UAAS;YACPN,KAAK,CAACW,IAAI,CAACP,SAAS,CAAC;UACvB;MAAC;MAEHJ,KAAK,CAACY,YAAY,EAAE;IACtB;IACAZ,KAAK,CAACa,aAAa,EAAE;IACrB,OAAO,IAAIrB,QAAQ,EAAE;EACvB;AACF"}
1
+ {"version":3,"file":"BsonType.js","names":["thrift","BsonType","constructor","write","output","writeStructBegin","writeFieldStop","writeStructEnd","read","input","readStructBegin","ret","readFieldBegin","fieldType","ftype","fieldId","fid","Thrift","Type","STOP","skip","readFieldEnd","readStructEnd"],"sources":["../../../../src/parquetjs/parquet-thrift/BsonType.ts"],"sourcesContent":["/* tslint:disable */\n/* eslint-disable */\n/*\n * Autogenerated by @creditkarma/thrift-typescript v3.7.2\n * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING\n */\nimport * as thrift from 'thrift';\nexport interface IBsonTypeArgs {}\nexport class BsonType {\n constructor() {}\n public write(output: thrift.TProtocol): void {\n output.writeStructBegin('BsonType');\n output.writeFieldStop();\n output.writeStructEnd();\n return;\n }\n public static read(input: thrift.TProtocol): BsonType {\n input.readStructBegin();\n while (true) {\n const ret: thrift.TField = input.readFieldBegin();\n const fieldType: thrift.Thrift.Type = ret.ftype;\n const fieldId: number = ret.fid;\n if (fieldType === thrift.Thrift.Type.STOP) {\n break;\n }\n switch (fieldId) {\n default: {\n input.skip(fieldType);\n }\n }\n input.readFieldEnd();\n }\n input.readStructEnd();\n return new BsonType();\n }\n}\n"],"mappings":"AAMA,OAAO,KAAKA,MAAM,MAAM,QAAQ;AAEhC,OAAO,MAAMC,QAAQ,CAAC;EACpBC,WAAWA,CAAA,EAAG,CAAC;EACRC,KAAKA,CAACC,MAAwB,EAAQ;IAC3CA,MAAM,CAACC,gBAAgB,CAAC,UAAU,CAAC;IACnCD,MAAM,CAACE,cAAc,CAAC,CAAC;IACvBF,MAAM,CAACG,cAAc,CAAC,CAAC;IACvB;EACF;EACA,OAAcC,IAAIA,CAACC,KAAuB,EAAY;IACpDA,KAAK,CAACC,eAAe,CAAC,CAAC;IACvB,OAAO,IAAI,EAAE;MACX,MAAMC,GAAkB,GAAGF,KAAK,CAACG,cAAc,CAAC,CAAC;MACjD,MAAMC,SAA6B,GAAGF,GAAG,CAACG,KAAK;MAC/C,MAAMC,OAAe,GAAGJ,GAAG,CAACK,GAAG;MAC/B,IAAIH,SAAS,KAAKb,MAAM,CAACiB,MAAM,CAACC,IAAI,CAACC,IAAI,EAAE;QACzC;MACF;MACA,QAAQJ,OAAO;QACb;UAAS;YACPN,KAAK,CAACW,IAAI,CAACP,SAAS,CAAC;UACvB;MACF;MACAJ,KAAK,CAACY,YAAY,CAAC,CAAC;IACtB;IACAZ,KAAK,CAACa,aAAa,CAAC,CAAC;IACrB,OAAO,IAAIrB,QAAQ,CAAC,CAAC;EACvB;AACF"}