@loaders.gl/parquet 3.3.0-alpha.7 → 3.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. package/dist/dist.min.js +17 -26
  2. package/dist/dist.min.js.map +3 -3
  3. package/dist/es5/index.js +3 -3
  4. package/dist/es5/index.js.map +1 -1
  5. package/dist/es5/lib/convert-schema-deep.ts.disabled +910 -0
  6. package/dist/es5/lib/parse-parquet.js +49 -25
  7. package/dist/es5/lib/parse-parquet.js.map +1 -1
  8. package/dist/es5/parquet-loader.js +3 -2
  9. package/dist/es5/parquet-loader.js.map +1 -1
  10. package/dist/es5/parquet-wasm-loader.js +1 -1
  11. package/dist/es5/parquet-wasm-loader.js.map +1 -1
  12. package/dist/es5/parquet-wasm-writer.js +1 -1
  13. package/dist/es5/parquet-wasm-writer.js.map +1 -1
  14. package/dist/es5/parquet-writer.js +1 -1
  15. package/dist/es5/parquet-writer.js.map +1 -1
  16. package/dist/es5/parquetjs/compression.js +15 -5
  17. package/dist/es5/parquetjs/compression.js.map +1 -1
  18. package/dist/es5/parquetjs/encoder/{writer.js → parquet-encoder.js} +70 -158
  19. package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -0
  20. package/dist/es5/parquetjs/parser/parquet-reader.js +553 -222
  21. package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
  22. package/dist/es5/parquetjs/schema/declare.js +3 -1
  23. package/dist/es5/parquetjs/schema/declare.js.map +1 -1
  24. package/dist/es5/parquetjs/schema/shred.js +39 -33
  25. package/dist/es5/parquetjs/schema/shred.js.map +1 -1
  26. package/dist/es5/parquetjs/schema/types.js.map +1 -1
  27. package/dist/es5/parquetjs/utils/file-utils.js +2 -3
  28. package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
  29. package/dist/esm/index.js +1 -1
  30. package/dist/esm/index.js.map +1 -1
  31. package/dist/esm/lib/convert-schema-deep.ts.disabled +910 -0
  32. package/dist/esm/lib/parse-parquet.js +6 -12
  33. package/dist/esm/lib/parse-parquet.js.map +1 -1
  34. package/dist/esm/parquet-loader.js +3 -2
  35. package/dist/esm/parquet-loader.js.map +1 -1
  36. package/dist/esm/parquet-wasm-loader.js +1 -1
  37. package/dist/esm/parquet-wasm-loader.js.map +1 -1
  38. package/dist/esm/parquet-wasm-writer.js +1 -1
  39. package/dist/esm/parquet-wasm-writer.js.map +1 -1
  40. package/dist/esm/parquet-writer.js +1 -1
  41. package/dist/esm/parquet-writer.js.map +1 -1
  42. package/dist/esm/parquetjs/compression.js +10 -1
  43. package/dist/esm/parquetjs/compression.js.map +1 -1
  44. package/dist/esm/parquetjs/encoder/{writer.js → parquet-encoder.js} +7 -37
  45. package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +1 -0
  46. package/dist/esm/parquetjs/parser/parquet-reader.js +158 -72
  47. package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
  48. package/dist/esm/parquetjs/schema/declare.js +1 -0
  49. package/dist/esm/parquetjs/schema/declare.js.map +1 -1
  50. package/dist/esm/parquetjs/schema/shred.js +42 -34
  51. package/dist/esm/parquetjs/schema/shred.js.map +1 -1
  52. package/dist/esm/parquetjs/schema/types.js.map +1 -1
  53. package/dist/esm/parquetjs/utils/file-utils.js +1 -1
  54. package/dist/esm/parquetjs/utils/file-utils.js.map +1 -1
  55. package/dist/index.d.ts +1 -1
  56. package/dist/index.d.ts.map +1 -1
  57. package/dist/index.js +3 -4
  58. package/dist/lib/parse-parquet.d.ts +2 -2
  59. package/dist/lib/parse-parquet.d.ts.map +1 -1
  60. package/dist/lib/parse-parquet.js +24 -12
  61. package/dist/lib/wasm/encode-parquet-wasm.d.ts +1 -1
  62. package/dist/lib/wasm/encode-parquet-wasm.d.ts.map +1 -1
  63. package/dist/lib/wasm/parse-parquet-wasm.d.ts +1 -1
  64. package/dist/lib/wasm/parse-parquet-wasm.d.ts.map +1 -1
  65. package/dist/parquet-loader.d.ts +2 -1
  66. package/dist/parquet-loader.d.ts.map +1 -1
  67. package/dist/parquet-loader.js +2 -1
  68. package/dist/parquet-wasm-loader.d.ts +1 -1
  69. package/dist/parquet-wasm-loader.d.ts.map +1 -1
  70. package/dist/parquet-worker.js +15 -24
  71. package/dist/parquet-worker.js.map +3 -3
  72. package/dist/parquet-writer.d.ts +1 -1
  73. package/dist/parquet-writer.d.ts.map +1 -1
  74. package/dist/parquetjs/compression.d.ts.map +1 -1
  75. package/dist/parquetjs/compression.js +16 -5
  76. package/dist/parquetjs/encoder/{writer.d.ts → parquet-encoder.d.ts} +10 -19
  77. package/dist/parquetjs/encoder/parquet-encoder.d.ts.map +1 -0
  78. package/dist/parquetjs/encoder/{writer.js → parquet-encoder.js} +60 -58
  79. package/dist/parquetjs/parser/parquet-reader.d.ts +47 -57
  80. package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -1
  81. package/dist/parquetjs/parser/parquet-reader.js +168 -102
  82. package/dist/parquetjs/schema/declare.d.ts +21 -14
  83. package/dist/parquetjs/schema/declare.d.ts.map +1 -1
  84. package/dist/parquetjs/schema/declare.js +2 -0
  85. package/dist/parquetjs/schema/shred.d.ts +115 -0
  86. package/dist/parquetjs/schema/shred.d.ts.map +1 -1
  87. package/dist/parquetjs/schema/shred.js +161 -43
  88. package/dist/parquetjs/schema/types.d.ts +2 -2
  89. package/dist/parquetjs/schema/types.d.ts.map +1 -1
  90. package/dist/parquetjs/utils/file-utils.d.ts +3 -4
  91. package/dist/parquetjs/utils/file-utils.d.ts.map +1 -1
  92. package/dist/parquetjs/utils/file-utils.js +2 -5
  93. package/package.json +7 -5
  94. package/src/index.ts +2 -2
  95. package/src/lib/convert-schema-deep.ts.disabled +910 -0
  96. package/src/lib/parse-parquet.ts +25 -12
  97. package/src/parquet-loader.ts +3 -1
  98. package/src/parquetjs/compression.ts +14 -1
  99. package/src/parquetjs/encoder/{writer.ts → parquet-encoder.ts} +22 -28
  100. package/src/parquetjs/parser/parquet-reader.ts +239 -122
  101. package/src/parquetjs/schema/declare.ts +17 -9
  102. package/src/parquetjs/schema/shred.ts +157 -28
  103. package/src/parquetjs/schema/types.ts +21 -27
  104. package/src/parquetjs/utils/file-utils.ts +3 -4
  105. package/dist/es5/parquetjs/encoder/writer.js.map +0 -1
  106. package/dist/es5/parquetjs/file.js +0 -94
  107. package/dist/es5/parquetjs/file.js.map +0 -1
  108. package/dist/es5/parquetjs/parser/parquet-cursor.js +0 -183
  109. package/dist/es5/parquetjs/parser/parquet-cursor.js.map +0 -1
  110. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +0 -327
  111. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
  112. package/dist/es5/parquetjs/utils/buffer-utils.js +0 -19
  113. package/dist/es5/parquetjs/utils/buffer-utils.js.map +0 -1
  114. package/dist/esm/parquetjs/encoder/writer.js.map +0 -1
  115. package/dist/esm/parquetjs/file.js +0 -81
  116. package/dist/esm/parquetjs/file.js.map +0 -1
  117. package/dist/esm/parquetjs/parser/parquet-cursor.js +0 -78
  118. package/dist/esm/parquetjs/parser/parquet-cursor.js.map +0 -1
  119. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +0 -129
  120. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
  121. package/dist/esm/parquetjs/utils/buffer-utils.js +0 -13
  122. package/dist/esm/parquetjs/utils/buffer-utils.js.map +0 -1
  123. package/dist/parquetjs/encoder/writer.d.ts.map +0 -1
  124. package/dist/parquetjs/file.d.ts +0 -10
  125. package/dist/parquetjs/file.d.ts.map +0 -1
  126. package/dist/parquetjs/file.js +0 -99
  127. package/dist/parquetjs/parser/parquet-cursor.d.ts +0 -36
  128. package/dist/parquetjs/parser/parquet-cursor.d.ts.map +0 -1
  129. package/dist/parquetjs/parser/parquet-cursor.js +0 -74
  130. package/dist/parquetjs/parser/parquet-envelope-reader.d.ts +0 -40
  131. package/dist/parquetjs/parser/parquet-envelope-reader.d.ts.map +0 -1
  132. package/dist/parquetjs/parser/parquet-envelope-reader.js +0 -136
  133. package/dist/parquetjs/utils/buffer-utils.d.ts +0 -10
  134. package/dist/parquetjs/utils/buffer-utils.d.ts.map +0 -1
  135. package/dist/parquetjs/utils/buffer-utils.js +0 -22
  136. package/src/parquetjs/file.ts +0 -90
  137. package/src/parquetjs/parser/parquet-cursor.ts +0 -94
  138. package/src/parquetjs/parser/parquet-envelope-reader.ts +0 -199
  139. package/src/parquetjs/utils/buffer-utils.ts +0 -18
@@ -1 +0,0 @@
1
- {"version":3,"file":"writer.js","names":["Transform","PARQUET_CODECS","Compression","Shred","ColumnChunk","ColumnMetaData","CompressionCodec","ConvertedType","DataPageHeader","DataPageHeaderV2","Encoding","FieldRepetitionType","FileMetaData","KeyValue","PageHeader","PageType","RowGroup","SchemaElement","Type","osopen","oswrite","osclose","getBitWidth","serializeThrift","Int64","PARQUET_MAGIC","PARQUET_VERSION","PARQUET_DEFAULT_PAGE_SIZE","PARQUET_DEFAULT_ROW_GROUP_SIZE","PARQUET_RDLVL_TYPE","PARQUET_RDLVL_ENCODING","ParquetWriter","openFile","schema","path","opts","outputStream","openStream","envelopeWriter","ParquetEnvelopeWriter","constructor","rowBuffer","rowGroupSize","closed","userMetadata","writeHeader","err","close","appendRow","row","Error","shredRecord","rowCount","callback","writeFooter","setMetadata","key","value","String","setRowGroupSize","cnt","setPageSize","writeFn","bind","undefined","closeFn","fileOffset","write","offset","rowGroups","pageSize","useDataPageV2","Boolean","writeSection","buf","length","Buffer","from","writeRowGroup","records","rgroup","encodeRowGroup","baseOffset","push","metadata","body","encodeFooter","ParquetTransformer","objectMode","writeProxy","t","b","writer","_transform","encoding","then","Promise","resolve","_flush","encodeValues","type","values","encodeDataPage","column","data","rLevelsBuf","alloc","rLevelMax","rlevels","bitWidth","dLevelsBuf","dLevelMax","dlevels","valuesBuf","primitiveType","typeLength","dataBuf","concat","compressedBuf","deflate","compression","header","DATA_PAGE","data_page_header","num_values","count","definition_level_encoding","repetition_level_encoding","uncompressed_page_size","compressed_page_size","headerBuf","page","headerSize","encodeDataPageV2","disableEnvelope","DATA_PAGE_V2","data_page_header_v2","num_nulls","num_rows","definition_levels_byte_length","repetition_levels_byte_length","is_compressed","encodeColumnChunk","buffer","columnData","join","pageBuf","total_uncompressed_size","total_compressed_size","result","path_in_schema","data_page_offset","encodings","codec","metadataOffset","columns","total_byte_size","field","fieldList","isNested","cchunkData","cchunk","file_offset","meta_data","Number","version","created_by","row_groups","key_value_metadata","kv","schemaRoot","name","num_children","Object","keys","fields","relt","repetitionType","schemaElem","repetition_type","fieldCount","originalType","converted_type","type_length","metadataEncoded","footerEncoded","copy","writeUInt32LE"],"sources":["../../../../src/parquetjs/encoder/writer.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/* eslint-disable camelcase */\nimport {Transform, Writable} from 'stream';\nimport {ParquetCodecOptions, PARQUET_CODECS} from '../codecs';\nimport * as Compression from '../compression';\nimport {\n ParquetBuffer,\n ParquetCodec,\n ParquetData,\n ParquetField,\n PrimitiveType\n} from '../schema/declare';\nimport {ParquetSchema} from '../schema/schema';\nimport * as Shred from '../schema/shred';\nimport {\n ColumnChunk,\n ColumnMetaData,\n CompressionCodec,\n ConvertedType,\n DataPageHeader,\n DataPageHeaderV2,\n Encoding,\n FieldRepetitionType,\n FileMetaData,\n KeyValue,\n PageHeader,\n PageType,\n RowGroup,\n SchemaElement,\n Type\n} from '../parquet-thrift';\nimport {osopen, oswrite, osclose} from '../utils/file-utils';\nimport {getBitWidth, serializeThrift} from '../utils/read-utils';\nimport Int64 from 'node-int64';\n\n/**\n * Parquet File Magic String\n */\nconst PARQUET_MAGIC = 'PAR1';\n\n/**\n * Parquet File Format Version\n */\nconst PARQUET_VERSION = 1;\n\n/**\n * Default Page and Row Group sizes\n */\nconst PARQUET_DEFAULT_PAGE_SIZE = 8192;\nconst PARQUET_DEFAULT_ROW_GROUP_SIZE = 4096;\n\n/**\n * Repetition and Definition Level Encoding\n */\nconst PARQUET_RDLVL_TYPE = 'INT32';\nconst PARQUET_RDLVL_ENCODING = 'RLE';\n\nexport interface ParquetWriterOptions {\n baseOffset?: number;\n rowGroupSize?: number;\n pageSize?: number;\n useDataPageV2?: boolean;\n\n // Write Stream Options\n flags?: string;\n encoding?: string;\n fd?: number;\n mode?: number;\n autoClose?: boolean;\n start?: number;\n}\n\n/**\n * Write a parquet file to an output stream. The ParquetWriter will perform\n * buffering/batching for performance, so close() must be called after all rows\n * are written.\n */\n// eslint-disable-next-line @typescript-eslint/no-unused-vars\nexport class ParquetWriter<T> {\n /**\n * Convenience method to create a new buffered parquet writer that writes to\n * the specified file\n */\n static async openFile<T>(\n schema: ParquetSchema,\n path: string,\n opts?: ParquetWriterOptions\n ): Promise<ParquetWriter<T>> {\n const outputStream = await osopen(path, opts);\n return ParquetWriter.openStream(schema, outputStream, opts);\n }\n\n /**\n * Convenience method to create a new buffered parquet writer that writes to\n * the specified stream\n */\n static async openStream<T>(\n schema: ParquetSchema,\n outputStream: Writable,\n opts?: ParquetWriterOptions\n ): Promise<ParquetWriter<T>> {\n if (!opts) {\n // tslint:disable-next-line:no-parameter-reassignment\n opts = {};\n }\n\n const envelopeWriter = await ParquetEnvelopeWriter.openStream(schema, outputStream, opts);\n\n return new ParquetWriter(schema, envelopeWriter, opts);\n }\n\n public schema: ParquetSchema;\n public envelopeWriter: ParquetEnvelopeWriter;\n public rowBuffer: ParquetBuffer;\n public rowGroupSize: number;\n public closed: boolean;\n public userMetadata: Record<string, string>;\n\n /**\n * Create a new buffered parquet writer for a given envelope writer\n */\n constructor(\n schema: ParquetSchema,\n envelopeWriter: ParquetEnvelopeWriter,\n opts: ParquetWriterOptions\n ) {\n this.schema = schema;\n this.envelopeWriter = envelopeWriter;\n // @ts-ignore Row buffer typings...\n this.rowBuffer = {};\n this.rowGroupSize = opts.rowGroupSize || PARQUET_DEFAULT_ROW_GROUP_SIZE;\n this.closed = false;\n this.userMetadata = {};\n\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n this.writeHeader();\n }\n\n async writeHeader(): Promise<void> {\n // TODO - better not mess with promises in the constructor\n try {\n await this.envelopeWriter.writeHeader();\n } catch (err) {\n await this.envelopeWriter.close();\n throw err;\n }\n }\n\n /**\n * Append a single row to the parquet file. Rows are buffered in memory until\n * rowGroupSize rows are in the buffer or close() is called\n */\n async appendRow<T>(row: T): Promise<void> {\n if (this.closed) {\n throw new Error('writer was closed');\n }\n Shred.shredRecord(this.schema, row, this.rowBuffer);\n if (this.rowBuffer.rowCount >= this.rowGroupSize) {\n // @ts-ignore\n this.rowBuffer = {};\n }\n }\n\n /**\n * Finish writing the parquet file and commit the footer to disk. This method\n * MUST be called after you are finished adding rows. You must not call this\n * method twice on the same object or add any rows after the close() method has\n * been called\n */\n async close(callback?: () => void): Promise<void> {\n if (this.closed) {\n throw new Error('writer was closed');\n }\n\n this.closed = true;\n\n if (this.rowBuffer.rowCount > 0 || this.rowBuffer.rowCount >= this.rowGroupSize) {\n // @ts-ignore\n this.rowBuffer = {};\n }\n\n await this.envelopeWriter.writeFooter(this.userMetadata);\n await this.envelopeWriter.close();\n // this.envelopeWriter = null;\n\n if (callback) {\n callback();\n }\n }\n\n /**\n * Add key<>value metadata to the file\n */\n setMetadata(key: string, value: string): void {\n // TODO: value to be any, obj -> JSON\n this.userMetadata[String(key)] = String(value);\n }\n\n /**\n * Set the parquet row group size. This values controls the maximum number\n * of rows that are buffered in memory at any given time as well as the number\n * of rows that are co-located on disk. A higher value is generally better for\n * read-time I/O performance at the tradeoff of write-time memory usage.\n */\n setRowGroupSize(cnt: number): void {\n this.rowGroupSize = cnt;\n }\n\n /**\n * Set the parquet data page size. The data page size controls the maximum\n * number of column values that are written to disk as a consecutive array\n */\n setPageSize(cnt: number): void {\n this.envelopeWriter.setPageSize(cnt);\n }\n}\n\n/**\n * Create a parquet file from a schema and a number of row groups. This class\n * performs direct, unbuffered writes to the underlying output stream and is\n * intendend for advanced and internal users; the writeXXX methods must be\n * called in the correct order to produce a valid file.\n */\nexport class ParquetEnvelopeWriter {\n /**\n * Create a new parquet envelope writer that writes to the specified stream\n */\n static async openStream(\n schema: ParquetSchema,\n outputStream: Writable,\n opts: ParquetWriterOptions\n ): Promise<ParquetEnvelopeWriter> {\n const writeFn = oswrite.bind(undefined, outputStream);\n const closeFn = osclose.bind(undefined, outputStream);\n return new ParquetEnvelopeWriter(schema, writeFn, closeFn, 0, opts);\n }\n\n public schema: ParquetSchema;\n public write: (buf: Buffer) => Promise<void>;\n public close: () => Promise<void>;\n public offset: number;\n public rowCount: number;\n public rowGroups: RowGroup[];\n public pageSize: number;\n public useDataPageV2: boolean;\n\n constructor(\n schema: ParquetSchema,\n writeFn: (buf: Buffer) => Promise<void>,\n closeFn: () => Promise<void>,\n fileOffset: number,\n opts: ParquetWriterOptions\n ) {\n this.schema = schema;\n this.write = writeFn;\n this.close = closeFn;\n this.offset = fileOffset;\n this.rowCount = 0;\n this.rowGroups = [];\n this.pageSize = opts.pageSize || PARQUET_DEFAULT_PAGE_SIZE;\n this.useDataPageV2 = 'useDataPageV2' in opts ? Boolean(opts.useDataPageV2) : false;\n }\n\n writeSection(buf: Buffer): Promise<void> {\n this.offset += buf.length;\n return this.write(buf);\n }\n\n /**\n * Encode the parquet file header\n */\n writeHeader(): Promise<void> {\n return this.writeSection(Buffer.from(PARQUET_MAGIC));\n }\n\n /**\n * Encode a parquet row group. The records object should be created using the\n * shredRecord method\n */\n async writeRowGroup(records: ParquetBuffer): Promise<void> {\n const rgroup = await encodeRowGroup(this.schema, records, {\n baseOffset: this.offset,\n pageSize: this.pageSize,\n useDataPageV2: this.useDataPageV2\n });\n\n this.rowCount += records.rowCount;\n this.rowGroups.push(rgroup.metadata);\n return await this.writeSection(rgroup.body);\n }\n\n /**\n * Write the parquet file footer\n */\n writeFooter(userMetadata: Record<string, string>): Promise<void> {\n if (!userMetadata) {\n // tslint:disable-next-line:no-parameter-reassignment\n userMetadata = {};\n }\n\n return this.writeSection(\n encodeFooter(this.schema, this.rowCount, this.rowGroups, userMetadata)\n );\n }\n\n /**\n * Set the parquet data page size. The data page size controls the maximum\n * number of column values that are written to disk as a consecutive array\n */\n setPageSize(cnt: number): void {\n this.pageSize = cnt;\n }\n}\n\n/**\n * Create a parquet transform stream\n */\nexport class ParquetTransformer<T> extends Transform {\n public writer: ParquetWriter<T>;\n\n constructor(schema: ParquetSchema, opts: ParquetWriterOptions = {}) {\n super({objectMode: true});\n\n const writeProxy = (function (t: ParquetTransformer<any>) {\n return async function (b: any): Promise<void> {\n t.push(b);\n };\n })(this);\n\n this.writer = new ParquetWriter(\n schema,\n new ParquetEnvelopeWriter(schema, writeProxy, async () => {}, 0, opts),\n opts\n );\n }\n\n // tslint:disable-next-line:function-name\n _transform(row: any, encoding: string, callback: (val?: any) => void): Promise<void> {\n if (row) {\n return this.writer.appendRow(row).then(callback);\n }\n callback();\n return Promise.resolve();\n }\n\n // tslint:disable-next-line:function-name\n async _flush(callback: (val?: any) => void) {\n await this.writer.close(callback);\n }\n}\n\n/**\n * Encode a consecutive array of data using one of the parquet encodings\n */\nfunction encodeValues(\n type: PrimitiveType,\n encoding: ParquetCodec,\n values: any[],\n opts: ParquetCodecOptions\n) {\n if (!(encoding in PARQUET_CODECS)) {\n throw new Error(`invalid encoding: ${encoding}`);\n }\n return PARQUET_CODECS[encoding].encodeValues(type, values, opts);\n}\n\n/**\n * Encode a parquet data page\n */\nasync function encodeDataPage(\n column: ParquetField,\n data: ParquetData\n): Promise<{\n header: PageHeader;\n headerSize: number;\n page: Buffer;\n}> {\n /* encode repetition and definition levels */\n let rLevelsBuf = Buffer.alloc(0);\n if (column.rLevelMax > 0) {\n rLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.rlevels, {\n bitWidth: getBitWidth(column.rLevelMax)\n // disableEnvelope: false\n });\n }\n\n let dLevelsBuf = Buffer.alloc(0);\n if (column.dLevelMax > 0) {\n dLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.dlevels, {\n bitWidth: getBitWidth(column.dLevelMax)\n // disableEnvelope: false\n });\n }\n\n /* encode values */\n const valuesBuf = encodeValues(column.primitiveType!, column.encoding!, data.values, {\n typeLength: column.typeLength,\n bitWidth: column.typeLength\n });\n\n const dataBuf = Buffer.concat([rLevelsBuf, dLevelsBuf, valuesBuf]);\n\n // compression = column.compression === 'UNCOMPRESSED' ? (compression || 'UNCOMPRESSED') : column.compression;\n const compressedBuf = await Compression.deflate(column.compression!, dataBuf);\n\n /* build page header */\n const header = new PageHeader({\n type: PageType.DATA_PAGE,\n data_page_header: new DataPageHeader({\n num_values: data.count,\n encoding: Encoding[column.encoding!] as any,\n definition_level_encoding: Encoding[PARQUET_RDLVL_ENCODING], // [PARQUET_RDLVL_ENCODING],\n repetition_level_encoding: Encoding[PARQUET_RDLVL_ENCODING] // [PARQUET_RDLVL_ENCODING]\n }),\n uncompressed_page_size: dataBuf.length,\n compressed_page_size: compressedBuf.length\n });\n\n /* concat page header, repetition and definition levels and values */\n const headerBuf = serializeThrift(header);\n const page = Buffer.concat([headerBuf, compressedBuf]);\n\n return {header, headerSize: headerBuf.length, page};\n}\n\n/**\n * Encode a parquet data page (v2)\n */\nasync function encodeDataPageV2(\n column: ParquetField,\n data: ParquetData,\n rowCount: number\n): Promise<{\n header: PageHeader;\n headerSize: number;\n page: Buffer;\n}> {\n /* encode values */\n const valuesBuf = encodeValues(column.primitiveType!, column.encoding!, data.values, {\n typeLength: column.typeLength,\n bitWidth: column.typeLength\n });\n\n // compression = column.compression === 'UNCOMPRESSED' ? (compression || 'UNCOMPRESSED') : column.compression;\n const compressedBuf = await Compression.deflate(column.compression!, valuesBuf);\n\n /* encode repetition and definition levels */\n let rLevelsBuf = Buffer.alloc(0);\n if (column.rLevelMax > 0) {\n rLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.rlevels, {\n bitWidth: getBitWidth(column.rLevelMax),\n disableEnvelope: true\n });\n }\n\n let dLevelsBuf = Buffer.alloc(0);\n if (column.dLevelMax > 0) {\n dLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.dlevels, {\n bitWidth: getBitWidth(column.dLevelMax),\n disableEnvelope: true\n });\n }\n\n /* build page header */\n const header = new PageHeader({\n type: PageType.DATA_PAGE_V2,\n data_page_header_v2: new DataPageHeaderV2({\n num_values: data.count,\n num_nulls: data.count - data.values.length,\n num_rows: rowCount,\n encoding: Encoding[column.encoding!] as any,\n definition_levels_byte_length: dLevelsBuf.length,\n repetition_levels_byte_length: rLevelsBuf.length,\n is_compressed: column.compression !== 'UNCOMPRESSED'\n }),\n uncompressed_page_size: rLevelsBuf.length + dLevelsBuf.length + valuesBuf.length,\n compressed_page_size: rLevelsBuf.length + dLevelsBuf.length + compressedBuf.length\n });\n\n /* concat page header, repetition and definition levels and values */\n const headerBuf = serializeThrift(header);\n const page = Buffer.concat([headerBuf, rLevelsBuf, dLevelsBuf, compressedBuf]);\n return {header, headerSize: headerBuf.length, page};\n}\n\n/**\n * Encode an array of values into a parquet column chunk\n */\nasync function encodeColumnChunk(\n column: ParquetField,\n buffer: ParquetBuffer,\n offset: number,\n opts: ParquetWriterOptions\n): Promise<{\n body: Buffer;\n metadata: ColumnMetaData;\n metadataOffset: number;\n}> {\n const data = buffer.columnData[column.path.join()];\n const baseOffset = (opts.baseOffset || 0) + offset;\n /* encode data page(s) */\n // const pages: Buffer[] = [];\n let pageBuf: Buffer;\n // tslint:disable-next-line:variable-name\n let total_uncompressed_size = 0;\n // tslint:disable-next-line:variable-name\n let total_compressed_size = 0;\n {\n const result = opts.useDataPageV2\n ? await encodeDataPageV2(column, data, buffer.rowCount)\n : await encodeDataPage(column, data);\n // pages.push(result.page);\n pageBuf = result.page;\n total_uncompressed_size += result.header.uncompressed_page_size + result.headerSize;\n total_compressed_size += result.header.compressed_page_size + result.headerSize;\n }\n\n // const pagesBuf = Buffer.concat(pages);\n // const compression = column.compression === 'UNCOMPRESSED' ? (opts.compression || 'UNCOMPRESSED') : column.compression;\n\n /* prepare metadata header */\n const metadata = new ColumnMetaData({\n path_in_schema: column.path,\n num_values: data.count,\n data_page_offset: baseOffset,\n encodings: [],\n total_uncompressed_size, // : pagesBuf.length,\n total_compressed_size,\n type: Type[column.primitiveType!],\n codec: CompressionCodec[column.compression!]\n });\n\n /* list encodings */\n metadata.encodings.push(Encoding[PARQUET_RDLVL_ENCODING]);\n metadata.encodings.push(Encoding[column.encoding!]);\n\n /* concat metadata header and data pages */\n const metadataOffset = baseOffset + pageBuf.length;\n const body = Buffer.concat([pageBuf, serializeThrift(metadata)]);\n return {body, metadata, metadataOffset};\n}\n\n/**\n * Encode a list of column values into a parquet row group\n */\nasync function encodeRowGroup(\n schema: ParquetSchema,\n data: ParquetBuffer,\n opts: ParquetWriterOptions\n): Promise<{\n body: Buffer;\n metadata: RowGroup;\n}> {\n const metadata = new RowGroup({\n num_rows: data.rowCount,\n columns: [],\n total_byte_size: 0\n });\n\n let body = Buffer.alloc(0);\n for (const field of schema.fieldList) {\n if (field.isNested) {\n continue; // eslint-disable-line no-continue\n }\n\n const cchunkData = await encodeColumnChunk(field, data, body.length, opts);\n\n const cchunk = new ColumnChunk({\n file_offset: cchunkData.metadataOffset,\n meta_data: cchunkData.metadata\n });\n\n metadata.columns.push(cchunk);\n metadata.total_byte_size = new Int64(Number(metadata.total_byte_size) + cchunkData.body.length);\n\n body = Buffer.concat([body, cchunkData.body]);\n }\n\n return {body, metadata};\n}\n\n/**\n * Encode a parquet file metadata footer\n */\nfunction encodeFooter(\n schema: ParquetSchema,\n rowCount: number,\n rowGroups: RowGroup[],\n userMetadata: Record<string, string>\n): Buffer {\n const metadata = new FileMetaData({\n version: PARQUET_VERSION,\n created_by: 'parquets',\n num_rows: rowCount,\n row_groups: rowGroups,\n schema: [],\n key_value_metadata: []\n });\n\n for (const key in userMetadata) {\n const kv = new KeyValue({\n key,\n value: userMetadata[key]\n });\n metadata.key_value_metadata?.push?.(kv);\n }\n\n {\n const schemaRoot = new SchemaElement({\n name: 'root',\n num_children: Object.keys(schema.fields).length\n });\n metadata.schema.push(schemaRoot);\n }\n\n for (const field of schema.fieldList) {\n const relt = FieldRepetitionType[field.repetitionType];\n const schemaElem = new SchemaElement({\n name: field.name,\n repetition_type: relt as any\n });\n\n if (field.isNested) {\n schemaElem.num_children = field.fieldCount;\n } else {\n schemaElem.type = Type[field.primitiveType!] as Type;\n }\n\n if (field.originalType) {\n schemaElem.converted_type = ConvertedType[field.originalType] as ConvertedType;\n }\n\n schemaElem.type_length = field.typeLength;\n\n metadata.schema.push(schemaElem);\n }\n\n const metadataEncoded = serializeThrift(metadata);\n const footerEncoded = Buffer.alloc(metadataEncoded.length + 8);\n metadataEncoded.copy(footerEncoded);\n footerEncoded.writeUInt32LE(metadataEncoded.length, metadataEncoded.length);\n footerEncoded.write(PARQUET_MAGIC, metadataEncoded.length + 4);\n return footerEncoded;\n}\n"],"mappings":";AAEA,SAAQA,SAAS,QAAiB,QAAQ;AAC1C,SAA6BC,cAAc,QAAO,WAAW;AAC7D,OAAO,KAAKC,WAAW,MAAM,gBAAgB;AAS7C,OAAO,KAAKC,KAAK,MAAM,iBAAiB;AACxC,SACEC,WAAW,EACXC,cAAc,EACdC,gBAAgB,EAChBC,aAAa,EACbC,cAAc,EACdC,gBAAgB,EAChBC,QAAQ,EACRC,mBAAmB,EACnBC,YAAY,EACZC,QAAQ,EACRC,UAAU,EACVC,QAAQ,EACRC,QAAQ,EACRC,aAAa,EACbC,IAAI,QACC,mBAAmB;AAC1B,SAAQC,MAAM,EAAEC,OAAO,EAAEC,OAAO,QAAO,qBAAqB;AAC5D,SAAQC,WAAW,EAAEC,eAAe,QAAO,qBAAqB;AAChE,OAAOC,KAAK,MAAM,YAAY;;AAK9B,MAAMC,aAAa,GAAG,MAAM;;AAK5B,MAAMC,eAAe,GAAG,CAAC;;AAKzB,MAAMC,yBAAyB,GAAG,IAAI;AACtC,MAAMC,8BAA8B,GAAG,IAAI;;AAK3C,MAAMC,kBAAkB,GAAG,OAAO;AAClC,MAAMC,sBAAsB,GAAG,KAAK;AAuBpC,OAAO,MAAMC,aAAa,CAAI;EAK5B,aAAaC,QAAQ,CACnBC,MAAqB,EACrBC,IAAY,EACZC,IAA2B,EACA;IAC3B,MAAMC,YAAY,GAAG,MAAMjB,MAAM,CAACe,IAAI,EAAEC,IAAI,CAAC;IAC7C,OAAOJ,aAAa,CAACM,UAAU,CAACJ,MAAM,EAAEG,YAAY,EAAED,IAAI,CAAC;EAC7D;;EAMA,aAAaE,UAAU,CACrBJ,MAAqB,EACrBG,YAAsB,EACtBD,IAA2B,EACA;IAC3B,IAAI,CAACA,IAAI,EAAE;MAETA,IAAI,GAAG,CAAC,CAAC;IACX;IAEA,MAAMG,cAAc,GAAG,MAAMC,qBAAqB,CAACF,UAAU,CAACJ,MAAM,EAAEG,YAAY,EAAED,IAAI,CAAC;IAEzF,OAAO,IAAIJ,aAAa,CAACE,MAAM,EAAEK,cAAc,EAAEH,IAAI,CAAC;EACxD;EAYAK,WAAW,CACTP,MAAqB,EACrBK,cAAqC,EACrCH,IAA0B,EAC1B;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IACA,IAAI,CAACF,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACK,cAAc,GAAGA,cAAc;IAEpC,IAAI,CAACG,SAAS,GAAG,CAAC,CAAC;IACnB,IAAI,CAACC,YAAY,GAAGP,IAAI,CAACO,YAAY,IAAId,8BAA8B;IACvE,IAAI,CAACe,MAAM,GAAG,KAAK;IACnB,IAAI,CAACC,YAAY,GAAG,CAAC,CAAC;;IAGtB,IAAI,CAACC,WAAW,EAAE;EACpB;EAEA,MAAMA,WAAW,GAAkB;IAEjC,IAAI;MACF,MAAM,IAAI,CAACP,cAAc,CAACO,WAAW,EAAE;IACzC,CAAC,CAAC,OAAOC,GAAG,EAAE;MACZ,MAAM,IAAI,CAACR,cAAc,CAACS,KAAK,EAAE;MACjC,MAAMD,GAAG;IACX;EACF;;EAMA,MAAME,SAAS,CAAIC,GAAM,EAAiB;IACxC,IAAI,IAAI,CAACN,MAAM,EAAE;MACf,MAAM,IAAIO,KAAK,CAAC,mBAAmB,CAAC;IACtC;IACA/C,KAAK,CAACgD,WAAW,CAAC,IAAI,CAAClB,MAAM,EAAEgB,GAAG,EAAE,IAAI,CAACR,SAAS,CAAC;IACnD,IAAI,IAAI,CAACA,SAAS,CAACW,QAAQ,IAAI,IAAI,CAACV,YAAY,EAAE;MAEhD,IAAI,CAACD,SAAS,GAAG,CAAC,CAAC;IACrB;EACF;;EAQA,MAAMM,KAAK,CAACM,QAAqB,EAAiB;IAChD,IAAI,IAAI,CAACV,MAAM,EAAE;MACf,MAAM,IAAIO,KAAK,CAAC,mBAAmB,CAAC;IACtC;IAEA,IAAI,CAACP,MAAM,GAAG,IAAI;IAElB,IAAI,IAAI,CAACF,SAAS,CAACW,QAAQ,GAAG,CAAC,IAAI,IAAI,CAACX,SAAS,CAACW,QAAQ,IAAI,IAAI,CAACV,YAAY,EAAE;MAE/E,IAAI,CAACD,SAAS,GAAG,CAAC,CAAC;IACrB;IAEA,MAAM,IAAI,CAACH,cAAc,CAACgB,WAAW,CAAC,IAAI,CAACV,YAAY,CAAC;IACxD,MAAM,IAAI,CAACN,cAAc,CAACS,KAAK,EAAE;;IAGjC,IAAIM,QAAQ,EAAE;MACZA,QAAQ,EAAE;IACZ;EACF;;EAKAE,WAAW,CAACC,GAAW,EAAEC,KAAa,EAAQ;IAE5C,IAAI,CAACb,YAAY,CAACc,MAAM,CAACF,GAAG,CAAC,CAAC,GAAGE,MAAM,CAACD,KAAK,CAAC;EAChD;;EAQAE,eAAe,CAACC,GAAW,EAAQ;IACjC,IAAI,CAAClB,YAAY,GAAGkB,GAAG;EACzB;;EAMAC,WAAW,CAACD,GAAW,EAAQ;IAC7B,IAAI,CAACtB,cAAc,CAACuB,WAAW,CAACD,GAAG,CAAC;EACtC;AACF;;AAQA,OAAO,MAAMrB,qBAAqB,CAAC;EAIjC,aAAaF,UAAU,CACrBJ,MAAqB,EACrBG,YAAsB,EACtBD,IAA0B,EACM;IAChC,MAAM2B,OAAO,GAAG1C,OAAO,CAAC2C,IAAI,CAACC,SAAS,EAAE5B,YAAY,CAAC;IACrD,MAAM6B,OAAO,GAAG5C,OAAO,CAAC0C,IAAI,CAACC,SAAS,EAAE5B,YAAY,CAAC;IACrD,OAAO,IAAIG,qBAAqB,CAACN,MAAM,EAAE6B,OAAO,EAAEG,OAAO,EAAE,CAAC,EAAE9B,IAAI,CAAC;EACrE;EAWAK,WAAW,CACTP,MAAqB,EACrB6B,OAAuC,EACvCG,OAA4B,EAC5BC,UAAkB,EAClB/B,IAA0B,EAC1B;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IAAA;IACA,IAAI,CAACF,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACkC,KAAK,GAAGL,OAAO;IACpB,IAAI,CAACf,KAAK,GAAGkB,OAAO;IACpB,IAAI,CAACG,MAAM,GAAGF,UAAU;IACxB,IAAI,CAACd,QAAQ,GAAG,CAAC;IACjB,IAAI,CAACiB,SAAS,GAAG,EAAE;IACnB,IAAI,CAACC,QAAQ,GAAGnC,IAAI,CAACmC,QAAQ,IAAI3C,yBAAyB;IAC1D,IAAI,CAAC4C,aAAa,GAAG,eAAe,IAAIpC,IAAI,GAAGqC,OAAO,CAACrC,IAAI,CAACoC,aAAa,CAAC,GAAG,KAAK;EACpF;EAEAE,YAAY,CAACC,GAAW,EAAiB;IACvC,IAAI,CAACN,MAAM,IAAIM,GAAG,CAACC,MAAM;IACzB,OAAO,IAAI,CAACR,KAAK,CAACO,GAAG,CAAC;EACxB;;EAKA7B,WAAW,GAAkB;IAC3B,OAAO,IAAI,CAAC4B,YAAY,CAACG,MAAM,CAACC,IAAI,CAACpD,aAAa,CAAC,CAAC;EACtD;;EAMA,MAAMqD,aAAa,CAACC,OAAsB,EAAiB;IACzD,MAAMC,MAAM,GAAG,MAAMC,cAAc,CAAC,IAAI,CAAChD,MAAM,EAAE8C,OAAO,EAAE;MACxDG,UAAU,EAAE,IAAI,CAACd,MAAM;MACvBE,QAAQ,EAAE,IAAI,CAACA,QAAQ;MACvBC,aAAa,EAAE,IAAI,CAACA;IACtB,CAAC,CAAC;IAEF,IAAI,CAACnB,QAAQ,IAAI2B,OAAO,CAAC3B,QAAQ;IACjC,IAAI,CAACiB,SAAS,CAACc,IAAI,CAACH,MAAM,CAACI,QAAQ,CAAC;IACpC,OAAO,MAAM,IAAI,CAACX,YAAY,CAACO,MAAM,CAACK,IAAI,CAAC;EAC7C;;EAKA/B,WAAW,CAACV,YAAoC,EAAiB;IAC/D,IAAI,CAACA,YAAY,EAAE;MAEjBA,YAAY,GAAG,CAAC,CAAC;IACnB;IAEA,OAAO,IAAI,CAAC6B,YAAY,CACtBa,YAAY,CAAC,IAAI,CAACrD,MAAM,EAAE,IAAI,CAACmB,QAAQ,EAAE,IAAI,CAACiB,SAAS,EAAEzB,YAAY,CAAC,CACvE;EACH;;EAMAiB,WAAW,CAACD,GAAW,EAAQ;IAC7B,IAAI,CAACU,QAAQ,GAAGV,GAAG;EACrB;AACF;;AAKA,OAAO,MAAM2B,kBAAkB,SAAYvF,SAAS,CAAC;EAGnDwC,WAAW,CAACP,MAAqB,EAAmC;IAAA,IAAjCE,IAA0B,uEAAG,CAAC,CAAC;IAChE,KAAK,CAAC;MAACqD,UAAU,EAAE;IAAI,CAAC,CAAC;IAAC;IAE1B,MAAMC,UAAU,GAAI,UAAUC,CAA0B,EAAE;MACxD,OAAO,gBAAgBC,CAAM,EAAiB;QAC5CD,CAAC,CAACP,IAAI,CAACQ,CAAC,CAAC;MACX,CAAC;IACH,CAAC,CAAE,IAAI,CAAC;IAER,IAAI,CAACC,MAAM,GAAG,IAAI7D,aAAa,CAC7BE,MAAM,EACN,IAAIM,qBAAqB,CAACN,MAAM,EAAEwD,UAAU,EAAE,YAAY,CAAC,CAAC,EAAE,CAAC,EAAEtD,IAAI,CAAC,EACtEA,IAAI,CACL;EACH;;EAGA0D,UAAU,CAAC5C,GAAQ,EAAE6C,QAAgB,EAAEzC,QAA6B,EAAiB;IACnF,IAAIJ,GAAG,EAAE;MACP,OAAO,IAAI,CAAC2C,MAAM,CAAC5C,SAAS,CAACC,GAAG,CAAC,CAAC8C,IAAI,CAAC1C,QAAQ,CAAC;IAClD;IACAA,QAAQ,EAAE;IACV,OAAO2C,OAAO,CAACC,OAAO,EAAE;EAC1B;;EAGA,MAAMC,MAAM,CAAC7C,QAA6B,EAAE;IAC1C,MAAM,IAAI,CAACuC,MAAM,CAAC7C,KAAK,CAACM,QAAQ,CAAC;EACnC;AACF;;AAKA,SAAS8C,YAAY,CACnBC,IAAmB,EACnBN,QAAsB,EACtBO,MAAa,EACblE,IAAyB,EACzB;EACA,IAAI,EAAE2D,QAAQ,IAAI7F,cAAc,CAAC,EAAE;IACjC,MAAM,IAAIiD,KAAK,6BAAsB4C,QAAQ,EAAG;EAClD;EACA,OAAO7F,cAAc,CAAC6F,QAAQ,CAAC,CAACK,YAAY,CAACC,IAAI,EAAEC,MAAM,EAAElE,IAAI,CAAC;AAClE;;AAKA,eAAemE,cAAc,CAC3BC,MAAoB,EACpBC,IAAiB,EAKhB;EAED,IAAIC,UAAU,GAAG7B,MAAM,CAAC8B,KAAK,CAAC,CAAC,CAAC;EAChC,IAAIH,MAAM,CAACI,SAAS,GAAG,CAAC,EAAE;IACxBF,UAAU,GAAGN,YAAY,CAACtE,kBAAkB,EAAEC,sBAAsB,EAAE0E,IAAI,CAACI,OAAO,EAAE;MAClFC,QAAQ,EAAEvF,WAAW,CAACiF,MAAM,CAACI,SAAS;IAExC,CAAC,CAAC;EACJ;;EAEA,IAAIG,UAAU,GAAGlC,MAAM,CAAC8B,KAAK,CAAC,CAAC,CAAC;EAChC,IAAIH,MAAM,CAACQ,SAAS,GAAG,CAAC,EAAE;IACxBD,UAAU,GAAGX,YAAY,CAACtE,kBAAkB,EAAEC,sBAAsB,EAAE0E,IAAI,CAACQ,OAAO,EAAE;MAClFH,QAAQ,EAAEvF,WAAW,CAACiF,MAAM,CAACQ,SAAS;IAExC,CAAC,CAAC;EACJ;;EAGA,MAAME,SAAS,GAAGd,YAAY,CAACI,MAAM,CAACW,aAAa,EAAGX,MAAM,CAACT,QAAQ,EAAGU,IAAI,CAACH,MAAM,EAAE;IACnFc,UAAU,EAAEZ,MAAM,CAACY,UAAU;IAC7BN,QAAQ,EAAEN,MAAM,CAACY;EACnB,CAAC,CAAC;EAEF,MAAMC,OAAO,GAAGxC,MAAM,CAACyC,MAAM,CAAC,CAACZ,UAAU,EAAEK,UAAU,EAAEG,SAAS,CAAC,CAAC;;EAGlE,MAAMK,aAAa,GAAG,MAAMpH,WAAW,CAACqH,OAAO,CAAChB,MAAM,CAACiB,WAAW,EAAGJ,OAAO,CAAC;;EAG7E,MAAMK,MAAM,GAAG,IAAI3G,UAAU,CAAC;IAC5BsF,IAAI,EAAErF,QAAQ,CAAC2G,SAAS;IACxBC,gBAAgB,EAAE,IAAInH,cAAc,CAAC;MACnCoH,UAAU,EAAEpB,IAAI,CAACqB,KAAK;MACtB/B,QAAQ,EAAEpF,QAAQ,CAAC6F,MAAM,CAACT,QAAQ,CAAS;MAC3CgC,yBAAyB,EAAEpH,QAAQ,CAACoB,sBAAsB,CAAC;MAC3DiG,yBAAyB,EAAErH,QAAQ,CAACoB,sBAAsB;IAC5D,CAAC,CAAC;;IACFkG,sBAAsB,EAAEZ,OAAO,CAACzC,MAAM;IACtCsD,oBAAoB,EAAEX,aAAa,CAAC3C;EACtC,CAAC,CAAC;;EAGF,MAAMuD,SAAS,GAAG3G,eAAe,CAACkG,MAAM,CAAC;EACzC,MAAMU,IAAI,GAAGvD,MAAM,CAACyC,MAAM,CAAC,CAACa,SAAS,EAAEZ,aAAa,CAAC,CAAC;EAEtD,OAAO;IAACG,MAAM;IAAEW,UAAU,EAAEF,SAAS,CAACvD,MAAM;IAAEwD;EAAI,CAAC;AACrD;;AAKA,eAAeE,gBAAgB,CAC7B9B,MAAoB,EACpBC,IAAiB,EACjBpD,QAAgB,EAKf;EAED,MAAM6D,SAAS,GAAGd,YAAY,CAACI,MAAM,CAACW,aAAa,EAAGX,MAAM,CAACT,QAAQ,EAAGU,IAAI,CAACH,MAAM,EAAE;IACnFc,UAAU,EAAEZ,MAAM,CAACY,UAAU;IAC7BN,QAAQ,EAAEN,MAAM,CAACY;EACnB,CAAC,CAAC;;EAGF,MAAMG,aAAa,GAAG,MAAMpH,WAAW,CAACqH,OAAO,CAAChB,MAAM,CAACiB,WAAW,EAAGP,SAAS,CAAC;;EAG/E,IAAIR,UAAU,GAAG7B,MAAM,CAAC8B,KAAK,CAAC,CAAC,CAAC;EAChC,IAAIH,MAAM,CAACI,SAAS,GAAG,CAAC,EAAE;IACxBF,UAAU,GAAGN,YAAY,CAACtE,kBAAkB,EAAEC,sBAAsB,EAAE0E,IAAI,CAACI,OAAO,EAAE;MAClFC,QAAQ,EAAEvF,WAAW,CAACiF,MAAM,CAACI,SAAS,CAAC;MACvC2B,eAAe,EAAE;IACnB,CAAC,CAAC;EACJ;EAEA,IAAIxB,UAAU,GAAGlC,MAAM,CAAC8B,KAAK,CAAC,CAAC,CAAC;EAChC,IAAIH,MAAM,CAACQ,SAAS,GAAG,CAAC,EAAE;IACxBD,UAAU,GAAGX,YAAY,CAACtE,kBAAkB,EAAEC,sBAAsB,EAAE0E,IAAI,CAACQ,OAAO,EAAE;MAClFH,QAAQ,EAAEvF,WAAW,CAACiF,MAAM,CAACQ,SAAS,CAAC;MACvCuB,eAAe,EAAE;IACnB,CAAC,CAAC;EACJ;;EAGA,MAAMb,MAAM,GAAG,IAAI3G,UAAU,CAAC;IAC5BsF,IAAI,EAAErF,QAAQ,CAACwH,YAAY;IAC3BC,mBAAmB,EAAE,IAAI/H,gBAAgB,CAAC;MACxCmH,UAAU,EAAEpB,IAAI,CAACqB,KAAK;MACtBY,SAAS,EAAEjC,IAAI,CAACqB,KAAK,GAAGrB,IAAI,CAACH,MAAM,CAAC1B,MAAM;MAC1C+D,QAAQ,EAAEtF,QAAQ;MAClB0C,QAAQ,EAAEpF,QAAQ,CAAC6F,MAAM,CAACT,QAAQ,CAAS;MAC3C6C,6BAA6B,EAAE7B,UAAU,CAACnC,MAAM;MAChDiE,6BAA6B,EAAEnC,UAAU,CAAC9B,MAAM;MAChDkE,aAAa,EAAEtC,MAAM,CAACiB,WAAW,KAAK;IACxC,CAAC,CAAC;IACFQ,sBAAsB,EAAEvB,UAAU,CAAC9B,MAAM,GAAGmC,UAAU,CAACnC,MAAM,GAAGsC,SAAS,CAACtC,MAAM;IAChFsD,oBAAoB,EAAExB,UAAU,CAAC9B,MAAM,GAAGmC,UAAU,CAACnC,MAAM,GAAG2C,aAAa,CAAC3C;EAC9E,CAAC,CAAC;;EAGF,MAAMuD,SAAS,GAAG3G,eAAe,CAACkG,MAAM,CAAC;EACzC,MAAMU,IAAI,GAAGvD,MAAM,CAACyC,MAAM,CAAC,CAACa,SAAS,EAAEzB,UAAU,EAAEK,UAAU,EAAEQ,aAAa,CAAC,CAAC;EAC9E,OAAO;IAACG,MAAM;IAAEW,UAAU,EAAEF,SAAS,CAACvD,MAAM;IAAEwD;EAAI,CAAC;AACrD;;AAKA,eAAeW,iBAAiB,CAC9BvC,MAAoB,EACpBwC,MAAqB,EACrB3E,MAAc,EACdjC,IAA0B,EAKzB;EACD,MAAMqE,IAAI,GAAGuC,MAAM,CAACC,UAAU,CAACzC,MAAM,CAACrE,IAAI,CAAC+G,IAAI,EAAE,CAAC;EAClD,MAAM/D,UAAU,GAAG,CAAC/C,IAAI,CAAC+C,UAAU,IAAI,CAAC,IAAId,MAAM;EAGlD,IAAI8E,OAAe;EAEnB,IAAIC,uBAAuB,GAAG,CAAC;EAE/B,IAAIC,qBAAqB,GAAG,CAAC;EAC7B;IACE,MAAMC,MAAM,GAAGlH,IAAI,CAACoC,aAAa,GAC7B,MAAM8D,gBAAgB,CAAC9B,MAAM,EAAEC,IAAI,EAAEuC,MAAM,CAAC3F,QAAQ,CAAC,GACrD,MAAMkD,cAAc,CAACC,MAAM,EAAEC,IAAI,CAAC;IAEtC0C,OAAO,GAAGG,MAAM,CAAClB,IAAI;IACrBgB,uBAAuB,IAAIE,MAAM,CAAC5B,MAAM,CAACO,sBAAsB,GAAGqB,MAAM,CAACjB,UAAU;IACnFgB,qBAAqB,IAAIC,MAAM,CAAC5B,MAAM,CAACQ,oBAAoB,GAAGoB,MAAM,CAACjB,UAAU;EACjF;;EAMA,MAAMhD,QAAQ,GAAG,IAAI/E,cAAc,CAAC;IAClCiJ,cAAc,EAAE/C,MAAM,CAACrE,IAAI;IAC3B0F,UAAU,EAAEpB,IAAI,CAACqB,KAAK;IACtB0B,gBAAgB,EAAErE,UAAU;IAC5BsE,SAAS,EAAE,EAAE;IACbL,uBAAuB;IACvBC,qBAAqB;IACrBhD,IAAI,EAAElF,IAAI,CAACqF,MAAM,CAACW,aAAa,CAAE;IACjCuC,KAAK,EAAEnJ,gBAAgB,CAACiG,MAAM,CAACiB,WAAW;EAC5C,CAAC,CAAC;;EAGFpC,QAAQ,CAACoE,SAAS,CAACrE,IAAI,CAACzE,QAAQ,CAACoB,sBAAsB,CAAC,CAAC;EACzDsD,QAAQ,CAACoE,SAAS,CAACrE,IAAI,CAACzE,QAAQ,CAAC6F,MAAM,CAACT,QAAQ,CAAE,CAAC;;EAGnD,MAAM4D,cAAc,GAAGxE,UAAU,GAAGgE,OAAO,CAACvE,MAAM;EAClD,MAAMU,IAAI,GAAGT,MAAM,CAACyC,MAAM,CAAC,CAAC6B,OAAO,EAAE3H,eAAe,CAAC6D,QAAQ,CAAC,CAAC,CAAC;EAChE,OAAO;IAACC,IAAI;IAAED,QAAQ;IAAEsE;EAAc,CAAC;AACzC;;AAKA,eAAezE,cAAc,CAC3BhD,MAAqB,EACrBuE,IAAmB,EACnBrE,IAA0B,EAIzB;EACD,MAAMiD,QAAQ,GAAG,IAAIpE,QAAQ,CAAC;IAC5B0H,QAAQ,EAAElC,IAAI,CAACpD,QAAQ;IACvBuG,OAAO,EAAE,EAAE;IACXC,eAAe,EAAE;EACnB,CAAC,CAAC;EAEF,IAAIvE,IAAI,GAAGT,MAAM,CAAC8B,KAAK,CAAC,CAAC,CAAC;EAC1B,KAAK,MAAMmD,KAAK,IAAI5H,MAAM,CAAC6H,SAAS,EAAE;IACpC,IAAID,KAAK,CAACE,QAAQ,EAAE;MAClB;IACF;;IAEA,MAAMC,UAAU,GAAG,MAAMlB,iBAAiB,CAACe,KAAK,EAAErD,IAAI,EAAEnB,IAAI,CAACV,MAAM,EAAExC,IAAI,CAAC;IAE1E,MAAM8H,MAAM,GAAG,IAAI7J,WAAW,CAAC;MAC7B8J,WAAW,EAAEF,UAAU,CAACN,cAAc;MACtCS,SAAS,EAAEH,UAAU,CAAC5E;IACxB,CAAC,CAAC;IAEFA,QAAQ,CAACuE,OAAO,CAACxE,IAAI,CAAC8E,MAAM,CAAC;IAC7B7E,QAAQ,CAACwE,eAAe,GAAG,IAAIpI,KAAK,CAAC4I,MAAM,CAAChF,QAAQ,CAACwE,eAAe,CAAC,GAAGI,UAAU,CAAC3E,IAAI,CAACV,MAAM,CAAC;IAE/FU,IAAI,GAAGT,MAAM,CAACyC,MAAM,CAAC,CAAChC,IAAI,EAAE2E,UAAU,CAAC3E,IAAI,CAAC,CAAC;EAC/C;EAEA,OAAO;IAACA,IAAI;IAAED;EAAQ,CAAC;AACzB;;AAKA,SAASE,YAAY,CACnBrD,MAAqB,EACrBmB,QAAgB,EAChBiB,SAAqB,EACrBzB,YAAoC,EAC5B;EACR,MAAMwC,QAAQ,GAAG,IAAIxE,YAAY,CAAC;IAChCyJ,OAAO,EAAE3I,eAAe;IACxB4I,UAAU,EAAE,UAAU;IACtB5B,QAAQ,EAAEtF,QAAQ;IAClBmH,UAAU,EAAElG,SAAS;IACrBpC,MAAM,EAAE,EAAE;IACVuI,kBAAkB,EAAE;EACtB,CAAC,CAAC;EAEF,KAAK,MAAMhH,GAAG,IAAIZ,YAAY,EAAE;IAAA;IAC9B,MAAM6H,EAAE,GAAG,IAAI5J,QAAQ,CAAC;MACtB2C,GAAG;MACHC,KAAK,EAAEb,YAAY,CAACY,GAAG;IACzB,CAAC,CAAC;IACF,yBAAA4B,QAAQ,CAACoF,kBAAkB,oFAA3B,iDAA6BrF,IAAI,2DAAjC,oDAAoCsF,EAAE,CAAC;EACzC;EAEA;IACE,MAAMC,UAAU,GAAG,IAAIzJ,aAAa,CAAC;MACnC0J,IAAI,EAAE,MAAM;MACZC,YAAY,EAAEC,MAAM,CAACC,IAAI,CAAC7I,MAAM,CAAC8I,MAAM,CAAC,CAACpG;IAC3C,CAAC,CAAC;IACFS,QAAQ,CAACnD,MAAM,CAACkD,IAAI,CAACuF,UAAU,CAAC;EAClC;EAEA,KAAK,MAAMb,KAAK,IAAI5H,MAAM,CAAC6H,SAAS,EAAE;IACpC,MAAMkB,IAAI,GAAGrK,mBAAmB,CAACkJ,KAAK,CAACoB,cAAc,CAAC;IACtD,MAAMC,UAAU,GAAG,IAAIjK,aAAa,CAAC;MACnC0J,IAAI,EAAEd,KAAK,CAACc,IAAI;MAChBQ,eAAe,EAAEH;IACnB,CAAC,CAAC;IAEF,IAAInB,KAAK,CAACE,QAAQ,EAAE;MAClBmB,UAAU,CAACN,YAAY,GAAGf,KAAK,CAACuB,UAAU;IAC5C,CAAC,MAAM;MACLF,UAAU,CAAC9E,IAAI,GAAGlF,IAAI,CAAC2I,KAAK,CAAC3C,aAAa,CAAU;IACtD;IAEA,IAAI2C,KAAK,CAACwB,YAAY,EAAE;MACtBH,UAAU,CAACI,cAAc,GAAG/K,aAAa,CAACsJ,KAAK,CAACwB,YAAY,CAAkB;IAChF;IAEAH,UAAU,CAACK,WAAW,GAAG1B,KAAK,CAAC1C,UAAU;IAEzC/B,QAAQ,CAACnD,MAAM,CAACkD,IAAI,CAAC+F,UAAU,CAAC;EAClC;EAEA,MAAMM,eAAe,GAAGjK,eAAe,CAAC6D,QAAQ,CAAC;EACjD,MAAMqG,aAAa,GAAG7G,MAAM,CAAC8B,KAAK,CAAC8E,eAAe,CAAC7G,MAAM,GAAG,CAAC,CAAC;EAC9D6G,eAAe,CAACE,IAAI,CAACD,aAAa,CAAC;EACnCA,aAAa,CAACE,aAAa,CAACH,eAAe,CAAC7G,MAAM,EAAE6G,eAAe,CAAC7G,MAAM,CAAC;EAC3E8G,aAAa,CAACtH,KAAK,CAAC1C,aAAa,EAAE+J,eAAe,CAAC7G,MAAM,GAAG,CAAC,CAAC;EAC9D,OAAO8G,aAAa;AACtB"}
@@ -1,81 +0,0 @@
1
-
2
- import fs from 'fs';
3
- export function fopen(filePath) {
4
- return new Promise((resolve, reject) => {
5
- fs.open(filePath, 'r', (err, fd) => {
6
- if (err) {
7
- reject(err);
8
- } else {
9
- resolve(fd);
10
- }
11
- });
12
- });
13
- }
14
- export function fstat(filePath) {
15
- return new Promise((resolve, reject) => {
16
- fs.stat(filePath, (err, stat) => {
17
- if (err) {
18
- reject(err);
19
- } else {
20
- resolve(stat);
21
- }
22
- });
23
- });
24
- }
25
- export function fread(fd, position, length) {
26
- const buffer = Buffer.alloc(length);
27
- return new Promise((resolve, reject) => {
28
- fs.read(fd, buffer, 0, length, position, (err, bytesRead, buf) => {
29
- if (err || bytesRead !== length) {
30
- reject(err || Error('read failed'));
31
- } else {
32
- resolve(buf);
33
- }
34
- });
35
- });
36
- }
37
- export function fclose(fd) {
38
- return new Promise((resolve, reject) => {
39
- fs.close(fd, err => {
40
- if (err) {
41
- reject(err);
42
- } else {
43
- resolve(err);
44
- }
45
- });
46
- });
47
- }
48
- export function oswrite(os, buf) {
49
- return new Promise((resolve, reject) => {
50
- os.write(buf, err => {
51
- if (err) {
52
- reject(err);
53
- } else {
54
- resolve();
55
- }
56
- });
57
- });
58
- }
59
- export function osclose(os) {
60
- return new Promise((resolve, reject) => {
61
- os.close(err => {
62
- if (err) {
63
- reject(err);
64
- } else {
65
- resolve();
66
- }
67
- });
68
- });
69
- }
70
- export function osopen(path, opts) {
71
- return new Promise((resolve, reject) => {
72
- const outputStream = fs.createWriteStream(path, opts);
73
- outputStream.on('open', function (fd) {
74
- resolve(outputStream);
75
- });
76
- outputStream.on('error', function (err) {
77
- reject(err);
78
- });
79
- });
80
- }
81
- //# sourceMappingURL=file.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"file.js","names":["fs","fopen","filePath","Promise","resolve","reject","open","err","fd","fstat","stat","fread","position","length","buffer","Buffer","alloc","read","bytesRead","buf","Error","fclose","close","oswrite","os","write","osclose","osopen","path","opts","outputStream","createWriteStream","on"],"sources":["../../../src/parquetjs/file.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport fs from 'fs';\n\nexport function fopen(filePath) {\n return new Promise((resolve, reject) => {\n fs.open(filePath, 'r', (err, fd) => {\n if (err) {\n reject(err);\n } else {\n resolve(fd);\n }\n });\n });\n}\n\nexport function fstat(filePath) {\n return new Promise<fs.Stats>((resolve, reject) => {\n fs.stat(filePath, (err, stat) => {\n if (err) {\n reject(err);\n } else {\n resolve(stat);\n }\n });\n });\n}\n\nexport function fread(fd, position, length) {\n const buffer = Buffer.alloc(length);\n\n return new Promise((resolve, reject) => {\n fs.read(fd, buffer, 0, length, position, (err, bytesRead, buf) => {\n if (err || bytesRead !== length) {\n reject(err || Error('read failed'));\n } else {\n resolve(buf);\n }\n });\n });\n}\n\nexport function fclose(fd) {\n return new Promise((resolve, reject) => {\n fs.close(fd, (err) => {\n if (err) {\n reject(err);\n } else {\n resolve(err);\n }\n });\n });\n}\n\nexport function oswrite(os, buf): Promise<void> {\n return new Promise((resolve, reject) => {\n os.write(buf, (err) => {\n if (err) {\n reject(err);\n } else {\n resolve();\n }\n });\n });\n}\n\nexport function osclose(os): Promise<void> {\n return new Promise((resolve, reject) => {\n os.close((err) => {\n if (err) {\n reject(err);\n } else {\n resolve();\n }\n });\n });\n}\n\nexport function osopen(path, opts) {\n return new Promise((resolve, reject) => {\n const outputStream = fs.createWriteStream(path, opts);\n\n outputStream.on('open', function (fd) {\n resolve(outputStream);\n });\n\n outputStream.on('error', function (err) {\n reject(err);\n });\n });\n}\n"],"mappings":";AACA,OAAOA,EAAE,MAAM,IAAI;AAEnB,OAAO,SAASC,KAAK,CAACC,QAAQ,EAAE;EAC9B,OAAO,IAAIC,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACtCL,EAAE,CAACM,IAAI,CAACJ,QAAQ,EAAE,GAAG,EAAE,CAACK,GAAG,EAAEC,EAAE,KAAK;MAClC,IAAID,GAAG,EAAE;QACPF,MAAM,CAACE,GAAG,CAAC;MACb,CAAC,MAAM;QACLH,OAAO,CAACI,EAAE,CAAC;MACb;IACF,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ;AAEA,OAAO,SAASC,KAAK,CAACP,QAAQ,EAAE;EAC9B,OAAO,IAAIC,OAAO,CAAW,CAACC,OAAO,EAAEC,MAAM,KAAK;IAChDL,EAAE,CAACU,IAAI,CAACR,QAAQ,EAAE,CAACK,GAAG,EAAEG,IAAI,KAAK;MAC/B,IAAIH,GAAG,EAAE;QACPF,MAAM,CAACE,GAAG,CAAC;MACb,CAAC,MAAM;QACLH,OAAO,CAACM,IAAI,CAAC;MACf;IACF,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ;AAEA,OAAO,SAASC,KAAK,CAACH,EAAE,EAAEI,QAAQ,EAAEC,MAAM,EAAE;EAC1C,MAAMC,MAAM,GAAGC,MAAM,CAACC,KAAK,CAACH,MAAM,CAAC;EAEnC,OAAO,IAAIV,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACtCL,EAAE,CAACiB,IAAI,CAACT,EAAE,EAAEM,MAAM,EAAE,CAAC,EAAED,MAAM,EAAED,QAAQ,EAAE,CAACL,GAAG,EAAEW,SAAS,EAAEC,GAAG,KAAK;MAChE,IAAIZ,GAAG,IAAIW,SAAS,KAAKL,MAAM,EAAE;QAC/BR,MAAM,CAACE,GAAG,IAAIa,KAAK,CAAC,aAAa,CAAC,CAAC;MACrC,CAAC,MAAM;QACLhB,OAAO,CAACe,GAAG,CAAC;MACd;IACF,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ;AAEA,OAAO,SAASE,MAAM,CAACb,EAAE,EAAE;EACzB,OAAO,IAAIL,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACtCL,EAAE,CAACsB,KAAK,CAACd,EAAE,EAAGD,GAAG,IAAK;MACpB,IAAIA,GAAG,EAAE;QACPF,MAAM,CAACE,GAAG,CAAC;MACb,CAAC,MAAM;QACLH,OAAO,CAACG,GAAG,CAAC;MACd;IACF,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ;AAEA,OAAO,SAASgB,OAAO,CAACC,EAAE,EAAEL,GAAG,EAAiB;EAC9C,OAAO,IAAIhB,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACtCmB,EAAE,CAACC,KAAK,CAACN,GAAG,EAAGZ,GAAG,IAAK;MACrB,IAAIA,GAAG,EAAE;QACPF,MAAM,CAACE,GAAG,CAAC;MACb,CAAC,MAAM;QACLH,OAAO,EAAE;MACX;IACF,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ;AAEA,OAAO,SAASsB,OAAO,CAACF,EAAE,EAAiB;EACzC,OAAO,IAAIrB,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACtCmB,EAAE,CAACF,KAAK,CAAEf,GAAG,IAAK;MAChB,IAAIA,GAAG,EAAE;QACPF,MAAM,CAACE,GAAG,CAAC;MACb,CAAC,MAAM;QACLH,OAAO,EAAE;MACX;IACF,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ;AAEA,OAAO,SAASuB,MAAM,CAACC,IAAI,EAAEC,IAAI,EAAE;EACjC,OAAO,IAAI1B,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACtC,MAAMyB,YAAY,GAAG9B,EAAE,CAAC+B,iBAAiB,CAACH,IAAI,EAAEC,IAAI,CAAC;IAErDC,YAAY,CAACE,EAAE,CAAC,MAAM,EAAE,UAAUxB,EAAE,EAAE;MACpCJ,OAAO,CAAC0B,YAAY,CAAC;IACvB,CAAC,CAAC;IAEFA,YAAY,CAACE,EAAE,CAAC,OAAO,EAAE,UAAUzB,GAAG,EAAE;MACtCF,MAAM,CAACE,GAAG,CAAC;IACb,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ"}
@@ -1,78 +0,0 @@
1
- import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
2
- let _Symbol$asyncIterator;
3
-
4
- import { materializeRecords } from '../schema/shred';
5
-
6
- _Symbol$asyncIterator = Symbol.asyncIterator;
7
- export class ParquetCursor {
8
- constructor(metadata, envelopeReader, schema, columnList) {
9
- _defineProperty(this, "metadata", void 0);
10
- _defineProperty(this, "envelopeReader", void 0);
11
- _defineProperty(this, "schema", void 0);
12
- _defineProperty(this, "columnList", void 0);
13
- _defineProperty(this, "rowGroup", []);
14
- _defineProperty(this, "rowGroupIndex", void 0);
15
- this.metadata = metadata;
16
- this.envelopeReader = envelopeReader;
17
- this.schema = schema;
18
- this.columnList = columnList;
19
- this.rowGroupIndex = 0;
20
- }
21
-
22
- async next() {
23
- if (this.rowGroup.length === 0) {
24
- if (this.rowGroupIndex >= this.metadata.row_groups.length) {
25
- return null;
26
- }
27
- const rowBuffer = await this.envelopeReader.readRowGroup(this.schema, this.metadata.row_groups[this.rowGroupIndex], this.columnList);
28
- this.rowGroup = materializeRecords(this.schema, rowBuffer);
29
- this.rowGroupIndex++;
30
- }
31
- return this.rowGroup.shift();
32
- }
33
-
34
- rewind() {
35
- this.rowGroup = [];
36
- this.rowGroupIndex = 0;
37
- }
38
-
39
- [_Symbol$asyncIterator]() {
40
- let done = false;
41
- return {
42
- next: async () => {
43
- if (done) {
44
- return {
45
- done,
46
- value: null
47
- };
48
- }
49
- const value = await this.next();
50
- if (value === null) {
51
- return {
52
- done: true,
53
- value
54
- };
55
- }
56
- return {
57
- done: false,
58
- value
59
- };
60
- },
61
- return: async () => {
62
- done = true;
63
- return {
64
- done,
65
- value: null
66
- };
67
- },
68
- throw: async () => {
69
- done = true;
70
- return {
71
- done: true,
72
- value: null
73
- };
74
- }
75
- };
76
- }
77
- }
78
- //# sourceMappingURL=parquet-cursor.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"parquet-cursor.js","names":["materializeRecords","Symbol","asyncIterator","ParquetCursor","constructor","metadata","envelopeReader","schema","columnList","rowGroupIndex","next","rowGroup","length","row_groups","rowBuffer","readRowGroup","shift","rewind","done","value","return","throw"],"sources":["../../../../src/parquetjs/parser/parquet-cursor.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {FileMetaData} from '../parquet-thrift';\nimport {ParquetEnvelopeReader} from './parquet-envelope-reader';\nimport {ParquetSchema} from '../schema/schema';\nimport {ParquetRecord} from '../schema/declare';\nimport {materializeRecords} from '../schema/shred';\n\n/**\n * A parquet cursor is used to retrieve rows from a parquet file in order\n */\nexport class ParquetCursor<T> implements AsyncIterable<T> {\n public metadata: FileMetaData;\n public envelopeReader: ParquetEnvelopeReader;\n public schema: ParquetSchema;\n public columnList: string[][];\n public rowGroup: ParquetRecord[] = [];\n public rowGroupIndex: number;\n\n /**\n * Create a new parquet reader from the file metadata and an envelope reader.\n * It is usually not recommended to call this constructor directly except for\n * advanced and internal use cases. Consider using getCursor() on the\n * ParquetReader instead\n */\n constructor(\n metadata: FileMetaData,\n envelopeReader: ParquetEnvelopeReader,\n schema: ParquetSchema,\n columnList: string[][]\n ) {\n this.metadata = metadata;\n this.envelopeReader = envelopeReader;\n this.schema = schema;\n this.columnList = columnList;\n this.rowGroupIndex = 0;\n }\n\n /**\n * Retrieve the next row from the cursor. Returns a row or NULL if the end\n * of the file was reached\n */\n async next<T = any>(): Promise<T> {\n if (this.rowGroup.length === 0) {\n if (this.rowGroupIndex >= this.metadata.row_groups.length) {\n // @ts-ignore\n return null;\n }\n const rowBuffer = await this.envelopeReader.readRowGroup(\n this.schema,\n this.metadata.row_groups[this.rowGroupIndex],\n this.columnList\n );\n this.rowGroup = materializeRecords(this.schema, rowBuffer);\n this.rowGroupIndex++;\n }\n return this.rowGroup.shift() as any;\n }\n\n /**\n * Rewind the cursor the the beginning of the file\n */\n rewind(): void {\n this.rowGroup = [];\n this.rowGroupIndex = 0;\n }\n\n /**\n * Implement AsyncIterable\n */\n // tslint:disable-next-line:function-name\n [Symbol.asyncIterator](): AsyncIterator<T> {\n let done = false;\n return {\n next: async () => {\n if (done) {\n return {done, value: null};\n }\n const value = await this.next();\n if (value === null) {\n return {done: true, value};\n }\n return {done: false, value};\n },\n return: async () => {\n done = true;\n return {done, value: null};\n },\n throw: async () => {\n done = true;\n return {done: true, value: null};\n }\n };\n }\n}\n"],"mappings":";;;AAKA,SAAQA,kBAAkB,QAAO,iBAAiB;;AAAC,wBAiEhDC,MAAM,CAACC,aAAa;AA5DvB,OAAO,MAAMC,aAAa,CAAgC;EAcxDC,WAAW,CACTC,QAAsB,EACtBC,cAAqC,EACrCC,MAAqB,EACrBC,UAAsB,EACtB;IAAA;IAAA;IAAA;IAAA;IAAA,kCAdiC,EAAE;IAAA;IAenC,IAAI,CAACH,QAAQ,GAAGA,QAAQ;IACxB,IAAI,CAACC,cAAc,GAAGA,cAAc;IACpC,IAAI,CAACC,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACC,UAAU,GAAGA,UAAU;IAC5B,IAAI,CAACC,aAAa,GAAG,CAAC;EACxB;;EAMA,MAAMC,IAAI,GAAwB;IAChC,IAAI,IAAI,CAACC,QAAQ,CAACC,MAAM,KAAK,CAAC,EAAE;MAC9B,IAAI,IAAI,CAACH,aAAa,IAAI,IAAI,CAACJ,QAAQ,CAACQ,UAAU,CAACD,MAAM,EAAE;QAEzD,OAAO,IAAI;MACb;MACA,MAAME,SAAS,GAAG,MAAM,IAAI,CAACR,cAAc,CAACS,YAAY,CACtD,IAAI,CAACR,MAAM,EACX,IAAI,CAACF,QAAQ,CAACQ,UAAU,CAAC,IAAI,CAACJ,aAAa,CAAC,EAC5C,IAAI,CAACD,UAAU,CAChB;MACD,IAAI,CAACG,QAAQ,GAAGX,kBAAkB,CAAC,IAAI,CAACO,MAAM,EAAEO,SAAS,CAAC;MAC1D,IAAI,CAACL,aAAa,EAAE;IACtB;IACA,OAAO,IAAI,CAACE,QAAQ,CAACK,KAAK,EAAE;EAC9B;;EAKAC,MAAM,GAAS;IACb,IAAI,CAACN,QAAQ,GAAG,EAAE;IAClB,IAAI,CAACF,aAAa,GAAG,CAAC;EACxB;;EAMA,0BAA2C;IACzC,IAAIS,IAAI,GAAG,KAAK;IAChB,OAAO;MACLR,IAAI,EAAE,YAAY;QAChB,IAAIQ,IAAI,EAAE;UACR,OAAO;YAACA,IAAI;YAAEC,KAAK,EAAE;UAAI,CAAC;QAC5B;QACA,MAAMA,KAAK,GAAG,MAAM,IAAI,CAACT,IAAI,EAAE;QAC/B,IAAIS,KAAK,KAAK,IAAI,EAAE;UAClB,OAAO;YAACD,IAAI,EAAE,IAAI;YAAEC;UAAK,CAAC;QAC5B;QACA,OAAO;UAACD,IAAI,EAAE,KAAK;UAAEC;QAAK,CAAC;MAC7B,CAAC;MACDC,MAAM,EAAE,YAAY;QAClBF,IAAI,GAAG,IAAI;QACX,OAAO;UAACA,IAAI;UAAEC,KAAK,EAAE;QAAI,CAAC;MAC5B,CAAC;MACDE,KAAK,EAAE,YAAY;QACjBH,IAAI,GAAG,IAAI;QACX,OAAO;UAACA,IAAI,EAAE,IAAI;UAAEC,KAAK,EAAE;QAAI,CAAC;MAClC;IACF,CAAC;EACH;AACF"}
@@ -1,129 +0,0 @@
1
- import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
2
-
3
- import { PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED } from '../../constants';
4
- import { CompressionCodec, Type } from '../parquet-thrift';
5
- import { decodeFileMetadata, getThriftEnum, fieldIndexOf } from '../utils/read-utils';
6
- import { decodeDataPages, decodePage } from './decoders';
7
- const DEFAULT_DICTIONARY_SIZE = 1e6;
8
-
9
- export class ParquetEnvelopeReader {
10
-
11
- static async openBuffer(buffer) {
12
- const readFn = (position, length) => Promise.resolve(buffer.slice(position, position + length));
13
- const closeFn = () => Promise.resolve();
14
- return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);
15
- }
16
- constructor(read, close, fileSize, options) {
17
- _defineProperty(this, "read", void 0);
18
- _defineProperty(this, "close", void 0);
19
- _defineProperty(this, "fileSize", void 0);
20
- _defineProperty(this, "defaultDictionarySize", void 0);
21
- this.read = read;
22
- this.close = close;
23
- this.fileSize = fileSize;
24
- this.defaultDictionarySize = (options === null || options === void 0 ? void 0 : options.defaultDictionarySize) || DEFAULT_DICTIONARY_SIZE;
25
- }
26
- async readHeader() {
27
- const buffer = await this.read(0, PARQUET_MAGIC.length);
28
- const magic = buffer.toString();
29
- switch (magic) {
30
- case PARQUET_MAGIC:
31
- break;
32
- case PARQUET_MAGIC_ENCRYPTED:
33
- throw new Error('Encrypted parquet file not supported');
34
- default:
35
- throw new Error("Invalid parquet file (magic=".concat(magic, ")"));
36
- }
37
- }
38
- async readRowGroup(schema, rowGroup, columnList) {
39
- const buffer = {
40
- rowCount: Number(rowGroup.num_rows),
41
- columnData: {}
42
- };
43
- for (const colChunk of rowGroup.columns) {
44
- const colMetadata = colChunk.meta_data;
45
- const colKey = colMetadata === null || colMetadata === void 0 ? void 0 : colMetadata.path_in_schema;
46
- if (columnList.length > 0 && fieldIndexOf(columnList, colKey) < 0) {
47
- continue;
48
- }
49
-
50
- buffer.columnData[colKey.join()] = await this.readColumnChunk(schema, colChunk);
51
- }
52
- return buffer;
53
- }
54
-
55
- async readColumnChunk(schema, colChunk) {
56
- var _colChunk$meta_data, _colChunk$meta_data2, _colChunk$meta_data3, _colChunk$meta_data4, _colChunk$meta_data5, _colChunk$meta_data7, _colChunk$meta_data8, _options$dictionary;
57
- if (colChunk.file_path !== undefined && colChunk.file_path !== null) {
58
- throw new Error('external references are not supported');
59
- }
60
- const field = schema.findField((_colChunk$meta_data = colChunk.meta_data) === null || _colChunk$meta_data === void 0 ? void 0 : _colChunk$meta_data.path_in_schema);
61
- const type = getThriftEnum(Type, (_colChunk$meta_data2 = colChunk.meta_data) === null || _colChunk$meta_data2 === void 0 ? void 0 : _colChunk$meta_data2.type);
62
- if (type !== field.primitiveType) {
63
- throw new Error("chunk type not matching schema: ".concat(type));
64
- }
65
- const compression = getThriftEnum(CompressionCodec, (_colChunk$meta_data3 = colChunk.meta_data) === null || _colChunk$meta_data3 === void 0 ? void 0 : _colChunk$meta_data3.codec);
66
- const pagesOffset = Number((_colChunk$meta_data4 = colChunk.meta_data) === null || _colChunk$meta_data4 === void 0 ? void 0 : _colChunk$meta_data4.data_page_offset);
67
- let pagesSize = Number((_colChunk$meta_data5 = colChunk.meta_data) === null || _colChunk$meta_data5 === void 0 ? void 0 : _colChunk$meta_data5.total_compressed_size);
68
- if (!colChunk.file_path) {
69
- var _colChunk$meta_data6;
70
- pagesSize = Math.min(this.fileSize - pagesOffset, Number((_colChunk$meta_data6 = colChunk.meta_data) === null || _colChunk$meta_data6 === void 0 ? void 0 : _colChunk$meta_data6.total_compressed_size));
71
- }
72
- const options = {
73
- type,
74
- rLevelMax: field.rLevelMax,
75
- dLevelMax: field.dLevelMax,
76
- compression,
77
- column: field,
78
- numValues: (_colChunk$meta_data7 = colChunk.meta_data) === null || _colChunk$meta_data7 === void 0 ? void 0 : _colChunk$meta_data7.num_values,
79
- dictionary: []
80
- };
81
- let dictionary;
82
- const dictionaryPageOffset = colChunk === null || colChunk === void 0 ? void 0 : (_colChunk$meta_data8 = colChunk.meta_data) === null || _colChunk$meta_data8 === void 0 ? void 0 : _colChunk$meta_data8.dictionary_page_offset;
83
- if (dictionaryPageOffset) {
84
- const dictionaryOffset = Number(dictionaryPageOffset);
85
- dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);
86
- }
87
- dictionary = (_options$dictionary = options.dictionary) !== null && _options$dictionary !== void 0 && _options$dictionary.length ? options.dictionary : dictionary;
88
- const pagesBuf = await this.read(pagesOffset, pagesSize);
89
- return await decodeDataPages(pagesBuf, {
90
- ...options,
91
- dictionary
92
- });
93
- }
94
-
95
- async getDictionary(dictionaryPageOffset, options, pagesOffset) {
96
- if (dictionaryPageOffset === 0) {
97
-
98
- return [];
99
- }
100
- const dictionarySize = Math.min(this.fileSize - dictionaryPageOffset, this.defaultDictionarySize);
101
- const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);
102
- const cursor = {
103
- buffer: pagesBuf,
104
- offset: 0,
105
- size: pagesBuf.length
106
- };
107
- const decodedPage = await decodePage(cursor, options);
108
- return decodedPage.dictionary;
109
- }
110
- async readFooter() {
111
- const trailerLen = PARQUET_MAGIC.length + 4;
112
- const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);
113
- const magic = trailerBuf.slice(4).toString();
114
- if (magic !== PARQUET_MAGIC) {
115
- throw new Error("Not a valid parquet file (magic=\"".concat(magic, ")"));
116
- }
117
- const metadataSize = trailerBuf.readUInt32LE(0);
118
- const metadataOffset = this.fileSize - metadataSize - trailerLen;
119
- if (metadataOffset < PARQUET_MAGIC.length) {
120
- throw new Error("Invalid metadata size ".concat(metadataOffset));
121
- }
122
- const metadataBuf = await this.read(metadataOffset, metadataSize);
123
- const {
124
- metadata
125
- } = decodeFileMetadata(metadataBuf);
126
- return metadata;
127
- }
128
- }
129
- //# sourceMappingURL=parquet-envelope-reader.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"parquet-envelope-reader.js","names":["PARQUET_MAGIC","PARQUET_MAGIC_ENCRYPTED","CompressionCodec","Type","decodeFileMetadata","getThriftEnum","fieldIndexOf","decodeDataPages","decodePage","DEFAULT_DICTIONARY_SIZE","ParquetEnvelopeReader","openBuffer","buffer","readFn","position","length","Promise","resolve","slice","closeFn","constructor","read","close","fileSize","options","defaultDictionarySize","readHeader","magic","toString","Error","readRowGroup","schema","rowGroup","columnList","rowCount","Number","num_rows","columnData","colChunk","columns","colMetadata","meta_data","colKey","path_in_schema","join","readColumnChunk","file_path","undefined","field","findField","type","primitiveType","compression","codec","pagesOffset","data_page_offset","pagesSize","total_compressed_size","Math","min","rLevelMax","dLevelMax","column","numValues","num_values","dictionary","dictionaryPageOffset","dictionary_page_offset","dictionaryOffset","getDictionary","pagesBuf","dictionarySize","cursor","offset","size","decodedPage","readFooter","trailerLen","trailerBuf","metadataSize","readUInt32LE","metadataOffset","metadataBuf","metadata"],"sources":["../../../../src/parquetjs/parser/parquet-envelope-reader.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetSchema} from '../schema/schema';\nimport {PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED} from '../../constants';\nimport {ColumnChunk, CompressionCodec, FileMetaData, RowGroup, Type} from '../parquet-thrift';\nimport {\n ParquetBuffer,\n ParquetCompression,\n ParquetData,\n PrimitiveType,\n ParquetOptions\n} from '../schema/declare';\nimport {decodeFileMetadata, getThriftEnum, fieldIndexOf} from '../utils/read-utils';\nimport {decodeDataPages, decodePage} from './decoders';\n\nconst DEFAULT_DICTIONARY_SIZE = 1e6;\n\n/**\n * The parquet envelope reader allows direct, unbuffered access to the individual\n * sections of the parquet file, namely the header, footer and the row groups.\n * This class is intended for advanced/internal users; if you just want to retrieve\n * rows from a parquet file use the ParquetReader instead\n */\nexport class ParquetEnvelopeReader {\n public read: (position: number, length: number) => Promise<Buffer>;\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n public close: () => Promise<void>;\n public fileSize: number;\n public defaultDictionarySize: number;\n\n static async openBuffer(buffer: Buffer): Promise<ParquetEnvelopeReader> {\n const readFn = (position: number, length: number) =>\n Promise.resolve(buffer.slice(position, position + length));\n const closeFn = () => Promise.resolve();\n return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);\n }\n\n constructor(\n read: (position: number, length: number) => Promise<Buffer>,\n close: () => Promise<void>,\n fileSize: number,\n options?: any\n ) {\n this.read = read;\n this.close = close;\n this.fileSize = fileSize;\n this.defaultDictionarySize = options?.defaultDictionarySize || DEFAULT_DICTIONARY_SIZE;\n }\n\n async readHeader(): Promise<void> {\n const buffer = await this.read(0, PARQUET_MAGIC.length);\n\n const magic = buffer.toString();\n switch (magic) {\n case PARQUET_MAGIC:\n break;\n case PARQUET_MAGIC_ENCRYPTED:\n throw new Error('Encrypted parquet file not supported');\n default:\n throw new Error(`Invalid parquet file (magic=${magic})`);\n }\n }\n\n async readRowGroup(\n schema: ParquetSchema,\n rowGroup: RowGroup,\n columnList: string[][]\n ): Promise<ParquetBuffer> {\n const buffer: ParquetBuffer = {\n rowCount: Number(rowGroup.num_rows),\n columnData: {}\n };\n for (const colChunk of rowGroup.columns) {\n const colMetadata = colChunk.meta_data;\n const colKey = colMetadata?.path_in_schema;\n if (columnList.length > 0 && fieldIndexOf(columnList, colKey!) < 0) {\n continue; // eslint-disable-line no-continue\n }\n buffer.columnData[colKey!.join()] = await this.readColumnChunk(schema, colChunk);\n }\n return buffer;\n }\n\n /**\n * Do reading of parquet file's column chunk\n * @param schema\n * @param colChunk\n */\n async readColumnChunk(schema: ParquetSchema, colChunk: ColumnChunk): Promise<ParquetData> {\n if (colChunk.file_path !== undefined && colChunk.file_path !== null) {\n throw new Error('external references are not supported');\n }\n\n const field = schema.findField(colChunk.meta_data?.path_in_schema!);\n const type: PrimitiveType = getThriftEnum(Type, colChunk.meta_data?.type!) as any;\n\n if (type !== field.primitiveType) {\n throw new Error(`chunk type not matching schema: ${type}`);\n }\n\n const compression: ParquetCompression = getThriftEnum(\n CompressionCodec,\n colChunk.meta_data?.codec!\n ) as any;\n\n const pagesOffset = Number(colChunk.meta_data?.data_page_offset!);\n let pagesSize = Number(colChunk.meta_data?.total_compressed_size!);\n\n if (!colChunk.file_path) {\n pagesSize = Math.min(\n this.fileSize - pagesOffset,\n Number(colChunk.meta_data?.total_compressed_size)\n );\n }\n\n const options: ParquetOptions = {\n type,\n rLevelMax: field.rLevelMax,\n dLevelMax: field.dLevelMax,\n compression,\n column: field,\n numValues: colChunk.meta_data?.num_values,\n dictionary: []\n };\n\n let dictionary;\n\n const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;\n\n if (dictionaryPageOffset) {\n const dictionaryOffset = Number(dictionaryPageOffset);\n // Getting dictionary from column chunk to iterate all over indexes to get dataPage values.\n dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);\n }\n\n dictionary = options.dictionary?.length ? options.dictionary : dictionary;\n const pagesBuf = await this.read(pagesOffset, pagesSize);\n return await decodeDataPages(pagesBuf, {...options, dictionary});\n }\n\n /**\n * Getting dictionary for allows to flatten values by indices.\n * @param dictionaryPageOffset\n * @param options\n * @param pagesOffset\n * @returns\n */\n async getDictionary(\n dictionaryPageOffset: number,\n options: ParquetOptions,\n pagesOffset: number\n ): Promise<string[]> {\n if (dictionaryPageOffset === 0) {\n // dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);\n // pagesBuf = await this.read(pagesOffset, dictionarySize);\n\n // In this case we are working with parquet-mr files format. Problem is described below:\n // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding\n // We need to get dictionary page from column chunk if it exists.\n // Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.\n return [];\n }\n\n const dictionarySize = Math.min(\n this.fileSize - dictionaryPageOffset,\n this.defaultDictionarySize\n );\n const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);\n\n const cursor = {buffer: pagesBuf, offset: 0, size: pagesBuf.length};\n const decodedPage = await decodePage(cursor, options);\n\n return decodedPage.dictionary!;\n }\n\n async readFooter(): Promise<FileMetaData> {\n const trailerLen = PARQUET_MAGIC.length + 4;\n const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);\n\n const magic = trailerBuf.slice(4).toString();\n if (magic !== PARQUET_MAGIC) {\n throw new Error(`Not a valid parquet file (magic=\"${magic})`);\n }\n\n const metadataSize = trailerBuf.readUInt32LE(0);\n const metadataOffset = this.fileSize - metadataSize - trailerLen;\n if (metadataOffset < PARQUET_MAGIC.length) {\n throw new Error(`Invalid metadata size ${metadataOffset}`);\n }\n\n const metadataBuf = await this.read(metadataOffset, metadataSize);\n // let metadata = new parquet_thrift.FileMetaData();\n // parquet_util.decodeThrift(metadata, metadataBuf);\n const {metadata} = decodeFileMetadata(metadataBuf);\n return metadata;\n }\n}\n"],"mappings":";;AAEA,SAAQA,aAAa,EAAEC,uBAAuB,QAAO,iBAAiB;AACtE,SAAqBC,gBAAgB,EAA0BC,IAAI,QAAO,mBAAmB;AAQ7F,SAAQC,kBAAkB,EAAEC,aAAa,EAAEC,YAAY,QAAO,qBAAqB;AACnF,SAAQC,eAAe,EAAEC,UAAU,QAAO,YAAY;AAEtD,MAAMC,uBAAuB,GAAG,GAAG;;AAQnC,OAAO,MAAMC,qBAAqB,CAAC;;EAUjC,aAAaC,UAAU,CAACC,MAAc,EAAkC;IACtE,MAAMC,MAAM,GAAG,CAACC,QAAgB,EAAEC,MAAc,KAC9CC,OAAO,CAACC,OAAO,CAACL,MAAM,CAACM,KAAK,CAACJ,QAAQ,EAAEA,QAAQ,GAAGC,MAAM,CAAC,CAAC;IAC5D,MAAMI,OAAO,GAAG,MAAMH,OAAO,CAACC,OAAO,EAAE;IACvC,OAAO,IAAIP,qBAAqB,CAACG,MAAM,EAAEM,OAAO,EAAEP,MAAM,CAACG,MAAM,CAAC;EAClE;EAEAK,WAAW,CACTC,IAA2D,EAC3DC,KAA0B,EAC1BC,QAAgB,EAChBC,OAAa,EACb;IAAA;IAAA;IAAA;IAAA;IACA,IAAI,CAACH,IAAI,GAAGA,IAAI;IAChB,IAAI,CAACC,KAAK,GAAGA,KAAK;IAClB,IAAI,CAACC,QAAQ,GAAGA,QAAQ;IACxB,IAAI,CAACE,qBAAqB,GAAG,CAAAD,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEC,qBAAqB,KAAIhB,uBAAuB;EACxF;EAEA,MAAMiB,UAAU,GAAkB;IAChC,MAAMd,MAAM,GAAG,MAAM,IAAI,CAACS,IAAI,CAAC,CAAC,EAAErB,aAAa,CAACe,MAAM,CAAC;IAEvD,MAAMY,KAAK,GAAGf,MAAM,CAACgB,QAAQ,EAAE;IAC/B,QAAQD,KAAK;MACX,KAAK3B,aAAa;QAChB;MACF,KAAKC,uBAAuB;QAC1B,MAAM,IAAI4B,KAAK,CAAC,sCAAsC,CAAC;MACzD;QACE,MAAM,IAAIA,KAAK,uCAAgCF,KAAK,OAAI;IAAC;EAE/D;EAEA,MAAMG,YAAY,CAChBC,MAAqB,EACrBC,QAAkB,EAClBC,UAAsB,EACE;IACxB,MAAMrB,MAAqB,GAAG;MAC5BsB,QAAQ,EAAEC,MAAM,CAACH,QAAQ,CAACI,QAAQ,CAAC;MACnCC,UAAU,EAAE,CAAC;IACf,CAAC;IACD,KAAK,MAAMC,QAAQ,IAAIN,QAAQ,CAACO,OAAO,EAAE;MACvC,MAAMC,WAAW,GAAGF,QAAQ,CAACG,SAAS;MACtC,MAAMC,MAAM,GAAGF,WAAW,aAAXA,WAAW,uBAAXA,WAAW,CAAEG,cAAc;MAC1C,IAAIV,UAAU,CAAClB,MAAM,GAAG,CAAC,IAAIT,YAAY,CAAC2B,UAAU,EAAES,MAAM,CAAE,GAAG,CAAC,EAAE;QAClE;MACF;;MACA9B,MAAM,CAACyB,UAAU,CAACK,MAAM,CAAEE,IAAI,EAAE,CAAC,GAAG,MAAM,IAAI,CAACC,eAAe,CAACd,MAAM,EAAEO,QAAQ,CAAC;IAClF;IACA,OAAO1B,MAAM;EACf;;EAOA,MAAMiC,eAAe,CAACd,MAAqB,EAAEO,QAAqB,EAAwB;IAAA;IACxF,IAAIA,QAAQ,CAACQ,SAAS,KAAKC,SAAS,IAAIT,QAAQ,CAACQ,SAAS,KAAK,IAAI,EAAE;MACnE,MAAM,IAAIjB,KAAK,CAAC,uCAAuC,CAAC;IAC1D;IAEA,MAAMmB,KAAK,GAAGjB,MAAM,CAACkB,SAAS,wBAACX,QAAQ,CAACG,SAAS,wDAAlB,oBAAoBE,cAAc,CAAE;IACnE,MAAMO,IAAmB,GAAG7C,aAAa,CAACF,IAAI,0BAAEmC,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBS,IAAI,CAAS;IAEjF,IAAIA,IAAI,KAAKF,KAAK,CAACG,aAAa,EAAE;MAChC,MAAM,IAAItB,KAAK,2CAAoCqB,IAAI,EAAG;IAC5D;IAEA,MAAME,WAA+B,GAAG/C,aAAa,CACnDH,gBAAgB,0BAChBoC,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBY,KAAK,CACnB;IAER,MAAMC,WAAW,GAAGnB,MAAM,yBAACG,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBc,gBAAgB,CAAE;IACjE,IAAIC,SAAS,GAAGrB,MAAM,yBAACG,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBgB,qBAAqB,CAAE;IAElE,IAAI,CAACnB,QAAQ,CAACQ,SAAS,EAAE;MAAA;MACvBU,SAAS,GAAGE,IAAI,CAACC,GAAG,CAClB,IAAI,CAACpC,QAAQ,GAAG+B,WAAW,EAC3BnB,MAAM,yBAACG,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBgB,qBAAqB,CAAC,CAClD;IACH;IAEA,MAAMjC,OAAuB,GAAG;MAC9B0B,IAAI;MACJU,SAAS,EAAEZ,KAAK,CAACY,SAAS;MAC1BC,SAAS,EAAEb,KAAK,CAACa,SAAS;MAC1BT,WAAW;MACXU,MAAM,EAAEd,KAAK;MACbe,SAAS,0BAAEzB,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBuB,UAAU;MACzCC,UAAU,EAAE;IACd,CAAC;IAED,IAAIA,UAAU;IAEd,MAAMC,oBAAoB,GAAG5B,QAAQ,aAARA,QAAQ,+CAARA,QAAQ,CAAEG,SAAS,yDAAnB,qBAAqB0B,sBAAsB;IAExE,IAAID,oBAAoB,EAAE;MACxB,MAAME,gBAAgB,GAAGjC,MAAM,CAAC+B,oBAAoB,CAAC;MAErDD,UAAU,GAAG,MAAM,IAAI,CAACI,aAAa,CAACD,gBAAgB,EAAE5C,OAAO,EAAE8B,WAAW,CAAC;IAC/E;IAEAW,UAAU,GAAG,uBAAAzC,OAAO,CAACyC,UAAU,gDAAlB,oBAAoBlD,MAAM,GAAGS,OAAO,CAACyC,UAAU,GAAGA,UAAU;IACzE,MAAMK,QAAQ,GAAG,MAAM,IAAI,CAACjD,IAAI,CAACiC,WAAW,EAAEE,SAAS,CAAC;IACxD,OAAO,MAAMjD,eAAe,CAAC+D,QAAQ,EAAE;MAAC,GAAG9C,OAAO;MAAEyC;IAAU,CAAC,CAAC;EAClE;;EASA,MAAMI,aAAa,CACjBH,oBAA4B,EAC5B1C,OAAuB,EACvB8B,WAAmB,EACA;IACnB,IAAIY,oBAAoB,KAAK,CAAC,EAAE;;MAQ9B,OAAO,EAAE;IACX;IAEA,MAAMK,cAAc,GAAGb,IAAI,CAACC,GAAG,CAC7B,IAAI,CAACpC,QAAQ,GAAG2C,oBAAoB,EACpC,IAAI,CAACzC,qBAAqB,CAC3B;IACD,MAAM6C,QAAQ,GAAG,MAAM,IAAI,CAACjD,IAAI,CAAC6C,oBAAoB,EAAEK,cAAc,CAAC;IAEtE,MAAMC,MAAM,GAAG;MAAC5D,MAAM,EAAE0D,QAAQ;MAAEG,MAAM,EAAE,CAAC;MAAEC,IAAI,EAAEJ,QAAQ,CAACvD;IAAM,CAAC;IACnE,MAAM4D,WAAW,GAAG,MAAMnE,UAAU,CAACgE,MAAM,EAAEhD,OAAO,CAAC;IAErD,OAAOmD,WAAW,CAACV,UAAU;EAC/B;EAEA,MAAMW,UAAU,GAA0B;IACxC,MAAMC,UAAU,GAAG7E,aAAa,CAACe,MAAM,GAAG,CAAC;IAC3C,MAAM+D,UAAU,GAAG,MAAM,IAAI,CAACzD,IAAI,CAAC,IAAI,CAACE,QAAQ,GAAGsD,UAAU,EAAEA,UAAU,CAAC;IAE1E,MAAMlD,KAAK,GAAGmD,UAAU,CAAC5D,KAAK,CAAC,CAAC,CAAC,CAACU,QAAQ,EAAE;IAC5C,IAAID,KAAK,KAAK3B,aAAa,EAAE;MAC3B,MAAM,IAAI6B,KAAK,6CAAqCF,KAAK,OAAI;IAC/D;IAEA,MAAMoD,YAAY,GAAGD,UAAU,CAACE,YAAY,CAAC,CAAC,CAAC;IAC/C,MAAMC,cAAc,GAAG,IAAI,CAAC1D,QAAQ,GAAGwD,YAAY,GAAGF,UAAU;IAChE,IAAII,cAAc,GAAGjF,aAAa,CAACe,MAAM,EAAE;MACzC,MAAM,IAAIc,KAAK,iCAA0BoD,cAAc,EAAG;IAC5D;IAEA,MAAMC,WAAW,GAAG,MAAM,IAAI,CAAC7D,IAAI,CAAC4D,cAAc,EAAEF,YAAY,CAAC;IAGjE,MAAM;MAACI;IAAQ,CAAC,GAAG/E,kBAAkB,CAAC8E,WAAW,CAAC;IAClD,OAAOC,QAAQ;EACjB;AACF"}
@@ -1,13 +0,0 @@
1
-
2
- export function toArrayBuffer(buffer) {
3
- if (Buffer.isBuffer(buffer)) {
4
- const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);
5
- return typedArray.slice().buffer;
6
- }
7
- return buffer;
8
- }
9
-
10
- export function toBuffer(arrayBuffer) {
11
- return Buffer.from(arrayBuffer);
12
- }
13
- //# sourceMappingURL=buffer-utils.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"buffer-utils.js","names":["toArrayBuffer","buffer","Buffer","isBuffer","typedArray","Uint8Array","byteOffset","length","slice","toBuffer","arrayBuffer","from"],"sources":["../../../../src/parquetjs/utils/buffer-utils.ts"],"sourcesContent":["/**\n * Convert Buffer to ArrayBuffer\n */\nexport function toArrayBuffer(buffer: Buffer): ArrayBuffer {\n // TODO - per docs we should just be able to call buffer.buffer, but there are issues\n if (Buffer.isBuffer(buffer)) {\n const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);\n return typedArray.slice().buffer;\n }\n return buffer;\n}\n\n/**\n * Convert (copy) ArrayBuffer to Buffer\n */\nexport function toBuffer(arrayBuffer: ArrayBuffer): Buffer {\n return Buffer.from(arrayBuffer);\n}\n"],"mappings":";AAGA,OAAO,SAASA,aAAa,CAACC,MAAc,EAAe;EAEzD,IAAIC,MAAM,CAACC,QAAQ,CAACF,MAAM,CAAC,EAAE;IAC3B,MAAMG,UAAU,GAAG,IAAIC,UAAU,CAACJ,MAAM,CAACA,MAAM,EAAEA,MAAM,CAACK,UAAU,EAAEL,MAAM,CAACM,MAAM,CAAC;IAClF,OAAOH,UAAU,CAACI,KAAK,EAAE,CAACP,MAAM;EAClC;EACA,OAAOA,MAAM;AACf;;AAKA,OAAO,SAASQ,QAAQ,CAACC,WAAwB,EAAU;EACzD,OAAOR,MAAM,CAACS,IAAI,CAACD,WAAW,CAAC;AACjC"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"writer.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/encoder/writer.ts"],"names":[],"mappings":";;AAEA,OAAO,EAAC,SAAS,EAAE,QAAQ,EAAC,MAAM,QAAQ,CAAC;AAG3C,OAAO,EACL,aAAa,EAKd,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAC;AAE/C,OAAO,EAaL,QAAQ,EAGT,MAAM,mBAAmB,CAAC;AA2B3B,MAAM,WAAW,oBAAoB;IACnC,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,aAAa,CAAC,EAAE,OAAO,CAAC;IAGxB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,EAAE,CAAC,EAAE,MAAM,CAAC;IACZ,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED;;;;GAIG;AAEH,qBAAa,aAAa,CAAC,CAAC;IAC1B;;;OAGG;WACU,QAAQ,CAAC,CAAC,EACrB,MAAM,EAAE,aAAa,EACrB,IAAI,EAAE,MAAM,EACZ,IAAI,CAAC,EAAE,oBAAoB,GAC1B,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;IAK5B;;;OAGG;WACU,UAAU,CAAC,CAAC,EACvB,MAAM,EAAE,aAAa,EACrB,YAAY,EAAE,QAAQ,EACtB,IAAI,CAAC,EAAE,oBAAoB,GAC1B,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;IAWrB,MAAM,EAAE,aAAa,CAAC;IACtB,cAAc,EAAE,qBAAqB,CAAC;IACtC,SAAS,EAAE,aAAa,CAAC;IACzB,YAAY,EAAE,MAAM,CAAC;IACrB,MAAM,EAAE,OAAO,CAAC;IAChB,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE5C;;OAEG;gBAED,MAAM,EAAE,aAAa,EACrB,cAAc,EAAE,qBAAqB,EACrC,IAAI,EAAE,oBAAoB;IActB,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC;IAUlC;;;OAGG;IACG,SAAS,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAWzC;;;;;OAKG;IACG,KAAK,CAAC,QAAQ,CAAC,EAAE,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAqBjD;;OAEG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,IAAI;IAK7C;;;;;OAKG;IACH,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI;IAIlC;;;OAGG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI;CAG/B;AAED;;;;;GAKG;AACH,qBAAa,qBAAqB;IAChC;;OAEG;WACU,UAAU,CACrB,MAAM,EAAE,aAAa,EACrB,YAAY,EAAE,QAAQ,EACtB,IAAI,EAAE,oBAAoB,GACzB,OAAO,CAAC,qBAAqB,CAAC;IAM1B,MAAM,EAAE,aAAa,CAAC;IACtB,KAAK,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACtC,KAAK,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAC3B,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,MAAM,CAAC;IACjB,SAAS,EAAE,QAAQ,EAAE,CAAC;IACtB,QAAQ,EAAE,MAAM,CAAC;IACjB,aAAa,EAAE,OAAO,CAAC;gBAG5B,MAAM,EAAE,aAAa,EACrB,OAAO,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,EACvC,OAAO,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,EAC5B,UAAU,EAAE,MAAM,EAClB,IAAI,EAAE,oBAAoB;IAY5B,YAAY,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAKxC;;OAEG;IACH,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC;IAI5B;;;OAGG;IACG,aAAa,CAAC,OAAO,EAAE,aAAa,GAAG,OAAO,CAAC,IAAI,CAAC;IAY1D;;OAEG;IACH,WAAW,CAAC,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAWhE;;;OAGG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI;CAG/B;AAED;;GAEG;AACH,qBAAa,kBAAkB,CAAC,CAAC,CAAE,SAAQ,SAAS;IAC3C,MAAM,EAAE,aAAa,CAAC,CAAC,CAAC,CAAC;gBAEpB,MAAM,EAAE,aAAa,EAAE,IAAI,GAAE,oBAAyB;IAiBlE,UAAU,CAAC,GAAG,EAAE,GAAG,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,GAAG,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAS9E,MAAM,CAAC,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,GAAG,KAAK,IAAI;CAG3C"}
@@ -1,10 +0,0 @@
1
- /// <reference types="node" />
2
- import fs from 'fs';
3
- export declare function fopen(filePath: any): Promise<unknown>;
4
- export declare function fstat(filePath: any): Promise<fs.Stats>;
5
- export declare function fread(fd: any, position: any, length: any): Promise<unknown>;
6
- export declare function fclose(fd: any): Promise<unknown>;
7
- export declare function oswrite(os: any, buf: any): Promise<void>;
8
- export declare function osclose(os: any): Promise<void>;
9
- export declare function osopen(path: any, opts: any): Promise<unknown>;
10
- //# sourceMappingURL=file.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../src/parquetjs/file.ts"],"names":[],"mappings":";AACA,OAAO,EAAE,MAAM,IAAI,CAAC;AAEpB,wBAAgB,KAAK,CAAC,QAAQ,KAAA,oBAU7B;AAED,wBAAgB,KAAK,CAAC,QAAQ,KAAA,qBAU7B;AAED,wBAAgB,KAAK,CAAC,EAAE,KAAA,EAAE,QAAQ,KAAA,EAAE,MAAM,KAAA,oBAYzC;AAED,wBAAgB,MAAM,CAAC,EAAE,KAAA,oBAUxB;AAED,wBAAgB,OAAO,CAAC,EAAE,KAAA,EAAE,GAAG,KAAA,GAAG,OAAO,CAAC,IAAI,CAAC,CAU9C;AAED,wBAAgB,OAAO,CAAC,EAAE,KAAA,GAAG,OAAO,CAAC,IAAI,CAAC,CAUzC;AAED,wBAAgB,MAAM,CAAC,IAAI,KAAA,EAAE,IAAI,KAAA,oBAYhC"}
@@ -1,99 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.osopen = exports.osclose = exports.oswrite = exports.fclose = exports.fread = exports.fstat = exports.fopen = void 0;
7
- // Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
8
- const fs_1 = __importDefault(require("fs"));
9
- function fopen(filePath) {
10
- return new Promise((resolve, reject) => {
11
- fs_1.default.open(filePath, 'r', (err, fd) => {
12
- if (err) {
13
- reject(err);
14
- }
15
- else {
16
- resolve(fd);
17
- }
18
- });
19
- });
20
- }
21
- exports.fopen = fopen;
22
- function fstat(filePath) {
23
- return new Promise((resolve, reject) => {
24
- fs_1.default.stat(filePath, (err, stat) => {
25
- if (err) {
26
- reject(err);
27
- }
28
- else {
29
- resolve(stat);
30
- }
31
- });
32
- });
33
- }
34
- exports.fstat = fstat;
35
- function fread(fd, position, length) {
36
- const buffer = Buffer.alloc(length);
37
- return new Promise((resolve, reject) => {
38
- fs_1.default.read(fd, buffer, 0, length, position, (err, bytesRead, buf) => {
39
- if (err || bytesRead !== length) {
40
- reject(err || Error('read failed'));
41
- }
42
- else {
43
- resolve(buf);
44
- }
45
- });
46
- });
47
- }
48
- exports.fread = fread;
49
- function fclose(fd) {
50
- return new Promise((resolve, reject) => {
51
- fs_1.default.close(fd, (err) => {
52
- if (err) {
53
- reject(err);
54
- }
55
- else {
56
- resolve(err);
57
- }
58
- });
59
- });
60
- }
61
- exports.fclose = fclose;
62
- function oswrite(os, buf) {
63
- return new Promise((resolve, reject) => {
64
- os.write(buf, (err) => {
65
- if (err) {
66
- reject(err);
67
- }
68
- else {
69
- resolve();
70
- }
71
- });
72
- });
73
- }
74
- exports.oswrite = oswrite;
75
- function osclose(os) {
76
- return new Promise((resolve, reject) => {
77
- os.close((err) => {
78
- if (err) {
79
- reject(err);
80
- }
81
- else {
82
- resolve();
83
- }
84
- });
85
- });
86
- }
87
- exports.osclose = osclose;
88
- function osopen(path, opts) {
89
- return new Promise((resolve, reject) => {
90
- const outputStream = fs_1.default.createWriteStream(path, opts);
91
- outputStream.on('open', function (fd) {
92
- resolve(outputStream);
93
- });
94
- outputStream.on('error', function (err) {
95
- reject(err);
96
- });
97
- });
98
- }
99
- exports.osopen = osopen;
@@ -1,36 +0,0 @@
1
- import { FileMetaData } from '../parquet-thrift';
2
- import { ParquetEnvelopeReader } from './parquet-envelope-reader';
3
- import { ParquetSchema } from '../schema/schema';
4
- import { ParquetRecord } from '../schema/declare';
5
- /**
6
- * A parquet cursor is used to retrieve rows from a parquet file in order
7
- */
8
- export declare class ParquetCursor<T> implements AsyncIterable<T> {
9
- metadata: FileMetaData;
10
- envelopeReader: ParquetEnvelopeReader;
11
- schema: ParquetSchema;
12
- columnList: string[][];
13
- rowGroup: ParquetRecord[];
14
- rowGroupIndex: number;
15
- /**
16
- * Create a new parquet reader from the file metadata and an envelope reader.
17
- * It is usually not recommended to call this constructor directly except for
18
- * advanced and internal use cases. Consider using getCursor() on the
19
- * ParquetReader instead
20
- */
21
- constructor(metadata: FileMetaData, envelopeReader: ParquetEnvelopeReader, schema: ParquetSchema, columnList: string[][]);
22
- /**
23
- * Retrieve the next row from the cursor. Returns a row or NULL if the end
24
- * of the file was reached
25
- */
26
- next<T = any>(): Promise<T>;
27
- /**
28
- * Rewind the cursor the the beginning of the file
29
- */
30
- rewind(): void;
31
- /**
32
- * Implement AsyncIterable
33
- */
34
- [Symbol.asyncIterator](): AsyncIterator<T>;
35
- }
36
- //# sourceMappingURL=parquet-cursor.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"parquet-cursor.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parser/parquet-cursor.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,YAAY,EAAC,MAAM,mBAAmB,CAAC;AAC/C,OAAO,EAAC,qBAAqB,EAAC,MAAM,2BAA2B,CAAC;AAChE,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAC,aAAa,EAAC,MAAM,mBAAmB,CAAC;AAGhD;;GAEG;AACH,qBAAa,aAAa,CAAC,CAAC,CAAE,YAAW,aAAa,CAAC,CAAC,CAAC;IAChD,QAAQ,EAAE,YAAY,CAAC;IACvB,cAAc,EAAE,qBAAqB,CAAC;IACtC,MAAM,EAAE,aAAa,CAAC;IACtB,UAAU,EAAE,MAAM,EAAE,EAAE,CAAC;IACvB,QAAQ,EAAE,aAAa,EAAE,CAAM;IAC/B,aAAa,EAAE,MAAM,CAAC;IAE7B;;;;;OAKG;gBAED,QAAQ,EAAE,YAAY,EACtB,cAAc,EAAE,qBAAqB,EACrC,MAAM,EAAE,aAAa,EACrB,UAAU,EAAE,MAAM,EAAE,EAAE;IASxB;;;OAGG;IACG,IAAI,CAAC,CAAC,GAAG,GAAG,KAAK,OAAO,CAAC,CAAC,CAAC;IAiBjC;;OAEG;IACH,MAAM,IAAI,IAAI;IAKd;;OAEG;IAEH,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,aAAa,CAAC,CAAC,CAAC;CAuB3C"}