@loaders.gl/parquet 3.3.0-alpha.8 → 3.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (131) hide show
  1. package/dist/dist.min.js +17 -26
  2. package/dist/dist.min.js.map +3 -3
  3. package/dist/es5/index.js +3 -3
  4. package/dist/es5/index.js.map +1 -1
  5. package/dist/es5/lib/convert-schema-deep.ts.disabled +910 -0
  6. package/dist/es5/lib/parse-parquet.js +49 -25
  7. package/dist/es5/lib/parse-parquet.js.map +1 -1
  8. package/dist/es5/parquet-loader.js +3 -2
  9. package/dist/es5/parquet-loader.js.map +1 -1
  10. package/dist/es5/parquet-wasm-loader.js +1 -1
  11. package/dist/es5/parquet-wasm-loader.js.map +1 -1
  12. package/dist/es5/parquet-wasm-writer.js +1 -1
  13. package/dist/es5/parquet-wasm-writer.js.map +1 -1
  14. package/dist/es5/parquet-writer.js +1 -1
  15. package/dist/es5/parquet-writer.js.map +1 -1
  16. package/dist/es5/parquetjs/compression.js +15 -5
  17. package/dist/es5/parquetjs/compression.js.map +1 -1
  18. package/dist/es5/parquetjs/encoder/{writer.js → parquet-encoder.js} +70 -158
  19. package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -0
  20. package/dist/es5/parquetjs/parser/parquet-reader.js +553 -222
  21. package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
  22. package/dist/es5/parquetjs/schema/declare.js +3 -1
  23. package/dist/es5/parquetjs/schema/declare.js.map +1 -1
  24. package/dist/es5/parquetjs/schema/shred.js +39 -33
  25. package/dist/es5/parquetjs/schema/shred.js.map +1 -1
  26. package/dist/es5/parquetjs/schema/types.js.map +1 -1
  27. package/dist/es5/parquetjs/utils/file-utils.js +2 -3
  28. package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
  29. package/dist/esm/index.js +1 -1
  30. package/dist/esm/index.js.map +1 -1
  31. package/dist/esm/lib/convert-schema-deep.ts.disabled +910 -0
  32. package/dist/esm/lib/parse-parquet.js +6 -12
  33. package/dist/esm/lib/parse-parquet.js.map +1 -1
  34. package/dist/esm/parquet-loader.js +3 -2
  35. package/dist/esm/parquet-loader.js.map +1 -1
  36. package/dist/esm/parquet-wasm-loader.js +1 -1
  37. package/dist/esm/parquet-wasm-loader.js.map +1 -1
  38. package/dist/esm/parquet-wasm-writer.js +1 -1
  39. package/dist/esm/parquet-wasm-writer.js.map +1 -1
  40. package/dist/esm/parquet-writer.js +1 -1
  41. package/dist/esm/parquet-writer.js.map +1 -1
  42. package/dist/esm/parquetjs/compression.js +10 -1
  43. package/dist/esm/parquetjs/compression.js.map +1 -1
  44. package/dist/esm/parquetjs/encoder/{writer.js → parquet-encoder.js} +7 -37
  45. package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +1 -0
  46. package/dist/esm/parquetjs/parser/parquet-reader.js +158 -72
  47. package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
  48. package/dist/esm/parquetjs/schema/declare.js +1 -0
  49. package/dist/esm/parquetjs/schema/declare.js.map +1 -1
  50. package/dist/esm/parquetjs/schema/shred.js +42 -34
  51. package/dist/esm/parquetjs/schema/shred.js.map +1 -1
  52. package/dist/esm/parquetjs/schema/types.js.map +1 -1
  53. package/dist/esm/parquetjs/utils/file-utils.js +1 -1
  54. package/dist/esm/parquetjs/utils/file-utils.js.map +1 -1
  55. package/dist/index.d.ts +1 -1
  56. package/dist/index.d.ts.map +1 -1
  57. package/dist/index.js +3 -4
  58. package/dist/lib/parse-parquet.d.ts +2 -2
  59. package/dist/lib/parse-parquet.d.ts.map +1 -1
  60. package/dist/lib/parse-parquet.js +24 -12
  61. package/dist/parquet-loader.d.ts +1 -0
  62. package/dist/parquet-loader.d.ts.map +1 -1
  63. package/dist/parquet-loader.js +2 -1
  64. package/dist/parquet-worker.js +15 -24
  65. package/dist/parquet-worker.js.map +3 -3
  66. package/dist/parquetjs/compression.d.ts.map +1 -1
  67. package/dist/parquetjs/compression.js +16 -5
  68. package/dist/parquetjs/encoder/{writer.d.ts → parquet-encoder.d.ts} +10 -19
  69. package/dist/parquetjs/encoder/parquet-encoder.d.ts.map +1 -0
  70. package/dist/parquetjs/encoder/{writer.js → parquet-encoder.js} +39 -37
  71. package/dist/parquetjs/parser/parquet-reader.d.ts +47 -57
  72. package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -1
  73. package/dist/parquetjs/parser/parquet-reader.js +168 -102
  74. package/dist/parquetjs/schema/declare.d.ts +14 -7
  75. package/dist/parquetjs/schema/declare.d.ts.map +1 -1
  76. package/dist/parquetjs/schema/declare.js +2 -0
  77. package/dist/parquetjs/schema/shred.d.ts +115 -0
  78. package/dist/parquetjs/schema/shred.d.ts.map +1 -1
  79. package/dist/parquetjs/schema/shred.js +161 -43
  80. package/dist/parquetjs/schema/types.d.ts +2 -2
  81. package/dist/parquetjs/schema/types.d.ts.map +1 -1
  82. package/dist/parquetjs/utils/file-utils.d.ts +3 -4
  83. package/dist/parquetjs/utils/file-utils.d.ts.map +1 -1
  84. package/dist/parquetjs/utils/file-utils.js +2 -5
  85. package/package.json +7 -5
  86. package/src/index.ts +2 -2
  87. package/src/lib/convert-schema-deep.ts.disabled +910 -0
  88. package/src/lib/parse-parquet.ts +25 -12
  89. package/src/parquet-loader.ts +3 -1
  90. package/src/parquetjs/compression.ts +14 -1
  91. package/src/parquetjs/encoder/{writer.ts → parquet-encoder.ts} +22 -28
  92. package/src/parquetjs/parser/parquet-reader.ts +239 -122
  93. package/src/parquetjs/schema/declare.ts +17 -9
  94. package/src/parquetjs/schema/shred.ts +157 -28
  95. package/src/parquetjs/schema/types.ts +21 -27
  96. package/src/parquetjs/utils/file-utils.ts +3 -4
  97. package/dist/es5/parquetjs/encoder/writer.js.map +0 -1
  98. package/dist/es5/parquetjs/file.js +0 -94
  99. package/dist/es5/parquetjs/file.js.map +0 -1
  100. package/dist/es5/parquetjs/parser/parquet-cursor.js +0 -183
  101. package/dist/es5/parquetjs/parser/parquet-cursor.js.map +0 -1
  102. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +0 -327
  103. package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
  104. package/dist/es5/parquetjs/utils/buffer-utils.js +0 -19
  105. package/dist/es5/parquetjs/utils/buffer-utils.js.map +0 -1
  106. package/dist/esm/parquetjs/encoder/writer.js.map +0 -1
  107. package/dist/esm/parquetjs/file.js +0 -81
  108. package/dist/esm/parquetjs/file.js.map +0 -1
  109. package/dist/esm/parquetjs/parser/parquet-cursor.js +0 -78
  110. package/dist/esm/parquetjs/parser/parquet-cursor.js.map +0 -1
  111. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +0 -129
  112. package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
  113. package/dist/esm/parquetjs/utils/buffer-utils.js +0 -13
  114. package/dist/esm/parquetjs/utils/buffer-utils.js.map +0 -1
  115. package/dist/parquetjs/encoder/writer.d.ts.map +0 -1
  116. package/dist/parquetjs/file.d.ts +0 -10
  117. package/dist/parquetjs/file.d.ts.map +0 -1
  118. package/dist/parquetjs/file.js +0 -99
  119. package/dist/parquetjs/parser/parquet-cursor.d.ts +0 -36
  120. package/dist/parquetjs/parser/parquet-cursor.d.ts.map +0 -1
  121. package/dist/parquetjs/parser/parquet-cursor.js +0 -74
  122. package/dist/parquetjs/parser/parquet-envelope-reader.d.ts +0 -40
  123. package/dist/parquetjs/parser/parquet-envelope-reader.d.ts.map +0 -1
  124. package/dist/parquetjs/parser/parquet-envelope-reader.js +0 -136
  125. package/dist/parquetjs/utils/buffer-utils.d.ts +0 -10
  126. package/dist/parquetjs/utils/buffer-utils.d.ts.map +0 -1
  127. package/dist/parquetjs/utils/buffer-utils.js +0 -22
  128. package/src/parquetjs/file.ts +0 -90
  129. package/src/parquetjs/parser/parquet-cursor.ts +0 -94
  130. package/src/parquetjs/parser/parquet-envelope-reader.ts +0 -199
  131. package/src/parquetjs/utils/buffer-utils.ts +0 -18
@@ -1 +1 @@
1
- {"version":3,"file":"parquet-reader.js","names":["ParquetEnvelopeReader","ParquetSchema","ParquetCursor","PARQUET_VERSION","decodeSchema","Symbol","asyncIterator","ParquetReader","openBlob","blob","readFn","start","length","arrayBuffer","slice","Buffer","from","closeFn","size","envelopeReader","readHeader","metadata","readFooter","err","close","openArrayBuffer","byteLength","openBuffer","buffer","constructor","version","Error","root","schema","num_children","getCursor","columnList","map","x","Array","isArray","getRowCount","Number","num_rows","getSchema","getMetadata","md","kv","key_value_metadata","key","value"],"sources":["../../../../src/parquetjs/parser/parquet-reader.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetEnvelopeReader} from './parquet-envelope-reader';\nimport {FileMetaData} from '../parquet-thrift';\nimport {ParquetSchema} from '../schema/schema';\nimport {ParquetCursor} from './parquet-cursor';\nimport {PARQUET_VERSION} from '../../constants';\nimport {decodeSchema} from './decoders';\n\n/**\n * A parquet reader allows retrieving the rows from a parquet file in order.\n * The basic usage is to create a reader and then retrieve a cursor/iterator\n * which allows you to consume row after row until all rows have been read. It is\n * important that you call close() after you are finished reading the file to\n * avoid leaking file descriptors.\n */\nexport class ParquetReader<T> implements AsyncIterable<T> {\n /**\n * return a new parquet reader initialized with a read function\n */\n static async openBlob<T>(blob: Blob): Promise<ParquetReader<T>> {\n const readFn = async (start: number, length: number) => {\n const arrayBuffer = await blob.slice(start, start + length).arrayBuffer();\n return Buffer.from(arrayBuffer);\n };\n const closeFn = async () => {};\n const size = blob.size;\n const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n /**\n * return a new parquet reader initialized with a read function\n */\n static async openArrayBuffer<T>(arrayBuffer: ArrayBuffer): Promise<ParquetReader<T>> {\n const readFn = async (start: number, length: number) => Buffer.from(arrayBuffer, start, length);\n const closeFn = async () => {};\n const size = arrayBuffer.byteLength;\n const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n static async openBuffer<T>(buffer: Buffer): Promise<ParquetReader<T>> {\n const envelopeReader = await ParquetEnvelopeReader.openBuffer(buffer);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader<T>(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n public metadata: FileMetaData;\n public envelopeReader: ParquetEnvelopeReader;\n public schema: ParquetSchema;\n\n /**\n * Create a new parquet reader from the file metadata and an envelope reader.\n * It is not recommended to call this constructor directly except for advanced\n * and internal use cases. Consider using one of the open{File,Buffer} methods\n * instead\n */\n constructor(metadata: FileMetaData, envelopeReader: ParquetEnvelopeReader) {\n if (metadata.version !== PARQUET_VERSION) {\n throw new Error('invalid parquet version');\n }\n\n this.metadata = metadata;\n this.envelopeReader = envelopeReader;\n const root = this.metadata.schema[0];\n const {schema} = decodeSchema(this.metadata.schema, 1, root.num_children!);\n this.schema = new ParquetSchema(schema);\n }\n\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n async close(): Promise<void> {\n await this.envelopeReader.close();\n // this.envelopeReader = null;\n // this.metadata = null;\n }\n\n /**\n * Return a cursor to the file. You may open more than one cursor and use\n * them concurrently. All cursors become invalid once close() is called on\n * the reader object.\n *\n * The required_columns parameter controls which columns are actually read\n * from disk. An empty array or no value implies all columns. A list of column\n * names means that only those columns should be loaded from disk.\n */\n getCursor(): ParquetCursor<T>;\n // @ts-ignore\n getCursor<K extends keyof T>(columnList: (K | K[])[]): ParquetCursor<Pick<T, K>>;\n getCursor(columnList: (string | string[])[]): ParquetCursor<Partial<T>>;\n getCursor(columnList?: (string | string[])[]): ParquetCursor<Partial<T>> {\n if (!columnList) {\n // tslint:disable-next-line:no-parameter-reassignment\n columnList = [];\n }\n\n // tslint:disable-next-line:no-parameter-reassignment\n columnList = columnList.map((x) => (Array.isArray(x) ? x : [x]));\n\n return new ParquetCursor<T>(\n this.metadata,\n this.envelopeReader,\n this.schema,\n columnList as string[][]\n );\n }\n\n /**\n * Return the number of rows in this file. Note that the number of rows is\n * not neccessarily equal to the number of rows in each column.\n */\n getRowCount(): number {\n return Number(this.metadata.num_rows);\n }\n\n /**\n * Returns the ParquetSchema for this file\n */\n getSchema(): ParquetSchema {\n return this.schema;\n }\n\n /**\n * Returns the user (key/value) metadata for this file\n */\n getMetadata(): Record<string, string> {\n const md: Record<string, string> = {};\n for (const kv of this.metadata.key_value_metadata!) {\n md[kv.key] = kv.value!;\n }\n return md;\n }\n\n /**\n * Implement AsyncIterable\n */\n // tslint:disable-next-line:function-name\n [Symbol.asyncIterator](): AsyncIterator<T> {\n return this.getCursor()[Symbol.asyncIterator]();\n }\n}\n"],"mappings":";;AACA,SAAQA,qBAAqB,QAAO,2BAA2B;AAE/D,SAAQC,aAAa,QAAO,kBAAkB;AAC9C,SAAQC,aAAa,QAAO,kBAAkB;AAC9C,SAAQC,eAAe,QAAO,iBAAiB;AAC/C,SAAQC,YAAY,QAAO,YAAY;;AAAC,wBAyJrCC,MAAM,CAACC,aAAa;AAhJvB,OAAO,MAAMC,aAAa,CAAgC;EAIxD,aAAaC,QAAQ,CAAIC,IAAU,EAA6B;IAC9D,MAAMC,MAAM,GAAG,OAAOC,KAAa,EAAEC,MAAc,KAAK;MACtD,MAAMC,WAAW,GAAG,MAAMJ,IAAI,CAACK,KAAK,CAACH,KAAK,EAAEA,KAAK,GAAGC,MAAM,CAAC,CAACC,WAAW,EAAE;MACzE,OAAOE,MAAM,CAACC,IAAI,CAACH,WAAW,CAAC;IACjC,CAAC;IACD,MAAMI,OAAO,GAAG,YAAY,CAAC,CAAC;IAC9B,MAAMC,IAAI,GAAGT,IAAI,CAACS,IAAI;IACtB,MAAMC,cAAc,GAAG,IAAInB,qBAAqB,CAACU,MAAM,EAAEO,OAAO,EAAEC,IAAI,CAAC;IACvE,IAAI;MACF,MAAMC,cAAc,CAACC,UAAU,EAAE;MACjC,MAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAU,EAAE;MAClD,OAAO,IAAIf,aAAa,CAACc,QAAQ,EAAEF,cAAc,CAAC;IACpD,CAAC,CAAC,OAAOI,GAAG,EAAE;MACZ,MAAMJ,cAAc,CAACK,KAAK,EAAE;MAC5B,MAAMD,GAAG;IACX;EACF;;EAKA,aAAaE,eAAe,CAAIZ,WAAwB,EAA6B;IACnF,MAAMH,MAAM,GAAG,OAAOC,KAAa,EAAEC,MAAc,KAAKG,MAAM,CAACC,IAAI,CAACH,WAAW,EAAEF,KAAK,EAAEC,MAAM,CAAC;IAC/F,MAAMK,OAAO,GAAG,YAAY,CAAC,CAAC;IAC9B,MAAMC,IAAI,GAAGL,WAAW,CAACa,UAAU;IACnC,MAAMP,cAAc,GAAG,IAAInB,qBAAqB,CAACU,MAAM,EAAEO,OAAO,EAAEC,IAAI,CAAC;IACvE,IAAI;MACF,MAAMC,cAAc,CAACC,UAAU,EAAE;MACjC,MAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAU,EAAE;MAClD,OAAO,IAAIf,aAAa,CAACc,QAAQ,EAAEF,cAAc,CAAC;IACpD,CAAC,CAAC,OAAOI,GAAG,EAAE;MACZ,MAAMJ,cAAc,CAACK,KAAK,EAAE;MAC5B,MAAMD,GAAG;IACX;EACF;EAEA,aAAaI,UAAU,CAAIC,MAAc,EAA6B;IACpE,MAAMT,cAAc,GAAG,MAAMnB,qBAAqB,CAAC2B,UAAU,CAACC,MAAM,CAAC;IACrE,IAAI;MACF,MAAMT,cAAc,CAACC,UAAU,EAAE;MACjC,MAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAU,EAAE;MAClD,OAAO,IAAIf,aAAa,CAAIc,QAAQ,EAAEF,cAAc,CAAC;IACvD,CAAC,CAAC,OAAOI,GAAG,EAAE;MACZ,MAAMJ,cAAc,CAACK,KAAK,EAAE;MAC5B,MAAMD,GAAG;IACX;EACF;EAYAM,WAAW,CAACR,QAAsB,EAAEF,cAAqC,EAAE;IAAA;IAAA;IAAA;IACzE,IAAIE,QAAQ,CAACS,OAAO,KAAK3B,eAAe,EAAE;MACxC,MAAM,IAAI4B,KAAK,CAAC,yBAAyB,CAAC;IAC5C;IAEA,IAAI,CAACV,QAAQ,GAAGA,QAAQ;IACxB,IAAI,CAACF,cAAc,GAAGA,cAAc;IACpC,MAAMa,IAAI,GAAG,IAAI,CAACX,QAAQ,CAACY,MAAM,CAAC,CAAC,CAAC;IACpC,MAAM;MAACA;IAAM,CAAC,GAAG7B,YAAY,CAAC,IAAI,CAACiB,QAAQ,CAACY,MAAM,EAAE,CAAC,EAAED,IAAI,CAACE,YAAY,CAAE;IAC1E,IAAI,CAACD,MAAM,GAAG,IAAIhC,aAAa,CAACgC,MAAM,CAAC;EACzC;;EAMA,MAAMT,KAAK,GAAkB;IAC3B,MAAM,IAAI,CAACL,cAAc,CAACK,KAAK,EAAE;EAGnC;;EAeAW,SAAS,CAACC,UAAkC,EAA6B;IACvE,IAAI,CAACA,UAAU,EAAE;MAEfA,UAAU,GAAG,EAAE;IACjB;;IAGAA,UAAU,GAAGA,UAAU,CAACC,GAAG,CAAEC,CAAC,IAAMC,KAAK,CAACC,OAAO,CAACF,CAAC,CAAC,GAAGA,CAAC,GAAG,CAACA,CAAC,CAAE,CAAC;IAEhE,OAAO,IAAIpC,aAAa,CACtB,IAAI,CAACmB,QAAQ,EACb,IAAI,CAACF,cAAc,EACnB,IAAI,CAACc,MAAM,EACXG,UAAU,CACX;EACH;;EAMAK,WAAW,GAAW;IACpB,OAAOC,MAAM,CAAC,IAAI,CAACrB,QAAQ,CAACsB,QAAQ,CAAC;EACvC;;EAKAC,SAAS,GAAkB;IACzB,OAAO,IAAI,CAACX,MAAM;EACpB;;EAKAY,WAAW,GAA2B;IACpC,MAAMC,EAA0B,GAAG,CAAC,CAAC;IACrC,KAAK,MAAMC,EAAE,IAAI,IAAI,CAAC1B,QAAQ,CAAC2B,kBAAkB,EAAG;MAClDF,EAAE,CAACC,EAAE,CAACE,GAAG,CAAC,GAAGF,EAAE,CAACG,KAAM;IACxB;IACA,OAAOJ,EAAE;EACX;;EAMA,0BAA2C;IACzC,OAAO,IAAI,CAACX,SAAS,EAAE,CAAC9B,MAAM,CAACC,aAAa,CAAC,EAAE;EACjD;AACF"}
1
+ {"version":3,"file":"parquet-reader.js","names":["ParquetSchema","decodeSchema","materializeRecords","PARQUET_MAGIC","PARQUET_MAGIC_ENCRYPTED","CompressionCodec","Type","decodeFileMetadata","getThriftEnum","fieldIndexOf","decodeDataPages","decodePage","DEFAULT_PROPS","defaultDictionarySize","ParquetReader","constructor","file","props","close","rowIterator","rows","rowBatchIterator","row","schema","getSchema","rowGroup","rowGroupIterator","columnList","map","x","Array","isArray","metadata","getFileMetadata","rowGroupCount","row_groups","length","rowGroupIndex","readRowGroup","getRowCount","Number","num_rows","root","schemaDefinition","num_children","getSchemaMetadata","md","kv","key_value_metadata","key","value","readHeader","readFooter","buffer","read","magic","toString","Error","trailerLen","trailerBuf","size","slice","metadataSize","readUInt32LE","metadataOffset","metadataBuf","rowCount","columnData","colChunk","columns","colMetadata","meta_data","colKey","path_in_schema","join","readColumnChunk","file_path","undefined","field","findField","type","primitiveType","compression","codec","pagesOffset","data_page_offset","pagesSize","total_compressed_size","Math","min","options","rLevelMax","dLevelMax","column","numValues","num_values","dictionary","dictionaryPageOffset","dictionary_page_offset","dictionaryOffset","getDictionary","pagesBuf","dictionarySize","cursor","offset","decodedPage"],"sources":["../../../../src/parquetjs/parser/parquet-reader.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport type {ReadableFile} from '@loaders.gl/loader-utils';\n\nimport {ParquetSchema} from '../schema/schema';\nimport {decodeSchema} from './decoders';\nimport {materializeRecords} from '../schema/shred';\n\nimport {PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED} from '../../constants';\nimport {ColumnChunk, CompressionCodec, FileMetaData, RowGroup, Type} from '../parquet-thrift';\nimport {\n ParquetBuffer,\n ParquetCompression,\n ParquetData,\n PrimitiveType,\n ParquetOptions\n} from '../schema/declare';\nimport {decodeFileMetadata, getThriftEnum, fieldIndexOf} from '../utils/read-utils';\nimport {decodeDataPages, decodePage} from './decoders';\n\nexport type ParquetReaderProps = {\n defaultDictionarySize?: number;\n};\n\n/** Properties for initializing a ParquetRowGroupReader */\nexport type ParquetIterationProps = {\n /** Filter allowing some columns to be dropped */\n columnList?: string[] | string[][];\n};\n\nconst DEFAULT_PROPS: Required<ParquetReaderProps> = {\n defaultDictionarySize: 1e6\n};\n\n/**\n * The parquet envelope reader allows direct, unbuffered access to the individual\n * sections of the parquet file, namely the header, footer and the row groups.\n * This class is intended for advanced/internal users; if you just want to retrieve\n * rows from a parquet file use the ParquetReader instead\n */\nexport class ParquetReader {\n props: Required<ParquetReaderProps>;\n file: ReadableFile;\n metadata: Promise<FileMetaData> | null = null;\n\n constructor(file: ReadableFile, props?: ParquetReaderProps) {\n this.file = file;\n this.props = {...DEFAULT_PROPS, ...props};\n }\n\n close(): void {\n // eslint-disable-next-line @typescript-eslint/no-floating-promises\n this.file.close();\n }\n\n // HIGH LEVEL METHODS\n\n /** Yield one row at a time */\n async *rowIterator(props?: ParquetIterationProps) {\n for await (const rows of this.rowBatchIterator(props)) {\n // yield *rows\n for (const row of rows) {\n yield row;\n }\n }\n }\n\n /** Yield one batch of rows at a time */\n async *rowBatchIterator(props?: ParquetIterationProps) {\n const schema = await this.getSchema();\n for await (const rowGroup of this.rowGroupIterator(props)) {\n yield materializeRecords(schema, rowGroup);\n }\n }\n\n /** Iterate over the raw row groups */\n async *rowGroupIterator(props?: ParquetIterationProps) {\n // Ensure strings are nested in arrays\n const columnList: string[][] = (props?.columnList || []).map((x) =>\n Array.isArray(x) ? x : [x]\n );\n\n const metadata = await this.getFileMetadata();\n const schema = await this.getSchema();\n\n const rowGroupCount = metadata?.row_groups.length || 0;\n\n for (let rowGroupIndex = 0; rowGroupIndex < rowGroupCount; rowGroupIndex++) {\n const rowGroup = await this.readRowGroup(\n schema,\n metadata.row_groups[rowGroupIndex],\n columnList\n );\n yield rowGroup;\n }\n }\n\n async getRowCount(): Promise<number> {\n const metadata = await this.getFileMetadata();\n return Number(metadata.num_rows);\n }\n\n async getSchema(): Promise<ParquetSchema> {\n const metadata = await this.getFileMetadata();\n const root = metadata.schema[0];\n const {schema: schemaDefinition} = decodeSchema(metadata.schema, 1, root.num_children!);\n const schema = new ParquetSchema(schemaDefinition);\n return schema;\n }\n\n /**\n * Returns the user (key/value) metadata for this file\n * In parquet this is not stored on the schema like it is in arrow\n */\n async getSchemaMetadata(): Promise<Record<string, string>> {\n const metadata = await this.getFileMetadata();\n const md: Record<string, string> = {};\n for (const kv of metadata.key_value_metadata!) {\n md[kv.key] = kv.value!;\n }\n return md;\n }\n\n async getFileMetadata(): Promise<FileMetaData> {\n if (!this.metadata) {\n await this.readHeader();\n this.metadata = this.readFooter();\n }\n return this.metadata;\n }\n\n // LOW LEVEL METHODS\n\n /** Metadata is stored in the footer */\n async readHeader(): Promise<void> {\n const buffer = await this.file.read(0, PARQUET_MAGIC.length);\n const magic = buffer.toString();\n switch (magic) {\n case PARQUET_MAGIC:\n break;\n case PARQUET_MAGIC_ENCRYPTED:\n throw new Error('Encrypted parquet file not supported');\n default:\n throw new Error(`Invalid parquet file (magic=${magic})`);\n }\n }\n\n /** Metadata is stored in the footer */\n async readFooter(): Promise<FileMetaData> {\n const trailerLen = PARQUET_MAGIC.length + 4;\n const trailerBuf = await this.file.read(this.file.size - trailerLen, trailerLen);\n\n const magic = trailerBuf.slice(4).toString();\n if (magic !== PARQUET_MAGIC) {\n throw new Error(`Not a valid parquet file (magic=\"${magic})`);\n }\n\n const metadataSize = trailerBuf.readUInt32LE(0);\n const metadataOffset = this.file.size - metadataSize - trailerLen;\n if (metadataOffset < PARQUET_MAGIC.length) {\n throw new Error(`Invalid metadata size ${metadataOffset}`);\n }\n\n const metadataBuf = await this.file.read(metadataOffset, metadataSize);\n // let metadata = new parquet_thrift.FileMetaData();\n // parquet_util.decodeThrift(metadata, metadataBuf);\n const {metadata} = decodeFileMetadata(metadataBuf);\n return metadata;\n }\n\n /** Data is stored in row groups (similar to Apache Arrow record batches) */\n async readRowGroup(\n schema: ParquetSchema,\n rowGroup: RowGroup,\n columnList: string[][]\n ): Promise<ParquetBuffer> {\n const buffer: ParquetBuffer = {\n rowCount: Number(rowGroup.num_rows),\n columnData: {}\n };\n for (const colChunk of rowGroup.columns) {\n const colMetadata = colChunk.meta_data;\n const colKey = colMetadata?.path_in_schema;\n if (columnList.length > 0 && fieldIndexOf(columnList, colKey!) < 0) {\n continue; // eslint-disable-line no-continue\n }\n buffer.columnData[colKey!.join()] = await this.readColumnChunk(schema, colChunk);\n }\n return buffer;\n }\n\n /**\n * Each row group contains column chunks for all the columns.\n */\n async readColumnChunk(schema: ParquetSchema, colChunk: ColumnChunk): Promise<ParquetData> {\n if (colChunk.file_path !== undefined && colChunk.file_path !== null) {\n throw new Error('external references are not supported');\n }\n\n const field = schema.findField(colChunk.meta_data?.path_in_schema!);\n const type: PrimitiveType = getThriftEnum(Type, colChunk.meta_data?.type!) as any;\n\n if (type !== field.primitiveType) {\n throw new Error(`chunk type not matching schema: ${type}`);\n }\n\n const compression: ParquetCompression = getThriftEnum(\n CompressionCodec,\n colChunk.meta_data?.codec!\n ) as any;\n\n const pagesOffset = Number(colChunk.meta_data?.data_page_offset!);\n let pagesSize = Number(colChunk.meta_data?.total_compressed_size!);\n\n if (!colChunk.file_path) {\n pagesSize = Math.min(\n this.file.size - pagesOffset,\n Number(colChunk.meta_data?.total_compressed_size)\n );\n }\n\n const options: ParquetOptions = {\n type,\n rLevelMax: field.rLevelMax,\n dLevelMax: field.dLevelMax,\n compression,\n column: field,\n numValues: colChunk.meta_data?.num_values,\n dictionary: []\n };\n\n let dictionary;\n\n const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;\n\n if (dictionaryPageOffset) {\n const dictionaryOffset = Number(dictionaryPageOffset);\n // Getting dictionary from column chunk to iterate all over indexes to get dataPage values.\n dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);\n }\n\n dictionary = options.dictionary?.length ? options.dictionary : dictionary;\n const pagesBuf = await this.file.read(pagesOffset, pagesSize);\n return await decodeDataPages(pagesBuf, {...options, dictionary});\n }\n\n /**\n * Getting dictionary for allows to flatten values by indices.\n * @param dictionaryPageOffset\n * @param options\n * @param pagesOffset\n * @returns\n */\n async getDictionary(\n dictionaryPageOffset: number,\n options: ParquetOptions,\n pagesOffset: number\n ): Promise<string[]> {\n if (dictionaryPageOffset === 0) {\n // dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);\n // pagesBuf = await this.read(pagesOffset, dictionarySize);\n\n // In this case we are working with parquet-mr files format. Problem is described below:\n // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding\n // We need to get dictionary page from column chunk if it exists.\n // Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.\n return [];\n }\n\n const dictionarySize = Math.min(\n this.file.size - dictionaryPageOffset,\n this.props.defaultDictionarySize\n );\n const pagesBuf = await this.file.read(dictionaryPageOffset, dictionarySize);\n\n const cursor = {buffer: pagesBuf, offset: 0, size: pagesBuf.length};\n const decodedPage = await decodePage(cursor, options);\n\n return decodedPage.dictionary!;\n }\n}\n"],"mappings":";;AAGA,SAAQA,aAAa,QAAO,kBAAkB;AAC9C,SAAQC,YAAY,QAAO,YAAY;AACvC,SAAQC,kBAAkB,QAAO,iBAAiB;AAElD,SAAQC,aAAa,EAAEC,uBAAuB,QAAO,iBAAiB;AACtE,SAAqBC,gBAAgB,EAA0BC,IAAI,QAAO,mBAAmB;AAQ7F,SAAQC,kBAAkB,EAAEC,aAAa,EAAEC,YAAY,QAAO,qBAAqB;AACnF,SAAQC,eAAe,EAAEC,UAAU,QAAO,YAAY;AAYtD,MAAMC,aAA2C,GAAG;EAClDC,qBAAqB,EAAE;AACzB,CAAC;;AAQD,OAAO,MAAMC,aAAa,CAAC;EAKzBC,WAAW,CAACC,IAAkB,EAAEC,KAA0B,EAAE;IAAA;IAAA;IAAA,kCAFnB,IAAI;IAG3C,IAAI,CAACD,IAAI,GAAGA,IAAI;IAChB,IAAI,CAACC,KAAK,GAAG;MAAC,GAAGL,aAAa;MAAE,GAAGK;IAAK,CAAC;EAC3C;EAEAC,KAAK,GAAS;IAEZ,IAAI,CAACF,IAAI,CAACE,KAAK,EAAE;EACnB;;EAKA,OAAOC,WAAW,CAACF,KAA6B,EAAE;IAChD,WAAW,MAAMG,IAAI,IAAI,IAAI,CAACC,gBAAgB,CAACJ,KAAK,CAAC,EAAE;MAErD,KAAK,MAAMK,GAAG,IAAIF,IAAI,EAAE;QACtB,MAAME,GAAG;MACX;IACF;EACF;;EAGA,OAAOD,gBAAgB,CAACJ,KAA6B,EAAE;IACrD,MAAMM,MAAM,GAAG,MAAM,IAAI,CAACC,SAAS,EAAE;IACrC,WAAW,MAAMC,QAAQ,IAAI,IAAI,CAACC,gBAAgB,CAACT,KAAK,CAAC,EAAE;MACzD,MAAMf,kBAAkB,CAACqB,MAAM,EAAEE,QAAQ,CAAC;IAC5C;EACF;;EAGA,OAAOC,gBAAgB,CAACT,KAA6B,EAAE;IAErD,MAAMU,UAAsB,GAAG,CAAC,CAAAV,KAAK,aAALA,KAAK,uBAALA,KAAK,CAAEU,UAAU,KAAI,EAAE,EAAEC,GAAG,CAAEC,CAAC,IAC7DC,KAAK,CAACC,OAAO,CAACF,CAAC,CAAC,GAAGA,CAAC,GAAG,CAACA,CAAC,CAAC,CAC3B;IAED,MAAMG,QAAQ,GAAG,MAAM,IAAI,CAACC,eAAe,EAAE;IAC7C,MAAMV,MAAM,GAAG,MAAM,IAAI,CAACC,SAAS,EAAE;IAErC,MAAMU,aAAa,GAAG,CAAAF,QAAQ,aAARA,QAAQ,uBAARA,QAAQ,CAAEG,UAAU,CAACC,MAAM,KAAI,CAAC;IAEtD,KAAK,IAAIC,aAAa,GAAG,CAAC,EAAEA,aAAa,GAAGH,aAAa,EAAEG,aAAa,EAAE,EAAE;MAC1E,MAAMZ,QAAQ,GAAG,MAAM,IAAI,CAACa,YAAY,CACtCf,MAAM,EACNS,QAAQ,CAACG,UAAU,CAACE,aAAa,CAAC,EAClCV,UAAU,CACX;MACD,MAAMF,QAAQ;IAChB;EACF;EAEA,MAAMc,WAAW,GAAoB;IACnC,MAAMP,QAAQ,GAAG,MAAM,IAAI,CAACC,eAAe,EAAE;IAC7C,OAAOO,MAAM,CAACR,QAAQ,CAACS,QAAQ,CAAC;EAClC;EAEA,MAAMjB,SAAS,GAA2B;IACxC,MAAMQ,QAAQ,GAAG,MAAM,IAAI,CAACC,eAAe,EAAE;IAC7C,MAAMS,IAAI,GAAGV,QAAQ,CAACT,MAAM,CAAC,CAAC,CAAC;IAC/B,MAAM;MAACA,MAAM,EAAEoB;IAAgB,CAAC,GAAG1C,YAAY,CAAC+B,QAAQ,CAACT,MAAM,EAAE,CAAC,EAAEmB,IAAI,CAACE,YAAY,CAAE;IACvF,MAAMrB,MAAM,GAAG,IAAIvB,aAAa,CAAC2C,gBAAgB,CAAC;IAClD,OAAOpB,MAAM;EACf;;EAMA,MAAMsB,iBAAiB,GAAoC;IACzD,MAAMb,QAAQ,GAAG,MAAM,IAAI,CAACC,eAAe,EAAE;IAC7C,MAAMa,EAA0B,GAAG,CAAC,CAAC;IACrC,KAAK,MAAMC,EAAE,IAAIf,QAAQ,CAACgB,kBAAkB,EAAG;MAC7CF,EAAE,CAACC,EAAE,CAACE,GAAG,CAAC,GAAGF,EAAE,CAACG,KAAM;IACxB;IACA,OAAOJ,EAAE;EACX;EAEA,MAAMb,eAAe,GAA0B;IAC7C,IAAI,CAAC,IAAI,CAACD,QAAQ,EAAE;MAClB,MAAM,IAAI,CAACmB,UAAU,EAAE;MACvB,IAAI,CAACnB,QAAQ,GAAG,IAAI,CAACoB,UAAU,EAAE;IACnC;IACA,OAAO,IAAI,CAACpB,QAAQ;EACtB;;EAKA,MAAMmB,UAAU,GAAkB;IAChC,MAAME,MAAM,GAAG,MAAM,IAAI,CAACrC,IAAI,CAACsC,IAAI,CAAC,CAAC,EAAEnD,aAAa,CAACiC,MAAM,CAAC;IAC5D,MAAMmB,KAAK,GAAGF,MAAM,CAACG,QAAQ,EAAE;IAC/B,QAAQD,KAAK;MACX,KAAKpD,aAAa;QAChB;MACF,KAAKC,uBAAuB;QAC1B,MAAM,IAAIqD,KAAK,CAAC,sCAAsC,CAAC;MACzD;QACE,MAAM,IAAIA,KAAK,uCAAgCF,KAAK,OAAI;IAAC;EAE/D;;EAGA,MAAMH,UAAU,GAA0B;IACxC,MAAMM,UAAU,GAAGvD,aAAa,CAACiC,MAAM,GAAG,CAAC;IAC3C,MAAMuB,UAAU,GAAG,MAAM,IAAI,CAAC3C,IAAI,CAACsC,IAAI,CAAC,IAAI,CAACtC,IAAI,CAAC4C,IAAI,GAAGF,UAAU,EAAEA,UAAU,CAAC;IAEhF,MAAMH,KAAK,GAAGI,UAAU,CAACE,KAAK,CAAC,CAAC,CAAC,CAACL,QAAQ,EAAE;IAC5C,IAAID,KAAK,KAAKpD,aAAa,EAAE;MAC3B,MAAM,IAAIsD,KAAK,6CAAqCF,KAAK,OAAI;IAC/D;IAEA,MAAMO,YAAY,GAAGH,UAAU,CAACI,YAAY,CAAC,CAAC,CAAC;IAC/C,MAAMC,cAAc,GAAG,IAAI,CAAChD,IAAI,CAAC4C,IAAI,GAAGE,YAAY,GAAGJ,UAAU;IACjE,IAAIM,cAAc,GAAG7D,aAAa,CAACiC,MAAM,EAAE;MACzC,MAAM,IAAIqB,KAAK,iCAA0BO,cAAc,EAAG;IAC5D;IAEA,MAAMC,WAAW,GAAG,MAAM,IAAI,CAACjD,IAAI,CAACsC,IAAI,CAACU,cAAc,EAAEF,YAAY,CAAC;IAGtE,MAAM;MAAC9B;IAAQ,CAAC,GAAGzB,kBAAkB,CAAC0D,WAAW,CAAC;IAClD,OAAOjC,QAAQ;EACjB;;EAGA,MAAMM,YAAY,CAChBf,MAAqB,EACrBE,QAAkB,EAClBE,UAAsB,EACE;IACxB,MAAM0B,MAAqB,GAAG;MAC5Ba,QAAQ,EAAE1B,MAAM,CAACf,QAAQ,CAACgB,QAAQ,CAAC;MACnC0B,UAAU,EAAE,CAAC;IACf,CAAC;IACD,KAAK,MAAMC,QAAQ,IAAI3C,QAAQ,CAAC4C,OAAO,EAAE;MACvC,MAAMC,WAAW,GAAGF,QAAQ,CAACG,SAAS;MACtC,MAAMC,MAAM,GAAGF,WAAW,aAAXA,WAAW,uBAAXA,WAAW,CAAEG,cAAc;MAC1C,IAAI9C,UAAU,CAACS,MAAM,GAAG,CAAC,IAAI3B,YAAY,CAACkB,UAAU,EAAE6C,MAAM,CAAE,GAAG,CAAC,EAAE;QAClE;MACF;;MACAnB,MAAM,CAACc,UAAU,CAACK,MAAM,CAAEE,IAAI,EAAE,CAAC,GAAG,MAAM,IAAI,CAACC,eAAe,CAACpD,MAAM,EAAE6C,QAAQ,CAAC;IAClF;IACA,OAAOf,MAAM;EACf;;EAKA,MAAMsB,eAAe,CAACpD,MAAqB,EAAE6C,QAAqB,EAAwB;IAAA;IACxF,IAAIA,QAAQ,CAACQ,SAAS,KAAKC,SAAS,IAAIT,QAAQ,CAACQ,SAAS,KAAK,IAAI,EAAE;MACnE,MAAM,IAAInB,KAAK,CAAC,uCAAuC,CAAC;IAC1D;IAEA,MAAMqB,KAAK,GAAGvD,MAAM,CAACwD,SAAS,wBAACX,QAAQ,CAACG,SAAS,wDAAlB,oBAAoBE,cAAc,CAAE;IACnE,MAAMO,IAAmB,GAAGxE,aAAa,CAACF,IAAI,0BAAE8D,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBS,IAAI,CAAS;IAEjF,IAAIA,IAAI,KAAKF,KAAK,CAACG,aAAa,EAAE;MAChC,MAAM,IAAIxB,KAAK,2CAAoCuB,IAAI,EAAG;IAC5D;IAEA,MAAME,WAA+B,GAAG1E,aAAa,CACnDH,gBAAgB,0BAChB+D,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBY,KAAK,CACnB;IAER,MAAMC,WAAW,GAAG5C,MAAM,yBAAC4B,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBc,gBAAgB,CAAE;IACjE,IAAIC,SAAS,GAAG9C,MAAM,yBAAC4B,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBgB,qBAAqB,CAAE;IAElE,IAAI,CAACnB,QAAQ,CAACQ,SAAS,EAAE;MAAA;MACvBU,SAAS,GAAGE,IAAI,CAACC,GAAG,CAClB,IAAI,CAACzE,IAAI,CAAC4C,IAAI,GAAGwB,WAAW,EAC5B5C,MAAM,yBAAC4B,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBgB,qBAAqB,CAAC,CAClD;IACH;IAEA,MAAMG,OAAuB,GAAG;MAC9BV,IAAI;MACJW,SAAS,EAAEb,KAAK,CAACa,SAAS;MAC1BC,SAAS,EAAEd,KAAK,CAACc,SAAS;MAC1BV,WAAW;MACXW,MAAM,EAAEf,KAAK;MACbgB,SAAS,0BAAE1B,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBwB,UAAU;MACzCC,UAAU,EAAE;IACd,CAAC;IAED,IAAIA,UAAU;IAEd,MAAMC,oBAAoB,GAAG7B,QAAQ,aAARA,QAAQ,+CAARA,QAAQ,CAAEG,SAAS,yDAAnB,qBAAqB2B,sBAAsB;IAExE,IAAID,oBAAoB,EAAE;MACxB,MAAME,gBAAgB,GAAG3D,MAAM,CAACyD,oBAAoB,CAAC;MAErDD,UAAU,GAAG,MAAM,IAAI,CAACI,aAAa,CAACD,gBAAgB,EAAET,OAAO,EAAEN,WAAW,CAAC;IAC/E;IAEAY,UAAU,GAAG,uBAAAN,OAAO,CAACM,UAAU,gDAAlB,oBAAoB5D,MAAM,GAAGsD,OAAO,CAACM,UAAU,GAAGA,UAAU;IACzE,MAAMK,QAAQ,GAAG,MAAM,IAAI,CAACrF,IAAI,CAACsC,IAAI,CAAC8B,WAAW,EAAEE,SAAS,CAAC;IAC7D,OAAO,MAAM5E,eAAe,CAAC2F,QAAQ,EAAE;MAAC,GAAGX,OAAO;MAAEM;IAAU,CAAC,CAAC;EAClE;;EASA,MAAMI,aAAa,CACjBH,oBAA4B,EAC5BP,OAAuB,EACvBN,WAAmB,EACA;IACnB,IAAIa,oBAAoB,KAAK,CAAC,EAAE;;MAQ9B,OAAO,EAAE;IACX;IAEA,MAAMK,cAAc,GAAGd,IAAI,CAACC,GAAG,CAC7B,IAAI,CAACzE,IAAI,CAAC4C,IAAI,GAAGqC,oBAAoB,EACrC,IAAI,CAAChF,KAAK,CAACJ,qBAAqB,CACjC;IACD,MAAMwF,QAAQ,GAAG,MAAM,IAAI,CAACrF,IAAI,CAACsC,IAAI,CAAC2C,oBAAoB,EAAEK,cAAc,CAAC;IAE3E,MAAMC,MAAM,GAAG;MAAClD,MAAM,EAAEgD,QAAQ;MAAEG,MAAM,EAAE,CAAC;MAAE5C,IAAI,EAAEyC,QAAQ,CAACjE;IAAM,CAAC;IACnE,MAAMqE,WAAW,GAAG,MAAM9F,UAAU,CAAC4F,MAAM,EAAEb,OAAO,CAAC;IAErD,OAAOe,WAAW,CAACT,UAAU;EAC/B;AACF"}
@@ -1,6 +1,7 @@
1
1
  import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
2
2
 
3
3
  export class ParquetBuffer {
4
+
4
5
  constructor() {
5
6
  let rowCount = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0;
6
7
  let columnData = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
@@ -1 +1 @@
1
- {"version":3,"file":"declare.js","names":["ParquetBuffer","constructor","rowCount","columnData"],"sources":["../../../../src/parquetjs/schema/declare.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport Int64 from 'node-int64';\nimport type {PageHeader} from '../parquet-thrift';\n\nexport type ParquetCodec = 'PLAIN' | 'RLE' | 'PLAIN_DICTIONARY';\nexport type ParquetCompression =\n | 'UNCOMPRESSED'\n | 'GZIP'\n | 'SNAPPY'\n | 'LZO'\n | 'BROTLI'\n | 'LZ4'\n | 'LZ4_RAW'\n | 'ZSTD';\nexport type RepetitionType = 'REQUIRED' | 'OPTIONAL' | 'REPEATED';\nexport type ParquetType = PrimitiveType | OriginalType;\n\n/**\n * Physical type\n */\nexport type PrimitiveType =\n // Base Types\n | 'BOOLEAN' // 0\n | 'INT32' // 1\n | 'INT64' // 2\n | 'INT96' // 3\n | 'FLOAT' // 4\n | 'DOUBLE' // 5\n | 'BYTE_ARRAY' // 6,\n | 'FIXED_LEN_BYTE_ARRAY'; // 7\n\n/**\n * Logical type\n */\nexport type OriginalType =\n // Converted Types\n | 'UTF8' // 0\n // | 'MAP' // 1\n // | 'MAP_KEY_VALUE' // 2\n // | 'LIST' // 3\n // | 'ENUM' // 4\n // | 'DECIMAL' // 5\n | 'DECIMAL_INT32' // 5\n | 'DECIMAL_INT64' // 5\n | 'DECIMAL_BYTE_ARRAY' // 5\n | 'DECIMAL_FIXED_LEN_BYTE_ARRAY' // 5\n | 'DATE' // 6\n | 'TIME_MILLIS' // 7\n | 'TIME_MICROS' // 8\n | 'TIMESTAMP_MILLIS' // 9\n | 'TIMESTAMP_MICROS' // 10\n | 'UINT_8' // 11\n | 'UINT_16' // 12\n | 'UINT_32' // 13\n | 'UINT_64' // 14\n | 'INT_8' // 15\n | 'INT_16' // 16\n | 'INT_32' // 17\n | 'INT_64' // 18\n | 'JSON' // 19\n | 'BSON' // 20\n | 'INTERVAL'; // 21\n\nexport type ParquetDictionary = string[];\n\nexport interface SchemaDefinition {\n [string: string]: FieldDefinition;\n}\n\nexport interface FieldDefinition {\n type?: ParquetType;\n typeLength?: number;\n presision?: number;\n scale?: number;\n encoding?: ParquetCodec;\n compression?: ParquetCompression;\n optional?: boolean;\n repeated?: boolean;\n fields?: SchemaDefinition;\n}\n\nexport interface ParquetField {\n name: string;\n path: string[];\n key: string;\n primitiveType?: PrimitiveType;\n originalType?: OriginalType;\n repetitionType: RepetitionType;\n typeLength?: number;\n presision?: number;\n scale?: number;\n encoding?: ParquetCodec;\n compression?: ParquetCompression;\n rLevelMax: number;\n dLevelMax: number;\n isNested?: boolean;\n fieldCount?: number;\n fields?: Record<string, ParquetField>;\n}\n\nexport interface ParquetOptions {\n type: ParquetType;\n rLevelMax: number;\n dLevelMax: number;\n compression: ParquetCompression;\n column: ParquetField;\n numValues?: Int64;\n dictionary?: ParquetDictionary;\n}\n\nexport interface ParquetData {\n dlevels: number[];\n rlevels: number[];\n values: any[];\n count: number;\n pageHeaders: PageHeader[];\n}\n\nexport interface ParquetPageData {\n dlevels: number[];\n rlevels: number[];\n values: any[];\n count: number;\n dictionary?: ParquetDictionary;\n pageHeader: PageHeader;\n}\n\nexport interface ParquetRecord {\n [key: string]: any;\n}\n\nexport class ParquetBuffer {\n rowCount: number;\n columnData: Record<string, ParquetData>;\n constructor(rowCount: number = 0, columnData: Record<string, ParquetData> = {}) {\n this.rowCount = rowCount;\n this.columnData = columnData;\n }\n}\n"],"mappings":";;AAmIA,OAAO,MAAMA,aAAa,CAAC;EAGzBC,WAAW,GAAqE;IAAA,IAApEC,QAAgB,uEAAG,CAAC;IAAA,IAAEC,UAAuC,uEAAG,CAAC,CAAC;IAAA;IAAA;IAC5E,IAAI,CAACD,QAAQ,GAAGA,QAAQ;IACxB,IAAI,CAACC,UAAU,GAAGA,UAAU;EAC9B;AACF"}
1
+ {"version":3,"file":"declare.js","names":["ParquetBuffer","constructor","rowCount","columnData"],"sources":["../../../../src/parquetjs/schema/declare.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport Int64 from 'node-int64';\nimport type {PageHeader} from '../parquet-thrift';\n\nexport type ParquetCodec = 'PLAIN' | 'RLE' | 'PLAIN_DICTIONARY';\nexport type ParquetCompression =\n | 'UNCOMPRESSED'\n | 'GZIP'\n | 'SNAPPY'\n | 'LZO'\n | 'BROTLI'\n | 'LZ4'\n | 'LZ4_RAW'\n | 'ZSTD';\nexport type RepetitionType = 'REQUIRED' | 'OPTIONAL' | 'REPEATED';\nexport type ParquetType = PrimitiveType | OriginalType;\n\n/**\n * Physical type\n */\nexport type PrimitiveType =\n // Base Types\n | 'BOOLEAN' // 0\n | 'INT32' // 1\n | 'INT64' // 2\n | 'INT96' // 3\n | 'FLOAT' // 4\n | 'DOUBLE' // 5\n | 'BYTE_ARRAY' // 6,\n | 'FIXED_LEN_BYTE_ARRAY'; // 7\n\n/**\n * Logical type\n */\nexport type OriginalType =\n // Converted Types\n | 'UTF8' // 0\n // | 'MAP' // 1\n // | 'MAP_KEY_VALUE' // 2\n // | 'LIST' // 3\n // | 'ENUM' // 4\n // | 'DECIMAL' // 5\n | 'DECIMAL_INT32' // 5\n | 'DECIMAL_INT64' // 5\n | 'DECIMAL_BYTE_ARRAY' // 5\n | 'DECIMAL_FIXED_LEN_BYTE_ARRAY' // 5\n | 'DATE' // 6\n | 'TIME_MILLIS' // 7\n | 'TIME_MICROS' // 8\n | 'TIMESTAMP_MILLIS' // 9\n | 'TIMESTAMP_MICROS' // 10\n | 'UINT_8' // 11\n | 'UINT_16' // 12\n | 'UINT_32' // 13\n | 'UINT_64' // 14\n | 'INT_8' // 15\n | 'INT_16' // 16\n | 'INT_32' // 17\n | 'INT_64' // 18\n | 'JSON' // 19\n | 'BSON' // 20\n | 'INTERVAL'; // 21\n\nexport type ParquetDictionary = string[];\n\nexport interface SchemaDefinition {\n [string: string]: FieldDefinition;\n}\n\nexport interface FieldDefinition {\n type?: ParquetType;\n typeLength?: number;\n presision?: number;\n scale?: number;\n encoding?: ParquetCodec;\n compression?: ParquetCompression;\n optional?: boolean;\n repeated?: boolean;\n fields?: SchemaDefinition;\n}\n\nexport interface ParquetField {\n name: string;\n path: string[];\n key: string;\n primitiveType?: PrimitiveType;\n originalType?: OriginalType;\n repetitionType: RepetitionType;\n typeLength?: number;\n presision?: number;\n scale?: number;\n encoding?: ParquetCodec;\n compression?: ParquetCompression;\n rLevelMax: number;\n dLevelMax: number;\n isNested?: boolean;\n fieldCount?: number;\n fields?: Record<string, ParquetField>;\n}\n\n/** @todo better name, this is an internal type? */\nexport interface ParquetOptions {\n type: ParquetType;\n rLevelMax: number;\n dLevelMax: number;\n compression: ParquetCompression;\n column: ParquetField;\n numValues?: Int64;\n dictionary?: ParquetDictionary;\n}\n\nexport interface ParquetPageData {\n dlevels: number[];\n rlevels: number[];\n /** Actual column chunks */\n values: any[]; // ArrayLike<any>;\n count: number;\n dictionary?: ParquetDictionary;\n /** The \"raw\" page header from the file */\n pageHeader: PageHeader;\n}\n\nexport interface ParquetRecord {\n [key: string]: any;\n}\n\n/** @\n * Holds data for one row group (column chunks) */\nexport class ParquetBuffer {\n /** Number of rows in this page */\n rowCount: number;\n\n columnData: Record<string, ParquetData>;\n constructor(rowCount: number = 0, columnData: Record<string, ParquetData> = {}) {\n this.rowCount = rowCount;\n this.columnData = columnData;\n }\n}\n\n/** Holds the data for one column chunk */\nexport interface ParquetData {\n dlevels: number[];\n rlevels: number[];\n values: any[];\n count: number;\n pageHeaders: PageHeader[];\n}\n"],"mappings":";;AAgIA,OAAO,MAAMA,aAAa,CAAC;;EAKzBC,WAAW,GAAqE;IAAA,IAApEC,QAAgB,uEAAG,CAAC;IAAA,IAAEC,UAAuC,uEAAG,CAAC,CAAC;IAAA;IAAA;IAC5E,IAAI,CAACD,QAAQ,GAAGA,QAAQ;IACxB,IAAI,CAACC,UAAU,GAAGA,UAAU;EAC9B;AACF"}
@@ -84,62 +84,70 @@ function shredRecordFields(fields, record, data, rLevel, dLevel) {
84
84
 
85
85
  export function materializeRecords(schema, buffer) {
86
86
  const records = [];
87
- for (let i = 0; i < buffer.rowCount; i++) records.push({});
87
+ for (let i = 0; i < buffer.rowCount; i++) {
88
+ records.push({});
89
+ }
88
90
  for (const key in buffer.columnData) {
89
- materializeColumn(schema, buffer, key, records);
91
+ const columnData = buffer.columnData[key];
92
+ if (columnData.count) {
93
+ materializeColumn(schema, columnData, key, records);
94
+ }
90
95
  }
91
96
  return records;
92
97
  }
93
98
 
94
- function materializeColumn(schema, buffer, key, records) {
95
- const data = buffer.columnData[key];
96
- if (!data.count) return;
99
+ function materializeColumn(schema, columnData, key, records) {
97
100
  const field = schema.findField(key);
98
101
  const branch = schema.findFieldBranch(key);
99
102
 
100
103
  const rLevels = new Array(field.rLevelMax + 1).fill(0);
101
104
  let vIndex = 0;
102
- for (let i = 0; i < data.count; i++) {
103
- const dLevel = data.dlevels[i];
104
- const rLevel = data.rlevels[i];
105
+ for (let i = 0; i < columnData.count; i++) {
106
+ const dLevel = columnData.dlevels[i];
107
+ const rLevel = columnData.rlevels[i];
105
108
  rLevels[rLevel]++;
106
109
  rLevels.fill(0, rLevel + 1);
107
110
  let rIndex = 0;
108
111
  let record = records[rLevels[rIndex++] - 1];
109
112
 
110
113
  for (const step of branch) {
111
- if (step === field) break;
112
- if (dLevel < step.dLevelMax) break;
113
- if (step.repetitionType === 'REPEATED') {
114
- if (!(step.name in record)) {
115
- record[step.name] = [];
116
- }
117
- const ix = rLevels[rIndex++];
118
- while (record[step.name].length <= ix) {
119
- record[step.name].push({});
120
- }
121
- record = record[step.name][ix];
122
- } else {
123
- record[step.name] = record[step.name] || {};
124
- record = record[step.name];
114
+ if (step === field || dLevel < step.dLevelMax) {
115
+ break;
116
+ }
117
+ switch (step.repetitionType) {
118
+ case 'REPEATED':
119
+ if (!(step.name in record)) {
120
+ record[step.name] = [];
121
+ }
122
+ const ix = rLevels[rIndex++];
123
+ while (record[step.name].length <= ix) {
124
+ record[step.name].push({});
125
+ }
126
+ record = record[step.name][ix];
127
+ break;
128
+ default:
129
+ record[step.name] = record[step.name] || {};
130
+ record = record[step.name];
125
131
  }
126
132
  }
127
133
 
128
134
  if (dLevel === field.dLevelMax) {
129
135
  const value = Types.fromPrimitive(
130
- field.originalType || field.primitiveType, data.values[vIndex], field);
136
+ field.originalType || field.primitiveType, columnData.values[vIndex], field);
131
137
  vIndex++;
132
- if (field.repetitionType === 'REPEATED') {
133
- if (!(field.name in record)) {
134
- record[field.name] = [];
135
- }
136
- const ix = rLevels[rIndex];
137
- while (record[field.name].length <= ix) {
138
- record[field.name].push(null);
139
- }
140
- record[field.name][ix] = value;
141
- } else {
142
- record[field.name] = value;
138
+ switch (field.repetitionType) {
139
+ case 'REPEATED':
140
+ if (!(field.name in record)) {
141
+ record[field.name] = [];
142
+ }
143
+ const ix = rLevels[rIndex];
144
+ while (record[field.name].length <= ix) {
145
+ record[field.name].push(null);
146
+ }
147
+ record[field.name][ix] = value;
148
+ break;
149
+ default:
150
+ record[field.name] = value;
143
151
  }
144
152
  }
145
153
  }
@@ -1 +1 @@
1
- {"version":3,"file":"shred.js","names":["ParquetBuffer","Types","shredBuffer","schema","columnData","field","fieldList","key","dlevels","rlevels","values","pageHeaders","count","rowCount","shredRecord","record","buffer","data","shredRecordFields","fields","Array","prototype","push","apply","rLevel","dLevel","name","undefined","constructor","length","Boolean","repetitionType","Error","isNested","i","rlvl","rLevelMax","dLevelMax","toPrimitive","originalType","primitiveType","materializeRecords","records","materializeColumn","findField","branch","findFieldBranch","rLevels","fill","vIndex","rIndex","step","ix","value","fromPrimitive"],"sources":["../../../../src/parquetjs/schema/shred.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport {ParquetBuffer, ParquetData, ParquetField, ParquetRecord} from './declare';\nimport {ParquetSchema} from './schema';\nimport * as Types from './types';\n\nexport {ParquetBuffer};\n\nexport function shredBuffer(schema: ParquetSchema): ParquetBuffer {\n const columnData: Record<string, ParquetData> = {};\n for (const field of schema.fieldList) {\n columnData[field.key] = {\n dlevels: [],\n rlevels: [],\n values: [],\n pageHeaders: [],\n count: 0\n };\n }\n return {rowCount: 0, columnData};\n}\n\n/**\n * 'Shred' a record into a list of <value, repetition_level, definition_level>\n * tuples per column using the Google Dremel Algorithm..\n *\n * The buffer argument must point to an object into which the shredded record\n * will be returned. You may re-use the buffer for repeated calls to this function\n * to append to an existing buffer, as long as the schema is unchanged.\n *\n * The format in which the shredded records will be stored in the buffer is as\n * follows:\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n */\nexport function shredRecord(schema: ParquetSchema, record: any, buffer: ParquetBuffer): void {\n /* shred the record, this may raise an exception */\n const data = shredBuffer(schema).columnData;\n\n shredRecordFields(schema.fields, record, data, 0, 0);\n\n /* if no error during shredding, add the shredded record to the buffer */\n if (buffer.rowCount === 0) {\n buffer.rowCount = 1;\n buffer.columnData = data;\n return;\n }\n buffer.rowCount += 1;\n for (const field of schema.fieldList) {\n Array.prototype.push.apply(buffer.columnData[field.key].rlevels, data[field.key].rlevels);\n Array.prototype.push.apply(buffer.columnData[field.key].dlevels, data[field.key].dlevels);\n Array.prototype.push.apply(buffer.columnData[field.key].values, data[field.key].values);\n buffer.columnData[field.key].count += data[field.key].count;\n }\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction shredRecordFields(\n fields: Record<string, ParquetField>,\n record: any,\n data: Record<string, ParquetData>,\n rLevel: number,\n dLevel: number\n) {\n for (const name in fields) {\n const field = fields[name];\n\n // fetch values\n let values: any[] = [];\n if (\n record &&\n field.name in record &&\n record[field.name] !== undefined &&\n record[field.name] !== null\n ) {\n if (record[field.name].constructor === Array) {\n values = record[field.name];\n } else {\n values.push(record[field.name]);\n }\n }\n // check values\n if (values.length === 0 && Boolean(record) && field.repetitionType === 'REQUIRED') {\n throw new Error(`missing required field: ${field.name}`);\n }\n if (values.length > 1 && field.repetitionType !== 'REPEATED') {\n throw new Error(`too many values for field: ${field.name}`);\n }\n\n // push null\n if (values.length === 0) {\n if (field.isNested) {\n shredRecordFields(field.fields!, null, data, rLevel, dLevel);\n } else {\n data[field.key].count += 1;\n data[field.key].rlevels.push(rLevel);\n data[field.key].dlevels.push(dLevel);\n }\n continue; // eslint-disable-line no-continue\n }\n\n // push values\n for (let i = 0; i < values.length; i++) {\n const rlvl = i === 0 ? rLevel : field.rLevelMax;\n if (field.isNested) {\n shredRecordFields(field.fields!, values[i], data, rlvl, field.dLevelMax);\n } else {\n data[field.key].count += 1;\n data[field.key].rlevels.push(rlvl);\n data[field.key].dlevels.push(field.dLevelMax);\n data[field.key].values.push(\n Types.toPrimitive((field.originalType || field.primitiveType)!, values[i])\n );\n }\n }\n }\n}\n\n/**\n * 'Materialize' a list of <value, repetition_level, definition_level>\n * tuples back to nested records (objects/arrays) using the Google Dremel\n * Algorithm..\n *\n * The buffer argument must point to an object with the following structure (i.e.\n * the same structure that is returned by shredRecords):\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n */\nexport function materializeRecords(schema: ParquetSchema, buffer: ParquetBuffer): ParquetRecord[] {\n const records: ParquetRecord[] = [];\n for (let i = 0; i < buffer.rowCount; i++) records.push({});\n for (const key in buffer.columnData) {\n materializeColumn(schema, buffer, key, records);\n }\n return records;\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction materializeColumn(\n schema: ParquetSchema,\n buffer: ParquetBuffer,\n key: string,\n records: ParquetRecord[]\n) {\n const data = buffer.columnData[key];\n if (!data.count) return;\n\n const field = schema.findField(key);\n const branch = schema.findFieldBranch(key);\n\n // tslint:disable-next-line:prefer-array-literal\n const rLevels: number[] = new Array(field.rLevelMax + 1).fill(0);\n let vIndex = 0;\n for (let i = 0; i < data.count; i++) {\n const dLevel = data.dlevels[i];\n const rLevel = data.rlevels[i];\n rLevels[rLevel]++;\n rLevels.fill(0, rLevel + 1);\n\n let rIndex = 0;\n let record = records[rLevels[rIndex++] - 1];\n\n // Internal nodes\n for (const step of branch) {\n if (step === field) break;\n if (dLevel < step.dLevelMax) break;\n if (step.repetitionType === 'REPEATED') {\n if (!(step.name in record)) {\n // eslint-disable max-depth\n record[step.name] = [];\n }\n const ix = rLevels[rIndex++];\n while (record[step.name].length <= ix) {\n // eslint-disable max-depth\n record[step.name].push({});\n }\n record = record[step.name][ix];\n } else {\n record[step.name] = record[step.name] || {};\n record = record[step.name];\n }\n }\n\n // Leaf node\n if (dLevel === field.dLevelMax) {\n const value = Types.fromPrimitive(\n // @ts-ignore\n field.originalType || field.primitiveType,\n data.values[vIndex],\n field\n );\n vIndex++;\n if (field.repetitionType === 'REPEATED') {\n if (!(field.name in record)) {\n // eslint-disable max-depth\n record[field.name] = [];\n }\n const ix = rLevels[rIndex];\n while (record[field.name].length <= ix) {\n // eslint-disable max-depth\n record[field.name].push(null);\n }\n record[field.name][ix] = value;\n } else {\n record[field.name] = value;\n }\n }\n }\n}\n"],"mappings":";;AAEA,SAAQA,aAAa,QAAiD,WAAW;AAEjF,OAAO,KAAKC,KAAK,MAAM,SAAS;AAEhC,SAAQD,aAAa;AAErB,OAAO,SAASE,WAAW,CAACC,MAAqB,EAAiB;EAChE,MAAMC,UAAuC,GAAG,CAAC,CAAC;EAClD,KAAK,MAAMC,KAAK,IAAIF,MAAM,CAACG,SAAS,EAAE;IACpCF,UAAU,CAACC,KAAK,CAACE,GAAG,CAAC,GAAG;MACtBC,OAAO,EAAE,EAAE;MACXC,OAAO,EAAE,EAAE;MACXC,MAAM,EAAE,EAAE;MACVC,WAAW,EAAE,EAAE;MACfC,KAAK,EAAE;IACT,CAAC;EACH;EACA,OAAO;IAACC,QAAQ,EAAE,CAAC;IAAET;EAAU,CAAC;AAClC;;AAwBA,OAAO,SAASU,WAAW,CAACX,MAAqB,EAAEY,MAAW,EAAEC,MAAqB,EAAQ;EAE3F,MAAMC,IAAI,GAAGf,WAAW,CAACC,MAAM,CAAC,CAACC,UAAU;EAE3Cc,iBAAiB,CAACf,MAAM,CAACgB,MAAM,EAAEJ,MAAM,EAAEE,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC;;EAGpD,IAAID,MAAM,CAACH,QAAQ,KAAK,CAAC,EAAE;IACzBG,MAAM,CAACH,QAAQ,GAAG,CAAC;IACnBG,MAAM,CAACZ,UAAU,GAAGa,IAAI;IACxB;EACF;EACAD,MAAM,CAACH,QAAQ,IAAI,CAAC;EACpB,KAAK,MAAMR,KAAK,IAAIF,MAAM,CAACG,SAAS,EAAE;IACpCc,KAAK,CAACC,SAAS,CAACC,IAAI,CAACC,KAAK,CAACP,MAAM,CAACZ,UAAU,CAACC,KAAK,CAACE,GAAG,CAAC,CAACE,OAAO,EAAEQ,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACE,OAAO,CAAC;IACzFW,KAAK,CAACC,SAAS,CAACC,IAAI,CAACC,KAAK,CAACP,MAAM,CAACZ,UAAU,CAACC,KAAK,CAACE,GAAG,CAAC,CAACC,OAAO,EAAES,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACC,OAAO,CAAC;IACzFY,KAAK,CAACC,SAAS,CAACC,IAAI,CAACC,KAAK,CAACP,MAAM,CAACZ,UAAU,CAACC,KAAK,CAACE,GAAG,CAAC,CAACG,MAAM,EAAEO,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACG,MAAM,CAAC;IACvFM,MAAM,CAACZ,UAAU,CAACC,KAAK,CAACE,GAAG,CAAC,CAACK,KAAK,IAAIK,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACK,KAAK;EAC7D;AACF;;AAGA,SAASM,iBAAiB,CACxBC,MAAoC,EACpCJ,MAAW,EACXE,IAAiC,EACjCO,MAAc,EACdC,MAAc,EACd;EACA,KAAK,MAAMC,IAAI,IAAIP,MAAM,EAAE;IACzB,MAAMd,KAAK,GAAGc,MAAM,CAACO,IAAI,CAAC;;IAG1B,IAAIhB,MAAa,GAAG,EAAE;IACtB,IACEK,MAAM,IACNV,KAAK,CAACqB,IAAI,IAAIX,MAAM,IACpBA,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC,KAAKC,SAAS,IAChCZ,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC,KAAK,IAAI,EAC3B;MACA,IAAIX,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC,CAACE,WAAW,KAAKR,KAAK,EAAE;QAC5CV,MAAM,GAAGK,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC;MAC7B,CAAC,MAAM;QACLhB,MAAM,CAACY,IAAI,CAACP,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC,CAAC;MACjC;IACF;IAEA,IAAIhB,MAAM,CAACmB,MAAM,KAAK,CAAC,IAAIC,OAAO,CAACf,MAAM,CAAC,IAAIV,KAAK,CAAC0B,cAAc,KAAK,UAAU,EAAE;MACjF,MAAM,IAAIC,KAAK,mCAA4B3B,KAAK,CAACqB,IAAI,EAAG;IAC1D;IACA,IAAIhB,MAAM,CAACmB,MAAM,GAAG,CAAC,IAAIxB,KAAK,CAAC0B,cAAc,KAAK,UAAU,EAAE;MAC5D,MAAM,IAAIC,KAAK,sCAA+B3B,KAAK,CAACqB,IAAI,EAAG;IAC7D;;IAGA,IAAIhB,MAAM,CAACmB,MAAM,KAAK,CAAC,EAAE;MACvB,IAAIxB,KAAK,CAAC4B,QAAQ,EAAE;QAClBf,iBAAiB,CAACb,KAAK,CAACc,MAAM,EAAG,IAAI,EAAEF,IAAI,EAAEO,MAAM,EAAEC,MAAM,CAAC;MAC9D,CAAC,MAAM;QACLR,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACK,KAAK,IAAI,CAAC;QAC1BK,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACE,OAAO,CAACa,IAAI,CAACE,MAAM,CAAC;QACpCP,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACC,OAAO,CAACc,IAAI,CAACG,MAAM,CAAC;MACtC;MACA;IACF;;IAGA,KAAK,IAAIS,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGxB,MAAM,CAACmB,MAAM,EAAEK,CAAC,EAAE,EAAE;MACtC,MAAMC,IAAI,GAAGD,CAAC,KAAK,CAAC,GAAGV,MAAM,GAAGnB,KAAK,CAAC+B,SAAS;MAC/C,IAAI/B,KAAK,CAAC4B,QAAQ,EAAE;QAClBf,iBAAiB,CAACb,KAAK,CAACc,MAAM,EAAGT,MAAM,CAACwB,CAAC,CAAC,EAAEjB,IAAI,EAAEkB,IAAI,EAAE9B,KAAK,CAACgC,SAAS,CAAC;MAC1E,CAAC,MAAM;QACLpB,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACK,KAAK,IAAI,CAAC;QAC1BK,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACE,OAAO,CAACa,IAAI,CAACa,IAAI,CAAC;QAClClB,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACC,OAAO,CAACc,IAAI,CAACjB,KAAK,CAACgC,SAAS,CAAC;QAC7CpB,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACG,MAAM,CAACY,IAAI,CACzBrB,KAAK,CAACqC,WAAW,CAAEjC,KAAK,CAACkC,YAAY,IAAIlC,KAAK,CAACmC,aAAa,EAAI9B,MAAM,CAACwB,CAAC,CAAC,CAAC,CAC3E;MACH;IACF;EACF;AACF;;AAqBA,OAAO,SAASO,kBAAkB,CAACtC,MAAqB,EAAEa,MAAqB,EAAmB;EAChG,MAAM0B,OAAwB,GAAG,EAAE;EACnC,KAAK,IAAIR,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGlB,MAAM,CAACH,QAAQ,EAAEqB,CAAC,EAAE,EAAEQ,OAAO,CAACpB,IAAI,CAAC,CAAC,CAAC,CAAC;EAC1D,KAAK,MAAMf,GAAG,IAAIS,MAAM,CAACZ,UAAU,EAAE;IACnCuC,iBAAiB,CAACxC,MAAM,EAAEa,MAAM,EAAET,GAAG,EAAEmC,OAAO,CAAC;EACjD;EACA,OAAOA,OAAO;AAChB;;AAGA,SAASC,iBAAiB,CACxBxC,MAAqB,EACrBa,MAAqB,EACrBT,GAAW,EACXmC,OAAwB,EACxB;EACA,MAAMzB,IAAI,GAAGD,MAAM,CAACZ,UAAU,CAACG,GAAG,CAAC;EACnC,IAAI,CAACU,IAAI,CAACL,KAAK,EAAE;EAEjB,MAAMP,KAAK,GAAGF,MAAM,CAACyC,SAAS,CAACrC,GAAG,CAAC;EACnC,MAAMsC,MAAM,GAAG1C,MAAM,CAAC2C,eAAe,CAACvC,GAAG,CAAC;;EAG1C,MAAMwC,OAAiB,GAAG,IAAI3B,KAAK,CAACf,KAAK,CAAC+B,SAAS,GAAG,CAAC,CAAC,CAACY,IAAI,CAAC,CAAC,CAAC;EAChE,IAAIC,MAAM,GAAG,CAAC;EACd,KAAK,IAAIf,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGjB,IAAI,CAACL,KAAK,EAAEsB,CAAC,EAAE,EAAE;IACnC,MAAMT,MAAM,GAAGR,IAAI,CAACT,OAAO,CAAC0B,CAAC,CAAC;IAC9B,MAAMV,MAAM,GAAGP,IAAI,CAACR,OAAO,CAACyB,CAAC,CAAC;IAC9Ba,OAAO,CAACvB,MAAM,CAAC,EAAE;IACjBuB,OAAO,CAACC,IAAI,CAAC,CAAC,EAAExB,MAAM,GAAG,CAAC,CAAC;IAE3B,IAAI0B,MAAM,GAAG,CAAC;IACd,IAAInC,MAAM,GAAG2B,OAAO,CAACK,OAAO,CAACG,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC;;IAG3C,KAAK,MAAMC,IAAI,IAAIN,MAAM,EAAE;MACzB,IAAIM,IAAI,KAAK9C,KAAK,EAAE;MACpB,IAAIoB,MAAM,GAAG0B,IAAI,CAACd,SAAS,EAAE;MAC7B,IAAIc,IAAI,CAACpB,cAAc,KAAK,UAAU,EAAE;QACtC,IAAI,EAAEoB,IAAI,CAACzB,IAAI,IAAIX,MAAM,CAAC,EAAE;UAE1BA,MAAM,CAACoC,IAAI,CAACzB,IAAI,CAAC,GAAG,EAAE;QACxB;QACA,MAAM0B,EAAE,GAAGL,OAAO,CAACG,MAAM,EAAE,CAAC;QAC5B,OAAOnC,MAAM,CAACoC,IAAI,CAACzB,IAAI,CAAC,CAACG,MAAM,IAAIuB,EAAE,EAAE;UAErCrC,MAAM,CAACoC,IAAI,CAACzB,IAAI,CAAC,CAACJ,IAAI,CAAC,CAAC,CAAC,CAAC;QAC5B;QACAP,MAAM,GAAGA,MAAM,CAACoC,IAAI,CAACzB,IAAI,CAAC,CAAC0B,EAAE,CAAC;MAChC,CAAC,MAAM;QACLrC,MAAM,CAACoC,IAAI,CAACzB,IAAI,CAAC,GAAGX,MAAM,CAACoC,IAAI,CAACzB,IAAI,CAAC,IAAI,CAAC,CAAC;QAC3CX,MAAM,GAAGA,MAAM,CAACoC,IAAI,CAACzB,IAAI,CAAC;MAC5B;IACF;;IAGA,IAAID,MAAM,KAAKpB,KAAK,CAACgC,SAAS,EAAE;MAC9B,MAAMgB,KAAK,GAAGpD,KAAK,CAACqD,aAAa;MAE/BjD,KAAK,CAACkC,YAAY,IAAIlC,KAAK,CAACmC,aAAa,EACzCvB,IAAI,CAACP,MAAM,CAACuC,MAAM,CAAC,EACnB5C,KAAK,CACN;MACD4C,MAAM,EAAE;MACR,IAAI5C,KAAK,CAAC0B,cAAc,KAAK,UAAU,EAAE;QACvC,IAAI,EAAE1B,KAAK,CAACqB,IAAI,IAAIX,MAAM,CAAC,EAAE;UAE3BA,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC,GAAG,EAAE;QACzB;QACA,MAAM0B,EAAE,GAAGL,OAAO,CAACG,MAAM,CAAC;QAC1B,OAAOnC,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC,CAACG,MAAM,IAAIuB,EAAE,EAAE;UAEtCrC,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC,CAACJ,IAAI,CAAC,IAAI,CAAC;QAC/B;QACAP,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC,CAAC0B,EAAE,CAAC,GAAGC,KAAK;MAChC,CAAC,MAAM;QACLtC,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC,GAAG2B,KAAK;MAC5B;IACF;EACF;AACF"}
1
+ {"version":3,"file":"shred.js","names":["ParquetBuffer","Types","shredBuffer","schema","columnData","field","fieldList","key","dlevels","rlevels","values","pageHeaders","count","rowCount","shredRecord","record","buffer","data","shredRecordFields","fields","Array","prototype","push","apply","rLevel","dLevel","name","undefined","constructor","length","Boolean","repetitionType","Error","isNested","i","rlvl","rLevelMax","dLevelMax","toPrimitive","originalType","primitiveType","materializeRecords","records","materializeColumn","findField","branch","findFieldBranch","rLevels","fill","vIndex","rIndex","step","ix","value","fromPrimitive"],"sources":["../../../../src/parquetjs/schema/shred.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport {ParquetBuffer, ParquetData, ParquetField, ParquetRecord} from './declare';\nimport {ParquetSchema} from './schema';\nimport * as Types from './types';\n\nexport {ParquetBuffer};\n\nexport function shredBuffer(schema: ParquetSchema): ParquetBuffer {\n const columnData: Record<string, ParquetData> = {};\n for (const field of schema.fieldList) {\n columnData[field.key] = {\n dlevels: [],\n rlevels: [],\n values: [],\n pageHeaders: [],\n count: 0\n };\n }\n return {rowCount: 0, columnData};\n}\n\n/**\n * 'Shred' a record into a list of <value, repetition_level, definition_level>\n * tuples per column using the Google Dremel Algorithm..\n *\n * The buffer argument must point to an object into which the shredded record\n * will be returned. You may re-use the buffer for repeated calls to this function\n * to append to an existing buffer, as long as the schema is unchanged.\n *\n * The format in which the shredded records will be stored in the buffer is as\n * follows:\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n */\nexport function shredRecord(schema: ParquetSchema, record: any, buffer: ParquetBuffer): void {\n /* shred the record, this may raise an exception */\n const data = shredBuffer(schema).columnData;\n\n shredRecordFields(schema.fields, record, data, 0, 0);\n\n /* if no error during shredding, add the shredded record to the buffer */\n if (buffer.rowCount === 0) {\n buffer.rowCount = 1;\n buffer.columnData = data;\n return;\n }\n buffer.rowCount += 1;\n for (const field of schema.fieldList) {\n Array.prototype.push.apply(buffer.columnData[field.key].rlevels, data[field.key].rlevels);\n Array.prototype.push.apply(buffer.columnData[field.key].dlevels, data[field.key].dlevels);\n Array.prototype.push.apply(buffer.columnData[field.key].values, data[field.key].values);\n buffer.columnData[field.key].count += data[field.key].count;\n }\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction shredRecordFields(\n fields: Record<string, ParquetField>,\n record: any,\n data: Record<string, ParquetData>,\n rLevel: number,\n dLevel: number\n) {\n for (const name in fields) {\n const field = fields[name];\n\n // fetch values\n let values: any[] = [];\n if (\n record &&\n field.name in record &&\n record[field.name] !== undefined &&\n record[field.name] !== null\n ) {\n if (record[field.name].constructor === Array) {\n values = record[field.name];\n } else {\n values.push(record[field.name]);\n }\n }\n // check values\n if (values.length === 0 && Boolean(record) && field.repetitionType === 'REQUIRED') {\n throw new Error(`missing required field: ${field.name}`);\n }\n if (values.length > 1 && field.repetitionType !== 'REPEATED') {\n throw new Error(`too many values for field: ${field.name}`);\n }\n\n // push null\n if (values.length === 0) {\n if (field.isNested) {\n shredRecordFields(field.fields!, null, data, rLevel, dLevel);\n } else {\n data[field.key].count += 1;\n data[field.key].rlevels.push(rLevel);\n data[field.key].dlevels.push(dLevel);\n }\n continue; // eslint-disable-line no-continue\n }\n\n // push values\n for (let i = 0; i < values.length; i++) {\n const rlvl = i === 0 ? rLevel : field.rLevelMax;\n if (field.isNested) {\n shredRecordFields(field.fields!, values[i], data, rlvl, field.dLevelMax);\n } else {\n data[field.key].count += 1;\n data[field.key].rlevels.push(rlvl);\n data[field.key].dlevels.push(field.dLevelMax);\n data[field.key].values.push(\n Types.toPrimitive((field.originalType || field.primitiveType)!, values[i])\n );\n }\n }\n }\n}\n\n/**\n * 'Materialize' a list of <value, repetition_level, definition_level>\n * tuples back to nested records (objects/arrays) using the Google Dremel\n * Algorithm..\n *\n * The buffer argument must point to an object with the following structure (i.e.\n * the same structure that is returned by shredRecords):\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n */\nexport function materializeRecords(schema: ParquetSchema, buffer: ParquetBuffer): ParquetRecord[] {\n const records: ParquetRecord[] = [];\n for (let i = 0; i < buffer.rowCount; i++) {\n records.push({});\n }\n for (const key in buffer.columnData) {\n const columnData = buffer.columnData[key];\n if (columnData.count) {\n materializeColumn(schema, columnData, key, records);\n }\n }\n return records;\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction materializeColumn(\n schema: ParquetSchema,\n columnData: ParquetData,\n key: string,\n records: ParquetRecord[]\n): void {\n const field = schema.findField(key);\n const branch = schema.findFieldBranch(key);\n\n // tslint:disable-next-line:prefer-array-literal\n const rLevels: number[] = new Array(field.rLevelMax + 1).fill(0);\n let vIndex = 0;\n for (let i = 0; i < columnData.count; i++) {\n const dLevel = columnData.dlevels[i];\n const rLevel = columnData.rlevels[i];\n rLevels[rLevel]++;\n rLevels.fill(0, rLevel + 1);\n\n let rIndex = 0;\n let record = records[rLevels[rIndex++] - 1];\n\n // Internal nodes - Build a nested row object\n for (const step of branch) {\n if (step === field || dLevel < step.dLevelMax) {\n break;\n }\n\n switch (step.repetitionType) {\n case 'REPEATED':\n if (!(step.name in record)) {\n // eslint-disable max-depth\n record[step.name] = [];\n }\n const ix = rLevels[rIndex++];\n while (record[step.name].length <= ix) {\n // eslint-disable max-depth\n record[step.name].push({});\n }\n record = record[step.name][ix];\n break;\n\n default:\n record[step.name] = record[step.name] || {};\n record = record[step.name];\n }\n }\n\n // Leaf node - Add the value\n if (dLevel === field.dLevelMax) {\n const value = Types.fromPrimitive(\n // @ts-ignore\n field.originalType || field.primitiveType,\n columnData.values[vIndex],\n field\n );\n vIndex++;\n\n switch (field.repetitionType) {\n case 'REPEATED':\n if (!(field.name in record)) {\n // eslint-disable max-depth\n record[field.name] = [];\n }\n const ix = rLevels[rIndex];\n while (record[field.name].length <= ix) {\n // eslint-disable max-depth\n record[field.name].push(null);\n }\n record[field.name][ix] = value;\n break;\n\n default:\n record[field.name] = value;\n }\n }\n }\n}\n\n// Columnar export\n\n/**\n * 'Materialize' a list of <value, repetition_level, definition_level>\n * tuples back to nested records (objects/arrays) using the Google Dremel\n * Algorithm..\n *\n * The buffer argument must point to an object with the following structure (i.e.\n * the same structure that is returned by shredRecords):\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n *\nexport function extractColumns(schema: ParquetSchema, buffer: ParquetBuffer): Record<string, unknown> {\n const columns: ParquetRecord = {};\n for (const key in buffer.columnData) {\n const columnData = buffer.columnData[key];\n if (columnData.count) {\n extractColumn(schema, columnData, key, columns);\n }\n }\n return columns;\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction extractColumn(\n schema: ParquetSchema,\n columnData: ParquetData,\n key: string,\n columns: Record<string, unknown> \n) {\n if (columnData.count <= 0) {\n return;\n }\n\n const record = columns;\n\n const field = schema.findField(key);\n const branch = schema.findFieldBranch(key);\n\n // tslint:disable-next-line:prefer-array-literal\n const rLevels: number[] = new Array(field.rLevelMax + 1).fill(0);\n let vIndex = 0;\n\n let i = 0;\n const dLevel = columnData.dlevels[i];\n const rLevel = columnData.rlevels[i];\n rLevels[rLevel]++;\n rLevels.fill(0, rLevel + 1);\n\n let rIndex = 0;\n let record = records[rLevels[rIndex++] - 1];\n\n // Internal nodes\n for (const step of branch) {\n if (step === field || dLevel < step.dLevelMax) {\n break;\n }\n\n switch (step.repetitionType) {\n case 'REPEATED':\n if (!(step.name in record)) {\n // eslint-disable max-depth\n record[step.name] = [];\n }\n const ix = rLevels[rIndex++];\n while (record[step.name].length <= ix) {\n // eslint-disable max-depth\n record[step.name].push({});\n }\n record = record[step.name][ix];\n break;\n\n default:\n record[step.name] = record[step.name] || {};\n record = record[step.name];\n }\n }\n\n // Leaf node\n if (dLevel === field.dLevelMax) {\n const value = Types.fromPrimitive(\n // @ts-ignore\n field.originalType || field.primitiveType,\n columnData.values[vIndex],\n field\n );\n vIndex++;\n\n switch (field.repetitionType) {\n case 'REPEATED':\n if (!(field.name in record)) {\n // eslint-disable max-depth\n record[field.name] = [];\n }\n const ix = rLevels[rIndex];\n while (record[field.name].length <= ix) {\n // eslint-disable max-depth\n record[field.name].push(null);\n }\n record[field.name][ix] = value;\n break;\n\n default:\n record[field.name] = value;\n }\n }\n}\n*/\n"],"mappings":";;AAEA,SAAQA,aAAa,QAAiD,WAAW;AAEjF,OAAO,KAAKC,KAAK,MAAM,SAAS;AAEhC,SAAQD,aAAa;AAErB,OAAO,SAASE,WAAW,CAACC,MAAqB,EAAiB;EAChE,MAAMC,UAAuC,GAAG,CAAC,CAAC;EAClD,KAAK,MAAMC,KAAK,IAAIF,MAAM,CAACG,SAAS,EAAE;IACpCF,UAAU,CAACC,KAAK,CAACE,GAAG,CAAC,GAAG;MACtBC,OAAO,EAAE,EAAE;MACXC,OAAO,EAAE,EAAE;MACXC,MAAM,EAAE,EAAE;MACVC,WAAW,EAAE,EAAE;MACfC,KAAK,EAAE;IACT,CAAC;EACH;EACA,OAAO;IAACC,QAAQ,EAAE,CAAC;IAAET;EAAU,CAAC;AAClC;;AAwBA,OAAO,SAASU,WAAW,CAACX,MAAqB,EAAEY,MAAW,EAAEC,MAAqB,EAAQ;EAE3F,MAAMC,IAAI,GAAGf,WAAW,CAACC,MAAM,CAAC,CAACC,UAAU;EAE3Cc,iBAAiB,CAACf,MAAM,CAACgB,MAAM,EAAEJ,MAAM,EAAEE,IAAI,EAAE,CAAC,EAAE,CAAC,CAAC;;EAGpD,IAAID,MAAM,CAACH,QAAQ,KAAK,CAAC,EAAE;IACzBG,MAAM,CAACH,QAAQ,GAAG,CAAC;IACnBG,MAAM,CAACZ,UAAU,GAAGa,IAAI;IACxB;EACF;EACAD,MAAM,CAACH,QAAQ,IAAI,CAAC;EACpB,KAAK,MAAMR,KAAK,IAAIF,MAAM,CAACG,SAAS,EAAE;IACpCc,KAAK,CAACC,SAAS,CAACC,IAAI,CAACC,KAAK,CAACP,MAAM,CAACZ,UAAU,CAACC,KAAK,CAACE,GAAG,CAAC,CAACE,OAAO,EAAEQ,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACE,OAAO,CAAC;IACzFW,KAAK,CAACC,SAAS,CAACC,IAAI,CAACC,KAAK,CAACP,MAAM,CAACZ,UAAU,CAACC,KAAK,CAACE,GAAG,CAAC,CAACC,OAAO,EAAES,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACC,OAAO,CAAC;IACzFY,KAAK,CAACC,SAAS,CAACC,IAAI,CAACC,KAAK,CAACP,MAAM,CAACZ,UAAU,CAACC,KAAK,CAACE,GAAG,CAAC,CAACG,MAAM,EAAEO,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACG,MAAM,CAAC;IACvFM,MAAM,CAACZ,UAAU,CAACC,KAAK,CAACE,GAAG,CAAC,CAACK,KAAK,IAAIK,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACK,KAAK;EAC7D;AACF;;AAGA,SAASM,iBAAiB,CACxBC,MAAoC,EACpCJ,MAAW,EACXE,IAAiC,EACjCO,MAAc,EACdC,MAAc,EACd;EACA,KAAK,MAAMC,IAAI,IAAIP,MAAM,EAAE;IACzB,MAAMd,KAAK,GAAGc,MAAM,CAACO,IAAI,CAAC;;IAG1B,IAAIhB,MAAa,GAAG,EAAE;IACtB,IACEK,MAAM,IACNV,KAAK,CAACqB,IAAI,IAAIX,MAAM,IACpBA,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC,KAAKC,SAAS,IAChCZ,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC,KAAK,IAAI,EAC3B;MACA,IAAIX,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC,CAACE,WAAW,KAAKR,KAAK,EAAE;QAC5CV,MAAM,GAAGK,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC;MAC7B,CAAC,MAAM;QACLhB,MAAM,CAACY,IAAI,CAACP,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC,CAAC;MACjC;IACF;IAEA,IAAIhB,MAAM,CAACmB,MAAM,KAAK,CAAC,IAAIC,OAAO,CAACf,MAAM,CAAC,IAAIV,KAAK,CAAC0B,cAAc,KAAK,UAAU,EAAE;MACjF,MAAM,IAAIC,KAAK,mCAA4B3B,KAAK,CAACqB,IAAI,EAAG;IAC1D;IACA,IAAIhB,MAAM,CAACmB,MAAM,GAAG,CAAC,IAAIxB,KAAK,CAAC0B,cAAc,KAAK,UAAU,EAAE;MAC5D,MAAM,IAAIC,KAAK,sCAA+B3B,KAAK,CAACqB,IAAI,EAAG;IAC7D;;IAGA,IAAIhB,MAAM,CAACmB,MAAM,KAAK,CAAC,EAAE;MACvB,IAAIxB,KAAK,CAAC4B,QAAQ,EAAE;QAClBf,iBAAiB,CAACb,KAAK,CAACc,MAAM,EAAG,IAAI,EAAEF,IAAI,EAAEO,MAAM,EAAEC,MAAM,CAAC;MAC9D,CAAC,MAAM;QACLR,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACK,KAAK,IAAI,CAAC;QAC1BK,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACE,OAAO,CAACa,IAAI,CAACE,MAAM,CAAC;QACpCP,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACC,OAAO,CAACc,IAAI,CAACG,MAAM,CAAC;MACtC;MACA;IACF;;IAGA,KAAK,IAAIS,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGxB,MAAM,CAACmB,MAAM,EAAEK,CAAC,EAAE,EAAE;MACtC,MAAMC,IAAI,GAAGD,CAAC,KAAK,CAAC,GAAGV,MAAM,GAAGnB,KAAK,CAAC+B,SAAS;MAC/C,IAAI/B,KAAK,CAAC4B,QAAQ,EAAE;QAClBf,iBAAiB,CAACb,KAAK,CAACc,MAAM,EAAGT,MAAM,CAACwB,CAAC,CAAC,EAAEjB,IAAI,EAAEkB,IAAI,EAAE9B,KAAK,CAACgC,SAAS,CAAC;MAC1E,CAAC,MAAM;QACLpB,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACK,KAAK,IAAI,CAAC;QAC1BK,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACE,OAAO,CAACa,IAAI,CAACa,IAAI,CAAC;QAClClB,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACC,OAAO,CAACc,IAAI,CAACjB,KAAK,CAACgC,SAAS,CAAC;QAC7CpB,IAAI,CAACZ,KAAK,CAACE,GAAG,CAAC,CAACG,MAAM,CAACY,IAAI,CACzBrB,KAAK,CAACqC,WAAW,CAAEjC,KAAK,CAACkC,YAAY,IAAIlC,KAAK,CAACmC,aAAa,EAAI9B,MAAM,CAACwB,CAAC,CAAC,CAAC,CAC3E;MACH;IACF;EACF;AACF;;AAqBA,OAAO,SAASO,kBAAkB,CAACtC,MAAqB,EAAEa,MAAqB,EAAmB;EAChG,MAAM0B,OAAwB,GAAG,EAAE;EACnC,KAAK,IAAIR,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGlB,MAAM,CAACH,QAAQ,EAAEqB,CAAC,EAAE,EAAE;IACxCQ,OAAO,CAACpB,IAAI,CAAC,CAAC,CAAC,CAAC;EAClB;EACA,KAAK,MAAMf,GAAG,IAAIS,MAAM,CAACZ,UAAU,EAAE;IACnC,MAAMA,UAAU,GAAGY,MAAM,CAACZ,UAAU,CAACG,GAAG,CAAC;IACzC,IAAIH,UAAU,CAACQ,KAAK,EAAE;MACpB+B,iBAAiB,CAACxC,MAAM,EAAEC,UAAU,EAAEG,GAAG,EAAEmC,OAAO,CAAC;IACrD;EACF;EACA,OAAOA,OAAO;AAChB;;AAGA,SAASC,iBAAiB,CACxBxC,MAAqB,EACrBC,UAAuB,EACvBG,GAAW,EACXmC,OAAwB,EAClB;EACN,MAAMrC,KAAK,GAAGF,MAAM,CAACyC,SAAS,CAACrC,GAAG,CAAC;EACnC,MAAMsC,MAAM,GAAG1C,MAAM,CAAC2C,eAAe,CAACvC,GAAG,CAAC;;EAG1C,MAAMwC,OAAiB,GAAG,IAAI3B,KAAK,CAACf,KAAK,CAAC+B,SAAS,GAAG,CAAC,CAAC,CAACY,IAAI,CAAC,CAAC,CAAC;EAChE,IAAIC,MAAM,GAAG,CAAC;EACd,KAAK,IAAIf,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG9B,UAAU,CAACQ,KAAK,EAAEsB,CAAC,EAAE,EAAE;IACzC,MAAMT,MAAM,GAAGrB,UAAU,CAACI,OAAO,CAAC0B,CAAC,CAAC;IACpC,MAAMV,MAAM,GAAGpB,UAAU,CAACK,OAAO,CAACyB,CAAC,CAAC;IACpCa,OAAO,CAACvB,MAAM,CAAC,EAAE;IACjBuB,OAAO,CAACC,IAAI,CAAC,CAAC,EAAExB,MAAM,GAAG,CAAC,CAAC;IAE3B,IAAI0B,MAAM,GAAG,CAAC;IACd,IAAInC,MAAM,GAAG2B,OAAO,CAACK,OAAO,CAACG,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC;;IAG3C,KAAK,MAAMC,IAAI,IAAIN,MAAM,EAAE;MACzB,IAAIM,IAAI,KAAK9C,KAAK,IAAIoB,MAAM,GAAG0B,IAAI,CAACd,SAAS,EAAE;QAC7C;MACF;MAEA,QAAQc,IAAI,CAACpB,cAAc;QACzB,KAAK,UAAU;UACb,IAAI,EAAEoB,IAAI,CAACzB,IAAI,IAAIX,MAAM,CAAC,EAAE;YAE1BA,MAAM,CAACoC,IAAI,CAACzB,IAAI,CAAC,GAAG,EAAE;UACxB;UACA,MAAM0B,EAAE,GAAGL,OAAO,CAACG,MAAM,EAAE,CAAC;UAC5B,OAAOnC,MAAM,CAACoC,IAAI,CAACzB,IAAI,CAAC,CAACG,MAAM,IAAIuB,EAAE,EAAE;YAErCrC,MAAM,CAACoC,IAAI,CAACzB,IAAI,CAAC,CAACJ,IAAI,CAAC,CAAC,CAAC,CAAC;UAC5B;UACAP,MAAM,GAAGA,MAAM,CAACoC,IAAI,CAACzB,IAAI,CAAC,CAAC0B,EAAE,CAAC;UAC9B;QAEF;UACErC,MAAM,CAACoC,IAAI,CAACzB,IAAI,CAAC,GAAGX,MAAM,CAACoC,IAAI,CAACzB,IAAI,CAAC,IAAI,CAAC,CAAC;UAC3CX,MAAM,GAAGA,MAAM,CAACoC,IAAI,CAACzB,IAAI,CAAC;MAAC;IAEjC;;IAGA,IAAID,MAAM,KAAKpB,KAAK,CAACgC,SAAS,EAAE;MAC9B,MAAMgB,KAAK,GAAGpD,KAAK,CAACqD,aAAa;MAE/BjD,KAAK,CAACkC,YAAY,IAAIlC,KAAK,CAACmC,aAAa,EACzCpC,UAAU,CAACM,MAAM,CAACuC,MAAM,CAAC,EACzB5C,KAAK,CACN;MACD4C,MAAM,EAAE;MAER,QAAQ5C,KAAK,CAAC0B,cAAc;QAC1B,KAAK,UAAU;UACb,IAAI,EAAE1B,KAAK,CAACqB,IAAI,IAAIX,MAAM,CAAC,EAAE;YAE3BA,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC,GAAG,EAAE;UACzB;UACA,MAAM0B,EAAE,GAAGL,OAAO,CAACG,MAAM,CAAC;UAC1B,OAAOnC,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC,CAACG,MAAM,IAAIuB,EAAE,EAAE;YAEtCrC,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC,CAACJ,IAAI,CAAC,IAAI,CAAC;UAC/B;UACAP,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC,CAAC0B,EAAE,CAAC,GAAGC,KAAK;UAC9B;QAEF;UACEtC,MAAM,CAACV,KAAK,CAACqB,IAAI,CAAC,GAAG2B,KAAK;MAAC;IAEjC;EACF;AACF"}
@@ -1 +1 @@
1
- {"version":3,"file":"types.js","names":["BSON","PARQUET_LOGICAL_TYPES","BOOLEAN","primitiveType","toPrimitive","toPrimitive_BOOLEAN","fromPrimitive","fromPrimitive_BOOLEAN","INT32","toPrimitive_INT32","INT64","toPrimitive_INT64","INT96","toPrimitive_INT96","FLOAT","toPrimitive_FLOAT","DOUBLE","toPrimitive_DOUBLE","BYTE_ARRAY","toPrimitive_BYTE_ARRAY","FIXED_LEN_BYTE_ARRAY","UTF8","originalType","toPrimitive_UTF8","fromPrimitive_UTF8","TIME_MILLIS","toPrimitive_TIME_MILLIS","TIME_MICROS","toPrimitive_TIME_MICROS","DATE","toPrimitive_DATE","fromPrimitive_DATE","TIMESTAMP_MILLIS","toPrimitive_TIMESTAMP_MILLIS","fromPrimitive_TIMESTAMP_MILLIS","TIMESTAMP_MICROS","toPrimitive_TIMESTAMP_MICROS","fromPrimitive_TIMESTAMP_MICROS","UINT_8","toPrimitive_UINT8","UINT_16","toPrimitive_UINT16","UINT_32","toPrimitive_UINT32","UINT_64","toPrimitive_UINT64","INT_8","toPrimitive_INT8","INT_16","toPrimitive_INT16","INT_32","INT_64","JSON","toPrimitive_JSON","fromPrimitive_JSON","toPrimitive_BSON","fromPrimitive_BSON","INTERVAL","typeLength","toPrimitive_INTERVAL","fromPrimitive_INTERVAL","DECIMAL_INT32","decimalToPrimitive_INT32","decimalFromPrimitive_INT","DECIMAL_INT64","decimalToPrimitive_INT64","DECIMAL_BYTE_ARRAY","decimalToPrimitive_BYTE_ARRAY","decimalFromPrimitive_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","type","value","field","Error","Boolean","v","parseFloat","isNaN","parseInt","primitiveValue","scale","Math","round","presision","Buffer","from","toString","stringify","parse","serialize","deserialize","kMillisPerDay","Date","getTime","months","days","milliseconds","buf","alloc","writeUInt32LE","readUInt32LE","millis","presisionInt","number","length","i","component"],"sources":["../../../../src/parquetjs/schema/types.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/* eslint-disable camelcase */\nimport BSON from 'bson';\nimport {OriginalType, ParquetField, ParquetType, PrimitiveType} from './declare';\n\nexport interface ParquetTypeKit {\n primitiveType: PrimitiveType;\n originalType?: OriginalType;\n typeLength?: number;\n toPrimitive: Function;\n fromPrimitive?: Function;\n}\n\nexport const PARQUET_LOGICAL_TYPES: Record<ParquetType, ParquetTypeKit> = {\n BOOLEAN: {\n primitiveType: 'BOOLEAN',\n toPrimitive: toPrimitive_BOOLEAN,\n fromPrimitive: fromPrimitive_BOOLEAN\n },\n INT32: {\n primitiveType: 'INT32',\n toPrimitive: toPrimitive_INT32\n },\n INT64: {\n primitiveType: 'INT64',\n toPrimitive: toPrimitive_INT64\n },\n INT96: {\n primitiveType: 'INT96',\n toPrimitive: toPrimitive_INT96\n },\n FLOAT: {\n primitiveType: 'FLOAT',\n toPrimitive: toPrimitive_FLOAT\n },\n DOUBLE: {\n primitiveType: 'DOUBLE',\n toPrimitive: toPrimitive_DOUBLE\n },\n BYTE_ARRAY: {\n primitiveType: 'BYTE_ARRAY',\n toPrimitive: toPrimitive_BYTE_ARRAY\n },\n FIXED_LEN_BYTE_ARRAY: {\n primitiveType: 'FIXED_LEN_BYTE_ARRAY',\n toPrimitive: toPrimitive_BYTE_ARRAY\n },\n UTF8: {\n primitiveType: 'BYTE_ARRAY',\n originalType: 'UTF8',\n toPrimitive: toPrimitive_UTF8,\n fromPrimitive: fromPrimitive_UTF8\n },\n TIME_MILLIS: {\n primitiveType: 'INT32',\n originalType: 'TIME_MILLIS',\n toPrimitive: toPrimitive_TIME_MILLIS\n },\n TIME_MICROS: {\n primitiveType: 'INT64',\n originalType: 'TIME_MICROS',\n toPrimitive: toPrimitive_TIME_MICROS\n },\n DATE: {\n primitiveType: 'INT32',\n originalType: 'DATE',\n toPrimitive: toPrimitive_DATE,\n fromPrimitive: fromPrimitive_DATE\n },\n TIMESTAMP_MILLIS: {\n primitiveType: 'INT64',\n originalType: 'TIMESTAMP_MILLIS',\n toPrimitive: toPrimitive_TIMESTAMP_MILLIS,\n fromPrimitive: fromPrimitive_TIMESTAMP_MILLIS\n },\n TIMESTAMP_MICROS: {\n primitiveType: 'INT64',\n originalType: 'TIMESTAMP_MICROS',\n toPrimitive: toPrimitive_TIMESTAMP_MICROS,\n fromPrimitive: fromPrimitive_TIMESTAMP_MICROS\n },\n UINT_8: {\n primitiveType: 'INT32',\n originalType: 'UINT_8',\n toPrimitive: toPrimitive_UINT8\n },\n UINT_16: {\n primitiveType: 'INT32',\n originalType: 'UINT_16',\n toPrimitive: toPrimitive_UINT16\n },\n UINT_32: {\n primitiveType: 'INT32',\n originalType: 'UINT_32',\n toPrimitive: toPrimitive_UINT32\n },\n UINT_64: {\n primitiveType: 'INT64',\n originalType: 'UINT_64',\n toPrimitive: toPrimitive_UINT64\n },\n INT_8: {\n primitiveType: 'INT32',\n originalType: 'INT_8',\n toPrimitive: toPrimitive_INT8\n },\n INT_16: {\n primitiveType: 'INT32',\n originalType: 'INT_16',\n toPrimitive: toPrimitive_INT16\n },\n INT_32: {\n primitiveType: 'INT32',\n originalType: 'INT_32',\n toPrimitive: toPrimitive_INT32\n },\n INT_64: {\n primitiveType: 'INT64',\n originalType: 'INT_64',\n toPrimitive: toPrimitive_INT64\n },\n JSON: {\n primitiveType: 'BYTE_ARRAY',\n originalType: 'JSON',\n toPrimitive: toPrimitive_JSON,\n fromPrimitive: fromPrimitive_JSON\n },\n BSON: {\n primitiveType: 'BYTE_ARRAY',\n originalType: 'BSON',\n toPrimitive: toPrimitive_BSON,\n fromPrimitive: fromPrimitive_BSON\n },\n INTERVAL: {\n primitiveType: 'FIXED_LEN_BYTE_ARRAY',\n originalType: 'INTERVAL',\n typeLength: 12,\n toPrimitive: toPrimitive_INTERVAL,\n fromPrimitive: fromPrimitive_INTERVAL\n },\n DECIMAL_INT32: {\n primitiveType: 'INT32',\n originalType: 'DECIMAL_INT32',\n toPrimitive: decimalToPrimitive_INT32,\n fromPrimitive: decimalFromPrimitive_INT\n },\n DECIMAL_INT64: {\n primitiveType: 'INT64',\n originalType: 'DECIMAL_INT64',\n toPrimitive: decimalToPrimitive_INT64,\n fromPrimitive: decimalFromPrimitive_INT\n },\n DECIMAL_BYTE_ARRAY: {\n primitiveType: 'BYTE_ARRAY',\n originalType: 'DECIMAL_BYTE_ARRAY',\n toPrimitive: decimalToPrimitive_BYTE_ARRAY,\n fromPrimitive: decimalFromPrimitive_BYTE_ARRAY\n },\n DECIMAL_FIXED_LEN_BYTE_ARRAY: {\n primitiveType: 'FIXED_LEN_BYTE_ARRAY',\n originalType: 'DECIMAL_FIXED_LEN_BYTE_ARRAY',\n toPrimitive: decimalToPrimitive_BYTE_ARRAY,\n fromPrimitive: decimalFromPrimitive_BYTE_ARRAY\n }\n};\n\n/**\n * Convert a value from it's native representation to the internal/underlying\n * primitive type\n */\nexport function toPrimitive(type: ParquetType, value: any, field?: ParquetField) {\n if (!(type in PARQUET_LOGICAL_TYPES)) {\n throw new Error(`invalid type: ${type}`);\n }\n\n return PARQUET_LOGICAL_TYPES[type].toPrimitive(value, field);\n}\n\n/**\n * Convert a value from it's internal/underlying primitive representation to\n * the native representation\n */\nexport function fromPrimitive(type: ParquetType, value: any, field?: ParquetField) {\n if (!(type in PARQUET_LOGICAL_TYPES)) {\n throw new Error(`invalid type: ${type}`);\n }\n\n if ('fromPrimitive' in PARQUET_LOGICAL_TYPES[type]) {\n return PARQUET_LOGICAL_TYPES[type].fromPrimitive?.(value, field);\n // tslint:disable-next-line:no-else-after-return\n }\n return value;\n}\n\nfunction toPrimitive_BOOLEAN(value: any) {\n return Boolean(value);\n}\n\nfunction fromPrimitive_BOOLEAN(value: any) {\n return Boolean(value);\n}\n\nfunction toPrimitive_FLOAT(value: any) {\n const v = parseFloat(value);\n if (isNaN(v)) {\n throw new Error(`invalid value for FLOAT: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_DOUBLE(value: any) {\n const v = parseFloat(value);\n if (isNaN(v)) {\n throw new Error(`invalid value for DOUBLE: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_INT8(value: any) {\n const v = parseInt(value, 10);\n if (v < -0x80 || v > 0x7f || isNaN(v)) {\n throw new Error(`invalid value for INT8: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_UINT8(value: any) {\n const v = parseInt(value, 10);\n if (v < 0 || v > 0xff || isNaN(v)) {\n throw new Error(`invalid value for UINT8: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_INT16(value: any) {\n const v = parseInt(value, 10);\n if (v < -0x8000 || v > 0x7fff || isNaN(v)) {\n throw new Error(`invalid value for INT16: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_UINT16(value: any) {\n const v = parseInt(value, 10);\n if (v < 0 || v > 0xffff || isNaN(v)) {\n throw new Error(`invalid value for UINT16: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_INT32(value: any) {\n const v = parseInt(value, 10);\n if (v < -0x80000000 || v > 0x7fffffff || isNaN(v)) {\n throw new Error(`invalid value for INT32: ${value}`);\n }\n\n return v;\n}\n\nfunction decimalToPrimitive_INT32(value: number, field: ParquetField) {\n const primitiveValue = value * 10 ** (field.scale || 0);\n const v = Math.round(((primitiveValue * 10 ** -field.presision!) % 1) * 10 ** field.presision!);\n if (v < -0x80000000 || v > 0x7fffffff || isNaN(v)) {\n throw new Error(`invalid value for INT32: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_UINT32(value: any) {\n const v = parseInt(value, 10);\n if (v < 0 || v > 0xffffffffffff || isNaN(v)) {\n throw new Error(`invalid value for UINT32: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_INT64(value: any) {\n const v = parseInt(value, 10);\n if (isNaN(v)) {\n throw new Error(`invalid value for INT64: ${value}`);\n }\n\n return v;\n}\n\nfunction decimalToPrimitive_INT64(value: number, field: ParquetField) {\n const primitiveValue = value * 10 ** (field.scale || 0);\n const v = Math.round(((primitiveValue * 10 ** -field.presision!) % 1) * 10 ** field.presision!);\n if (isNaN(v)) {\n throw new Error(`invalid value for INT64: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_UINT64(value: any) {\n const v = parseInt(value, 10);\n if (v < 0 || isNaN(v)) {\n throw new Error(`invalid value for UINT64: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_INT96(value: any) {\n const v = parseInt(value, 10);\n if (isNaN(v)) {\n throw new Error(`invalid value for INT96: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_BYTE_ARRAY(value: any) {\n return Buffer.from(value);\n}\n\nfunction decimalToPrimitive_BYTE_ARRAY(value: any) {\n // TBD\n return Buffer.from(value);\n}\n\nfunction toPrimitive_UTF8(value: any) {\n return Buffer.from(value, 'utf8');\n}\n\nfunction fromPrimitive_UTF8(value: any) {\n return value.toString();\n}\n\nfunction toPrimitive_JSON(value: any) {\n return Buffer.from(JSON.stringify(value));\n}\n\nfunction fromPrimitive_JSON(value: any) {\n return JSON.parse(value);\n}\n\nfunction toPrimitive_BSON(value: any) {\n return Buffer.from(BSON.serialize(value));\n}\n\nfunction fromPrimitive_BSON(value: any) {\n return BSON.deserialize(value);\n}\n\nfunction toPrimitive_TIME_MILLIS(value: any) {\n const v = parseInt(value, 10);\n if (v < 0 || v > 0xffffffffffffffff || isNaN(v)) {\n throw new Error(`invalid value for TIME_MILLIS: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_TIME_MICROS(value: any) {\n const v = parseInt(value, 10);\n if (v < 0 || isNaN(v)) {\n throw new Error(`invalid value for TIME_MICROS: ${value}`);\n }\n\n return v;\n}\n\nconst kMillisPerDay = 86400000;\n\nfunction toPrimitive_DATE(value: any) {\n /* convert from date */\n if (value instanceof Date) {\n return value.getTime() / kMillisPerDay;\n }\n\n /* convert from integer */\n {\n const v = parseInt(value, 10);\n if (v < 0 || isNaN(v)) {\n throw new Error(`invalid value for DATE: ${value}`);\n }\n\n return v;\n }\n}\n\nfunction fromPrimitive_DATE(value: any) {\n return new Date(value * kMillisPerDay);\n}\n\nfunction toPrimitive_TIMESTAMP_MILLIS(value: any) {\n /* convert from date */\n if (value instanceof Date) {\n return value.getTime();\n }\n\n /* convert from integer */\n {\n const v = parseInt(value, 10);\n if (v < 0 || isNaN(v)) {\n throw new Error(`invalid value for TIMESTAMP_MILLIS: ${value}`);\n }\n\n return v;\n }\n}\n\nfunction fromPrimitive_TIMESTAMP_MILLIS(value: any) {\n return new Date(value);\n}\n\nfunction toPrimitive_TIMESTAMP_MICROS(value: any) {\n /* convert from date */\n if (value instanceof Date) {\n return value.getTime() * 1000;\n }\n\n /* convert from integer */\n {\n const v = parseInt(value, 10);\n if (v < 0 || isNaN(v)) {\n throw new Error(`invalid value for TIMESTAMP_MICROS: ${value}`);\n }\n\n return v;\n }\n}\n\nfunction fromPrimitive_TIMESTAMP_MICROS(value: any) {\n return new Date(value / 1000);\n}\n\nfunction toPrimitive_INTERVAL(value: any) {\n if (!value.months || !value.days || !value.milliseconds) {\n throw new Error(\n 'value for INTERVAL must be object { months: ..., days: ..., milliseconds: ... }'\n );\n }\n\n const buf = Buffer.alloc(12);\n\n buf.writeUInt32LE(value.months, 0);\n buf.writeUInt32LE(value.days, 4);\n buf.writeUInt32LE(value.milliseconds, 8);\n return buf;\n}\n\nfunction fromPrimitive_INTERVAL(value: any) {\n const buf = Buffer.from(value);\n const months = buf.readUInt32LE(0);\n const days = buf.readUInt32LE(4);\n const millis = buf.readUInt32LE(8);\n\n return {months, days, milliseconds: millis};\n}\n\nfunction decimalFromPrimitive_INT(value: any, field: ParquetField) {\n const presisionInt = Math.round(((value * 10 ** -field.presision!) % 1) * 10 ** field.presision!);\n return presisionInt * 10 ** -(field.scale || 0);\n}\n\nfunction decimalFromPrimitive_BYTE_ARRAY(value: any, field: ParquetField) {\n let number = 0;\n if (value.length <= 4) {\n // Bytewise operators faster. Use them if it is possible\n for (let i = 0; i < value.length; i++) {\n // `value.length - i - 1` bytes have reverse order (big-endian)\n const component = value[i] << (8 * (value.length - i - 1));\n number += component;\n }\n } else {\n for (let i = 0; i < value.length; i++) {\n // `value.length - i - 1` bytes have reverse order (big-endian)\n const component = value[i] * 2 ** (8 * (value.length - 1 - i));\n number += component;\n }\n }\n\n const presisionInt = Math.round(\n ((number * 10 ** -field.presision!) % 1) * 10 ** field.presision!\n );\n return presisionInt * 10 ** -(field.scale || 0);\n}\n"],"mappings":";AAEA,OAAOA,IAAI,MAAM,MAAM;AAWvB,OAAO,MAAMC,qBAA0D,GAAG;EACxEC,OAAO,EAAE;IACPC,aAAa,EAAE,SAAS;IACxBC,WAAW,EAAEC,mBAAmB;IAChCC,aAAa,EAAEC;EACjB,CAAC;EACDC,KAAK,EAAE;IACLL,aAAa,EAAE,OAAO;IACtBC,WAAW,EAAEK;EACf,CAAC;EACDC,KAAK,EAAE;IACLP,aAAa,EAAE,OAAO;IACtBC,WAAW,EAAEO;EACf,CAAC;EACDC,KAAK,EAAE;IACLT,aAAa,EAAE,OAAO;IACtBC,WAAW,EAAES;EACf,CAAC;EACDC,KAAK,EAAE;IACLX,aAAa,EAAE,OAAO;IACtBC,WAAW,EAAEW;EACf,CAAC;EACDC,MAAM,EAAE;IACNb,aAAa,EAAE,QAAQ;IACvBC,WAAW,EAAEa;EACf,CAAC;EACDC,UAAU,EAAE;IACVf,aAAa,EAAE,YAAY;IAC3BC,WAAW,EAAEe;EACf,CAAC;EACDC,oBAAoB,EAAE;IACpBjB,aAAa,EAAE,sBAAsB;IACrCC,WAAW,EAAEe;EACf,CAAC;EACDE,IAAI,EAAE;IACJlB,aAAa,EAAE,YAAY;IAC3BmB,YAAY,EAAE,MAAM;IACpBlB,WAAW,EAAEmB,gBAAgB;IAC7BjB,aAAa,EAAEkB;EACjB,CAAC;EACDC,WAAW,EAAE;IACXtB,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,aAAa;IAC3BlB,WAAW,EAAEsB;EACf,CAAC;EACDC,WAAW,EAAE;IACXxB,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,aAAa;IAC3BlB,WAAW,EAAEwB;EACf,CAAC;EACDC,IAAI,EAAE;IACJ1B,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,MAAM;IACpBlB,WAAW,EAAE0B,gBAAgB;IAC7BxB,aAAa,EAAEyB;EACjB,CAAC;EACDC,gBAAgB,EAAE;IAChB7B,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,kBAAkB;IAChClB,WAAW,EAAE6B,4BAA4B;IACzC3B,aAAa,EAAE4B;EACjB,CAAC;EACDC,gBAAgB,EAAE;IAChBhC,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,kBAAkB;IAChClB,WAAW,EAAEgC,4BAA4B;IACzC9B,aAAa,EAAE+B;EACjB,CAAC;EACDC,MAAM,EAAE;IACNnC,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,QAAQ;IACtBlB,WAAW,EAAEmC;EACf,CAAC;EACDC,OAAO,EAAE;IACPrC,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,SAAS;IACvBlB,WAAW,EAAEqC;EACf,CAAC;EACDC,OAAO,EAAE;IACPvC,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,SAAS;IACvBlB,WAAW,EAAEuC;EACf,CAAC;EACDC,OAAO,EAAE;IACPzC,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,SAAS;IACvBlB,WAAW,EAAEyC;EACf,CAAC;EACDC,KAAK,EAAE;IACL3C,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,OAAO;IACrBlB,WAAW,EAAE2C;EACf,CAAC;EACDC,MAAM,EAAE;IACN7C,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,QAAQ;IACtBlB,WAAW,EAAE6C;EACf,CAAC;EACDC,MAAM,EAAE;IACN/C,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,QAAQ;IACtBlB,WAAW,EAAEK;EACf,CAAC;EACD0C,MAAM,EAAE;IACNhD,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,QAAQ;IACtBlB,WAAW,EAAEO;EACf,CAAC;EACDyC,IAAI,EAAE;IACJjD,aAAa,EAAE,YAAY;IAC3BmB,YAAY,EAAE,MAAM;IACpBlB,WAAW,EAAEiD,gBAAgB;IAC7B/C,aAAa,EAAEgD;EACjB,CAAC;EACDtD,IAAI,EAAE;IACJG,aAAa,EAAE,YAAY;IAC3BmB,YAAY,EAAE,MAAM;IACpBlB,WAAW,EAAEmD,gBAAgB;IAC7BjD,aAAa,EAAEkD;EACjB,CAAC;EACDC,QAAQ,EAAE;IACRtD,aAAa,EAAE,sBAAsB;IACrCmB,YAAY,EAAE,UAAU;IACxBoC,UAAU,EAAE,EAAE;IACdtD,WAAW,EAAEuD,oBAAoB;IACjCrD,aAAa,EAAEsD;EACjB,CAAC;EACDC,aAAa,EAAE;IACb1D,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,eAAe;IAC7BlB,WAAW,EAAE0D,wBAAwB;IACrCxD,aAAa,EAAEyD;EACjB,CAAC;EACDC,aAAa,EAAE;IACb7D,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,eAAe;IAC7BlB,WAAW,EAAE6D,wBAAwB;IACrC3D,aAAa,EAAEyD;EACjB,CAAC;EACDG,kBAAkB,EAAE;IAClB/D,aAAa,EAAE,YAAY;IAC3BmB,YAAY,EAAE,oBAAoB;IAClClB,WAAW,EAAE+D,6BAA6B;IAC1C7D,aAAa,EAAE8D;EACjB,CAAC;EACDC,4BAA4B,EAAE;IAC5BlE,aAAa,EAAE,sBAAsB;IACrCmB,YAAY,EAAE,8BAA8B;IAC5ClB,WAAW,EAAE+D,6BAA6B;IAC1C7D,aAAa,EAAE8D;EACjB;AACF,CAAC;;AAMD,OAAO,SAAShE,WAAW,CAACkE,IAAiB,EAAEC,KAAU,EAAEC,KAAoB,EAAE;EAC/E,IAAI,EAAEF,IAAI,IAAIrE,qBAAqB,CAAC,EAAE;IACpC,MAAM,IAAIwE,KAAK,yBAAkBH,IAAI,EAAG;EAC1C;EAEA,OAAOrE,qBAAqB,CAACqE,IAAI,CAAC,CAAClE,WAAW,CAACmE,KAAK,EAAEC,KAAK,CAAC;AAC9D;;AAMA,OAAO,SAASlE,aAAa,CAACgE,IAAiB,EAAEC,KAAU,EAAEC,KAAoB,EAAE;EACjF,IAAI,EAAEF,IAAI,IAAIrE,qBAAqB,CAAC,EAAE;IACpC,MAAM,IAAIwE,KAAK,yBAAkBH,IAAI,EAAG;EAC1C;EAEA,IAAI,eAAe,IAAIrE,qBAAqB,CAACqE,IAAI,CAAC,EAAE;IAAA;IAClD,gCAAO,0BAAArE,qBAAqB,CAACqE,IAAI,CAAC,EAAChE,aAAa,0DAAzC,mDAA4CiE,KAAK,EAAEC,KAAK,CAAC;EAElE;;EACA,OAAOD,KAAK;AACd;AAEA,SAASlE,mBAAmB,CAACkE,KAAU,EAAE;EACvC,OAAOG,OAAO,CAACH,KAAK,CAAC;AACvB;AAEA,SAAShE,qBAAqB,CAACgE,KAAU,EAAE;EACzC,OAAOG,OAAO,CAACH,KAAK,CAAC;AACvB;AAEA,SAASxD,iBAAiB,CAACwD,KAAU,EAAE;EACrC,MAAMI,CAAC,GAAGC,UAAU,CAACL,KAAK,CAAC;EAC3B,IAAIM,KAAK,CAACF,CAAC,CAAC,EAAE;IACZ,MAAM,IAAIF,KAAK,oCAA6BF,KAAK,EAAG;EACtD;EAEA,OAAOI,CAAC;AACV;AAEA,SAAS1D,kBAAkB,CAACsD,KAAU,EAAE;EACtC,MAAMI,CAAC,GAAGC,UAAU,CAACL,KAAK,CAAC;EAC3B,IAAIM,KAAK,CAACF,CAAC,CAAC,EAAE;IACZ,MAAM,IAAIF,KAAK,qCAA8BF,KAAK,EAAG;EACvD;EAEA,OAAOI,CAAC;AACV;AAEA,SAAS5B,gBAAgB,CAACwB,KAAU,EAAE;EACpC,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAII,CAAC,GAAG,CAAC,IAAI,IAAIA,CAAC,GAAG,IAAI,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IACrC,MAAM,IAAIF,KAAK,mCAA4BF,KAAK,EAAG;EACrD;EAEA,OAAOI,CAAC;AACV;AAEA,SAASpC,iBAAiB,CAACgC,KAAU,EAAE;EACrC,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAII,CAAC,GAAG,CAAC,IAAIA,CAAC,GAAG,IAAI,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IACjC,MAAM,IAAIF,KAAK,oCAA6BF,KAAK,EAAG;EACtD;EAEA,OAAOI,CAAC;AACV;AAEA,SAAS1B,iBAAiB,CAACsB,KAAU,EAAE;EACrC,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAII,CAAC,GAAG,CAAC,MAAM,IAAIA,CAAC,GAAG,MAAM,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IACzC,MAAM,IAAIF,KAAK,oCAA6BF,KAAK,EAAG;EACtD;EAEA,OAAOI,CAAC;AACV;AAEA,SAASlC,kBAAkB,CAAC8B,KAAU,EAAE;EACtC,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAII,CAAC,GAAG,CAAC,IAAIA,CAAC,GAAG,MAAM,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IACnC,MAAM,IAAIF,KAAK,qCAA8BF,KAAK,EAAG;EACvD;EAEA,OAAOI,CAAC;AACV;AAEA,SAASlE,iBAAiB,CAAC8D,KAAU,EAAE;EACrC,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAII,CAAC,GAAG,CAAC,UAAU,IAAIA,CAAC,GAAG,UAAU,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IACjD,MAAM,IAAIF,KAAK,oCAA6BF,KAAK,EAAG;EACtD;EAEA,OAAOI,CAAC;AACV;AAEA,SAASb,wBAAwB,CAACS,KAAa,EAAEC,KAAmB,EAAE;EACpE,MAAMO,cAAc,GAAGR,KAAK,GAAG,EAAE,KAAKC,KAAK,CAACQ,KAAK,IAAI,CAAC,CAAC;EACvD,MAAML,CAAC,GAAGM,IAAI,CAACC,KAAK,CAAGH,cAAc,GAAG,EAAE,IAAI,CAACP,KAAK,CAACW,SAAU,GAAI,CAAC,GAAI,EAAE,IAAIX,KAAK,CAACW,SAAU,CAAC;EAC/F,IAAIR,CAAC,GAAG,CAAC,UAAU,IAAIA,CAAC,GAAG,UAAU,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IACjD,MAAM,IAAIF,KAAK,oCAA6BF,KAAK,EAAG;EACtD;EAEA,OAAOI,CAAC;AACV;AAEA,SAAShC,kBAAkB,CAAC4B,KAAU,EAAE;EACtC,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAII,CAAC,GAAG,CAAC,IAAIA,CAAC,GAAG,cAAc,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IAC3C,MAAM,IAAIF,KAAK,qCAA8BF,KAAK,EAAG;EACvD;EAEA,OAAOI,CAAC;AACV;AAEA,SAAShE,iBAAiB,CAAC4D,KAAU,EAAE;EACrC,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAIM,KAAK,CAACF,CAAC,CAAC,EAAE;IACZ,MAAM,IAAIF,KAAK,oCAA6BF,KAAK,EAAG;EACtD;EAEA,OAAOI,CAAC;AACV;AAEA,SAASV,wBAAwB,CAACM,KAAa,EAAEC,KAAmB,EAAE;EACpE,MAAMO,cAAc,GAAGR,KAAK,GAAG,EAAE,KAAKC,KAAK,CAACQ,KAAK,IAAI,CAAC,CAAC;EACvD,MAAML,CAAC,GAAGM,IAAI,CAACC,KAAK,CAAGH,cAAc,GAAG,EAAE,IAAI,CAACP,KAAK,CAACW,SAAU,GAAI,CAAC,GAAI,EAAE,IAAIX,KAAK,CAACW,SAAU,CAAC;EAC/F,IAAIN,KAAK,CAACF,CAAC,CAAC,EAAE;IACZ,MAAM,IAAIF,KAAK,oCAA6BF,KAAK,EAAG;EACtD;EAEA,OAAOI,CAAC;AACV;AAEA,SAAS9B,kBAAkB,CAAC0B,KAAU,EAAE;EACtC,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAII,CAAC,GAAG,CAAC,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IACrB,MAAM,IAAIF,KAAK,qCAA8BF,KAAK,EAAG;EACvD;EAEA,OAAOI,CAAC;AACV;AAEA,SAAS9D,iBAAiB,CAAC0D,KAAU,EAAE;EACrC,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAIM,KAAK,CAACF,CAAC,CAAC,EAAE;IACZ,MAAM,IAAIF,KAAK,oCAA6BF,KAAK,EAAG;EACtD;EAEA,OAAOI,CAAC;AACV;AAEA,SAASxD,sBAAsB,CAACoD,KAAU,EAAE;EAC1C,OAAOa,MAAM,CAACC,IAAI,CAACd,KAAK,CAAC;AAC3B;AAEA,SAASJ,6BAA6B,CAACI,KAAU,EAAE;EAEjD,OAAOa,MAAM,CAACC,IAAI,CAACd,KAAK,CAAC;AAC3B;AAEA,SAAShD,gBAAgB,CAACgD,KAAU,EAAE;EACpC,OAAOa,MAAM,CAACC,IAAI,CAACd,KAAK,EAAE,MAAM,CAAC;AACnC;AAEA,SAAS/C,kBAAkB,CAAC+C,KAAU,EAAE;EACtC,OAAOA,KAAK,CAACe,QAAQ,EAAE;AACzB;AAEA,SAASjC,gBAAgB,CAACkB,KAAU,EAAE;EACpC,OAAOa,MAAM,CAACC,IAAI,CAACjC,IAAI,CAACmC,SAAS,CAAChB,KAAK,CAAC,CAAC;AAC3C;AAEA,SAASjB,kBAAkB,CAACiB,KAAU,EAAE;EACtC,OAAOnB,IAAI,CAACoC,KAAK,CAACjB,KAAK,CAAC;AAC1B;AAEA,SAAShB,gBAAgB,CAACgB,KAAU,EAAE;EACpC,OAAOa,MAAM,CAACC,IAAI,CAACrF,IAAI,CAACyF,SAAS,CAAClB,KAAK,CAAC,CAAC;AAC3C;AAEA,SAASf,kBAAkB,CAACe,KAAU,EAAE;EACtC,OAAOvE,IAAI,CAAC0F,WAAW,CAACnB,KAAK,CAAC;AAChC;AAEA,SAAS7C,uBAAuB,CAAC6C,KAAU,EAAE;EAC3C,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAII,CAAC,GAAG,CAAC,IAAIA,CAAC,GAAG,kBAAkB,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IAC/C,MAAM,IAAIF,KAAK,0CAAmCF,KAAK,EAAG;EAC5D;EAEA,OAAOI,CAAC;AACV;AAEA,SAAS/C,uBAAuB,CAAC2C,KAAU,EAAE;EAC3C,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAII,CAAC,GAAG,CAAC,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IACrB,MAAM,IAAIF,KAAK,0CAAmCF,KAAK,EAAG;EAC5D;EAEA,OAAOI,CAAC;AACV;AAEA,MAAMgB,aAAa,GAAG,QAAQ;AAE9B,SAAS7D,gBAAgB,CAACyC,KAAU,EAAE;EAEpC,IAAIA,KAAK,YAAYqB,IAAI,EAAE;IACzB,OAAOrB,KAAK,CAACsB,OAAO,EAAE,GAAGF,aAAa;EACxC;;EAGA;IACE,MAAMhB,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;IAC7B,IAAII,CAAC,GAAG,CAAC,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;MACrB,MAAM,IAAIF,KAAK,mCAA4BF,KAAK,EAAG;IACrD;IAEA,OAAOI,CAAC;EACV;AACF;AAEA,SAAS5C,kBAAkB,CAACwC,KAAU,EAAE;EACtC,OAAO,IAAIqB,IAAI,CAACrB,KAAK,GAAGoB,aAAa,CAAC;AACxC;AAEA,SAAS1D,4BAA4B,CAACsC,KAAU,EAAE;EAEhD,IAAIA,KAAK,YAAYqB,IAAI,EAAE;IACzB,OAAOrB,KAAK,CAACsB,OAAO,EAAE;EACxB;;EAGA;IACE,MAAMlB,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;IAC7B,IAAII,CAAC,GAAG,CAAC,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;MACrB,MAAM,IAAIF,KAAK,+CAAwCF,KAAK,EAAG;IACjE;IAEA,OAAOI,CAAC;EACV;AACF;AAEA,SAASzC,8BAA8B,CAACqC,KAAU,EAAE;EAClD,OAAO,IAAIqB,IAAI,CAACrB,KAAK,CAAC;AACxB;AAEA,SAASnC,4BAA4B,CAACmC,KAAU,EAAE;EAEhD,IAAIA,KAAK,YAAYqB,IAAI,EAAE;IACzB,OAAOrB,KAAK,CAACsB,OAAO,EAAE,GAAG,IAAI;EAC/B;;EAGA;IACE,MAAMlB,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;IAC7B,IAAII,CAAC,GAAG,CAAC,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;MACrB,MAAM,IAAIF,KAAK,+CAAwCF,KAAK,EAAG;IACjE;IAEA,OAAOI,CAAC;EACV;AACF;AAEA,SAAStC,8BAA8B,CAACkC,KAAU,EAAE;EAClD,OAAO,IAAIqB,IAAI,CAACrB,KAAK,GAAG,IAAI,CAAC;AAC/B;AAEA,SAASZ,oBAAoB,CAACY,KAAU,EAAE;EACxC,IAAI,CAACA,KAAK,CAACuB,MAAM,IAAI,CAACvB,KAAK,CAACwB,IAAI,IAAI,CAACxB,KAAK,CAACyB,YAAY,EAAE;IACvD,MAAM,IAAIvB,KAAK,CACb,iFAAiF,CAClF;EACH;EAEA,MAAMwB,GAAG,GAAGb,MAAM,CAACc,KAAK,CAAC,EAAE,CAAC;EAE5BD,GAAG,CAACE,aAAa,CAAC5B,KAAK,CAACuB,MAAM,EAAE,CAAC,CAAC;EAClCG,GAAG,CAACE,aAAa,CAAC5B,KAAK,CAACwB,IAAI,EAAE,CAAC,CAAC;EAChCE,GAAG,CAACE,aAAa,CAAC5B,KAAK,CAACyB,YAAY,EAAE,CAAC,CAAC;EACxC,OAAOC,GAAG;AACZ;AAEA,SAASrC,sBAAsB,CAACW,KAAU,EAAE;EAC1C,MAAM0B,GAAG,GAAGb,MAAM,CAACC,IAAI,CAACd,KAAK,CAAC;EAC9B,MAAMuB,MAAM,GAAGG,GAAG,CAACG,YAAY,CAAC,CAAC,CAAC;EAClC,MAAML,IAAI,GAAGE,GAAG,CAACG,YAAY,CAAC,CAAC,CAAC;EAChC,MAAMC,MAAM,GAAGJ,GAAG,CAACG,YAAY,CAAC,CAAC,CAAC;EAElC,OAAO;IAACN,MAAM;IAAEC,IAAI;IAAEC,YAAY,EAAEK;EAAM,CAAC;AAC7C;AAEA,SAAStC,wBAAwB,CAACQ,KAAU,EAAEC,KAAmB,EAAE;EACjE,MAAM8B,YAAY,GAAGrB,IAAI,CAACC,KAAK,CAAGX,KAAK,GAAG,EAAE,IAAI,CAACC,KAAK,CAACW,SAAU,GAAI,CAAC,GAAI,EAAE,IAAIX,KAAK,CAACW,SAAU,CAAC;EACjG,OAAOmB,YAAY,GAAG,EAAE,IAAI,EAAE9B,KAAK,CAACQ,KAAK,IAAI,CAAC,CAAC;AACjD;AAEA,SAASZ,+BAA+B,CAACG,KAAU,EAAEC,KAAmB,EAAE;EACxE,IAAI+B,MAAM,GAAG,CAAC;EACd,IAAIhC,KAAK,CAACiC,MAAM,IAAI,CAAC,EAAE;IAErB,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGlC,KAAK,CAACiC,MAAM,EAAEC,CAAC,EAAE,EAAE;MAErC,MAAMC,SAAS,GAAGnC,KAAK,CAACkC,CAAC,CAAC,IAAK,CAAC,IAAIlC,KAAK,CAACiC,MAAM,GAAGC,CAAC,GAAG,CAAC,CAAE;MAC1DF,MAAM,IAAIG,SAAS;IACrB;EACF,CAAC,MAAM;IACL,KAAK,IAAID,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGlC,KAAK,CAACiC,MAAM,EAAEC,CAAC,EAAE,EAAE;MAErC,MAAMC,SAAS,GAAGnC,KAAK,CAACkC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,IAAIlC,KAAK,CAACiC,MAAM,GAAG,CAAC,GAAGC,CAAC,CAAC,CAAC;MAC9DF,MAAM,IAAIG,SAAS;IACrB;EACF;EAEA,MAAMJ,YAAY,GAAGrB,IAAI,CAACC,KAAK,CAC3BqB,MAAM,GAAG,EAAE,IAAI,CAAC/B,KAAK,CAACW,SAAU,GAAI,CAAC,GAAI,EAAE,IAAIX,KAAK,CAACW,SAAU,CAClE;EACD,OAAOmB,YAAY,GAAG,EAAE,IAAI,EAAE9B,KAAK,CAACQ,KAAK,IAAI,CAAC,CAAC;AACjD"}
1
+ {"version":3,"file":"types.js","names":["BSON","PARQUET_LOGICAL_TYPES","BOOLEAN","primitiveType","toPrimitive","toPrimitive_BOOLEAN","fromPrimitive","fromPrimitive_BOOLEAN","INT32","toPrimitive_INT32","INT64","toPrimitive_INT64","INT96","toPrimitive_INT96","FLOAT","toPrimitive_FLOAT","DOUBLE","toPrimitive_DOUBLE","BYTE_ARRAY","toPrimitive_BYTE_ARRAY","FIXED_LEN_BYTE_ARRAY","UTF8","originalType","toPrimitive_UTF8","fromPrimitive_UTF8","TIME_MILLIS","toPrimitive_TIME_MILLIS","TIME_MICROS","toPrimitive_TIME_MICROS","DATE","toPrimitive_DATE","fromPrimitive_DATE","TIMESTAMP_MILLIS","toPrimitive_TIMESTAMP_MILLIS","fromPrimitive_TIMESTAMP_MILLIS","TIMESTAMP_MICROS","toPrimitive_TIMESTAMP_MICROS","fromPrimitive_TIMESTAMP_MICROS","UINT_8","toPrimitive_UINT8","UINT_16","toPrimitive_UINT16","UINT_32","toPrimitive_UINT32","UINT_64","toPrimitive_UINT64","INT_8","toPrimitive_INT8","INT_16","toPrimitive_INT16","INT_32","INT_64","JSON","toPrimitive_JSON","fromPrimitive_JSON","toPrimitive_BSON","fromPrimitive_BSON","INTERVAL","typeLength","toPrimitive_INTERVAL","fromPrimitive_INTERVAL","DECIMAL_INT32","decimalToPrimitive_INT32","decimalFromPrimitive_INT","DECIMAL_INT64","decimalToPrimitive_INT64","DECIMAL_BYTE_ARRAY","decimalToPrimitive_BYTE_ARRAY","decimalFromPrimitive_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","type","value","field","Error","Boolean","v","parseFloat","isNaN","parseInt","primitiveValue","scale","Math","round","presision","Buffer","from","toString","stringify","parse","serialize","deserialize","kMillisPerDay","Date","getTime","months","days","milliseconds","buf","alloc","writeUInt32LE","readUInt32LE","millis","presisionInt","number","length","i","component"],"sources":["../../../../src/parquetjs/schema/types.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/* eslint-disable camelcase */\nimport BSON from 'bson';\nimport {OriginalType, ParquetField, ParquetType, PrimitiveType} from './declare';\n\nexport interface ParquetTypeKit {\n primitiveType: PrimitiveType;\n originalType?: OriginalType;\n typeLength?: number;\n toPrimitive: Function;\n fromPrimitive?: Function;\n}\n\nexport const PARQUET_LOGICAL_TYPES: Record<ParquetType, ParquetTypeKit> = {\n BOOLEAN: {\n primitiveType: 'BOOLEAN',\n toPrimitive: toPrimitive_BOOLEAN,\n fromPrimitive: fromPrimitive_BOOLEAN\n },\n INT32: {\n primitiveType: 'INT32',\n toPrimitive: toPrimitive_INT32\n },\n INT64: {\n primitiveType: 'INT64',\n toPrimitive: toPrimitive_INT64\n },\n INT96: {\n primitiveType: 'INT96',\n toPrimitive: toPrimitive_INT96\n },\n FLOAT: {\n primitiveType: 'FLOAT',\n toPrimitive: toPrimitive_FLOAT\n },\n DOUBLE: {\n primitiveType: 'DOUBLE',\n toPrimitive: toPrimitive_DOUBLE\n },\n BYTE_ARRAY: {\n primitiveType: 'BYTE_ARRAY',\n toPrimitive: toPrimitive_BYTE_ARRAY\n },\n FIXED_LEN_BYTE_ARRAY: {\n primitiveType: 'FIXED_LEN_BYTE_ARRAY',\n toPrimitive: toPrimitive_BYTE_ARRAY\n },\n UTF8: {\n primitiveType: 'BYTE_ARRAY',\n originalType: 'UTF8',\n toPrimitive: toPrimitive_UTF8,\n fromPrimitive: fromPrimitive_UTF8\n },\n TIME_MILLIS: {\n primitiveType: 'INT32',\n originalType: 'TIME_MILLIS',\n toPrimitive: toPrimitive_TIME_MILLIS\n },\n TIME_MICROS: {\n primitiveType: 'INT64',\n originalType: 'TIME_MICROS',\n toPrimitive: toPrimitive_TIME_MICROS\n },\n DATE: {\n primitiveType: 'INT32',\n originalType: 'DATE',\n toPrimitive: toPrimitive_DATE,\n fromPrimitive: fromPrimitive_DATE\n },\n TIMESTAMP_MILLIS: {\n primitiveType: 'INT64',\n originalType: 'TIMESTAMP_MILLIS',\n toPrimitive: toPrimitive_TIMESTAMP_MILLIS,\n fromPrimitive: fromPrimitive_TIMESTAMP_MILLIS\n },\n TIMESTAMP_MICROS: {\n primitiveType: 'INT64',\n originalType: 'TIMESTAMP_MICROS',\n toPrimitive: toPrimitive_TIMESTAMP_MICROS,\n fromPrimitive: fromPrimitive_TIMESTAMP_MICROS\n },\n UINT_8: {\n primitiveType: 'INT32',\n originalType: 'UINT_8',\n toPrimitive: toPrimitive_UINT8\n },\n UINT_16: {\n primitiveType: 'INT32',\n originalType: 'UINT_16',\n toPrimitive: toPrimitive_UINT16\n },\n UINT_32: {\n primitiveType: 'INT32',\n originalType: 'UINT_32',\n toPrimitive: toPrimitive_UINT32\n },\n UINT_64: {\n primitiveType: 'INT64',\n originalType: 'UINT_64',\n toPrimitive: toPrimitive_UINT64\n },\n INT_8: {\n primitiveType: 'INT32',\n originalType: 'INT_8',\n toPrimitive: toPrimitive_INT8\n },\n INT_16: {\n primitiveType: 'INT32',\n originalType: 'INT_16',\n toPrimitive: toPrimitive_INT16\n },\n INT_32: {\n primitiveType: 'INT32',\n originalType: 'INT_32',\n toPrimitive: toPrimitive_INT32\n },\n INT_64: {\n primitiveType: 'INT64',\n originalType: 'INT_64',\n toPrimitive: toPrimitive_INT64\n },\n JSON: {\n primitiveType: 'BYTE_ARRAY',\n originalType: 'JSON',\n toPrimitive: toPrimitive_JSON,\n fromPrimitive: fromPrimitive_JSON\n },\n BSON: {\n primitiveType: 'BYTE_ARRAY',\n originalType: 'BSON',\n toPrimitive: toPrimitive_BSON,\n fromPrimitive: fromPrimitive_BSON\n },\n INTERVAL: {\n primitiveType: 'FIXED_LEN_BYTE_ARRAY',\n originalType: 'INTERVAL',\n typeLength: 12,\n toPrimitive: toPrimitive_INTERVAL,\n fromPrimitive: fromPrimitive_INTERVAL\n },\n DECIMAL_INT32: {\n primitiveType: 'INT32',\n originalType: 'DECIMAL_INT32',\n toPrimitive: decimalToPrimitive_INT32,\n fromPrimitive: decimalFromPrimitive_INT\n },\n DECIMAL_INT64: {\n primitiveType: 'INT64',\n originalType: 'DECIMAL_INT64',\n toPrimitive: decimalToPrimitive_INT64,\n fromPrimitive: decimalFromPrimitive_INT\n },\n DECIMAL_BYTE_ARRAY: {\n primitiveType: 'BYTE_ARRAY',\n originalType: 'DECIMAL_BYTE_ARRAY',\n toPrimitive: decimalToPrimitive_BYTE_ARRAY,\n fromPrimitive: decimalFromPrimitive_BYTE_ARRAY\n },\n DECIMAL_FIXED_LEN_BYTE_ARRAY: {\n primitiveType: 'FIXED_LEN_BYTE_ARRAY',\n originalType: 'DECIMAL_FIXED_LEN_BYTE_ARRAY',\n toPrimitive: decimalToPrimitive_BYTE_ARRAY,\n fromPrimitive: decimalFromPrimitive_BYTE_ARRAY\n }\n};\n\n/**\n * Convert a value from it's native representation to the internal/underlying\n * primitive type\n */\nexport function toPrimitive(type: ParquetType, value: unknown, field?: ParquetField): unknown {\n if (!(type in PARQUET_LOGICAL_TYPES)) {\n throw new Error(`invalid type: ${type}`);\n }\n\n return PARQUET_LOGICAL_TYPES[type].toPrimitive(value, field);\n}\n\n/**\n * Convert a value from it's internal/underlying primitive representation to\n * the native representation\n */\nexport function fromPrimitive(type: ParquetType, value: unknown, field?: ParquetField) {\n if (!(type in PARQUET_LOGICAL_TYPES)) {\n throw new Error(`invalid type: ${type}`);\n }\n\n if ('fromPrimitive' in PARQUET_LOGICAL_TYPES[type]) {\n return PARQUET_LOGICAL_TYPES[type].fromPrimitive?.(value, field);\n // tslint:disable-next-line:no-else-after-return\n }\n return value;\n}\n\nfunction toPrimitive_BOOLEAN(value: unknown): boolean {\n return Boolean(value);\n}\n\nfunction fromPrimitive_BOOLEAN(value: any): boolean {\n return Boolean(value);\n}\n\nfunction toPrimitive_FLOAT(value: any): number {\n const v = parseFloat(value);\n if (isNaN(v)) {\n throw new Error(`invalid value for FLOAT: ${value}`);\n }\n return v;\n}\n\nfunction toPrimitive_DOUBLE(value: any): number {\n const v = parseFloat(value);\n if (isNaN(v)) {\n throw new Error(`invalid value for DOUBLE: ${value}`);\n }\n return v;\n}\n\nfunction toPrimitive_INT8(value: any) {\n const v = parseInt(value, 10);\n if (v < -0x80 || v > 0x7f || isNaN(v)) {\n throw new Error(`invalid value for INT8: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_UINT8(value: any) {\n const v = parseInt(value, 10);\n if (v < 0 || v > 0xff || isNaN(v)) {\n throw new Error(`invalid value for UINT8: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_INT16(value: any) {\n const v = parseInt(value, 10);\n if (v < -0x8000 || v > 0x7fff || isNaN(v)) {\n throw new Error(`invalid value for INT16: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_UINT16(value: any) {\n const v = parseInt(value, 10);\n if (v < 0 || v > 0xffff || isNaN(v)) {\n throw new Error(`invalid value for UINT16: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_INT32(value: any) {\n const v = parseInt(value, 10);\n if (v < -0x80000000 || v > 0x7fffffff || isNaN(v)) {\n throw new Error(`invalid value for INT32: ${value}`);\n }\n\n return v;\n}\n\nfunction decimalToPrimitive_INT32(value: number, field: ParquetField): number {\n const primitiveValue = value * 10 ** (field.scale || 0);\n const v = Math.round(((primitiveValue * 10 ** -field.presision!) % 1) * 10 ** field.presision!);\n if (v < -0x80000000 || v > 0x7fffffff || isNaN(v)) {\n throw new Error(`invalid value for INT32: ${value}`);\n }\n return v;\n}\n\nfunction toPrimitive_UINT32(value: any): number {\n const v = parseInt(value, 10);\n if (v < 0 || v > 0xffffffffffff || isNaN(v)) {\n throw new Error(`invalid value for UINT32: ${value}`);\n }\n return v;\n}\n\nfunction toPrimitive_INT64(value: any): number {\n const v = parseInt(value, 10);\n if (isNaN(v)) {\n throw new Error(`invalid value for INT64: ${value}`);\n }\n return v;\n}\n\nfunction decimalToPrimitive_INT64(value: number, field: ParquetField) {\n const primitiveValue = value * 10 ** (field.scale || 0);\n const v = Math.round(((primitiveValue * 10 ** -field.presision!) % 1) * 10 ** field.presision!);\n if (isNaN(v)) {\n throw new Error(`invalid value for INT64: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_UINT64(value: any) {\n const v = parseInt(value, 10);\n if (v < 0 || isNaN(v)) {\n throw new Error(`invalid value for UINT64: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_INT96(value: any) {\n const v = parseInt(value, 10);\n if (isNaN(v)) {\n throw new Error(`invalid value for INT96: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_BYTE_ARRAY(value: any): Buffer {\n return Buffer.from(value);\n}\n\nfunction decimalToPrimitive_BYTE_ARRAY(value: any): Buffer {\n // TBD\n return Buffer.from(value);\n}\n\nfunction toPrimitive_UTF8(value: any): Buffer {\n return Buffer.from(value, 'utf8');\n}\n\nfunction fromPrimitive_UTF8(value: any): string {\n return value.toString();\n}\n\nfunction toPrimitive_JSON(value: any): Buffer {\n return Buffer.from(JSON.stringify(value));\n}\n\nfunction fromPrimitive_JSON(value: any): unknown {\n return JSON.parse(value);\n}\n\nfunction toPrimitive_BSON(value: any): Buffer {\n return Buffer.from(BSON.serialize(value));\n}\n\nfunction fromPrimitive_BSON(value: any) {\n return BSON.deserialize(value);\n}\n\nfunction toPrimitive_TIME_MILLIS(value: any) {\n const v = parseInt(value, 10);\n if (v < 0 || v > 0xffffffffffffffff || isNaN(v)) {\n throw new Error(`invalid value for TIME_MILLIS: ${value}`);\n }\n\n return v;\n}\n\nfunction toPrimitive_TIME_MICROS(value: any): number {\n const v = parseInt(value, 10);\n if (v < 0 || isNaN(v)) {\n throw new Error(`invalid value for TIME_MICROS: ${value}`);\n }\n return v;\n}\n\nconst kMillisPerDay = 86400000;\n\nfunction toPrimitive_DATE(value: any): number {\n /* convert from date */\n if (value instanceof Date) {\n return value.getTime() / kMillisPerDay;\n }\n\n /* convert from integer */\n {\n const v = parseInt(value, 10);\n if (v < 0 || isNaN(v)) {\n throw new Error(`invalid value for DATE: ${value}`);\n }\n\n return v;\n }\n}\n\nfunction fromPrimitive_DATE(value: any): Date {\n return new Date(value * kMillisPerDay);\n}\n\nfunction toPrimitive_TIMESTAMP_MILLIS(value: any): number {\n /* convert from date */\n if (value instanceof Date) {\n return value.getTime();\n }\n\n /* convert from integer */\n {\n const v = parseInt(value, 10);\n if (v < 0 || isNaN(v)) {\n throw new Error(`invalid value for TIMESTAMP_MILLIS: ${value}`);\n }\n\n return v;\n }\n}\n\nfunction fromPrimitive_TIMESTAMP_MILLIS(value: any): Date {\n return new Date(value);\n}\n\nfunction toPrimitive_TIMESTAMP_MICROS(value: any) {\n /* convert from date */\n if (value instanceof Date) {\n return value.getTime() * 1000;\n }\n\n /* convert from integer */\n {\n const v = parseInt(value, 10);\n if (v < 0 || isNaN(v)) {\n throw new Error(`invalid value for TIMESTAMP_MICROS: ${value}`);\n }\n\n return v;\n }\n}\n\nfunction fromPrimitive_TIMESTAMP_MICROS(value: any) {\n return new Date(value / 1000);\n}\n\nfunction toPrimitive_INTERVAL(value: any) {\n if (!value.months || !value.days || !value.milliseconds) {\n throw new Error(\n 'value for INTERVAL must be object { months: ..., days: ..., milliseconds: ... }'\n );\n }\n\n const buf = Buffer.alloc(12);\n\n buf.writeUInt32LE(value.months, 0);\n buf.writeUInt32LE(value.days, 4);\n buf.writeUInt32LE(value.milliseconds, 8);\n return buf;\n}\n\nfunction fromPrimitive_INTERVAL(value: any) {\n const buf = Buffer.from(value);\n const months = buf.readUInt32LE(0);\n const days = buf.readUInt32LE(4);\n const millis = buf.readUInt32LE(8);\n\n return {months, days, milliseconds: millis};\n}\n\nfunction decimalFromPrimitive_INT(value: any, field: ParquetField) {\n const presisionInt = Math.round(((value * 10 ** -field.presision!) % 1) * 10 ** field.presision!);\n return presisionInt * 10 ** -(field.scale || 0);\n}\n\nfunction decimalFromPrimitive_BYTE_ARRAY(value: any, field: ParquetField) {\n let number = 0;\n if (value.length <= 4) {\n // Bytewise operators faster. Use them if it is possible\n for (let i = 0; i < value.length; i++) {\n // `value.length - i - 1` bytes have reverse order (big-endian)\n const component = value[i] << (8 * (value.length - i - 1));\n number += component;\n }\n } else {\n for (let i = 0; i < value.length; i++) {\n // `value.length - i - 1` bytes have reverse order (big-endian)\n const component = value[i] * 2 ** (8 * (value.length - 1 - i));\n number += component;\n }\n }\n\n const presisionInt = Math.round(\n ((number * 10 ** -field.presision!) % 1) * 10 ** field.presision!\n );\n return presisionInt * 10 ** -(field.scale || 0);\n}\n"],"mappings":";AAEA,OAAOA,IAAI,MAAM,MAAM;AAWvB,OAAO,MAAMC,qBAA0D,GAAG;EACxEC,OAAO,EAAE;IACPC,aAAa,EAAE,SAAS;IACxBC,WAAW,EAAEC,mBAAmB;IAChCC,aAAa,EAAEC;EACjB,CAAC;EACDC,KAAK,EAAE;IACLL,aAAa,EAAE,OAAO;IACtBC,WAAW,EAAEK;EACf,CAAC;EACDC,KAAK,EAAE;IACLP,aAAa,EAAE,OAAO;IACtBC,WAAW,EAAEO;EACf,CAAC;EACDC,KAAK,EAAE;IACLT,aAAa,EAAE,OAAO;IACtBC,WAAW,EAAES;EACf,CAAC;EACDC,KAAK,EAAE;IACLX,aAAa,EAAE,OAAO;IACtBC,WAAW,EAAEW;EACf,CAAC;EACDC,MAAM,EAAE;IACNb,aAAa,EAAE,QAAQ;IACvBC,WAAW,EAAEa;EACf,CAAC;EACDC,UAAU,EAAE;IACVf,aAAa,EAAE,YAAY;IAC3BC,WAAW,EAAEe;EACf,CAAC;EACDC,oBAAoB,EAAE;IACpBjB,aAAa,EAAE,sBAAsB;IACrCC,WAAW,EAAEe;EACf,CAAC;EACDE,IAAI,EAAE;IACJlB,aAAa,EAAE,YAAY;IAC3BmB,YAAY,EAAE,MAAM;IACpBlB,WAAW,EAAEmB,gBAAgB;IAC7BjB,aAAa,EAAEkB;EACjB,CAAC;EACDC,WAAW,EAAE;IACXtB,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,aAAa;IAC3BlB,WAAW,EAAEsB;EACf,CAAC;EACDC,WAAW,EAAE;IACXxB,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,aAAa;IAC3BlB,WAAW,EAAEwB;EACf,CAAC;EACDC,IAAI,EAAE;IACJ1B,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,MAAM;IACpBlB,WAAW,EAAE0B,gBAAgB;IAC7BxB,aAAa,EAAEyB;EACjB,CAAC;EACDC,gBAAgB,EAAE;IAChB7B,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,kBAAkB;IAChClB,WAAW,EAAE6B,4BAA4B;IACzC3B,aAAa,EAAE4B;EACjB,CAAC;EACDC,gBAAgB,EAAE;IAChBhC,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,kBAAkB;IAChClB,WAAW,EAAEgC,4BAA4B;IACzC9B,aAAa,EAAE+B;EACjB,CAAC;EACDC,MAAM,EAAE;IACNnC,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,QAAQ;IACtBlB,WAAW,EAAEmC;EACf,CAAC;EACDC,OAAO,EAAE;IACPrC,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,SAAS;IACvBlB,WAAW,EAAEqC;EACf,CAAC;EACDC,OAAO,EAAE;IACPvC,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,SAAS;IACvBlB,WAAW,EAAEuC;EACf,CAAC;EACDC,OAAO,EAAE;IACPzC,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,SAAS;IACvBlB,WAAW,EAAEyC;EACf,CAAC;EACDC,KAAK,EAAE;IACL3C,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,OAAO;IACrBlB,WAAW,EAAE2C;EACf,CAAC;EACDC,MAAM,EAAE;IACN7C,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,QAAQ;IACtBlB,WAAW,EAAE6C;EACf,CAAC;EACDC,MAAM,EAAE;IACN/C,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,QAAQ;IACtBlB,WAAW,EAAEK;EACf,CAAC;EACD0C,MAAM,EAAE;IACNhD,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,QAAQ;IACtBlB,WAAW,EAAEO;EACf,CAAC;EACDyC,IAAI,EAAE;IACJjD,aAAa,EAAE,YAAY;IAC3BmB,YAAY,EAAE,MAAM;IACpBlB,WAAW,EAAEiD,gBAAgB;IAC7B/C,aAAa,EAAEgD;EACjB,CAAC;EACDtD,IAAI,EAAE;IACJG,aAAa,EAAE,YAAY;IAC3BmB,YAAY,EAAE,MAAM;IACpBlB,WAAW,EAAEmD,gBAAgB;IAC7BjD,aAAa,EAAEkD;EACjB,CAAC;EACDC,QAAQ,EAAE;IACRtD,aAAa,EAAE,sBAAsB;IACrCmB,YAAY,EAAE,UAAU;IACxBoC,UAAU,EAAE,EAAE;IACdtD,WAAW,EAAEuD,oBAAoB;IACjCrD,aAAa,EAAEsD;EACjB,CAAC;EACDC,aAAa,EAAE;IACb1D,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,eAAe;IAC7BlB,WAAW,EAAE0D,wBAAwB;IACrCxD,aAAa,EAAEyD;EACjB,CAAC;EACDC,aAAa,EAAE;IACb7D,aAAa,EAAE,OAAO;IACtBmB,YAAY,EAAE,eAAe;IAC7BlB,WAAW,EAAE6D,wBAAwB;IACrC3D,aAAa,EAAEyD;EACjB,CAAC;EACDG,kBAAkB,EAAE;IAClB/D,aAAa,EAAE,YAAY;IAC3BmB,YAAY,EAAE,oBAAoB;IAClClB,WAAW,EAAE+D,6BAA6B;IAC1C7D,aAAa,EAAE8D;EACjB,CAAC;EACDC,4BAA4B,EAAE;IAC5BlE,aAAa,EAAE,sBAAsB;IACrCmB,YAAY,EAAE,8BAA8B;IAC5ClB,WAAW,EAAE+D,6BAA6B;IAC1C7D,aAAa,EAAE8D;EACjB;AACF,CAAC;;AAMD,OAAO,SAAShE,WAAW,CAACkE,IAAiB,EAAEC,KAAc,EAAEC,KAAoB,EAAW;EAC5F,IAAI,EAAEF,IAAI,IAAIrE,qBAAqB,CAAC,EAAE;IACpC,MAAM,IAAIwE,KAAK,yBAAkBH,IAAI,EAAG;EAC1C;EAEA,OAAOrE,qBAAqB,CAACqE,IAAI,CAAC,CAAClE,WAAW,CAACmE,KAAK,EAAEC,KAAK,CAAC;AAC9D;;AAMA,OAAO,SAASlE,aAAa,CAACgE,IAAiB,EAAEC,KAAc,EAAEC,KAAoB,EAAE;EACrF,IAAI,EAAEF,IAAI,IAAIrE,qBAAqB,CAAC,EAAE;IACpC,MAAM,IAAIwE,KAAK,yBAAkBH,IAAI,EAAG;EAC1C;EAEA,IAAI,eAAe,IAAIrE,qBAAqB,CAACqE,IAAI,CAAC,EAAE;IAAA;IAClD,gCAAO,0BAAArE,qBAAqB,CAACqE,IAAI,CAAC,EAAChE,aAAa,0DAAzC,mDAA4CiE,KAAK,EAAEC,KAAK,CAAC;EAElE;;EACA,OAAOD,KAAK;AACd;AAEA,SAASlE,mBAAmB,CAACkE,KAAc,EAAW;EACpD,OAAOG,OAAO,CAACH,KAAK,CAAC;AACvB;AAEA,SAAShE,qBAAqB,CAACgE,KAAU,EAAW;EAClD,OAAOG,OAAO,CAACH,KAAK,CAAC;AACvB;AAEA,SAASxD,iBAAiB,CAACwD,KAAU,EAAU;EAC7C,MAAMI,CAAC,GAAGC,UAAU,CAACL,KAAK,CAAC;EAC3B,IAAIM,KAAK,CAACF,CAAC,CAAC,EAAE;IACZ,MAAM,IAAIF,KAAK,oCAA6BF,KAAK,EAAG;EACtD;EACA,OAAOI,CAAC;AACV;AAEA,SAAS1D,kBAAkB,CAACsD,KAAU,EAAU;EAC9C,MAAMI,CAAC,GAAGC,UAAU,CAACL,KAAK,CAAC;EAC3B,IAAIM,KAAK,CAACF,CAAC,CAAC,EAAE;IACZ,MAAM,IAAIF,KAAK,qCAA8BF,KAAK,EAAG;EACvD;EACA,OAAOI,CAAC;AACV;AAEA,SAAS5B,gBAAgB,CAACwB,KAAU,EAAE;EACpC,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAII,CAAC,GAAG,CAAC,IAAI,IAAIA,CAAC,GAAG,IAAI,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IACrC,MAAM,IAAIF,KAAK,mCAA4BF,KAAK,EAAG;EACrD;EAEA,OAAOI,CAAC;AACV;AAEA,SAASpC,iBAAiB,CAACgC,KAAU,EAAE;EACrC,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAII,CAAC,GAAG,CAAC,IAAIA,CAAC,GAAG,IAAI,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IACjC,MAAM,IAAIF,KAAK,oCAA6BF,KAAK,EAAG;EACtD;EAEA,OAAOI,CAAC;AACV;AAEA,SAAS1B,iBAAiB,CAACsB,KAAU,EAAE;EACrC,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAII,CAAC,GAAG,CAAC,MAAM,IAAIA,CAAC,GAAG,MAAM,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IACzC,MAAM,IAAIF,KAAK,oCAA6BF,KAAK,EAAG;EACtD;EAEA,OAAOI,CAAC;AACV;AAEA,SAASlC,kBAAkB,CAAC8B,KAAU,EAAE;EACtC,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAII,CAAC,GAAG,CAAC,IAAIA,CAAC,GAAG,MAAM,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IACnC,MAAM,IAAIF,KAAK,qCAA8BF,KAAK,EAAG;EACvD;EAEA,OAAOI,CAAC;AACV;AAEA,SAASlE,iBAAiB,CAAC8D,KAAU,EAAE;EACrC,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAII,CAAC,GAAG,CAAC,UAAU,IAAIA,CAAC,GAAG,UAAU,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IACjD,MAAM,IAAIF,KAAK,oCAA6BF,KAAK,EAAG;EACtD;EAEA,OAAOI,CAAC;AACV;AAEA,SAASb,wBAAwB,CAACS,KAAa,EAAEC,KAAmB,EAAU;EAC5E,MAAMO,cAAc,GAAGR,KAAK,GAAG,EAAE,KAAKC,KAAK,CAACQ,KAAK,IAAI,CAAC,CAAC;EACvD,MAAML,CAAC,GAAGM,IAAI,CAACC,KAAK,CAAGH,cAAc,GAAG,EAAE,IAAI,CAACP,KAAK,CAACW,SAAU,GAAI,CAAC,GAAI,EAAE,IAAIX,KAAK,CAACW,SAAU,CAAC;EAC/F,IAAIR,CAAC,GAAG,CAAC,UAAU,IAAIA,CAAC,GAAG,UAAU,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IACjD,MAAM,IAAIF,KAAK,oCAA6BF,KAAK,EAAG;EACtD;EACA,OAAOI,CAAC;AACV;AAEA,SAAShC,kBAAkB,CAAC4B,KAAU,EAAU;EAC9C,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAII,CAAC,GAAG,CAAC,IAAIA,CAAC,GAAG,cAAc,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IAC3C,MAAM,IAAIF,KAAK,qCAA8BF,KAAK,EAAG;EACvD;EACA,OAAOI,CAAC;AACV;AAEA,SAAShE,iBAAiB,CAAC4D,KAAU,EAAU;EAC7C,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAIM,KAAK,CAACF,CAAC,CAAC,EAAE;IACZ,MAAM,IAAIF,KAAK,oCAA6BF,KAAK,EAAG;EACtD;EACA,OAAOI,CAAC;AACV;AAEA,SAASV,wBAAwB,CAACM,KAAa,EAAEC,KAAmB,EAAE;EACpE,MAAMO,cAAc,GAAGR,KAAK,GAAG,EAAE,KAAKC,KAAK,CAACQ,KAAK,IAAI,CAAC,CAAC;EACvD,MAAML,CAAC,GAAGM,IAAI,CAACC,KAAK,CAAGH,cAAc,GAAG,EAAE,IAAI,CAACP,KAAK,CAACW,SAAU,GAAI,CAAC,GAAI,EAAE,IAAIX,KAAK,CAACW,SAAU,CAAC;EAC/F,IAAIN,KAAK,CAACF,CAAC,CAAC,EAAE;IACZ,MAAM,IAAIF,KAAK,oCAA6BF,KAAK,EAAG;EACtD;EAEA,OAAOI,CAAC;AACV;AAEA,SAAS9B,kBAAkB,CAAC0B,KAAU,EAAE;EACtC,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAII,CAAC,GAAG,CAAC,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IACrB,MAAM,IAAIF,KAAK,qCAA8BF,KAAK,EAAG;EACvD;EAEA,OAAOI,CAAC;AACV;AAEA,SAAS9D,iBAAiB,CAAC0D,KAAU,EAAE;EACrC,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAIM,KAAK,CAACF,CAAC,CAAC,EAAE;IACZ,MAAM,IAAIF,KAAK,oCAA6BF,KAAK,EAAG;EACtD;EAEA,OAAOI,CAAC;AACV;AAEA,SAASxD,sBAAsB,CAACoD,KAAU,EAAU;EAClD,OAAOa,MAAM,CAACC,IAAI,CAACd,KAAK,CAAC;AAC3B;AAEA,SAASJ,6BAA6B,CAACI,KAAU,EAAU;EAEzD,OAAOa,MAAM,CAACC,IAAI,CAACd,KAAK,CAAC;AAC3B;AAEA,SAAShD,gBAAgB,CAACgD,KAAU,EAAU;EAC5C,OAAOa,MAAM,CAACC,IAAI,CAACd,KAAK,EAAE,MAAM,CAAC;AACnC;AAEA,SAAS/C,kBAAkB,CAAC+C,KAAU,EAAU;EAC9C,OAAOA,KAAK,CAACe,QAAQ,EAAE;AACzB;AAEA,SAASjC,gBAAgB,CAACkB,KAAU,EAAU;EAC5C,OAAOa,MAAM,CAACC,IAAI,CAACjC,IAAI,CAACmC,SAAS,CAAChB,KAAK,CAAC,CAAC;AAC3C;AAEA,SAASjB,kBAAkB,CAACiB,KAAU,EAAW;EAC/C,OAAOnB,IAAI,CAACoC,KAAK,CAACjB,KAAK,CAAC;AAC1B;AAEA,SAAShB,gBAAgB,CAACgB,KAAU,EAAU;EAC5C,OAAOa,MAAM,CAACC,IAAI,CAACrF,IAAI,CAACyF,SAAS,CAAClB,KAAK,CAAC,CAAC;AAC3C;AAEA,SAASf,kBAAkB,CAACe,KAAU,EAAE;EACtC,OAAOvE,IAAI,CAAC0F,WAAW,CAACnB,KAAK,CAAC;AAChC;AAEA,SAAS7C,uBAAuB,CAAC6C,KAAU,EAAE;EAC3C,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAII,CAAC,GAAG,CAAC,IAAIA,CAAC,GAAG,kBAAkB,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IAC/C,MAAM,IAAIF,KAAK,0CAAmCF,KAAK,EAAG;EAC5D;EAEA,OAAOI,CAAC;AACV;AAEA,SAAS/C,uBAAuB,CAAC2C,KAAU,EAAU;EACnD,MAAMI,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;EAC7B,IAAII,CAAC,GAAG,CAAC,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;IACrB,MAAM,IAAIF,KAAK,0CAAmCF,KAAK,EAAG;EAC5D;EACA,OAAOI,CAAC;AACV;AAEA,MAAMgB,aAAa,GAAG,QAAQ;AAE9B,SAAS7D,gBAAgB,CAACyC,KAAU,EAAU;EAE5C,IAAIA,KAAK,YAAYqB,IAAI,EAAE;IACzB,OAAOrB,KAAK,CAACsB,OAAO,EAAE,GAAGF,aAAa;EACxC;;EAGA;IACE,MAAMhB,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;IAC7B,IAAII,CAAC,GAAG,CAAC,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;MACrB,MAAM,IAAIF,KAAK,mCAA4BF,KAAK,EAAG;IACrD;IAEA,OAAOI,CAAC;EACV;AACF;AAEA,SAAS5C,kBAAkB,CAACwC,KAAU,EAAQ;EAC5C,OAAO,IAAIqB,IAAI,CAACrB,KAAK,GAAGoB,aAAa,CAAC;AACxC;AAEA,SAAS1D,4BAA4B,CAACsC,KAAU,EAAU;EAExD,IAAIA,KAAK,YAAYqB,IAAI,EAAE;IACzB,OAAOrB,KAAK,CAACsB,OAAO,EAAE;EACxB;;EAGA;IACE,MAAMlB,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;IAC7B,IAAII,CAAC,GAAG,CAAC,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;MACrB,MAAM,IAAIF,KAAK,+CAAwCF,KAAK,EAAG;IACjE;IAEA,OAAOI,CAAC;EACV;AACF;AAEA,SAASzC,8BAA8B,CAACqC,KAAU,EAAQ;EACxD,OAAO,IAAIqB,IAAI,CAACrB,KAAK,CAAC;AACxB;AAEA,SAASnC,4BAA4B,CAACmC,KAAU,EAAE;EAEhD,IAAIA,KAAK,YAAYqB,IAAI,EAAE;IACzB,OAAOrB,KAAK,CAACsB,OAAO,EAAE,GAAG,IAAI;EAC/B;;EAGA;IACE,MAAMlB,CAAC,GAAGG,QAAQ,CAACP,KAAK,EAAE,EAAE,CAAC;IAC7B,IAAII,CAAC,GAAG,CAAC,IAAIE,KAAK,CAACF,CAAC,CAAC,EAAE;MACrB,MAAM,IAAIF,KAAK,+CAAwCF,KAAK,EAAG;IACjE;IAEA,OAAOI,CAAC;EACV;AACF;AAEA,SAAStC,8BAA8B,CAACkC,KAAU,EAAE;EAClD,OAAO,IAAIqB,IAAI,CAACrB,KAAK,GAAG,IAAI,CAAC;AAC/B;AAEA,SAASZ,oBAAoB,CAACY,KAAU,EAAE;EACxC,IAAI,CAACA,KAAK,CAACuB,MAAM,IAAI,CAACvB,KAAK,CAACwB,IAAI,IAAI,CAACxB,KAAK,CAACyB,YAAY,EAAE;IACvD,MAAM,IAAIvB,KAAK,CACb,iFAAiF,CAClF;EACH;EAEA,MAAMwB,GAAG,GAAGb,MAAM,CAACc,KAAK,CAAC,EAAE,CAAC;EAE5BD,GAAG,CAACE,aAAa,CAAC5B,KAAK,CAACuB,MAAM,EAAE,CAAC,CAAC;EAClCG,GAAG,CAACE,aAAa,CAAC5B,KAAK,CAACwB,IAAI,EAAE,CAAC,CAAC;EAChCE,GAAG,CAACE,aAAa,CAAC5B,KAAK,CAACyB,YAAY,EAAE,CAAC,CAAC;EACxC,OAAOC,GAAG;AACZ;AAEA,SAASrC,sBAAsB,CAACW,KAAU,EAAE;EAC1C,MAAM0B,GAAG,GAAGb,MAAM,CAACC,IAAI,CAACd,KAAK,CAAC;EAC9B,MAAMuB,MAAM,GAAGG,GAAG,CAACG,YAAY,CAAC,CAAC,CAAC;EAClC,MAAML,IAAI,GAAGE,GAAG,CAACG,YAAY,CAAC,CAAC,CAAC;EAChC,MAAMC,MAAM,GAAGJ,GAAG,CAACG,YAAY,CAAC,CAAC,CAAC;EAElC,OAAO;IAACN,MAAM;IAAEC,IAAI;IAAEC,YAAY,EAAEK;EAAM,CAAC;AAC7C;AAEA,SAAStC,wBAAwB,CAACQ,KAAU,EAAEC,KAAmB,EAAE;EACjE,MAAM8B,YAAY,GAAGrB,IAAI,CAACC,KAAK,CAAGX,KAAK,GAAG,EAAE,IAAI,CAACC,KAAK,CAACW,SAAU,GAAI,CAAC,GAAI,EAAE,IAAIX,KAAK,CAACW,SAAU,CAAC;EACjG,OAAOmB,YAAY,GAAG,EAAE,IAAI,EAAE9B,KAAK,CAACQ,KAAK,IAAI,CAAC,CAAC;AACjD;AAEA,SAASZ,+BAA+B,CAACG,KAAU,EAAEC,KAAmB,EAAE;EACxE,IAAI+B,MAAM,GAAG,CAAC;EACd,IAAIhC,KAAK,CAACiC,MAAM,IAAI,CAAC,EAAE;IAErB,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGlC,KAAK,CAACiC,MAAM,EAAEC,CAAC,EAAE,EAAE;MAErC,MAAMC,SAAS,GAAGnC,KAAK,CAACkC,CAAC,CAAC,IAAK,CAAC,IAAIlC,KAAK,CAACiC,MAAM,GAAGC,CAAC,GAAG,CAAC,CAAE;MAC1DF,MAAM,IAAIG,SAAS;IACrB;EACF,CAAC,MAAM;IACL,KAAK,IAAID,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGlC,KAAK,CAACiC,MAAM,EAAEC,CAAC,EAAE,EAAE;MAErC,MAAMC,SAAS,GAAGnC,KAAK,CAACkC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,IAAIlC,KAAK,CAACiC,MAAM,GAAG,CAAC,GAAGC,CAAC,CAAC,CAAC;MAC9DF,MAAM,IAAIG,SAAS;IACrB;EACF;EAEA,MAAMJ,YAAY,GAAGrB,IAAI,CAACC,KAAK,CAC3BqB,MAAM,GAAG,EAAE,IAAI,CAAC/B,KAAK,CAACW,SAAU,GAAI,CAAC,GAAI,EAAE,IAAIX,KAAK,CAACW,SAAU,CAClE;EACD,OAAOmB,YAAY,GAAG,EAAE,IAAI,EAAE9B,KAAK,CAACQ,KAAK,IAAI,CAAC,CAAC;AACjD"}
@@ -1,5 +1,5 @@
1
1
 
2
- import fs from 'fs';
2
+ import { fs } from '@loaders.gl/loader-utils';
3
3
  export function load(name) {
4
4
  return (module || global).require(name);
5
5
  }
@@ -1 +1 @@
1
- {"version":3,"file":"file-utils.js","names":["fs","load","name","module","global","require","oswrite","os","buf","Promise","resolve","reject","write","err","osclose","close","osopen","path","opts","outputStream","createWriteStream","once","fd"],"sources":["../../../../src/parquetjs/utils/file-utils.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport fs from 'fs';\nimport {Writable} from 'stream';\n\nexport function load(name: string): any {\n return (module || (global as any)).require(name);\n}\nexport interface WriteStreamOptions {\n flags?: string;\n encoding?: string;\n fd?: number;\n mode?: number;\n autoClose?: boolean;\n start?: number;\n}\n\nexport function oswrite(os: Writable, buf: Buffer): Promise<void> {\n return new Promise((resolve, reject) => {\n os.write(buf, (err) => {\n if (err) {\n reject(err);\n } else {\n resolve();\n }\n });\n });\n}\n\nexport function osclose(os: Writable): Promise<void> {\n return new Promise((resolve, reject) => {\n (os as any).close((err: any) => {\n if (err) {\n reject(err);\n } else {\n resolve();\n }\n });\n });\n}\n\nexport function osopen(path: string, opts?: WriteStreamOptions): Promise<fs.WriteStream> {\n return new Promise((resolve, reject) => {\n const outputStream = fs.createWriteStream(path, opts as any);\n outputStream.once('open', (fd) => resolve(outputStream));\n outputStream.once('error', (err) => reject(err));\n });\n}\n"],"mappings":";AACA,OAAOA,EAAE,MAAM,IAAI;AAGnB,OAAO,SAASC,IAAI,CAACC,IAAY,EAAO;EACtC,OAAO,CAACC,MAAM,IAAKC,MAAc,EAAEC,OAAO,CAACH,IAAI,CAAC;AAClD;AAUA,OAAO,SAASI,OAAO,CAACC,EAAY,EAAEC,GAAW,EAAiB;EAChE,OAAO,IAAIC,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACtCJ,EAAE,CAACK,KAAK,CAACJ,GAAG,EAAGK,GAAG,IAAK;MACrB,IAAIA,GAAG,EAAE;QACPF,MAAM,CAACE,GAAG,CAAC;MACb,CAAC,MAAM;QACLH,OAAO,EAAE;MACX;IACF,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ;AAEA,OAAO,SAASI,OAAO,CAACP,EAAY,EAAiB;EACnD,OAAO,IAAIE,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACrCJ,EAAE,CAASQ,KAAK,CAAEF,GAAQ,IAAK;MAC9B,IAAIA,GAAG,EAAE;QACPF,MAAM,CAACE,GAAG,CAAC;MACb,CAAC,MAAM;QACLH,OAAO,EAAE;MACX;IACF,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ;AAEA,OAAO,SAASM,MAAM,CAACC,IAAY,EAAEC,IAAyB,EAA2B;EACvF,OAAO,IAAIT,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACtC,MAAMQ,YAAY,GAAGnB,EAAE,CAACoB,iBAAiB,CAACH,IAAI,EAAEC,IAAI,CAAQ;IAC5DC,YAAY,CAACE,IAAI,CAAC,MAAM,EAAGC,EAAE,IAAKZ,OAAO,CAACS,YAAY,CAAC,CAAC;IACxDA,YAAY,CAACE,IAAI,CAAC,OAAO,EAAGR,GAAG,IAAKF,MAAM,CAACE,GAAG,CAAC,CAAC;EAClD,CAAC,CAAC;AACJ"}
1
+ {"version":3,"file":"file-utils.js","names":["fs","load","name","module","global","require","oswrite","os","buf","Promise","resolve","reject","write","err","osclose","close","osopen","path","opts","outputStream","createWriteStream","once","fd"],"sources":["../../../../src/parquetjs/utils/file-utils.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {fs, stream} from '@loaders.gl/loader-utils';\n\nexport function load(name: string): any {\n return (module || (global as any)).require(name);\n}\nexport interface WriteStreamOptions {\n flags?: string;\n encoding?: string;\n fd?: number;\n mode?: number;\n autoClose?: boolean;\n start?: number;\n}\n\nexport function oswrite(os: stream.Writable, buf: Buffer): Promise<void> {\n return new Promise((resolve, reject) => {\n os.write(buf, (err) => {\n if (err) {\n reject(err);\n } else {\n resolve();\n }\n });\n });\n}\n\nexport function osclose(os: stream.Writable): Promise<void> {\n return new Promise((resolve, reject) => {\n (os as any).close((err: any) => {\n if (err) {\n reject(err);\n } else {\n resolve();\n }\n });\n });\n}\n\nexport function osopen(path: string, opts?: WriteStreamOptions): Promise<fs.WriteStream> {\n return new Promise((resolve, reject) => {\n const outputStream = fs.createWriteStream(path, opts as any);\n outputStream.once('open', (fd) => resolve(outputStream));\n outputStream.once('error', (err) => reject(err));\n });\n}\n"],"mappings":";AACA,SAAQA,EAAE,QAAe,0BAA0B;AAEnD,OAAO,SAASC,IAAI,CAACC,IAAY,EAAO;EACtC,OAAO,CAACC,MAAM,IAAKC,MAAc,EAAEC,OAAO,CAACH,IAAI,CAAC;AAClD;AAUA,OAAO,SAASI,OAAO,CAACC,EAAmB,EAAEC,GAAW,EAAiB;EACvE,OAAO,IAAIC,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACtCJ,EAAE,CAACK,KAAK,CAACJ,GAAG,EAAGK,GAAG,IAAK;MACrB,IAAIA,GAAG,EAAE;QACPF,MAAM,CAACE,GAAG,CAAC;MACb,CAAC,MAAM;QACLH,OAAO,EAAE;MACX;IACF,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ;AAEA,OAAO,SAASI,OAAO,CAACP,EAAmB,EAAiB;EAC1D,OAAO,IAAIE,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACrCJ,EAAE,CAASQ,KAAK,CAAEF,GAAQ,IAAK;MAC9B,IAAIA,GAAG,EAAE;QACPF,MAAM,CAACE,GAAG,CAAC;MACb,CAAC,MAAM;QACLH,OAAO,EAAE;MACX;IACF,CAAC,CAAC;EACJ,CAAC,CAAC;AACJ;AAEA,OAAO,SAASM,MAAM,CAACC,IAAY,EAAEC,IAAyB,EAA2B;EACvF,OAAO,IAAIT,OAAO,CAAC,CAACC,OAAO,EAAEC,MAAM,KAAK;IACtC,MAAMQ,YAAY,GAAGnB,EAAE,CAACoB,iBAAiB,CAACH,IAAI,EAAEC,IAAI,CAAQ;IAC5DC,YAAY,CAACE,IAAI,CAAC,MAAM,EAAGC,EAAE,IAAKZ,OAAO,CAACS,YAAY,CAAC,CAAC;IACxDA,YAAY,CAACE,IAAI,CAAC,OAAO,EAAGR,GAAG,IAAKF,MAAM,CAACE,GAAG,CAAC,CAAC;EAClD,CAAC,CAAC;AACJ"}
package/dist/index.d.ts CHANGED
@@ -39,7 +39,7 @@ export { ParquetWasmWriter } from './parquet-wasm-writer';
39
39
  export { preloadCompressions } from './parquetjs/compression';
40
40
  export { ParquetSchema } from './parquetjs/schema/schema';
41
41
  export { ParquetReader } from './parquetjs/parser/parquet-reader';
42
- export { ParquetEnvelopeReader } from './parquetjs/parser/parquet-envelope-reader';
42
+ export { ParquetEncoder } from './parquetjs/encoder/parquet-encoder';
43
43
  export { convertParquetToArrowSchema } from './lib/convert-schema';
44
44
  export declare const _typecheckParquetLoader: LoaderWithParser;
45
45
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,gBAAgB,EAAC,MAAM,0BAA0B,CAAC;AAI/D,OAAO,EAAC,iBAAiB,IAAI,uBAAuB,EAAC,MAAM,uBAAuB,CAAC;AACnF,OAAO,EAAC,aAAa,IAAI,mBAAmB,EAAC,MAAM,kBAAkB,CAAC;AACtE,OAAO,EAAC,YAAY,EAAE,yBAAyB,EAAC,MAAM,qBAAqB,CAAC;AAC5E,OAAO,EAAC,YAAY,IAAI,gBAAgB,EAAC,MAAM,+BAA+B,CAAC;AAE/E,OAAO,EAAC,mBAAmB,EAAE,uBAAuB,EAAC,CAAC;AAEtD,6BAA6B;AAC7B,eAAO,MAAM,aAAa;;;;;;;;;;;;;;CAIzB,CAAC;AAEF,eAAO,MAAM,iBAAiB;;;;;;;;;;;;;CAG7B,CAAC;AAIF,OAAO,EAAC,aAAa,IAAI,cAAc,EAAC,MAAM,kBAAkB,CAAC;AACjE,OAAO,EAAC,iBAAiB,EAAC,MAAM,uBAAuB,CAAC;AAIxD,OAAO,EAAC,mBAAmB,EAAC,MAAM,yBAAyB,CAAC;AAE5D,OAAO,EAAC,aAAa,EAAC,MAAM,2BAA2B,CAAC;AACxD,OAAO,EAAC,aAAa,EAAC,MAAM,mCAAmC,CAAC;AAChE,OAAO,EAAC,qBAAqB,EAAC,MAAM,4CAA4C,CAAC;AAEjF,OAAO,EAAC,2BAA2B,EAAC,MAAM,sBAAsB,CAAC;AAGjE,eAAO,MAAM,uBAAuB,EAAE,gBAAgC,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,gBAAgB,EAAC,MAAM,0BAA0B,CAAC;AAI/D,OAAO,EAAC,iBAAiB,IAAI,uBAAuB,EAAC,MAAM,uBAAuB,CAAC;AACnF,OAAO,EAAC,aAAa,IAAI,mBAAmB,EAAC,MAAM,kBAAkB,CAAC;AACtE,OAAO,EAAC,YAAY,EAAE,yBAAyB,EAAC,MAAM,qBAAqB,CAAC;AAC5E,OAAO,EAAC,YAAY,IAAI,gBAAgB,EAAC,MAAM,+BAA+B,CAAC;AAE/E,OAAO,EAAC,mBAAmB,EAAE,uBAAuB,EAAC,CAAC;AAEtD,6BAA6B;AAC7B,eAAO,MAAM,aAAa;;;;;;;;;;;;;;CAIzB,CAAC;AAEF,eAAO,MAAM,iBAAiB;;;;;;;;;;;;;CAG7B,CAAC;AAIF,OAAO,EAAC,aAAa,IAAI,cAAc,EAAC,MAAM,kBAAkB,CAAC;AACjE,OAAO,EAAC,iBAAiB,EAAC,MAAM,uBAAuB,CAAC;AAIxD,OAAO,EAAC,mBAAmB,EAAC,MAAM,yBAAyB,CAAC;AAE5D,OAAO,EAAC,aAAa,EAAC,MAAM,2BAA2B,CAAC;AACxD,OAAO,EAAC,aAAa,EAAC,MAAM,mCAAmC,CAAC;AAChE,OAAO,EAAC,cAAc,EAAC,MAAM,qCAAqC,CAAC;AAEnE,OAAO,EAAC,2BAA2B,EAAC,MAAM,sBAAsB,CAAC;AAGjE,eAAO,MAAM,uBAAuB,EAAE,gBAAgC,CAAC"}
package/dist/index.js CHANGED
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports._typecheckParquetLoader = exports.convertParquetToArrowSchema = exports.ParquetEnvelopeReader = exports.ParquetReader = exports.ParquetSchema = exports.preloadCompressions = exports.ParquetWasmWriter = exports._ParquetWriter = exports.ParquetWasmLoader = exports.ParquetLoader = exports.ParquetWasmWorkerLoader = exports.ParquetWorkerLoader = void 0;
3
+ exports._typecheckParquetLoader = exports.convertParquetToArrowSchema = exports.ParquetEncoder = exports.ParquetReader = exports.ParquetSchema = exports.preloadCompressions = exports.ParquetWasmWriter = exports._ParquetWriter = exports.ParquetWasmLoader = exports.ParquetLoader = exports.ParquetWasmWorkerLoader = exports.ParquetWorkerLoader = void 0;
4
4
  // ParquetLoader
5
5
  const parquet_wasm_loader_1 = require("./parquet-wasm-loader");
6
6
  Object.defineProperty(exports, "ParquetWasmWorkerLoader", { enumerable: true, get: function () { return parquet_wasm_loader_1.ParquetWasmLoader; } });
@@ -30,9 +30,8 @@ var schema_1 = require("./parquetjs/schema/schema");
30
30
  Object.defineProperty(exports, "ParquetSchema", { enumerable: true, get: function () { return schema_1.ParquetSchema; } });
31
31
  var parquet_reader_1 = require("./parquetjs/parser/parquet-reader");
32
32
  Object.defineProperty(exports, "ParquetReader", { enumerable: true, get: function () { return parquet_reader_1.ParquetReader; } });
33
- var parquet_envelope_reader_1 = require("./parquetjs/parser/parquet-envelope-reader");
34
- Object.defineProperty(exports, "ParquetEnvelopeReader", { enumerable: true, get: function () { return parquet_envelope_reader_1.ParquetEnvelopeReader; } });
35
- // export {ParquetWriter, ParquetEnvelopeWriter, ParquetTransformer} from './parquetjs/encoder/writer';
33
+ var parquet_encoder_1 = require("./parquetjs/encoder/parquet-encoder");
34
+ Object.defineProperty(exports, "ParquetEncoder", { enumerable: true, get: function () { return parquet_encoder_1.ParquetEncoder; } });
36
35
  var convert_schema_1 = require("./lib/convert-schema");
37
36
  Object.defineProperty(exports, "convertParquetToArrowSchema", { enumerable: true, get: function () { return convert_schema_1.convertParquetToArrowSchema; } });
38
37
  // TESTS
@@ -1,4 +1,4 @@
1
1
  import type { ParquetLoaderOptions } from '../parquet-loader';
2
- export declare function parseParquet(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions): Promise<any[][] | null>;
3
- export declare function parseParquetFileInBatches(blob: Blob, options?: ParquetLoaderOptions): AsyncGenerator<any[][], void, unknown>;
2
+ export declare function parseParquet(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions): Promise<import("../parquetjs/schema/declare").ParquetRecord[] | null>;
3
+ export declare function parseParquetFileInBatches(blob: Blob, options?: ParquetLoaderOptions): AsyncGenerator<import("../parquetjs/schema/declare").ParquetRecord[], void, unknown>;
4
4
  //# sourceMappingURL=parse-parquet.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"parse-parquet.d.ts","sourceRoot":"","sources":["../../src/lib/parse-parquet.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAC,oBAAoB,EAAC,MAAM,mBAAmB,CAAC;AAI5D,wBAAsB,YAAY,CAAC,WAAW,EAAE,WAAW,EAAE,OAAO,CAAC,EAAE,oBAAoB,2BAM1F;AAED,wBAAuB,yBAAyB,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,CAAC,EAAE,oBAAoB,0CAa1F"}
1
+ {"version":3,"file":"parse-parquet.d.ts","sourceRoot":"","sources":["../../src/lib/parse-parquet.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAC,oBAAoB,EAAC,MAAM,mBAAmB,CAAC;AAG5D,wBAAsB,YAAY,CAAC,WAAW,EAAE,WAAW,EAAE,OAAO,CAAC,EAAE,oBAAoB,yEAM1F;AAED,wBAAuB,yBAAyB,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,CAAC,EAAE,oBAAoB,wFAO1F"}
@@ -1,6 +1,9 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.parseParquetFileInBatches = exports.parseParquet = void 0;
4
+ // import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';
5
+ // import {ColumnarTableBatch} from '@loaders.gl/schema';
6
+ const loader_utils_1 = require("@loaders.gl/loader-utils");
4
7
  const parquet_reader_1 = require("../parquetjs/parser/parquet-reader");
5
8
  async function parseParquet(arrayBuffer, options) {
6
9
  const blob = new Blob([arrayBuffer]);
@@ -11,18 +14,27 @@ async function parseParquet(arrayBuffer, options) {
11
14
  }
12
15
  exports.parseParquet = parseParquet;
13
16
  async function* parseParquetFileInBatches(blob, options) {
14
- const reader = await parquet_reader_1.ParquetReader.openBlob(blob);
15
- const rows = [];
16
- try {
17
- const cursor = reader.getCursor();
18
- let record;
19
- while ((record = await cursor.next())) {
20
- rows.push(record);
21
- }
17
+ const file = (0, loader_utils_1.makeReadableFile)(blob);
18
+ const reader = new parquet_reader_1.ParquetReader(file);
19
+ const rowBatches = reader.rowBatchIterator(options?.parquet);
20
+ for await (const rows of rowBatches) {
21
+ yield rows;
22
22
  }
23
- finally {
24
- await reader.close();
25
- }
26
- yield rows;
27
23
  }
28
24
  exports.parseParquetFileInBatches = parseParquetFileInBatches;
25
+ // export async function* parseParquetFileInColumnarBatches(blob: Blob, options?: {columnList?: string[][]}): AsyncIterable<ColumnarTableBatch> {
26
+ // const rowGroupReader = new ParquetRowGroupReader({data: blob, columnList: options?.columnList});
27
+ // try {
28
+ // for await (const rowGroup of rowGroupReader) {
29
+ // yield convertRowGroupToTableBatch(rowGroup);
30
+ // }
31
+ // } finally {
32
+ // await rowGroupReader.close();
33
+ // }
34
+ // }
35
+ // function convertRowGroupToTableBatch(rowGroup): ColumnarTableBatch {
36
+ // // @ts-expect-error
37
+ // return {
38
+ // data: rowGroup
39
+ // };
40
+ // }
@@ -3,6 +3,7 @@ export type ParquetLoaderOptions = LoaderOptions & {
3
3
  parquet?: {
4
4
  type?: 'object-row-table';
5
5
  url?: string;
6
+ columnList?: string[] | string[][];
6
7
  };
7
8
  };
8
9
  /** ParquetJS table loader */
@@ -1 +1 @@
1
- {"version":3,"file":"parquet-loader.d.ts","sourceRoot":"","sources":["../src/parquet-loader.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,MAAM,EAAE,aAAa,EAAC,MAAM,0BAA0B,CAAC;AAMpE,MAAM,MAAM,oBAAoB,GAAG,aAAa,GAAG;IACjD,OAAO,CAAC,EAAE;QACR,IAAI,CAAC,EAAE,kBAAkB,CAAC;QAC1B,GAAG,CAAC,EAAE,MAAM,CAAC;KACd,CAAC;CACH,CAAC;AASF,6BAA6B;AAC7B,eAAO,MAAM,aAAa;;;;;;;;;;;;CAYzB,CAAC;AAEF,eAAO,MAAM,uBAAuB,EAAE,MAAsB,CAAC"}
1
+ {"version":3,"file":"parquet-loader.d.ts","sourceRoot":"","sources":["../src/parquet-loader.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,MAAM,EAAE,aAAa,EAAC,MAAM,0BAA0B,CAAC;AAMpE,MAAM,MAAM,oBAAoB,GAAG,aAAa,GAAG;IACjD,OAAO,CAAC,EAAE;QACR,IAAI,CAAC,EAAE,kBAAkB,CAAC;QAC1B,GAAG,CAAC,EAAE,MAAM,CAAC;QACb,UAAU,CAAC,EAAE,MAAM,EAAE,GAAG,MAAM,EAAE,EAAE,CAAC;KACpC,CAAC;CACH,CAAC;AAUF,6BAA6B;AAC7B,eAAO,MAAM,aAAa;;;;;;;;;;;;CAYzB,CAAC;AAEF,eAAO,MAAM,uBAAuB,EAAE,MAAsB,CAAC"}
@@ -7,7 +7,8 @@ const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';
7
7
  const DEFAULT_PARQUET_LOADER_OPTIONS = {
8
8
  parquet: {
9
9
  type: 'object-row-table',
10
- url: undefined
10
+ url: undefined,
11
+ columnList: []
11
12
  }
12
13
  };
13
14
  /** ParquetJS table loader */