@loaders.gl/parquet 3.4.6 → 4.0.0-alpha.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (130) hide show
  1. package/dist/dist.min.js +27 -34
  2. package/dist/dist.min.js.map +3 -3
  3. package/dist/es5/index.js +6 -6
  4. package/dist/es5/index.js.map +1 -1
  5. package/dist/es5/lib/arrow/convert-row-group-to-columns.js.map +1 -1
  6. package/dist/es5/lib/arrow/convert-schema-from-parquet.js +58 -42
  7. package/dist/es5/lib/arrow/convert-schema-from-parquet.js.map +1 -1
  8. package/dist/es5/lib/arrow/convert-schema-to-parquet.js +33 -31
  9. package/dist/es5/lib/arrow/convert-schema-to-parquet.js.map +1 -1
  10. package/dist/es5/lib/geo/decode-geo-metadata.js +12 -8
  11. package/dist/es5/lib/geo/decode-geo-metadata.js.map +1 -1
  12. package/dist/es5/lib/parsers/parse-parquet-to-columns.js +11 -7
  13. package/dist/es5/lib/parsers/parse-parquet-to-columns.js.map +1 -1
  14. package/dist/es5/lib/parsers/parse-parquet-to-rows.js +51 -29
  15. package/dist/es5/lib/parsers/parse-parquet-to-rows.js.map +1 -1
  16. package/dist/es5/lib/wasm/parse-parquet-wasm.js +6 -6
  17. package/dist/es5/lib/wasm/parse-parquet-wasm.js.map +1 -1
  18. package/dist/es5/parquet-loader.js +16 -4
  19. package/dist/es5/parquet-loader.js.map +1 -1
  20. package/dist/es5/parquet-wasm-loader.js +1 -1
  21. package/dist/es5/parquet-wasm-loader.js.map +1 -1
  22. package/dist/es5/parquet-wasm-writer.js +1 -1
  23. package/dist/es5/parquet-wasm-writer.js.map +1 -1
  24. package/dist/es5/parquet-writer.js +1 -1
  25. package/dist/es5/parquet-writer.js.map +1 -1
  26. package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -1
  27. package/dist/es5/parquetjs/parser/decoders.js.map +1 -1
  28. package/dist/es5/parquetjs/parser/parquet-reader.js +1 -1
  29. package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
  30. package/dist/es5/parquetjs/schema/declare.js +4 -4
  31. package/dist/es5/parquetjs/schema/declare.js.map +1 -1
  32. package/dist/es5/parquetjs/schema/schema.js +7 -7
  33. package/dist/es5/parquetjs/schema/schema.js.map +1 -1
  34. package/dist/es5/parquetjs/schema/shred.js +117 -22
  35. package/dist/es5/parquetjs/schema/shred.js.map +1 -1
  36. package/dist/esm/index.js +5 -5
  37. package/dist/esm/index.js.map +1 -1
  38. package/dist/esm/lib/arrow/convert-row-group-to-columns.js.map +1 -1
  39. package/dist/esm/lib/arrow/convert-schema-from-parquet.js +57 -41
  40. package/dist/esm/lib/arrow/convert-schema-from-parquet.js.map +1 -1
  41. package/dist/esm/lib/arrow/convert-schema-to-parquet.js +33 -31
  42. package/dist/esm/lib/arrow/convert-schema-to-parquet.js.map +1 -1
  43. package/dist/esm/lib/geo/decode-geo-metadata.js +12 -8
  44. package/dist/esm/lib/geo/decode-geo-metadata.js.map +1 -1
  45. package/dist/esm/lib/parsers/parse-parquet-to-columns.js +12 -8
  46. package/dist/esm/lib/parsers/parse-parquet-to-columns.js.map +1 -1
  47. package/dist/esm/lib/parsers/parse-parquet-to-rows.js +14 -3
  48. package/dist/esm/lib/parsers/parse-parquet-to-rows.js.map +1 -1
  49. package/dist/esm/lib/wasm/parse-parquet-wasm.js +3 -3
  50. package/dist/esm/lib/wasm/parse-parquet-wasm.js.map +1 -1
  51. package/dist/esm/parquet-loader.js +14 -2
  52. package/dist/esm/parquet-loader.js.map +1 -1
  53. package/dist/esm/parquet-wasm-loader.js +1 -1
  54. package/dist/esm/parquet-wasm-loader.js.map +1 -1
  55. package/dist/esm/parquet-wasm-writer.js +1 -1
  56. package/dist/esm/parquet-wasm-writer.js.map +1 -1
  57. package/dist/esm/parquet-writer.js +1 -1
  58. package/dist/esm/parquet-writer.js.map +1 -1
  59. package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +1 -1
  60. package/dist/esm/parquetjs/parser/decoders.js.map +1 -1
  61. package/dist/esm/parquetjs/parser/parquet-reader.js +2 -2
  62. package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
  63. package/dist/esm/parquetjs/schema/declare.js +1 -1
  64. package/dist/esm/parquetjs/schema/declare.js.map +1 -1
  65. package/dist/esm/parquetjs/schema/schema.js +6 -6
  66. package/dist/esm/parquetjs/schema/schema.js.map +1 -1
  67. package/dist/esm/parquetjs/schema/shred.js +108 -21
  68. package/dist/esm/parquetjs/schema/shred.js.map +1 -1
  69. package/dist/index.d.ts +8 -49
  70. package/dist/index.d.ts.map +1 -1
  71. package/dist/index.js +8 -6
  72. package/dist/lib/arrow/convert-row-group-to-columns.d.ts +2 -2
  73. package/dist/lib/arrow/convert-row-group-to-columns.d.ts.map +1 -1
  74. package/dist/lib/arrow/convert-schema-from-parquet.d.ts +4 -4
  75. package/dist/lib/arrow/convert-schema-from-parquet.d.ts.map +1 -1
  76. package/dist/lib/arrow/convert-schema-from-parquet.js +48 -44
  77. package/dist/lib/arrow/convert-schema-to-parquet.d.ts +1 -1
  78. package/dist/lib/arrow/convert-schema-to-parquet.d.ts.map +1 -1
  79. package/dist/lib/arrow/convert-schema-to-parquet.js +30 -31
  80. package/dist/lib/geo/decode-geo-metadata.js +12 -8
  81. package/dist/lib/parsers/parse-parquet-to-columns.d.ts +2 -2
  82. package/dist/lib/parsers/parse-parquet-to-columns.d.ts.map +1 -1
  83. package/dist/lib/parsers/parse-parquet-to-columns.js +13 -7
  84. package/dist/lib/parsers/parse-parquet-to-rows.d.ts +3 -2
  85. package/dist/lib/parsers/parse-parquet-to-rows.d.ts.map +1 -1
  86. package/dist/lib/parsers/parse-parquet-to-rows.js +16 -19
  87. package/dist/lib/wasm/parse-parquet-wasm.d.ts +3 -3
  88. package/dist/lib/wasm/parse-parquet-wasm.d.ts.map +1 -1
  89. package/dist/lib/wasm/parse-parquet-wasm.js +3 -3
  90. package/dist/parquet-loader.d.ts +3 -14
  91. package/dist/parquet-loader.d.ts.map +1 -1
  92. package/dist/parquet-loader.js +14 -2
  93. package/dist/parquet-worker.js +31 -38
  94. package/dist/parquet-worker.js.map +3 -3
  95. package/dist/parquet-writer.d.ts +2 -1
  96. package/dist/parquet-writer.d.ts.map +1 -1
  97. package/dist/parquet-writer.js +1 -0
  98. package/dist/parquetjs/encoder/parquet-encoder.d.ts +4 -4
  99. package/dist/parquetjs/encoder/parquet-encoder.d.ts.map +1 -1
  100. package/dist/parquetjs/parser/decoders.d.ts +2 -2
  101. package/dist/parquetjs/parser/decoders.d.ts.map +1 -1
  102. package/dist/parquetjs/parser/parquet-reader.d.ts +6 -6
  103. package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -1
  104. package/dist/parquetjs/parser/parquet-reader.js +1 -1
  105. package/dist/parquetjs/schema/declare.d.ts +6 -5
  106. package/dist/parquetjs/schema/declare.d.ts.map +1 -1
  107. package/dist/parquetjs/schema/declare.js +3 -3
  108. package/dist/parquetjs/schema/schema.d.ts +4 -4
  109. package/dist/parquetjs/schema/schema.d.ts.map +1 -1
  110. package/dist/parquetjs/schema/schema.js +5 -5
  111. package/dist/parquetjs/schema/shred.d.ts +17 -111
  112. package/dist/parquetjs/schema/shred.d.ts.map +1 -1
  113. package/dist/parquetjs/schema/shred.js +127 -119
  114. package/package.json +8 -8
  115. package/src/index.ts +32 -9
  116. package/src/lib/arrow/convert-row-group-to-columns.ts +2 -2
  117. package/src/lib/arrow/convert-schema-from-parquet.ts +56 -66
  118. package/src/lib/arrow/convert-schema-to-parquet.ts +32 -44
  119. package/src/lib/geo/decode-geo-metadata.ts +17 -8
  120. package/src/lib/parsers/parse-parquet-to-columns.ts +22 -11
  121. package/src/lib/parsers/parse-parquet-to-rows.ts +28 -23
  122. package/src/lib/wasm/parse-parquet-wasm.ts +7 -7
  123. package/src/parquet-loader.ts +25 -2
  124. package/src/parquet-writer.ts +4 -1
  125. package/src/parquetjs/encoder/parquet-encoder.ts +11 -10
  126. package/src/parquetjs/parser/decoders.ts +3 -3
  127. package/src/parquetjs/parser/parquet-reader.ts +7 -7
  128. package/src/parquetjs/schema/declare.ts +6 -5
  129. package/src/parquetjs/schema/schema.ts +8 -8
  130. package/src/parquetjs/schema/shred.ts +142 -103
package/dist/es5/index.js CHANGED
@@ -50,16 +50,16 @@ Object.defineProperty(exports, "_ParquetWriter", {
50
50
  }
51
51
  });
52
52
  exports._typecheckParquetLoader = void 0;
53
- Object.defineProperty(exports, "convertParquetToArrowSchema", {
53
+ Object.defineProperty(exports, "convertParquetSchema", {
54
54
  enumerable: true,
55
55
  get: function get() {
56
- return _convertSchemaFromParquet.convertSchemaFromParquet;
56
+ return _convertSchemaFromParquet.convertParquetSchema;
57
57
  }
58
58
  });
59
- Object.defineProperty(exports, "convertSchemaFromParquet", {
59
+ Object.defineProperty(exports, "convertParquetToArrowSchema", {
60
60
  enumerable: true,
61
61
  get: function get() {
62
- return _convertSchemaFromParquet.convertSchemaFromParquet;
62
+ return _convertSchemaFromParquet.convertParquetSchema;
63
63
  }
64
64
  });
65
65
  Object.defineProperty(exports, "geoJSONSchema", {
@@ -93,11 +93,11 @@ Object.defineProperty(exports, "unpackGeoMetadata", {
93
93
  }
94
94
  });
95
95
  var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
96
- var _parquetWasmLoader = require("./parquet-wasm-loader");
97
96
  var _parquetLoader = require("./parquet-loader");
98
97
  var _parseParquetToRows = require("./lib/parsers/parse-parquet-to-rows");
99
98
  var _parseParquetToColumns = require("./lib/parsers/parse-parquet-to-columns");
100
99
  var _parseParquetWasm = require("./lib/wasm/parse-parquet-wasm");
100
+ var _parquetWasmLoader = require("./parquet-wasm-loader");
101
101
  var _parquetWriter = require("./parquet-writer");
102
102
  var _parquetWasmWriter = require("./parquet-wasm-writer");
103
103
  var _compression = require("./parquetjs/compression");
@@ -120,7 +120,7 @@ var ParquetColumnarLoader = _objectSpread(_objectSpread({}, _parquetLoader.Parqu
120
120
  });
121
121
  exports.ParquetColumnarLoader = ParquetColumnarLoader;
122
122
  var ParquetWasmLoader = _objectSpread(_objectSpread({}, _parquetWasmLoader.ParquetWasmLoader), {}, {
123
- parse: _parseParquetWasm.parseParquet
123
+ parse: _parseParquetWasm.parseParquetWasm
124
124
  });
125
125
  exports.ParquetWasmLoader = ParquetWasmLoader;
126
126
  var _typecheckParquetLoader = ParquetLoader;
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","names":["_parquetWasmLoader","require","_parquetLoader","_parseParquetToRows","_parseParquetToColumns","_parseParquetWasm","_parquetWriter","_parquetWasmWriter","_compression","_schema","_parquetReader","_parquetEncoder","_convertSchemaFromParquet","_geoparquetSchema","_interopRequireDefault","_decodeGeoMetadata","ownKeys","object","enumerableOnly","keys","Object","getOwnPropertySymbols","symbols","filter","sym","getOwnPropertyDescriptor","enumerable","push","apply","_objectSpread","target","i","arguments","length","source","forEach","key","_defineProperty2","default","getOwnPropertyDescriptors","defineProperties","defineProperty","ParquetLoader","ParquetWorkerLoader","parse","parseParquet","parseFileInBatches","parseParquetFileInBatches","exports","ParquetColumnarLoader","parseParquetInColumns","parseParquetFileInColumnarBatches","ParquetWasmLoader","ParquetWasmWorkerLoader","parseParquetWasm","_typecheckParquetLoader"],"sources":["../../src/index.ts"],"sourcesContent":["import type {LoaderWithParser} from '@loaders.gl/loader-utils';\n\n// ParquetLoader\n\nimport {ParquetWasmLoader as ParquetWasmWorkerLoader} from './parquet-wasm-loader';\nimport {ParquetLoader as ParquetWorkerLoader} from './parquet-loader';\nimport {parseParquet, parseParquetFileInBatches} from './lib/parsers/parse-parquet-to-rows';\nimport {\n parseParquetInColumns,\n parseParquetFileInColumnarBatches\n} from './lib/parsers/parse-parquet-to-columns';\nimport {parseParquet as parseParquetWasm} from './lib/wasm/parse-parquet-wasm';\n\nexport {ParquetWorkerLoader, ParquetWasmWorkerLoader};\n\n/** ParquetJS table loader */\nexport const ParquetLoader = {\n ...ParquetWorkerLoader,\n parse: parseParquet,\n parseFileInBatches: parseParquetFileInBatches\n};\n\n/** ParquetJS table loader */\nexport const ParquetColumnarLoader = {\n ...ParquetWorkerLoader,\n parse: parseParquetInColumns,\n parseFileInBatches: parseParquetFileInColumnarBatches\n};\n\nexport const ParquetWasmLoader = {\n ...ParquetWasmWorkerLoader,\n parse: parseParquetWasm\n};\n\n// ParquetWriter\n\nexport {ParquetWriter as _ParquetWriter} from './parquet-writer';\nexport {ParquetWasmWriter} from './parquet-wasm-writer';\n\n// EXPERIMENTAL - expose the internal parquetjs API\n\nexport {preloadCompressions} from './parquetjs/compression';\n\nexport {ParquetSchema} from './parquetjs/schema/schema';\nexport {ParquetReader} from './parquetjs/parser/parquet-reader';\nexport {ParquetEncoder} from './parquetjs/encoder/parquet-encoder';\n\nexport {\n convertSchemaFromParquet,\n convertSchemaFromParquet as convertParquetToArrowSchema\n} from './lib/arrow/convert-schema-from-parquet';\n\n// TESTS\nexport const _typecheckParquetLoader: LoaderWithParser = ParquetLoader;\n\n// Geo Metadata\nexport {default as geoJSONSchema} from './lib/geo/geoparquet-schema';\n\nexport type {GeoMetadata} from './lib/geo/decode-geo-metadata';\nexport {getGeoMetadata, setGeoMetadata, unpackGeoMetadata} from './lib/geo/decode-geo-metadata';\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAIA,IAAAA,kBAAA,GAAAC,OAAA;AACA,IAAAC,cAAA,GAAAD,OAAA;AACA,IAAAE,mBAAA,GAAAF,OAAA;AACA,IAAAG,sBAAA,GAAAH,OAAA;AAIA,IAAAI,iBAAA,GAAAJ,OAAA;AAyBA,IAAAK,cAAA,GAAAL,OAAA;AACA,IAAAM,kBAAA,GAAAN,OAAA;AAIA,IAAAO,YAAA,GAAAP,OAAA;AAEA,IAAAQ,OAAA,GAAAR,OAAA;AACA,IAAAS,cAAA,GAAAT,OAAA;AACA,IAAAU,eAAA,GAAAV,OAAA;AAEA,IAAAW,yBAAA,GAAAX,OAAA;AASA,IAAAY,iBAAA,GAAAC,sBAAA,CAAAb,OAAA;AAGA,IAAAc,kBAAA,GAAAd,OAAA;AAAgG,SAAAe,QAAAC,MAAA,EAAAC,cAAA,QAAAC,IAAA,GAAAC,MAAA,CAAAD,IAAA,CAAAF,MAAA,OAAAG,MAAA,CAAAC,qBAAA,QAAAC,OAAA,GAAAF,MAAA,CAAAC,qBAAA,CAAAJ,MAAA,GAAAC,cAAA,KAAAI,OAAA,GAAAA,OAAA,CAAAC,MAAA,WAAAC,GAAA,WAAAJ,MAAA,CAAAK,wBAAA,CAAAR,MAAA,EAAAO,GAAA,EAAAE,UAAA,OAAAP,IAAA,CAAAQ,IAAA,CAAAC,KAAA,CAAAT,IAAA,EAAAG,OAAA,YAAAH,IAAA;AAAA,SAAAU,cAAAC,MAAA,aAAAC,CAAA,MAAAA,CAAA,GAAAC,SAAA,CAAAC,MAAA,EAAAF,CAAA,UAAAG,MAAA,WAAAF,SAAA,CAAAD,CAAA,IAAAC,SAAA,CAAAD,CAAA,QAAAA,CAAA,OAAAf,OAAA,CAAAI,MAAA,CAAAc,MAAA,OAAAC,OAAA,WAAAC,GAAA,QAAAC,gBAAA,CAAAC,OAAA,EAAAR,MAAA,EAAAM,GAAA,EAAAF,MAAA,CAAAE,GAAA,SAAAhB,MAAA,CAAAmB,yBAAA,GAAAnB,MAAA,CAAAoB,gBAAA,CAAAV,MAAA,EAAAV,MAAA,CAAAmB,yBAAA,CAAAL,MAAA,KAAAlB,OAAA,CAAAI,MAAA,CAAAc,MAAA,GAAAC,OAAA,WAAAC,GAAA,IAAAhB,MAAA,CAAAqB,cAAA,CAAAX,MAAA,EAAAM,GAAA,EAAAhB,MAAA,CAAAK,wBAAA,CAAAS,MAAA,EAAAE,GAAA,iBAAAN,MAAA;AA3CzF,IAAMY,aAAa,GAAAb,aAAA,CAAAA,aAAA,KACrBc,4BAAmB;EACtBC,KAAK,EAAEC,gCAAY;EACnBC,kBAAkB,EAAEC;AAAyB,EAC9C;AAACC,OAAA,CAAAN,aAAA,GAAAA,aAAA;AAGK,IAAMO,qBAAqB,GAAApB,aAAA,CAAAA,aAAA,KAC7Bc,4BAAmB;EACtBC,KAAK,EAAEM,4CAAqB;EAC5BJ,kBAAkB,EAAEK;AAAiC,EACtD;AAACH,OAAA,CAAAC,qBAAA,GAAAA,qBAAA;AAEK,IAAMG,iBAAiB,GAAAvB,aAAA,CAAAA,aAAA,KACzBwB,oCAAuB;EAC1BT,KAAK,EAAEU;AAAgB,EACxB;AAACN,OAAA,CAAAI,iBAAA,GAAAA,iBAAA;AAqBK,IAAMG,uBAAyC,GAAGb,aAAa;AAACM,OAAA,CAAAO,uBAAA,GAAAA,uBAAA"}
1
+ {"version":3,"file":"index.js","names":["_parquetLoader","require","_parseParquetToRows","_parseParquetToColumns","_parseParquetWasm","_parquetWasmLoader","_parquetWriter","_parquetWasmWriter","_compression","_schema","_parquetReader","_parquetEncoder","_convertSchemaFromParquet","_geoparquetSchema","_interopRequireDefault","_decodeGeoMetadata","ownKeys","object","enumerableOnly","keys","Object","getOwnPropertySymbols","symbols","filter","sym","getOwnPropertyDescriptor","enumerable","push","apply","_objectSpread","target","i","arguments","length","source","forEach","key","_defineProperty2","default","getOwnPropertyDescriptors","defineProperties","defineProperty","ParquetLoader","ParquetWorkerLoader","parse","parseParquet","parseFileInBatches","parseParquetFileInBatches","exports","ParquetColumnarLoader","ParquetColumnarWorkerLoader","parseParquetInColumns","parseParquetFileInColumnarBatches","ParquetWasmLoader","ParquetWasmWorkerLoader","parseParquetWasm","_typecheckParquetLoader"],"sources":["../../src/index.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport type {LoaderWithParser} from '@loaders.gl/loader-utils';\nimport type {\n ObjectRowTable,\n ObjectRowTableBatch,\n ColumnarTable,\n ColumnarTableBatch\n} from '@loaders.gl/schema';\nimport type {Table as ArrowTable} from 'apache-arrow';\n\n// ParquetLoader\n\nimport {\n ParquetLoader as ParquetWorkerLoader,\n ParquetLoader as ParquetColumnarWorkerLoader,\n ParquetLoaderOptions\n} from './parquet-loader';\nimport {parseParquet, parseParquetFileInBatches} from './lib/parsers/parse-parquet-to-rows';\nimport {\n parseParquetInColumns,\n parseParquetFileInColumnarBatches\n} from './lib/parsers/parse-parquet-to-columns';\n\nimport {parseParquetWasm, ParquetWasmLoaderOptions} from './lib/wasm/parse-parquet-wasm';\nimport {ParquetWasmLoader as ParquetWasmWorkerLoader} from './parquet-wasm-loader';\n\nexport {ParquetWorkerLoader, ParquetWasmWorkerLoader};\n\n/** ParquetJS table loader */\nexport const ParquetLoader: LoaderWithParser<\n ObjectRowTable,\n ObjectRowTableBatch,\n ParquetLoaderOptions\n> = {\n ...ParquetWorkerLoader,\n parse: parseParquet,\n parseFileInBatches: parseParquetFileInBatches\n};\n\n/** ParquetJS table loader */\n// @ts-expect-error\nexport const ParquetColumnarLoader: LoaderWithParser<\n ColumnarTable,\n ColumnarTableBatch,\n ParquetLoaderOptions\n> = {\n ...ParquetColumnarWorkerLoader,\n parse: parseParquetInColumns,\n parseFileInBatches: parseParquetFileInColumnarBatches\n};\n\nexport const ParquetWasmLoader: LoaderWithParser<ArrowTable, never, ParquetWasmLoaderOptions> = {\n ...ParquetWasmWorkerLoader,\n parse: parseParquetWasm\n};\n\n// ParquetWriter\n\nexport {ParquetWriter as _ParquetWriter} from './parquet-writer';\nexport {ParquetWasmWriter} from './parquet-wasm-writer';\n\n// EXPERIMENTAL - expose the internal parquetjs API\n\nexport {preloadCompressions} from './parquetjs/compression';\n\nexport {ParquetSchema} from './parquetjs/schema/schema';\nexport {ParquetReader} from './parquetjs/parser/parquet-reader';\nexport {ParquetEncoder} from './parquetjs/encoder/parquet-encoder';\n\nexport {\n convertParquetSchema,\n convertParquetSchema as convertParquetToArrowSchema\n} from './lib/arrow/convert-schema-from-parquet';\n\n// TESTS\nexport const _typecheckParquetLoader: LoaderWithParser = ParquetLoader;\n\n// Geo Metadata\nexport {default as geoJSONSchema} from './lib/geo/geoparquet-schema';\n\nexport type {GeoMetadata} from './lib/geo/decode-geo-metadata';\nexport {getGeoMetadata, setGeoMetadata, unpackGeoMetadata} from './lib/geo/decode-geo-metadata';\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAaA,IAAAA,cAAA,GAAAC,OAAA;AAKA,IAAAC,mBAAA,GAAAD,OAAA;AACA,IAAAE,sBAAA,GAAAF,OAAA;AAKA,IAAAG,iBAAA,GAAAH,OAAA;AACA,IAAAI,kBAAA,GAAAJ,OAAA;AAkCA,IAAAK,cAAA,GAAAL,OAAA;AACA,IAAAM,kBAAA,GAAAN,OAAA;AAIA,IAAAO,YAAA,GAAAP,OAAA;AAEA,IAAAQ,OAAA,GAAAR,OAAA;AACA,IAAAS,cAAA,GAAAT,OAAA;AACA,IAAAU,eAAA,GAAAV,OAAA;AAEA,IAAAW,yBAAA,GAAAX,OAAA;AASA,IAAAY,iBAAA,GAAAC,sBAAA,CAAAb,OAAA;AAGA,IAAAc,kBAAA,GAAAd,OAAA;AAAgG,SAAAe,QAAAC,MAAA,EAAAC,cAAA,QAAAC,IAAA,GAAAC,MAAA,CAAAD,IAAA,CAAAF,MAAA,OAAAG,MAAA,CAAAC,qBAAA,QAAAC,OAAA,GAAAF,MAAA,CAAAC,qBAAA,CAAAJ,MAAA,GAAAC,cAAA,KAAAI,OAAA,GAAAA,OAAA,CAAAC,MAAA,WAAAC,GAAA,WAAAJ,MAAA,CAAAK,wBAAA,CAAAR,MAAA,EAAAO,GAAA,EAAAE,UAAA,OAAAP,IAAA,CAAAQ,IAAA,CAAAC,KAAA,CAAAT,IAAA,EAAAG,OAAA,YAAAH,IAAA;AAAA,SAAAU,cAAAC,MAAA,aAAAC,CAAA,MAAAA,CAAA,GAAAC,SAAA,CAAAC,MAAA,EAAAF,CAAA,UAAAG,MAAA,WAAAF,SAAA,CAAAD,CAAA,IAAAC,SAAA,CAAAD,CAAA,QAAAA,CAAA,OAAAf,OAAA,CAAAI,MAAA,CAAAc,MAAA,OAAAC,OAAA,WAAAC,GAAA,QAAAC,gBAAA,CAAAC,OAAA,EAAAR,MAAA,EAAAM,GAAA,EAAAF,MAAA,CAAAE,GAAA,SAAAhB,MAAA,CAAAmB,yBAAA,GAAAnB,MAAA,CAAAoB,gBAAA,CAAAV,MAAA,EAAAV,MAAA,CAAAmB,yBAAA,CAAAL,MAAA,KAAAlB,OAAA,CAAAI,MAAA,CAAAc,MAAA,GAAAC,OAAA,WAAAC,GAAA,IAAAhB,MAAA,CAAAqB,cAAA,CAAAX,MAAA,EAAAM,GAAA,EAAAhB,MAAA,CAAAK,wBAAA,CAAAS,MAAA,EAAAE,GAAA,iBAAAN,MAAA;AApDzF,IAAMY,aAIZ,GAAAb,aAAA,CAAAA,aAAA,KACIc,4BAAmB;EACtBC,KAAK,EAAEC,gCAAY;EACnBC,kBAAkB,EAAEC;AAAyB,EAC9C;AAACC,OAAA,CAAAN,aAAA,GAAAA,aAAA;AAIK,IAAMO,qBAIZ,GAAApB,aAAA,CAAAA,aAAA,KACIqB,4BAA2B;EAC9BN,KAAK,EAAEO,4CAAqB;EAC5BL,kBAAkB,EAAEM;AAAiC,EACtD;AAACJ,OAAA,CAAAC,qBAAA,GAAAA,qBAAA;AAEK,IAAMI,iBAAgF,GAAAxB,aAAA,CAAAA,aAAA,KACxFyB,oCAAuB;EAC1BV,KAAK,EAAEW;AAAgB,EACxB;AAACP,OAAA,CAAAK,iBAAA,GAAAA,iBAAA;AAqBK,IAAMG,uBAAyC,GAAGd,aAAa;AAACM,OAAA,CAAAQ,uBAAA,GAAAA,uBAAA"}
@@ -1 +1 @@
1
- {"version":3,"file":"convert-row-group-to-columns.js","names":["convertParquetRowGroupToColumns","schema","rowGroup","columns","_i","_Object$entries","Object","entries","columnData","length","_Object$entries$_i","_slicedToArray2","default","columnName","data","values"],"sources":["../../../../src/lib/arrow/convert-row-group-to-columns.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport {Schema} from '@loaders.gl/schema';\nimport {ParquetBuffer} from '@loaders.gl/parquet/parquetjs/schema/declare';\n\nexport function convertParquetRowGroupToColumns(\n schema: Schema,\n rowGroup: ParquetBuffer\n): Record<string, any[]> {\n const columns: Record<string, any[]> = {};\n for (const [columnName, data] of Object.entries(rowGroup.columnData)) {\n columns[columnName] = columns[columnName] || data.values;\n }\n return columns;\n}\n"],"mappings":";;;;;;;;AAKO,SAASA,+BAA+BA,CAC7CC,MAAc,EACdC,QAAuB,EACA;EACvB,IAAMC,OAA8B,GAAG,CAAC,CAAC;EACzC,SAAAC,EAAA,MAAAC,eAAA,GAAiCC,MAAM,CAACC,OAAO,CAACL,QAAQ,CAACM,UAAU,CAAC,EAAAJ,EAAA,GAAAC,eAAA,CAAAI,MAAA,EAAAL,EAAA,IAAE;IAAjE,IAAAM,kBAAA,OAAAC,eAAA,CAAAC,OAAA,EAAAP,eAAA,CAAAD,EAAA;MAAOS,UAAU,GAAAH,kBAAA;MAAEI,IAAI,GAAAJ,kBAAA;IAC1BP,OAAO,CAACU,UAAU,CAAC,GAAGV,OAAO,CAACU,UAAU,CAAC,IAAIC,IAAI,CAACC,MAAM;EAC1D;EACA,OAAOZ,OAAO;AAChB"}
1
+ {"version":3,"file":"convert-row-group-to-columns.js","names":["convertParquetRowGroupToColumns","schema","rowGroup","columns","_i","_Object$entries","Object","entries","columnData","length","_Object$entries$_i","_slicedToArray2","default","columnName","data","values"],"sources":["../../../../src/lib/arrow/convert-row-group-to-columns.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport {Schema} from '@loaders.gl/schema';\nimport {ParquetRowGroup} from '@loaders.gl/parquet/parquetjs/schema/declare';\n\nexport function convertParquetRowGroupToColumns(\n schema: Schema,\n rowGroup: ParquetRowGroup\n): Record<string, any[]> {\n const columns: Record<string, any[]> = {};\n for (const [columnName, data] of Object.entries(rowGroup.columnData)) {\n columns[columnName] = columns[columnName] || data.values;\n }\n return columns;\n}\n"],"mappings":";;;;;;;;AAKO,SAASA,+BAA+BA,CAC7CC,MAAc,EACdC,QAAyB,EACF;EACvB,IAAMC,OAA8B,GAAG,CAAC,CAAC;EACzC,SAAAC,EAAA,MAAAC,eAAA,GAAiCC,MAAM,CAACC,OAAO,CAACL,QAAQ,CAACM,UAAU,CAAC,EAAAJ,EAAA,GAAAC,eAAA,CAAAI,MAAA,EAAAL,EAAA,IAAE;IAAjE,IAAAM,kBAAA,OAAAC,eAAA,CAAAC,OAAA,EAAAP,eAAA,CAAAD,EAAA;MAAOS,UAAU,GAAAH,kBAAA;MAAEI,IAAI,GAAAJ,kBAAA;IAC1BP,OAAO,CAACU,UAAU,CAAC,GAAGV,OAAO,CAACU,UAAU,CAAC,IAAIC,IAAI,CAACC,MAAM;EAC1D;EACA,OAAOZ,OAAO;AAChB"}
@@ -4,78 +4,93 @@ Object.defineProperty(exports, "__esModule", {
4
4
  value: true
5
5
  });
6
6
  exports.PARQUET_TYPE_MAPPING = void 0;
7
- exports.convertSchemaFromParquet = convertSchemaFromParquet;
8
- var _schema = require("@loaders.gl/schema");
7
+ exports.convertParquetSchema = convertParquetSchema;
9
8
  function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; }
10
9
  function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
11
10
  function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; return arr2; }
12
11
  var PARQUET_TYPE_MAPPING = {
13
- BOOLEAN: _schema.Bool,
14
- INT32: _schema.Int32,
15
- INT64: _schema.Float64,
16
- INT96: _schema.Float64,
17
- FLOAT: _schema.Float32,
18
- DOUBLE: _schema.Float64,
19
- BYTE_ARRAY: _schema.Binary,
20
- FIXED_LEN_BYTE_ARRAY: _schema.Binary,
21
- UTF8: _schema.Utf8,
22
- DATE: _schema.Int32,
23
- TIME_MILLIS: _schema.Int64,
24
- TIME_MICROS: _schema.Int64,
25
- TIMESTAMP_MILLIS: _schema.Int64,
26
- TIMESTAMP_MICROS: _schema.Int64,
27
- UINT_8: _schema.Int32,
28
- UINT_16: _schema.Uint16,
29
- UINT_32: _schema.Uint32,
30
- UINT_64: _schema.Uint64,
31
- INT_8: _schema.Int8,
32
- INT_16: _schema.Int16,
33
- INT_32: _schema.Int32,
34
- INT_64: _schema.Int64,
35
- JSON: _schema.Binary,
36
- BSON: _schema.Binary,
37
- INTERVAL: _schema.Binary,
38
- DECIMAL_INT32: _schema.Float32,
39
- DECIMAL_INT64: _schema.Float64,
40
- DECIMAL_BYTE_ARRAY: _schema.Float64,
41
- DECIMAL_FIXED_LEN_BYTE_ARRAY: _schema.Float64
12
+ BOOLEAN: 'bool',
13
+ INT32: 'int32',
14
+ INT64: 'float64',
15
+ INT96: 'float64',
16
+ FLOAT: 'float32',
17
+ DOUBLE: 'float64',
18
+ BYTE_ARRAY: 'binary',
19
+ FIXED_LEN_BYTE_ARRAY: 'binary',
20
+ UTF8: 'utf8',
21
+ DATE: 'int32',
22
+ TIME_MILLIS: 'int64',
23
+ TIME_MICROS: 'int64',
24
+ TIMESTAMP_MILLIS: 'int64',
25
+ TIMESTAMP_MICROS: 'int64',
26
+ UINT_8: 'int32',
27
+ UINT_16: 'uint16',
28
+ UINT_32: 'uint32',
29
+ UINT_64: 'uint64',
30
+ INT_8: 'int8',
31
+ INT_16: 'int16',
32
+ INT_32: 'int32',
33
+ INT_64: 'int64',
34
+ JSON: 'binary',
35
+ BSON: 'binary',
36
+ INTERVAL: 'binary',
37
+ DECIMAL_INT32: 'float32',
38
+ DECIMAL_INT64: 'float64',
39
+ DECIMAL_BYTE_ARRAY: 'float64',
40
+ DECIMAL_FIXED_LEN_BYTE_ARRAY: 'float64'
42
41
  };
43
42
  exports.PARQUET_TYPE_MAPPING = PARQUET_TYPE_MAPPING;
44
- function convertSchemaFromParquet(parquetSchema, parquetMetadata) {
43
+ function convertParquetSchema(parquetSchema, parquetMetadata) {
45
44
  var fields = getFields(parquetSchema.schema);
46
45
  var metadata = parquetMetadata && getSchemaMetadata(parquetMetadata);
47
- return new _schema.Schema(fields, metadata);
46
+ var schema = {
47
+ fields: fields,
48
+ metadata: metadata || {}
49
+ };
50
+ return schema;
48
51
  }
49
52
  function getFields(schema) {
50
53
  var fields = [];
51
54
  for (var name in schema) {
52
55
  var field = schema[name];
53
56
  if (field.fields) {
54
- var childFields = getFields(field.fields);
55
- var nestedField = new _schema.Field(name, new _schema.Struct(childFields), field.optional);
56
- fields.push(nestedField);
57
+ var children = getFields(field.fields);
58
+ fields.push({
59
+ name: name,
60
+ type: {
61
+ type: 'struct',
62
+ children: children
63
+ },
64
+ nullable: field.optional
65
+ });
57
66
  } else {
58
- var FieldType = PARQUET_TYPE_MAPPING[field.type];
67
+ var type = PARQUET_TYPE_MAPPING[field.type];
59
68
  var metadata = getFieldMetadata(field);
60
- var arrowField = new _schema.Field(name, new FieldType(), field.optional, metadata);
69
+ var arrowField = {
70
+ name: name,
71
+ type: type,
72
+ nullable: field.optional,
73
+ metadata: metadata
74
+ };
61
75
  fields.push(arrowField);
62
76
  }
63
77
  }
64
78
  return fields;
65
79
  }
66
80
  function getFieldMetadata(field) {
67
- var metadata = new Map();
81
+ var metadata;
68
82
  for (var key in field) {
69
83
  if (key !== 'name') {
70
84
  var value = field[key] || '';
71
85
  value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];
72
- metadata.set(key, value);
86
+ metadata = metadata || {};
87
+ metadata[key] = value;
73
88
  }
74
89
  }
75
90
  return metadata;
76
91
  }
77
92
  function getSchemaMetadata(parquetMetadata) {
78
- var metadata = new Map();
93
+ var metadata;
79
94
  var keyValueList = parquetMetadata.key_value_metadata || [];
80
95
  var _iterator = _createForOfIteratorHelper(keyValueList),
81
96
  _step;
@@ -85,7 +100,8 @@ function getSchemaMetadata(parquetMetadata) {
85
100
  key = _step$value.key,
86
101
  value = _step$value.value;
87
102
  if (typeof value === 'string') {
88
- metadata.set(key, value);
103
+ metadata = metadata || {};
104
+ metadata[key] = value;
89
105
  }
90
106
  }
91
107
  } catch (err) {
@@ -1 +1 @@
1
- {"version":3,"file":"convert-schema-from-parquet.js","names":["_schema","require","_createForOfIteratorHelper","o","allowArrayLike","it","Symbol","iterator","Array","isArray","_unsupportedIterableToArray","length","i","F","s","n","done","value","e","_e","f","TypeError","normalCompletion","didErr","err","call","step","next","_e2","return","minLen","_arrayLikeToArray","Object","prototype","toString","slice","constructor","name","from","test","arr","len","arr2","PARQUET_TYPE_MAPPING","BOOLEAN","Bool","INT32","Int32","INT64","Float64","INT96","FLOAT","Float32","DOUBLE","BYTE_ARRAY","Binary","FIXED_LEN_BYTE_ARRAY","UTF8","Utf8","DATE","TIME_MILLIS","Int64","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","Uint16","UINT_32","Uint32","UINT_64","Uint64","INT_8","Int8","INT_16","Int16","INT_32","INT_64","JSON","BSON","INTERVAL","DECIMAL_INT32","DECIMAL_INT64","DECIMAL_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","exports","convertSchemaFromParquet","parquetSchema","parquetMetadata","fields","getFields","schema","metadata","getSchemaMetadata","Schema","field","childFields","nestedField","Field","Struct","optional","push","FieldType","type","getFieldMetadata","arrowField","Map","key","stringify","set","keyValueList","key_value_metadata","_iterator","_step","_step$value"],"sources":["../../../../src/lib/arrow/convert-schema-from-parquet.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport type {ParquetSchema} from '../../parquetjs/schema/schema';\nimport type {FieldDefinition, ParquetField, ParquetType} from '../../parquetjs/schema/declare';\nimport {FileMetaData} from '@loaders.gl/parquet/parquetjs/parquet-thrift';\n\nimport {\n Schema,\n Struct,\n Field,\n DataType,\n Bool,\n Float64,\n Int32,\n Float32,\n Binary,\n Utf8,\n Int64,\n Uint16,\n Uint32,\n Uint64,\n Int8,\n Int16\n} from '@loaders.gl/schema';\n\nexport const PARQUET_TYPE_MAPPING: {[type in ParquetType]: typeof DataType} = {\n BOOLEAN: Bool,\n INT32: Int32,\n INT64: Float64,\n INT96: Float64,\n FLOAT: Float32,\n DOUBLE: Float64,\n BYTE_ARRAY: Binary,\n FIXED_LEN_BYTE_ARRAY: Binary,\n UTF8: Utf8,\n DATE: Int32,\n TIME_MILLIS: Int64,\n TIME_MICROS: Int64,\n TIMESTAMP_MILLIS: Int64,\n TIMESTAMP_MICROS: Int64,\n UINT_8: Int32,\n UINT_16: Uint16,\n UINT_32: Uint32,\n UINT_64: Uint64,\n INT_8: Int8,\n INT_16: Int16,\n INT_32: Int32,\n INT_64: Int64,\n JSON: Binary,\n BSON: Binary,\n // TODO check interval type\n INTERVAL: Binary,\n DECIMAL_INT32: Float32,\n DECIMAL_INT64: Float64,\n DECIMAL_BYTE_ARRAY: Float64,\n DECIMAL_FIXED_LEN_BYTE_ARRAY: Float64\n};\n\nexport function convertSchemaFromParquet(\n parquetSchema: ParquetSchema,\n parquetMetadata?: FileMetaData\n): Schema {\n const fields = getFields(parquetSchema.schema);\n const metadata = parquetMetadata && getSchemaMetadata(parquetMetadata);\n return new Schema(fields, metadata);\n}\n\nfunction getFields(schema: FieldDefinition): Field[] {\n const fields: Field[] = [];\n\n for (const name in schema) {\n const field = schema[name];\n\n if (field.fields) {\n const childFields = getFields(field.fields);\n const nestedField = new Field(name, new Struct(childFields), field.optional);\n fields.push(nestedField);\n } else {\n const FieldType = PARQUET_TYPE_MAPPING[field.type];\n const metadata = getFieldMetadata(field);\n const arrowField = new Field(name, new FieldType(), field.optional, metadata);\n fields.push(arrowField);\n }\n }\n\n return fields;\n}\n\nfunction getFieldMetadata(field: ParquetField): Map<string, string> {\n const metadata = new Map();\n\n for (const key in field) {\n if (key !== 'name') {\n let value = field[key] || '';\n value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];\n metadata.set(key, value);\n }\n }\n\n return metadata;\n}\n\nfunction getSchemaMetadata(parquetMetadata: FileMetaData): Map<string, string> {\n const metadata = new Map();\n\n const keyValueList = parquetMetadata.key_value_metadata || [];\n for (const {key, value} of keyValueList) {\n if (typeof value === 'string') {\n metadata.set(key, value);\n }\n }\n\n return metadata;\n}\n"],"mappings":";;;;;;;AAMA,IAAAA,OAAA,GAAAC,OAAA;AAiB4B,SAAAC,2BAAAC,CAAA,EAAAC,cAAA,QAAAC,EAAA,UAAAC,MAAA,oBAAAH,CAAA,CAAAG,MAAA,CAAAC,QAAA,KAAAJ,CAAA,qBAAAE,EAAA,QAAAG,KAAA,CAAAC,OAAA,CAAAN,CAAA,MAAAE,EAAA,GAAAK,2BAAA,CAAAP,CAAA,MAAAC,cAAA,IAAAD,CAAA,WAAAA,CAAA,CAAAQ,MAAA,qBAAAN,EAAA,EAAAF,CAAA,GAAAE,EAAA,MAAAO,CAAA,UAAAC,CAAA,YAAAA,EAAA,eAAAC,CAAA,EAAAD,CAAA,EAAAE,CAAA,WAAAA,EAAA,QAAAH,CAAA,IAAAT,CAAA,CAAAQ,MAAA,WAAAK,IAAA,mBAAAA,IAAA,SAAAC,KAAA,EAAAd,CAAA,CAAAS,CAAA,UAAAM,CAAA,WAAAA,EAAAC,EAAA,UAAAA,EAAA,KAAAC,CAAA,EAAAP,CAAA,gBAAAQ,SAAA,iJAAAC,gBAAA,SAAAC,MAAA,UAAAC,GAAA,WAAAV,CAAA,WAAAA,EAAA,IAAAT,EAAA,GAAAA,EAAA,CAAAoB,IAAA,CAAAtB,CAAA,MAAAY,CAAA,WAAAA,EAAA,QAAAW,IAAA,GAAArB,EAAA,CAAAsB,IAAA,IAAAL,gBAAA,GAAAI,IAAA,CAAAV,IAAA,SAAAU,IAAA,KAAAR,CAAA,WAAAA,EAAAU,GAAA,IAAAL,MAAA,SAAAC,GAAA,GAAAI,GAAA,KAAAR,CAAA,WAAAA,EAAA,eAAAE,gBAAA,IAAAjB,EAAA,CAAAwB,MAAA,UAAAxB,EAAA,CAAAwB,MAAA,oBAAAN,MAAA,QAAAC,GAAA;AAAA,SAAAd,4BAAAP,CAAA,EAAA2B,MAAA,SAAA3B,CAAA,qBAAAA,CAAA,sBAAA4B,iBAAA,CAAA5B,CAAA,EAAA2B,MAAA,OAAAf,CAAA,GAAAiB,MAAA,CAAAC,SAAA,CAAAC,QAAA,CAAAT,IAAA,CAAAtB,CAAA,EAAAgC,KAAA,aAAApB,CAAA,iBAAAZ,CAAA,CAAAiC,WAAA,EAAArB,CAAA,GAAAZ,CAAA,CAAAiC,WAAA,CAAAC,IAAA,MAAAtB,CAAA,cAAAA,CAAA,mBAAAP,KAAA,CAAA8B,IAAA,CAAAnC,CAAA,OAAAY,CAAA,+DAAAwB,IAAA,CAAAxB,CAAA,UAAAgB,iBAAA,CAAA5B,CAAA,EAAA2B,MAAA;AAAA,SAAAC,kBAAAS,GAAA,EAAAC,GAAA,QAAAA,GAAA,YAAAA,GAAA,GAAAD,GAAA,CAAA7B,MAAA,EAAA8B,GAAA,GAAAD,GAAA,CAAA7B,MAAA,WAAAC,CAAA,MAAA8B,IAAA,OAAAlC,KAAA,CAAAiC,GAAA,GAAA7B,CAAA,GAAA6B,GAAA,EAAA7B,CAAA,IAAA8B,IAAA,CAAA9B,CAAA,IAAA4B,GAAA,CAAA5B,CAAA,UAAA8B,IAAA;AAErB,IAAMC,oBAA8D,GAAG;EAC5EC,OAAO,EAAEC,YAAI;EACbC,KAAK,EAAEC,aAAK;EACZC,KAAK,EAAEC,eAAO;EACdC,KAAK,EAAED,eAAO;EACdE,KAAK,EAAEC,eAAO;EACdC,MAAM,EAAEJ,eAAO;EACfK,UAAU,EAAEC,cAAM;EAClBC,oBAAoB,EAAED,cAAM;EAC5BE,IAAI,EAAEC,YAAI;EACVC,IAAI,EAAEZ,aAAK;EACXa,WAAW,EAAEC,aAAK;EAClBC,WAAW,EAAED,aAAK;EAClBE,gBAAgB,EAAEF,aAAK;EACvBG,gBAAgB,EAAEH,aAAK;EACvBI,MAAM,EAAElB,aAAK;EACbmB,OAAO,EAAEC,cAAM;EACfC,OAAO,EAAEC,cAAM;EACfC,OAAO,EAAEC,cAAM;EACfC,KAAK,EAAEC,YAAI;EACXC,MAAM,EAAEC,aAAK;EACbC,MAAM,EAAE7B,aAAK;EACb8B,MAAM,EAAEhB,aAAK;EACbiB,IAAI,EAAEvB,cAAM;EACZwB,IAAI,EAAExB,cAAM;EAEZyB,QAAQ,EAAEzB,cAAM;EAChB0B,aAAa,EAAE7B,eAAO;EACtB8B,aAAa,EAAEjC,eAAO;EACtBkC,kBAAkB,EAAElC,eAAO;EAC3BmC,4BAA4B,EAAEnC;AAChC,CAAC;AAACoC,OAAA,CAAA1C,oBAAA,GAAAA,oBAAA;AAEK,SAAS2C,wBAAwBA,CACtCC,aAA4B,EAC5BC,eAA8B,EACtB;EACR,IAAMC,MAAM,GAAGC,SAAS,CAACH,aAAa,CAACI,MAAM,CAAC;EAC9C,IAAMC,QAAQ,GAAGJ,eAAe,IAAIK,iBAAiB,CAACL,eAAe,CAAC;EACtE,OAAO,IAAIM,cAAM,CAACL,MAAM,EAAEG,QAAQ,CAAC;AACrC;AAEA,SAASF,SAASA,CAACC,MAAuB,EAAW;EACnD,IAAMF,MAAe,GAAG,EAAE;EAE1B,KAAK,IAAMpD,IAAI,IAAIsD,MAAM,EAAE;IACzB,IAAMI,KAAK,GAAGJ,MAAM,CAACtD,IAAI,CAAC;IAE1B,IAAI0D,KAAK,CAACN,MAAM,EAAE;MAChB,IAAMO,WAAW,GAAGN,SAAS,CAACK,KAAK,CAACN,MAAM,CAAC;MAC3C,IAAMQ,WAAW,GAAG,IAAIC,aAAK,CAAC7D,IAAI,EAAE,IAAI8D,cAAM,CAACH,WAAW,CAAC,EAAED,KAAK,CAACK,QAAQ,CAAC;MAC5EX,MAAM,CAACY,IAAI,CAACJ,WAAW,CAAC;IAC1B,CAAC,MAAM;MACL,IAAMK,SAAS,GAAG3D,oBAAoB,CAACoD,KAAK,CAACQ,IAAI,CAAC;MAClD,IAAMX,QAAQ,GAAGY,gBAAgB,CAACT,KAAK,CAAC;MACxC,IAAMU,UAAU,GAAG,IAAIP,aAAK,CAAC7D,IAAI,EAAE,IAAIiE,SAAS,CAAC,CAAC,EAAEP,KAAK,CAACK,QAAQ,EAAER,QAAQ,CAAC;MAC7EH,MAAM,CAACY,IAAI,CAACI,UAAU,CAAC;IACzB;EACF;EAEA,OAAOhB,MAAM;AACf;AAEA,SAASe,gBAAgBA,CAACT,KAAmB,EAAuB;EAClE,IAAMH,QAAQ,GAAG,IAAIc,GAAG,CAAC,CAAC;EAE1B,KAAK,IAAMC,GAAG,IAAIZ,KAAK,EAAE;IACvB,IAAIY,GAAG,KAAK,MAAM,EAAE;MAClB,IAAI1F,KAAK,GAAG8E,KAAK,CAACY,GAAG,CAAC,IAAI,EAAE;MAC5B1F,KAAK,GAAG,OAAO8E,KAAK,CAACY,GAAG,CAAC,KAAK,QAAQ,GAAG7B,IAAI,CAAC8B,SAAS,CAACb,KAAK,CAACY,GAAG,CAAC,CAAC,GAAGZ,KAAK,CAACY,GAAG,CAAC;MAChFf,QAAQ,CAACiB,GAAG,CAACF,GAAG,EAAE1F,KAAK,CAAC;IAC1B;EACF;EAEA,OAAO2E,QAAQ;AACjB;AAEA,SAASC,iBAAiBA,CAACL,eAA6B,EAAuB;EAC7E,IAAMI,QAAQ,GAAG,IAAIc,GAAG,CAAC,CAAC;EAE1B,IAAMI,YAAY,GAAGtB,eAAe,CAACuB,kBAAkB,IAAI,EAAE;EAAC,IAAAC,SAAA,GAAA9G,0BAAA,CACnC4G,YAAY;IAAAG,KAAA;EAAA;IAAvC,KAAAD,SAAA,CAAAlG,CAAA,MAAAmG,KAAA,GAAAD,SAAA,CAAAjG,CAAA,IAAAC,IAAA,GAAyC;MAAA,IAAAkG,WAAA,GAAAD,KAAA,CAAAhG,KAAA;QAA7B0F,GAAG,GAAAO,WAAA,CAAHP,GAAG;QAAE1F,KAAK,GAAAiG,WAAA,CAALjG,KAAK;MACpB,IAAI,OAAOA,KAAK,KAAK,QAAQ,EAAE;QAC7B2E,QAAQ,CAACiB,GAAG,CAACF,GAAG,EAAE1F,KAAK,CAAC;MAC1B;IACF;EAAC,SAAAO,GAAA;IAAAwF,SAAA,CAAA9F,CAAA,CAAAM,GAAA;EAAA;IAAAwF,SAAA,CAAA5F,CAAA;EAAA;EAED,OAAOwE,QAAQ;AACjB"}
1
+ {"version":3,"file":"convert-schema-from-parquet.js","names":["PARQUET_TYPE_MAPPING","BOOLEAN","INT32","INT64","INT96","FLOAT","DOUBLE","BYTE_ARRAY","FIXED_LEN_BYTE_ARRAY","UTF8","DATE","TIME_MILLIS","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","UINT_32","UINT_64","INT_8","INT_16","INT_32","INT_64","JSON","BSON","INTERVAL","DECIMAL_INT32","DECIMAL_INT64","DECIMAL_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","exports","convertParquetSchema","parquetSchema","parquetMetadata","fields","getFields","schema","metadata","getSchemaMetadata","name","field","children","push","type","nullable","optional","getFieldMetadata","arrowField","key","value","stringify","keyValueList","key_value_metadata","_iterator","_createForOfIteratorHelper","_step","s","n","done","_step$value","err","e","f"],"sources":["../../../../src/lib/arrow/convert-schema-from-parquet.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport {Schema, Field, DataType} from '@loaders.gl/schema';\n\nimport type {ParquetSchema} from '../../parquetjs/schema/schema';\nimport type {FieldDefinition, ParquetField, ParquetType} from '../../parquetjs/schema/declare';\nimport {FileMetaData} from '../../parquetjs/parquet-thrift';\n\nexport const PARQUET_TYPE_MAPPING: {[type in ParquetType]: DataType} = {\n BOOLEAN: 'bool',\n INT32: 'int32',\n INT64: 'float64',\n INT96: 'float64',\n FLOAT: 'float32',\n DOUBLE: 'float64',\n BYTE_ARRAY: 'binary',\n FIXED_LEN_BYTE_ARRAY: 'binary',\n UTF8: 'utf8',\n DATE: 'int32',\n TIME_MILLIS: 'int64',\n TIME_MICROS: 'int64',\n TIMESTAMP_MILLIS: 'int64',\n TIMESTAMP_MICROS: 'int64',\n UINT_8: 'int32',\n UINT_16: 'uint16',\n UINT_32: 'uint32',\n UINT_64: 'uint64',\n INT_8: 'int8',\n INT_16: 'int16',\n INT_32: 'int32',\n INT_64: 'int64',\n JSON: 'binary',\n BSON: 'binary',\n // TODO check interal type\n INTERVAL: 'binary',\n DECIMAL_INT32: 'float32',\n DECIMAL_INT64: 'float64',\n DECIMAL_BYTE_ARRAY: 'float64',\n DECIMAL_FIXED_LEN_BYTE_ARRAY: 'float64'\n};\n\nexport function convertParquetSchema(\n parquetSchema: ParquetSchema,\n parquetMetadata: FileMetaData | null\n): Schema {\n const fields = getFields(parquetSchema.schema);\n const metadata = parquetMetadata && getSchemaMetadata(parquetMetadata);\n\n const schema: Schema = {\n fields,\n metadata: metadata || {}\n };\n\n return schema;\n}\n\nfunction getFields(schema: FieldDefinition): Field[] {\n const fields: Field[] = [];\n\n for (const name in schema) {\n const field = schema[name];\n\n if (field.fields) {\n const children = getFields(field.fields);\n fields.push({name, type: {type: 'struct', children}, nullable: field.optional});\n } else {\n const type = PARQUET_TYPE_MAPPING[field.type];\n const metadata = getFieldMetadata(field);\n const arrowField = {name, type, nullable: field.optional, metadata};\n fields.push(arrowField);\n }\n }\n\n return fields;\n}\n\nfunction getFieldMetadata(field: ParquetField): Record<string, string> | undefined {\n let metadata: Record<string, string> | undefined;\n\n for (const key in field) {\n if (key !== 'name') {\n let value = field[key] || '';\n value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];\n metadata = metadata || {};\n metadata[key] = value;\n }\n }\n\n return metadata;\n}\n\nfunction getSchemaMetadata(parquetMetadata: FileMetaData): Record<string, string> | undefined {\n let metadata: Record<string, string> | undefined;\n\n const keyValueList = parquetMetadata.key_value_metadata || [];\n for (const {key, value} of keyValueList) {\n if (typeof value === 'string') {\n metadata = metadata || {};\n metadata[key] = value;\n }\n }\n\n return metadata;\n}\n"],"mappings":";;;;;;;;;;AAQO,IAAMA,oBAAuD,GAAG;EACrEC,OAAO,EAAE,MAAM;EACfC,KAAK,EAAE,OAAO;EACdC,KAAK,EAAE,SAAS;EAChBC,KAAK,EAAE,SAAS;EAChBC,KAAK,EAAE,SAAS;EAChBC,MAAM,EAAE,SAAS;EACjBC,UAAU,EAAE,QAAQ;EACpBC,oBAAoB,EAAE,QAAQ;EAC9BC,IAAI,EAAE,MAAM;EACZC,IAAI,EAAE,OAAO;EACbC,WAAW,EAAE,OAAO;EACpBC,WAAW,EAAE,OAAO;EACpBC,gBAAgB,EAAE,OAAO;EACzBC,gBAAgB,EAAE,OAAO;EACzBC,MAAM,EAAE,OAAO;EACfC,OAAO,EAAE,QAAQ;EACjBC,OAAO,EAAE,QAAQ;EACjBC,OAAO,EAAE,QAAQ;EACjBC,KAAK,EAAE,MAAM;EACbC,MAAM,EAAE,OAAO;EACfC,MAAM,EAAE,OAAO;EACfC,MAAM,EAAE,OAAO;EACfC,IAAI,EAAE,QAAQ;EACdC,IAAI,EAAE,QAAQ;EAEdC,QAAQ,EAAE,QAAQ;EAClBC,aAAa,EAAE,SAAS;EACxBC,aAAa,EAAE,SAAS;EACxBC,kBAAkB,EAAE,SAAS;EAC7BC,4BAA4B,EAAE;AAChC,CAAC;AAACC,OAAA,CAAA9B,oBAAA,GAAAA,oBAAA;AAEK,SAAS+B,oBAAoBA,CAClCC,aAA4B,EAC5BC,eAAoC,EAC5B;EACR,IAAMC,MAAM,GAAGC,SAAS,CAACH,aAAa,CAACI,MAAM,CAAC;EAC9C,IAAMC,QAAQ,GAAGJ,eAAe,IAAIK,iBAAiB,CAACL,eAAe,CAAC;EAEtE,IAAMG,MAAc,GAAG;IACrBF,MAAM,EAANA,MAAM;IACNG,QAAQ,EAAEA,QAAQ,IAAI,CAAC;EACzB,CAAC;EAED,OAAOD,MAAM;AACf;AAEA,SAASD,SAASA,CAACC,MAAuB,EAAW;EACnD,IAAMF,MAAe,GAAG,EAAE;EAE1B,KAAK,IAAMK,IAAI,IAAIH,MAAM,EAAE;IACzB,IAAMI,KAAK,GAAGJ,MAAM,CAACG,IAAI,CAAC;IAE1B,IAAIC,KAAK,CAACN,MAAM,EAAE;MAChB,IAAMO,QAAQ,GAAGN,SAAS,CAACK,KAAK,CAACN,MAAM,CAAC;MACxCA,MAAM,CAACQ,IAAI,CAAC;QAACH,IAAI,EAAJA,IAAI;QAAEI,IAAI,EAAE;UAACA,IAAI,EAAE,QAAQ;UAAEF,QAAQ,EAARA;QAAQ,CAAC;QAAEG,QAAQ,EAAEJ,KAAK,CAACK;MAAQ,CAAC,CAAC;IACjF,CAAC,MAAM;MACL,IAAMF,IAAI,GAAG3C,oBAAoB,CAACwC,KAAK,CAACG,IAAI,CAAC;MAC7C,IAAMN,QAAQ,GAAGS,gBAAgB,CAACN,KAAK,CAAC;MACxC,IAAMO,UAAU,GAAG;QAACR,IAAI,EAAJA,IAAI;QAAEI,IAAI,EAAJA,IAAI;QAAEC,QAAQ,EAAEJ,KAAK,CAACK,QAAQ;QAAER,QAAQ,EAARA;MAAQ,CAAC;MACnEH,MAAM,CAACQ,IAAI,CAACK,UAAU,CAAC;IACzB;EACF;EAEA,OAAOb,MAAM;AACf;AAEA,SAASY,gBAAgBA,CAACN,KAAmB,EAAsC;EACjF,IAAIH,QAA4C;EAEhD,KAAK,IAAMW,GAAG,IAAIR,KAAK,EAAE;IACvB,IAAIQ,GAAG,KAAK,MAAM,EAAE;MAClB,IAAIC,KAAK,GAAGT,KAAK,CAACQ,GAAG,CAAC,IAAI,EAAE;MAC5BC,KAAK,GAAG,OAAOT,KAAK,CAACQ,GAAG,CAAC,KAAK,QAAQ,GAAGzB,IAAI,CAAC2B,SAAS,CAACV,KAAK,CAACQ,GAAG,CAAC,CAAC,GAAGR,KAAK,CAACQ,GAAG,CAAC;MAChFX,QAAQ,GAAGA,QAAQ,IAAI,CAAC,CAAC;MACzBA,QAAQ,CAACW,GAAG,CAAC,GAAGC,KAAK;IACvB;EACF;EAEA,OAAOZ,QAAQ;AACjB;AAEA,SAASC,iBAAiBA,CAACL,eAA6B,EAAsC;EAC5F,IAAII,QAA4C;EAEhD,IAAMc,YAAY,GAAGlB,eAAe,CAACmB,kBAAkB,IAAI,EAAE;EAAC,IAAAC,SAAA,GAAAC,0BAAA,CACnCH,YAAY;IAAAI,KAAA;EAAA;IAAvC,KAAAF,SAAA,CAAAG,CAAA,MAAAD,KAAA,GAAAF,SAAA,CAAAI,CAAA,IAAAC,IAAA,GAAyC;MAAA,IAAAC,WAAA,GAAAJ,KAAA,CAAAN,KAAA;QAA7BD,GAAG,GAAAW,WAAA,CAAHX,GAAG;QAAEC,KAAK,GAAAU,WAAA,CAALV,KAAK;MACpB,IAAI,OAAOA,KAAK,KAAK,QAAQ,EAAE;QAC7BZ,QAAQ,GAAGA,QAAQ,IAAI,CAAC,CAAC;QACzBA,QAAQ,CAACW,GAAG,CAAC,GAAGC,KAAK;MACvB;IACF;EAAC,SAAAW,GAAA;IAAAP,SAAA,CAAAQ,CAAA,CAAAD,GAAA;EAAA;IAAAP,SAAA,CAAAS,CAAA;EAAA;EAED,OAAOzB,QAAQ;AACjB"}
@@ -5,41 +5,43 @@ Object.defineProperty(exports, "__esModule", {
5
5
  });
6
6
  exports.PARQUET_TYPE_MAPPING = void 0;
7
7
  exports.convertToParquetSchema = convertToParquetSchema;
8
- var _schema = require("@loaders.gl/schema");
9
8
  var PARQUET_TYPE_MAPPING = {
10
- BOOLEAN: _schema.Bool,
11
- INT32: _schema.Int32,
12
- INT64: _schema.Float64,
13
- INT96: _schema.Float64,
14
- FLOAT: _schema.Float32,
15
- DOUBLE: _schema.Float64,
16
- BYTE_ARRAY: _schema.Binary,
17
- FIXED_LEN_BYTE_ARRAY: _schema.Binary,
18
- UTF8: _schema.Utf8,
19
- DATE: _schema.Int32,
20
- TIME_MILLIS: _schema.Int64,
21
- TIME_MICROS: _schema.Int64,
22
- TIMESTAMP_MILLIS: _schema.Int64,
23
- TIMESTAMP_MICROS: _schema.Int64,
24
- UINT_8: _schema.Int32,
25
- UINT_16: _schema.Uint16,
26
- UINT_32: _schema.Uint32,
27
- UINT_64: _schema.Uint64,
28
- INT_8: _schema.Int8,
29
- INT_16: _schema.Int16,
30
- INT_32: _schema.Int32,
31
- INT_64: _schema.Int64,
32
- JSON: _schema.Binary,
33
- BSON: _schema.Binary,
34
- INTERVAL: _schema.Binary,
35
- DECIMAL_INT32: _schema.Float32,
36
- DECIMAL_INT64: _schema.Float64,
37
- DECIMAL_BYTE_ARRAY: _schema.Float64,
38
- DECIMAL_FIXED_LEN_BYTE_ARRAY: _schema.Float64
9
+ BOOLEAN: 'bool',
10
+ INT32: 'int32',
11
+ INT64: 'float64',
12
+ INT96: 'float64',
13
+ FLOAT: 'float32',
14
+ DOUBLE: 'float64',
15
+ BYTE_ARRAY: 'binary',
16
+ FIXED_LEN_BYTE_ARRAY: 'binary',
17
+ UTF8: 'utf8',
18
+ DATE: 'int32',
19
+ TIME_MILLIS: 'int64',
20
+ TIME_MICROS: 'int64',
21
+ TIMESTAMP_MILLIS: 'int64',
22
+ TIMESTAMP_MICROS: 'int64',
23
+ UINT_8: 'int32',
24
+ UINT_16: 'uint16',
25
+ UINT_32: 'uint32',
26
+ UINT_64: 'uint64',
27
+ INT_8: 'int8',
28
+ INT_16: 'int16',
29
+ INT_32: 'int32',
30
+ INT_64: 'int64',
31
+ JSON: 'binary',
32
+ BSON: 'binary',
33
+ INTERVAL: 'binary',
34
+ DECIMAL_INT32: 'float32',
35
+ DECIMAL_INT64: 'float64',
36
+ DECIMAL_BYTE_ARRAY: 'float64',
37
+ DECIMAL_FIXED_LEN_BYTE_ARRAY: 'float64'
39
38
  };
40
39
  exports.PARQUET_TYPE_MAPPING = PARQUET_TYPE_MAPPING;
41
40
  function convertToParquetSchema(schema) {
42
41
  var fields = [];
43
- return new _schema.Schema(fields);
42
+ return {
43
+ fields: fields,
44
+ metadata: {}
45
+ };
44
46
  }
45
47
  //# sourceMappingURL=convert-schema-to-parquet.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"convert-schema-to-parquet.js","names":["_schema","require","PARQUET_TYPE_MAPPING","BOOLEAN","Bool","INT32","Int32","INT64","Float64","INT96","FLOAT","Float32","DOUBLE","BYTE_ARRAY","Binary","FIXED_LEN_BYTE_ARRAY","UTF8","Utf8","DATE","TIME_MILLIS","Int64","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","Uint16","UINT_32","Uint32","UINT_64","Uint64","INT_8","Int8","INT_16","Int16","INT_32","INT_64","JSON","BSON","INTERVAL","DECIMAL_INT32","DECIMAL_INT64","DECIMAL_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","exports","convertToParquetSchema","schema","fields","Schema"],"sources":["../../../../src/lib/arrow/convert-schema-to-parquet.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// import type {ParquetSchema} from '../../parquetjs/schema/schema';\nimport type {\n // FieldDefinition, ParquetField,\n ParquetType\n} from '../../parquetjs/schema/declare';\n\nimport {\n Schema,\n // Struct,\n // Field,\n DataType,\n Bool,\n Float64,\n Int32,\n Float32,\n Binary,\n Utf8,\n Int64,\n Uint16,\n Uint32,\n Uint64,\n Int8,\n Int16\n} from '@loaders.gl/schema';\n\nexport const PARQUET_TYPE_MAPPING: {[type in ParquetType]: typeof DataType} = {\n BOOLEAN: Bool,\n INT32: Int32,\n INT64: Float64,\n INT96: Float64,\n FLOAT: Float32,\n DOUBLE: Float64,\n BYTE_ARRAY: Binary,\n FIXED_LEN_BYTE_ARRAY: Binary,\n UTF8: Utf8,\n DATE: Int32,\n TIME_MILLIS: Int64,\n TIME_MICROS: Int64,\n TIMESTAMP_MILLIS: Int64,\n TIMESTAMP_MICROS: Int64,\n UINT_8: Int32,\n UINT_16: Uint16,\n UINT_32: Uint32,\n UINT_64: Uint64,\n INT_8: Int8,\n INT_16: Int16,\n INT_32: Int32,\n INT_64: Int64,\n JSON: Binary,\n BSON: Binary,\n // TODO check interval type\n INTERVAL: Binary,\n DECIMAL_INT32: Float32,\n DECIMAL_INT64: Float64,\n DECIMAL_BYTE_ARRAY: Float64,\n DECIMAL_FIXED_LEN_BYTE_ARRAY: Float64\n};\n\nexport function convertToParquetSchema(schema: Schema): Schema {\n const fields = []; // getFields(schema.fields);\n\n // TODO add metadata if needed.\n return new Schema(fields);\n}\n\n// function getFields(schema: Field[]): Definition[] {\n// const fields: Field[] = [];\n\n// for (const name in schema) {\n// const field = schema[name];\n\n// // @ts-ignore\n// const children = field.children as DataType[];\n// if (children) {\n// const childField = getFields(field.fields);\n// const nestedField = new Field(name, new Struct(childField), field.optional);\n// fields.push(nestedField);\n// } else {\n// const FieldType = PARQUET_TYPE_MAPPING[field.type];\n// const metadata = getFieldMetadata(field);\n// const arrowField = new Field(name, new FieldType(), field.optional, metadata);\n// fields.push(arrowField);\n// }\n// }\n\n// return fields;\n// }\n\n// function getFieldMetadata(field: ParquetField): Map<string, string> {\n// const metadata = new Map();\n\n// for (const key in field) {\n// if (key !== 'name') {\n// const value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];\n// metadata.set(key, value);\n// }\n// }\n\n// return metadata;\n// }\n"],"mappings":";;;;;;;AAQA,IAAAA,OAAA,GAAAC,OAAA;AAmBO,IAAMC,oBAA8D,GAAG;EAC5EC,OAAO,EAAEC,YAAI;EACbC,KAAK,EAAEC,aAAK;EACZC,KAAK,EAAEC,eAAO;EACdC,KAAK,EAAED,eAAO;EACdE,KAAK,EAAEC,eAAO;EACdC,MAAM,EAAEJ,eAAO;EACfK,UAAU,EAAEC,cAAM;EAClBC,oBAAoB,EAAED,cAAM;EAC5BE,IAAI,EAAEC,YAAI;EACVC,IAAI,EAAEZ,aAAK;EACXa,WAAW,EAAEC,aAAK;EAClBC,WAAW,EAAED,aAAK;EAClBE,gBAAgB,EAAEF,aAAK;EACvBG,gBAAgB,EAAEH,aAAK;EACvBI,MAAM,EAAElB,aAAK;EACbmB,OAAO,EAAEC,cAAM;EACfC,OAAO,EAAEC,cAAM;EACfC,OAAO,EAAEC,cAAM;EACfC,KAAK,EAAEC,YAAI;EACXC,MAAM,EAAEC,aAAK;EACbC,MAAM,EAAE7B,aAAK;EACb8B,MAAM,EAAEhB,aAAK;EACbiB,IAAI,EAAEvB,cAAM;EACZwB,IAAI,EAAExB,cAAM;EAEZyB,QAAQ,EAAEzB,cAAM;EAChB0B,aAAa,EAAE7B,eAAO;EACtB8B,aAAa,EAAEjC,eAAO;EACtBkC,kBAAkB,EAAElC,eAAO;EAC3BmC,4BAA4B,EAAEnC;AAChC,CAAC;AAACoC,OAAA,CAAA1C,oBAAA,GAAAA,oBAAA;AAEK,SAAS2C,sBAAsBA,CAACC,MAAc,EAAU;EAC7D,IAAMC,MAAM,GAAG,EAAE;EAGjB,OAAO,IAAIC,cAAM,CAACD,MAAM,CAAC;AAC3B"}
1
+ {"version":3,"file":"convert-schema-to-parquet.js","names":["PARQUET_TYPE_MAPPING","BOOLEAN","INT32","INT64","INT96","FLOAT","DOUBLE","BYTE_ARRAY","FIXED_LEN_BYTE_ARRAY","UTF8","DATE","TIME_MILLIS","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","UINT_32","UINT_64","INT_8","INT_16","INT_32","INT_64","JSON","BSON","INTERVAL","DECIMAL_INT32","DECIMAL_INT64","DECIMAL_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","exports","convertToParquetSchema","schema","fields","metadata"],"sources":["../../../../src/lib/arrow/convert-schema-to-parquet.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// import type {ParquetSchema} from '../../parquetjs/schema/schema';\nimport type {\n // FieldDefinition, ParquetField,\n ParquetType\n} from '../../parquetjs/schema/declare';\n\nimport {\n Schema,\n // Struct,\n // Field,\n DataType\n} from '@loaders.gl/schema';\n\nexport const PARQUET_TYPE_MAPPING: {[type in ParquetType]: DataType} = {\n BOOLEAN: 'bool',\n INT32: 'int32',\n INT64: 'float64',\n INT96: 'float64',\n FLOAT: 'float32',\n DOUBLE: 'float64',\n BYTE_ARRAY: 'binary',\n FIXED_LEN_BYTE_ARRAY: 'binary',\n UTF8: 'utf8',\n DATE: 'int32',\n TIME_MILLIS: 'int64',\n TIME_MICROS: 'int64',\n TIMESTAMP_MILLIS: 'int64',\n TIMESTAMP_MICROS: 'int64',\n UINT_8: 'int32',\n UINT_16: 'uint16',\n UINT_32: 'uint32',\n UINT_64: 'uint64',\n INT_8: 'int8',\n INT_16: 'int16',\n INT_32: 'int32',\n INT_64: 'int64',\n JSON: 'binary',\n BSON: 'binary',\n // TODO check interval type\n INTERVAL: 'binary',\n DECIMAL_INT32: 'float32',\n DECIMAL_INT64: 'float64',\n DECIMAL_BYTE_ARRAY: 'float64',\n DECIMAL_FIXED_LEN_BYTE_ARRAY: 'float64'\n};\n\nexport function convertToParquetSchema(schema: Schema): Schema {\n const fields = []; // getFields(schema.fields);\n\n // TODO add metadata if needed.\n return {fields, metadata: {}};\n}\n\n// function getFields(schema: Field[]): Definition[] {\n// const fields: Field[] = [];\n\n// for (const name in schema) {\n// const field = schema[name];\n\n// // @ts-ignore\n// const children = field.children as DataType[];\n// if (children) {\n// const childField = getFields(field.fields);\n// const nestedField = new Field(name, new Struct(childField), field.optional);\n// fields.push(nestedField);\n// } else {\n// const FieldType = PARQUET_TYPE_MAPPING[field.type];\n// const metadata = getFieldMetadata(field);\n// const arrowField = new Field(name, new FieldType(), field.optional, metadata);\n// fields.push(arrowField);\n// }\n// }\n\n// return fields;\n// }\n\n// function getFieldMetadata(field: ParquetField): Map<string, string> {\n// const metadata = new Map();\n\n// for (const key in field) {\n// if (key !== 'name') {\n// const value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];\n// metadata.set(key, value);\n// }\n// }\n\n// return metadata;\n// }\n"],"mappings":";;;;;;;AAeO,IAAMA,oBAAuD,GAAG;EACrEC,OAAO,EAAE,MAAM;EACfC,KAAK,EAAE,OAAO;EACdC,KAAK,EAAE,SAAS;EAChBC,KAAK,EAAE,SAAS;EAChBC,KAAK,EAAE,SAAS;EAChBC,MAAM,EAAE,SAAS;EACjBC,UAAU,EAAE,QAAQ;EACpBC,oBAAoB,EAAE,QAAQ;EAC9BC,IAAI,EAAE,MAAM;EACZC,IAAI,EAAE,OAAO;EACbC,WAAW,EAAE,OAAO;EACpBC,WAAW,EAAE,OAAO;EACpBC,gBAAgB,EAAE,OAAO;EACzBC,gBAAgB,EAAE,OAAO;EACzBC,MAAM,EAAE,OAAO;EACfC,OAAO,EAAE,QAAQ;EACjBC,OAAO,EAAE,QAAQ;EACjBC,OAAO,EAAE,QAAQ;EACjBC,KAAK,EAAE,MAAM;EACbC,MAAM,EAAE,OAAO;EACfC,MAAM,EAAE,OAAO;EACfC,MAAM,EAAE,OAAO;EACfC,IAAI,EAAE,QAAQ;EACdC,IAAI,EAAE,QAAQ;EAEdC,QAAQ,EAAE,QAAQ;EAClBC,aAAa,EAAE,SAAS;EACxBC,aAAa,EAAE,SAAS;EACxBC,kBAAkB,EAAE,SAAS;EAC7BC,4BAA4B,EAAE;AAChC,CAAC;AAACC,OAAA,CAAA9B,oBAAA,GAAAA,oBAAA;AAEK,SAAS+B,sBAAsBA,CAACC,MAAc,EAAU;EAC7D,IAAMC,MAAM,GAAG,EAAE;EAGjB,OAAO;IAACA,MAAM,EAANA,MAAM;IAAEC,QAAQ,EAAE,CAAC;EAAC,CAAC;AAC/B"}
@@ -9,7 +9,7 @@ exports.setGeoMetadata = setGeoMetadata;
9
9
  exports.unpackGeoMetadata = unpackGeoMetadata;
10
10
  var _slicedToArray2 = _interopRequireDefault(require("@babel/runtime/helpers/slicedToArray"));
11
11
  function getGeoMetadata(schema) {
12
- var stringifiedGeoMetadata = schema.metadata.get('geo');
12
+ var stringifiedGeoMetadata = schema.metadata.geo;
13
13
  if (!stringifiedGeoMetadata) {
14
14
  return null;
15
15
  }
@@ -22,7 +22,7 @@ function getGeoMetadata(schema) {
22
22
  }
23
23
  function setGeoMetadata(schema, geoMetadata) {
24
24
  var stringifiedGeoMetadata = JSON.stringify(geoMetadata);
25
- schema.metadata.set('geo', stringifiedGeoMetadata);
25
+ schema.metadata.geo = stringifiedGeoMetadata;
26
26
  }
27
27
  function unpackGeoMetadata(schema) {
28
28
  var geoMetadata = getGeoMetadata(schema);
@@ -33,12 +33,12 @@ function unpackGeoMetadata(schema) {
33
33
  primary_column = geoMetadata.primary_column,
34
34
  columns = geoMetadata.columns;
35
35
  if (version) {
36
- schema.metadata.set('geo.version', version);
36
+ schema.metadata['geo.version'] = version;
37
37
  }
38
38
  if (primary_column) {
39
- schema.metadata.set('geo.primary_column', primary_column);
39
+ schema.metadata['geo.primary_column'] = primary_column;
40
40
  }
41
- schema.metadata.set('geo.columns', Object.keys(columns || {}).join(''));
41
+ schema.metadata['geo.columns'] = Object.keys(columns || {}).join('');
42
42
  var _loop = function _loop() {
43
43
  var _Object$entries$_i = (0, _slicedToArray2.default)(_Object$entries[_i], 2),
44
44
  columnName = _Object$entries$_i[0],
@@ -48,7 +48,7 @@ function unpackGeoMetadata(schema) {
48
48
  });
49
49
  if (field) {
50
50
  if (field.name === primary_column) {
51
- field.metadata.set('geo.primary_field', 'true');
51
+ setFieldMetadata(field, 'geo.primary_field', 'true');
52
52
  }
53
53
  unpackGeoFieldMetadata(field, columnMetadata);
54
54
  }
@@ -64,14 +64,18 @@ function unpackGeoFieldMetadata(field, columnMetadata) {
64
64
  value = _Object$entries2$_i[1];
65
65
  switch (_key) {
66
66
  case 'geometry_type':
67
- field.metadata.set("geo.".concat(_key), value.join(','));
67
+ setFieldMetadata(field, "geo.".concat(_key), value.join(','));
68
68
  break;
69
69
  case 'bbox':
70
70
  case 'crs':
71
71
  case 'edges':
72
72
  default:
73
- field.metadata.set("geo.".concat(_key), typeof value === 'string' ? value : JSON.stringify(value));
73
+ setFieldMetadata(field, "geo.".concat(_key), typeof value === 'string' ? value : JSON.stringify(value));
74
74
  }
75
75
  }
76
76
  }
77
+ function setFieldMetadata(field, key, value) {
78
+ field.metadata = field.metadata || {};
79
+ field.metadata[key] = value;
80
+ }
77
81
  //# sourceMappingURL=decode-geo-metadata.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"decode-geo-metadata.js","names":["getGeoMetadata","schema","stringifiedGeoMetadata","metadata","get","geoMetadata","JSON","parse","_unused","setGeoMetadata","stringify","set","unpackGeoMetadata","version","primary_column","columns","Object","keys","join","_loop","_Object$entries$_i","_slicedToArray2","default","_Object$entries","_i","columnName","columnMetadata","field","fields","find","name","unpackGeoFieldMetadata","entries","length","_i2","_Object$entries2","_Object$entries2$_i","key","value","concat"],"sources":["../../../../src/lib/geo/decode-geo-metadata.ts"],"sourcesContent":["// loaders.gl, MIT license\nimport {Schema, Field} from '@loaders.gl/schema';\n\n/* eslint-disable camelcase */\n\n/** A geoarrow / geoparquet geo metadata object (stored in stringified form in the top level metadata 'geo' key) */\nexport type GeoMetadata = {\n version?: string;\n primary_column?: string;\n columns: Record<string, GeoColumnMetadata>;\n [key: string]: unknown;\n};\n\n/** A geoarrow / geoparquet geo metadata for one geometry column */\nexport type GeoColumnMetadata = {\n bounding_box?:\n | [number, number, number, number]\n | [number, number, number, number, number, number];\n crs?: string;\n geometry_type?: string[];\n edges?: string;\n [key: string]: unknown;\n};\n\n/**\n * Reads the GeoMetadata object from the metadata\n * @note geoarrow / parquet schema is stringified into a single key-value pair in the parquet metadata */\nexport function getGeoMetadata(schema: Schema): GeoMetadata | null {\n const stringifiedGeoMetadata = schema.metadata.get('geo');\n if (!stringifiedGeoMetadata) {\n return null;\n }\n\n try {\n const geoMetadata = JSON.parse(stringifiedGeoMetadata) as GeoMetadata;\n return geoMetadata;\n } catch {\n return null;\n }\n}\n\n/**\n * Stores a geoarrow / geoparquet geo metadata object in the schema\n * @note geoarrow / geoparquet geo metadata is a single stringified JSON field\n */\nexport function setGeoMetadata(schema: Schema, geoMetadata: GeoMetadata): void {\n const stringifiedGeoMetadata = JSON.stringify(geoMetadata);\n schema.metadata.set('geo', stringifiedGeoMetadata);\n}\n\n/**\n * Unpacks geo metadata into separate metadata fields (parses the long JSON string)\n * @note geoarrow / parquet schema is stringified into a single key-value pair in the parquet metadata\n */\nexport function unpackGeoMetadata(schema: Schema): void {\n const geoMetadata = getGeoMetadata(schema);\n if (!geoMetadata) {\n return;\n }\n\n // Store Parquet Schema Level Metadata\n\n const {version, primary_column, columns} = geoMetadata;\n if (version) {\n schema.metadata.set('geo.version', version);\n }\n\n if (primary_column) {\n schema.metadata.set('geo.primary_column', primary_column);\n }\n\n // store column names as comma separated list\n schema.metadata.set('geo.columns', Object.keys(columns || {}).join(''));\n\n for (const [columnName, columnMetadata] of Object.entries(columns || {})) {\n const field = schema.fields.find((field) => field.name === columnName);\n if (field) {\n if (field.name === primary_column) {\n field.metadata.set('geo.primary_field', 'true');\n }\n unpackGeoFieldMetadata(field, columnMetadata);\n }\n }\n}\n\nfunction unpackGeoFieldMetadata(field: Field, columnMetadata): void {\n for (const [key, value] of Object.entries(columnMetadata || {})) {\n switch (key) {\n case 'geometry_type':\n field.metadata.set(`geo.${key}`, (value as string[]).join(','));\n break;\n case 'bbox':\n case 'crs':\n case 'edges':\n default:\n field.metadata.set(`geo.${key}`, typeof value === 'string' ? value : JSON.stringify(value));\n }\n }\n}\n"],"mappings":";;;;;;;;;;AA2BO,SAASA,cAAcA,CAACC,MAAc,EAAsB;EACjE,IAAMC,sBAAsB,GAAGD,MAAM,CAACE,QAAQ,CAACC,GAAG,CAAC,KAAK,CAAC;EACzD,IAAI,CAACF,sBAAsB,EAAE;IAC3B,OAAO,IAAI;EACb;EAEA,IAAI;IACF,IAAMG,WAAW,GAAGC,IAAI,CAACC,KAAK,CAACL,sBAAsB,CAAgB;IACrE,OAAOG,WAAW;EACpB,CAAC,CAAC,OAAAG,OAAA,EAAM;IACN,OAAO,IAAI;EACb;AACF;AAMO,SAASC,cAAcA,CAACR,MAAc,EAAEI,WAAwB,EAAQ;EAC7E,IAAMH,sBAAsB,GAAGI,IAAI,CAACI,SAAS,CAACL,WAAW,CAAC;EAC1DJ,MAAM,CAACE,QAAQ,CAACQ,GAAG,CAAC,KAAK,EAAET,sBAAsB,CAAC;AACpD;AAMO,SAASU,iBAAiBA,CAACX,MAAc,EAAQ;EACtD,IAAMI,WAAW,GAAGL,cAAc,CAACC,MAAM,CAAC;EAC1C,IAAI,CAACI,WAAW,EAAE;IAChB;EACF;EAIA,IAAOQ,OAAO,GAA6BR,WAAW,CAA/CQ,OAAO;IAAEC,cAAc,GAAaT,WAAW,CAAtCS,cAAc;IAAEC,OAAO,GAAIV,WAAW,CAAtBU,OAAO;EACvC,IAAIF,OAAO,EAAE;IACXZ,MAAM,CAACE,QAAQ,CAACQ,GAAG,CAAC,aAAa,EAAEE,OAAO,CAAC;EAC7C;EAEA,IAAIC,cAAc,EAAE;IAClBb,MAAM,CAACE,QAAQ,CAACQ,GAAG,CAAC,oBAAoB,EAAEG,cAAc,CAAC;EAC3D;EAGAb,MAAM,CAACE,QAAQ,CAACQ,GAAG,CAAC,aAAa,EAAEK,MAAM,CAACC,IAAI,CAACF,OAAO,IAAI,CAAC,CAAC,CAAC,CAACG,IAAI,CAAC,EAAE,CAAC,CAAC;EAAC,IAAAC,KAAA,YAAAA,MAAA,EAEE;IAArE,IAAAC,kBAAA,OAAAC,eAAA,CAAAC,OAAA,EAAAC,eAAA,CAAAC,EAAA;MAAOC,UAAU,GAAAL,kBAAA;MAAEM,cAAc,GAAAN,kBAAA;IACpC,IAAMO,KAAK,GAAG1B,MAAM,CAAC2B,MAAM,CAACC,IAAI,CAAC,UAACF,KAAK;MAAA,OAAKA,KAAK,CAACG,IAAI,KAAKL,UAAU;IAAA,EAAC;IACtE,IAAIE,KAAK,EAAE;MACT,IAAIA,KAAK,CAACG,IAAI,KAAKhB,cAAc,EAAE;QACjCa,KAAK,CAACxB,QAAQ,CAACQ,GAAG,CAAC,mBAAmB,EAAE,MAAM,CAAC;MACjD;MACAoB,sBAAsB,CAACJ,KAAK,EAAED,cAAc,CAAC;IAC/C;EACF,CAAC;EARD,SAAAF,EAAA,MAAAD,eAAA,GAA2CP,MAAM,CAACgB,OAAO,CAACjB,OAAO,IAAI,CAAC,CAAC,CAAC,EAAAS,EAAA,GAAAD,eAAA,CAAAU,MAAA,EAAAT,EAAA;IAAAL,KAAA;EAAA;AAS1E;AAEA,SAASY,sBAAsBA,CAACJ,KAAY,EAAED,cAAc,EAAQ;EAClE,SAAAQ,GAAA,MAAAC,gBAAA,GAA2BnB,MAAM,CAACgB,OAAO,CAACN,cAAc,IAAI,CAAC,CAAC,CAAC,EAAAQ,GAAA,GAAAC,gBAAA,CAAAF,MAAA,EAAAC,GAAA,IAAE;IAA5D,IAAAE,mBAAA,OAAAf,eAAA,CAAAC,OAAA,EAAAa,gBAAA,CAAAD,GAAA;MAAOG,IAAG,GAAAD,mBAAA;MAAEE,KAAK,GAAAF,mBAAA;IACpB,QAAQC,IAAG;MACT,KAAK,eAAe;QAClBV,KAAK,CAACxB,QAAQ,CAACQ,GAAG,QAAA4B,MAAA,CAAQF,IAAG,GAAKC,KAAK,CAAcpB,IAAI,CAAC,GAAG,CAAC,CAAC;QAC/D;MACF,KAAK,MAAM;MACX,KAAK,KAAK;MACV,KAAK,OAAO;MACZ;QACES,KAAK,CAACxB,QAAQ,CAACQ,GAAG,QAAA4B,MAAA,CAAQF,IAAG,GAAI,OAAOC,KAAK,KAAK,QAAQ,GAAGA,KAAK,GAAGhC,IAAI,CAACI,SAAS,CAAC4B,KAAK,CAAC,CAAC;IAC/F;EACF;AACF"}
1
+ {"version":3,"file":"decode-geo-metadata.js","names":["getGeoMetadata","schema","stringifiedGeoMetadata","metadata","geo","geoMetadata","JSON","parse","_unused","setGeoMetadata","stringify","unpackGeoMetadata","version","primary_column","columns","Object","keys","join","_loop","_Object$entries$_i","_slicedToArray2","default","_Object$entries","_i","columnName","columnMetadata","field","fields","find","name","setFieldMetadata","unpackGeoFieldMetadata","entries","length","_i2","_Object$entries2","_Object$entries2$_i","key","value","concat"],"sources":["../../../../src/lib/geo/decode-geo-metadata.ts"],"sourcesContent":["// loaders.gl, MIT license\nimport {Schema, Field} from '@loaders.gl/schema';\n\n/* eslint-disable camelcase */\n\n/** A geoarrow / geoparquet geo metadata object (stored in stringified form in the top level metadata 'geo' key) */\nexport type GeoMetadata = {\n version?: string;\n primary_column?: string;\n columns: Record<string, GeoColumnMetadata>;\n [key: string]: unknown;\n};\n\n/** A geoarrow / geoparquet geo metadata for one geometry column */\nexport type GeoColumnMetadata = {\n bounding_box?:\n | [number, number, number, number]\n | [number, number, number, number, number, number];\n crs?: string;\n geometry_type?: string[];\n edges?: string;\n [key: string]: unknown;\n};\n\n/**\n * Reads the GeoMetadata object from the metadata\n * @note geoarrow / parquet schema is stringified into a single key-value pair in the parquet metadata */\nexport function getGeoMetadata(schema: Schema): GeoMetadata | null {\n const stringifiedGeoMetadata = schema.metadata.geo;\n if (!stringifiedGeoMetadata) {\n return null;\n }\n\n try {\n const geoMetadata = JSON.parse(stringifiedGeoMetadata) as GeoMetadata;\n return geoMetadata;\n } catch {\n return null;\n }\n}\n\n/**\n * Stores a geoarrow / geoparquet geo metadata object in the schema\n * @note geoarrow / geoparquet geo metadata is a single stringified JSON field\n */\nexport function setGeoMetadata(schema: Schema, geoMetadata: GeoMetadata): void {\n const stringifiedGeoMetadata = JSON.stringify(geoMetadata);\n schema.metadata.geo = stringifiedGeoMetadata;\n}\n\n/**\n * Unpacks geo metadata into separate metadata fields (parses the long JSON string)\n * @note geoarrow / parquet schema is stringified into a single key-value pair in the parquet metadata\n */\nexport function unpackGeoMetadata(schema: Schema): void {\n const geoMetadata = getGeoMetadata(schema);\n if (!geoMetadata) {\n return;\n }\n\n // Store Parquet Schema Level Metadata\n\n const {version, primary_column, columns} = geoMetadata;\n if (version) {\n schema.metadata['geo.version'] = version;\n }\n\n if (primary_column) {\n schema.metadata['geo.primary_column'] = primary_column;\n }\n\n // store column names as comma separated list\n schema.metadata['geo.columns'] = Object.keys(columns || {}).join('');\n\n for (const [columnName, columnMetadata] of Object.entries(columns || {})) {\n const field = schema.fields.find((field) => field.name === columnName);\n if (field) {\n if (field.name === primary_column) {\n setFieldMetadata(field, 'geo.primary_field', 'true');\n }\n unpackGeoFieldMetadata(field, columnMetadata);\n }\n }\n}\n\nfunction unpackGeoFieldMetadata(field: Field, columnMetadata): void {\n for (const [key, value] of Object.entries(columnMetadata || {})) {\n switch (key) {\n case 'geometry_type':\n setFieldMetadata(field, `geo.${key}`, (value as string[]).join(','));\n break;\n case 'bbox':\n case 'crs':\n case 'edges':\n default:\n setFieldMetadata(\n field,\n `geo.${key}`,\n typeof value === 'string' ? value : JSON.stringify(value)\n );\n }\n }\n}\n\nfunction setFieldMetadata(field: Field, key: string, value: string): void {\n field.metadata = field.metadata || {};\n field.metadata[key] = value;\n}\n"],"mappings":";;;;;;;;;;AA2BO,SAASA,cAAcA,CAACC,MAAc,EAAsB;EACjE,IAAMC,sBAAsB,GAAGD,MAAM,CAACE,QAAQ,CAACC,GAAG;EAClD,IAAI,CAACF,sBAAsB,EAAE;IAC3B,OAAO,IAAI;EACb;EAEA,IAAI;IACF,IAAMG,WAAW,GAAGC,IAAI,CAACC,KAAK,CAACL,sBAAsB,CAAgB;IACrE,OAAOG,WAAW;EACpB,CAAC,CAAC,OAAAG,OAAA,EAAM;IACN,OAAO,IAAI;EACb;AACF;AAMO,SAASC,cAAcA,CAACR,MAAc,EAAEI,WAAwB,EAAQ;EAC7E,IAAMH,sBAAsB,GAAGI,IAAI,CAACI,SAAS,CAACL,WAAW,CAAC;EAC1DJ,MAAM,CAACE,QAAQ,CAACC,GAAG,GAAGF,sBAAsB;AAC9C;AAMO,SAASS,iBAAiBA,CAACV,MAAc,EAAQ;EACtD,IAAMI,WAAW,GAAGL,cAAc,CAACC,MAAM,CAAC;EAC1C,IAAI,CAACI,WAAW,EAAE;IAChB;EACF;EAIA,IAAOO,OAAO,GAA6BP,WAAW,CAA/CO,OAAO;IAAEC,cAAc,GAAaR,WAAW,CAAtCQ,cAAc;IAAEC,OAAO,GAAIT,WAAW,CAAtBS,OAAO;EACvC,IAAIF,OAAO,EAAE;IACXX,MAAM,CAACE,QAAQ,CAAC,aAAa,CAAC,GAAGS,OAAO;EAC1C;EAEA,IAAIC,cAAc,EAAE;IAClBZ,MAAM,CAACE,QAAQ,CAAC,oBAAoB,CAAC,GAAGU,cAAc;EACxD;EAGAZ,MAAM,CAACE,QAAQ,CAAC,aAAa,CAAC,GAAGY,MAAM,CAACC,IAAI,CAACF,OAAO,IAAI,CAAC,CAAC,CAAC,CAACG,IAAI,CAAC,EAAE,CAAC;EAAC,IAAAC,KAAA,YAAAA,MAAA,EAEK;IAArE,IAAAC,kBAAA,OAAAC,eAAA,CAAAC,OAAA,EAAAC,eAAA,CAAAC,EAAA;MAAOC,UAAU,GAAAL,kBAAA;MAAEM,cAAc,GAAAN,kBAAA;IACpC,IAAMO,KAAK,GAAGzB,MAAM,CAAC0B,MAAM,CAACC,IAAI,CAAC,UAACF,KAAK;MAAA,OAAKA,KAAK,CAACG,IAAI,KAAKL,UAAU;IAAA,EAAC;IACtE,IAAIE,KAAK,EAAE;MACT,IAAIA,KAAK,CAACG,IAAI,KAAKhB,cAAc,EAAE;QACjCiB,gBAAgB,CAACJ,KAAK,EAAE,mBAAmB,EAAE,MAAM,CAAC;MACtD;MACAK,sBAAsB,CAACL,KAAK,EAAED,cAAc,CAAC;IAC/C;EACF,CAAC;EARD,SAAAF,EAAA,MAAAD,eAAA,GAA2CP,MAAM,CAACiB,OAAO,CAAClB,OAAO,IAAI,CAAC,CAAC,CAAC,EAAAS,EAAA,GAAAD,eAAA,CAAAW,MAAA,EAAAV,EAAA;IAAAL,KAAA;EAAA;AAS1E;AAEA,SAASa,sBAAsBA,CAACL,KAAY,EAAED,cAAc,EAAQ;EAClE,SAAAS,GAAA,MAAAC,gBAAA,GAA2BpB,MAAM,CAACiB,OAAO,CAACP,cAAc,IAAI,CAAC,CAAC,CAAC,EAAAS,GAAA,GAAAC,gBAAA,CAAAF,MAAA,EAAAC,GAAA,IAAE;IAA5D,IAAAE,mBAAA,OAAAhB,eAAA,CAAAC,OAAA,EAAAc,gBAAA,CAAAD,GAAA;MAAOG,IAAG,GAAAD,mBAAA;MAAEE,KAAK,GAAAF,mBAAA;IACpB,QAAQC,IAAG;MACT,KAAK,eAAe;QAClBP,gBAAgB,CAACJ,KAAK,SAAAa,MAAA,CAASF,IAAG,GAAKC,KAAK,CAAcrB,IAAI,CAAC,GAAG,CAAC,CAAC;QACpE;MACF,KAAK,MAAM;MACX,KAAK,KAAK;MACV,KAAK,OAAO;MACZ;QACEa,gBAAgB,CACdJ,KAAK,SAAAa,MAAA,CACEF,IAAG,GACV,OAAOC,KAAK,KAAK,QAAQ,GAAGA,KAAK,GAAGhC,IAAI,CAACI,SAAS,CAAC4B,KAAK,CAC1D,CAAC;IACL;EACF;AACF;AAEA,SAASR,gBAAgBA,CAACJ,KAAY,EAAEW,GAAW,EAAEC,KAAa,EAAQ;EACxEZ,KAAK,CAACvB,QAAQ,GAAGuB,KAAK,CAACvB,QAAQ,IAAI,CAAC,CAAC;EACrCuB,KAAK,CAACvB,QAAQ,CAACkC,GAAG,CAAC,GAAGC,KAAK;AAC7B"}
@@ -13,7 +13,7 @@ var _wrapAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helper
13
13
  var _loaderUtils = require("@loaders.gl/loader-utils");
14
14
  var _parquetReader = require("../../parquetjs/parser/parquet-reader");
15
15
  var _convertSchemaFromParquet = require("../arrow/convert-schema-from-parquet");
16
- var _convertRowGroupToColumns = require("../arrow/convert-row-group-to-columns");
16
+ var _shred = require("../../parquetjs/schema/shred");
17
17
  var _decodeGeoMetadata = require("../geo/decode-geo-metadata");
18
18
  function _asyncIterator(iterable) { var method, async, sync, retry = 2; for ("undefined" != typeof Symbol && (async = Symbol.asyncIterator, sync = Symbol.iterator); retry--;) { if (async && null != (method = iterable[async])) return method.call(iterable); if (sync && null != (method = iterable[sync])) return new AsyncFromSyncIterator(method.call(iterable)); async = "@@asyncIterator", sync = "@@iterator"; } throw new TypeError("Object is not async iterable"); }
19
19
  function AsyncFromSyncIterator(s) { function AsyncFromSyncIteratorContinuation(r) { if (Object(r) !== r) return Promise.reject(new TypeError(r + " is not an object.")); var done = r.done; return Promise.resolve(r.value).then(function (value) { return { value: value, done: done }; }); } return AsyncFromSyncIterator = function AsyncFromSyncIterator(s) { this.s = s, this.n = s.next; }, AsyncFromSyncIterator.prototype = { s: null, n: null, next: function next() { return AsyncFromSyncIteratorContinuation(this.n.apply(this.s, arguments)); }, return: function _return(value) { var ret = this.s.return; return void 0 === ret ? Promise.resolve({ value: value, done: !0 }) : AsyncFromSyncIteratorContinuation(ret.apply(this.s, arguments)); }, throw: function _throw(value) { var thr = this.s.return; return void 0 === thr ? Promise.reject(value) : AsyncFromSyncIteratorContinuation(thr.apply(this.s, arguments)); } }, new AsyncFromSyncIterator(s); }
@@ -40,7 +40,11 @@ function _parseParquetInColumns() {
40
40
  break;
41
41
  }
42
42
  batch = _step.value;
43
- return _context2.abrupt("return", batch);
43
+ return _context2.abrupt("return", {
44
+ shape: 'columnar-table',
45
+ schema: batch.schema,
46
+ data: batch.data
47
+ });
44
48
  case 10:
45
49
  _iteratorAbruptCompletion = false;
46
50
  _context2.next = 5;
@@ -74,7 +78,7 @@ function _parseParquetInColumns() {
74
78
  case 28:
75
79
  return _context2.finish(19);
76
80
  case 29:
77
- return _context2.abrupt("return", null);
81
+ throw new Error('empty table');
78
82
  case 30:
79
83
  case "end":
80
84
  return _context2.stop();
@@ -102,7 +106,7 @@ function _parseParquetFileInColumnarBatches() {
102
106
  return (0, _awaitAsyncGenerator2.default)(reader.getFileMetadata());
103
107
  case 7:
104
108
  parquetMetadata = _context.sent;
105
- schema = (0, _convertSchemaFromParquet.convertSchemaFromParquet)(parquetSchema, parquetMetadata);
109
+ schema = (0, _convertSchemaFromParquet.convertParquetSchema)(parquetSchema, parquetMetadata);
106
110
  (0, _decodeGeoMetadata.unpackGeoMetadata)(schema);
107
111
  rowGroups = reader.rowGroupIterator(options === null || options === void 0 ? void 0 : options.parquet);
108
112
  _iteratorAbruptCompletion2 = false;
@@ -119,7 +123,7 @@ function _parseParquetFileInColumnarBatches() {
119
123
  }
120
124
  rowGroup = _step2.value;
121
125
  _context.next = 21;
122
- return convertRowGroupToTableBatch(schema, rowGroup);
126
+ return convertRowGroupToTableBatch(parquetSchema, rowGroup, schema);
123
127
  case 21:
124
128
  _iteratorAbruptCompletion2 = false;
125
129
  _context.next = 15;
@@ -160,8 +164,8 @@ function _parseParquetFileInColumnarBatches() {
160
164
  }));
161
165
  return _parseParquetFileInColumnarBatches.apply(this, arguments);
162
166
  }
163
- function convertRowGroupToTableBatch(schema, rowGroup) {
164
- var data = (0, _convertRowGroupToColumns.convertParquetRowGroupToColumns)(schema, rowGroup);
167
+ function convertRowGroupToTableBatch(parquetSchema, rowGroup, schema) {
168
+ var data = (0, _shred.materializeColumns)(parquetSchema, rowGroup);
165
169
  return {
166
170
  shape: 'columnar-table',
167
171
  batchType: 'data',
@@ -1 +1 @@
1
- {"version":3,"file":"parse-parquet-to-columns.js","names":["_loaderUtils","require","_parquetReader","_convertSchemaFromParquet","_convertRowGroupToColumns","_decodeGeoMetadata","_asyncIterator","iterable","method","async","sync","retry","Symbol","asyncIterator","iterator","call","AsyncFromSyncIterator","TypeError","s","AsyncFromSyncIteratorContinuation","r","Object","Promise","reject","done","resolve","value","then","n","next","prototype","apply","arguments","return","_return","ret","throw","_throw","thr","parseParquetInColumns","_x3","_x4","_parseParquetInColumns","_asyncToGenerator2","default","_regenerator","mark","_callee2","arrayBuffer","options","blob","_iteratorAbruptCompletion","_didIteratorError","_iteratorError","_iterator","_step","batch","wrap","_callee2$","_context2","prev","Blob","parseParquetFileInColumnarBatches","sent","abrupt","t0","finish","stop","_x","_x2","_parseParquetFileInColumnarBatches","_wrapAsyncGenerator2","_callee","file","reader","parquetSchema","parquetMetadata","schema","rowGroups","_iteratorAbruptCompletion2","_didIteratorError2","_iteratorError2","_iterator2","_step2","rowGroup","_callee$","_context","makeReadableFile","ParquetReader","_awaitAsyncGenerator2","getSchema","getFileMetadata","convertSchemaFromParquet","unpackGeoMetadata","rowGroupIterator","parquet","convertRowGroupToTableBatch","data","convertParquetRowGroupToColumns","shape","batchType","length","rowCount"],"sources":["../../../../src/lib/parsers/parse-parquet-to-columns.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\nimport {ColumnarTableBatch, Schema} from '@loaders.gl/schema';\nimport {makeReadableFile} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../../parquet-loader';\nimport {ParquetReader} from '../../parquetjs/parser/parquet-reader';\nimport {ParquetBuffer} from '../../parquetjs/schema/declare';\nimport {convertSchemaFromParquet} from '../arrow/convert-schema-from-parquet';\nimport {convertParquetRowGroupToColumns} from '../arrow/convert-row-group-to-columns';\nimport {unpackGeoMetadata} from '../geo/decode-geo-metadata';\n\nexport async function parseParquetInColumns(\n arrayBuffer: ArrayBuffer,\n options?: ParquetLoaderOptions\n) {\n const blob = new Blob([arrayBuffer]);\n for await (const batch of parseParquetFileInColumnarBatches(blob, options)) {\n return batch;\n }\n return null;\n}\n\nexport async function* parseParquetFileInColumnarBatches(\n blob: Blob,\n options?: ParquetLoaderOptions\n): AsyncIterable<ColumnarTableBatch> {\n const file = makeReadableFile(blob);\n const reader = new ParquetReader(file);\n const parquetSchema = await reader.getSchema();\n const parquetMetadata = await reader.getFileMetadata();\n const schema = convertSchemaFromParquet(parquetSchema, parquetMetadata);\n unpackGeoMetadata(schema);\n const rowGroups = reader.rowGroupIterator(options?.parquet);\n for await (const rowGroup of rowGroups) {\n yield convertRowGroupToTableBatch(schema, rowGroup);\n }\n}\n\nfunction convertRowGroupToTableBatch(schema: Schema, rowGroup: ParquetBuffer): ColumnarTableBatch {\n const data = convertParquetRowGroupToColumns(schema, rowGroup);\n return {\n shape: 'columnar-table',\n batchType: 'data',\n schema,\n data,\n length: rowGroup.rowCount\n };\n}\n"],"mappings":";;;;;;;;;;;;AAIA,IAAAA,YAAA,GAAAC,OAAA;AAEA,IAAAC,cAAA,GAAAD,OAAA;AAEA,IAAAE,yBAAA,GAAAF,OAAA;AACA,IAAAG,yBAAA,GAAAH,OAAA;AACA,IAAAI,kBAAA,GAAAJ,OAAA;AAA6D,SAAAK,eAAAC,QAAA,QAAAC,MAAA,EAAAC,KAAA,EAAAC,IAAA,EAAAC,KAAA,iCAAAC,MAAA,KAAAH,KAAA,GAAAG,MAAA,CAAAC,aAAA,EAAAH,IAAA,GAAAE,MAAA,CAAAE,QAAA,GAAAH,KAAA,WAAAF,KAAA,aAAAD,MAAA,GAAAD,QAAA,CAAAE,KAAA,WAAAD,MAAA,CAAAO,IAAA,CAAAR,QAAA,OAAAG,IAAA,aAAAF,MAAA,GAAAD,QAAA,CAAAG,IAAA,eAAAM,qBAAA,CAAAR,MAAA,CAAAO,IAAA,CAAAR,QAAA,IAAAE,KAAA,sBAAAC,IAAA,6BAAAO,SAAA;AAAA,SAAAD,sBAAAE,CAAA,aAAAC,kCAAAC,CAAA,QAAAC,MAAA,CAAAD,CAAA,MAAAA,CAAA,SAAAE,OAAA,CAAAC,MAAA,KAAAN,SAAA,CAAAG,CAAA,+BAAAI,IAAA,GAAAJ,CAAA,CAAAI,IAAA,SAAAF,OAAA,CAAAG,OAAA,CAAAL,CAAA,CAAAM,KAAA,EAAAC,IAAA,WAAAD,KAAA,aAAAA,KAAA,EAAAA,KAAA,EAAAF,IAAA,EAAAA,IAAA,iBAAAR,qBAAA,YAAAA,sBAAAE,CAAA,SAAAA,CAAA,GAAAA,CAAA,OAAAU,CAAA,GAAAV,CAAA,CAAAW,IAAA,KAAAb,qBAAA,CAAAc,SAAA,KAAAZ,CAAA,QAAAU,CAAA,QAAAC,IAAA,WAAAA,KAAA,WAAAV,iCAAA,MAAAS,CAAA,CAAAG,KAAA,MAAAb,CAAA,EAAAc,SAAA,OAAAC,MAAA,WAAAC,QAAAR,KAAA,QAAAS,GAAA,QAAAjB,CAAA,CAAAe,MAAA,oBAAAE,GAAA,GAAAb,OAAA,CAAAG,OAAA,GAAAC,KAAA,EAAAA,KAAA,EAAAF,IAAA,UAAAL,iCAAA,CAAAgB,GAAA,CAAAJ,KAAA,MAAAb,CAAA,EAAAc,SAAA,OAAAI,KAAA,WAAAC,OAAAX,KAAA,QAAAY,GAAA,QAAApB,CAAA,CAAAe,MAAA,oBAAAK,GAAA,GAAAhB,OAAA,CAAAC,MAAA,CAAAG,KAAA,IAAAP,iCAAA,CAAAmB,GAAA,CAAAP,KAAA,MAAAb,CAAA,EAAAc,SAAA,aAAAhB,qBAAA,CAAAE,CAAA;AAAA,SAEvCqB,qBAAqBA,CAAAC,GAAA,EAAAC,GAAA;EAAA,OAAAC,sBAAA,CAAAX,KAAA,OAAAC,SAAA;AAAA;AAAA,SAAAU,uBAAA;EAAAA,sBAAA,OAAAC,kBAAA,CAAAC,OAAA,EAAAC,YAAA,CAAAD,OAAA,CAAAE,IAAA,CAApC,SAAAC,SACLC,WAAwB,EACxBC,OAA8B;IAAA,IAAAC,IAAA,EAAAC,yBAAA,EAAAC,iBAAA,EAAAC,cAAA,EAAAC,SAAA,EAAAC,KAAA,EAAAC,KAAA;IAAA,OAAAX,YAAA,CAAAD,OAAA,CAAAa,IAAA,UAAAC,UAAAC,SAAA;MAAA,kBAAAA,SAAA,CAAAC,IAAA,GAAAD,SAAA,CAAA9B,IAAA;QAAA;UAExBqB,IAAI,GAAG,IAAIW,IAAI,CAAC,CAACb,WAAW,CAAC,CAAC;UAAAG,yBAAA;UAAAC,iBAAA;UAAAO,SAAA,CAAAC,IAAA;UAAAN,SAAA,GAAAhD,cAAA,CACVwD,iCAAiC,CAACZ,IAAI,EAAED,OAAO,CAAC;QAAA;UAAAU,SAAA,CAAA9B,IAAA;UAAA,OAAAyB,SAAA,CAAAzB,IAAA;QAAA;UAAA,MAAAsB,yBAAA,KAAAI,KAAA,GAAAI,SAAA,CAAAI,IAAA,EAAAvC,IAAA;YAAAmC,SAAA,CAAA9B,IAAA;YAAA;UAAA;UAAzD2B,KAAK,GAAAD,KAAA,CAAA7B,KAAA;UAAA,OAAAiC,SAAA,CAAAK,MAAA,WACbR,KAAK;QAAA;UAAAL,yBAAA;UAAAQ,SAAA,CAAA9B,IAAA;UAAA;QAAA;UAAA8B,SAAA,CAAA9B,IAAA;UAAA;QAAA;UAAA8B,SAAA,CAAAC,IAAA;UAAAD,SAAA,CAAAM,EAAA,GAAAN,SAAA;UAAAP,iBAAA;UAAAC,cAAA,GAAAM,SAAA,CAAAM,EAAA;QAAA;UAAAN,SAAA,CAAAC,IAAA;UAAAD,SAAA,CAAAC,IAAA;UAAA,MAAAT,yBAAA,IAAAG,SAAA,CAAArB,MAAA;YAAA0B,SAAA,CAAA9B,IAAA;YAAA;UAAA;UAAA8B,SAAA,CAAA9B,IAAA;UAAA,OAAAyB,SAAA,CAAArB,MAAA;QAAA;UAAA0B,SAAA,CAAAC,IAAA;UAAA,KAAAR,iBAAA;YAAAO,SAAA,CAAA9B,IAAA;YAAA;UAAA;UAAA,MAAAwB,cAAA;QAAA;UAAA,OAAAM,SAAA,CAAAO,MAAA;QAAA;UAAA,OAAAP,SAAA,CAAAO,MAAA;QAAA;UAAA,OAAAP,SAAA,CAAAK,MAAA,WAEP,IAAI;QAAA;QAAA;UAAA,OAAAL,SAAA,CAAAQ,IAAA;MAAA;IAAA,GAAApB,QAAA;EAAA,CACZ;EAAA,OAAAL,sBAAA,CAAAX,KAAA,OAAAC,SAAA;AAAA;AAAA,SAEsB8B,iCAAiCA,CAAAM,EAAA,EAAAC,GAAA;EAAA,OAAAC,kCAAA,CAAAvC,KAAA,OAAAC,SAAA;AAAA;AAAA,SAAAsC,mCAAA;EAAAA,kCAAA,OAAAC,oBAAA,CAAA3B,OAAA,EAAAC,YAAA,CAAAD,OAAA,CAAAE,IAAA,CAAjD,SAAA0B,QACLtB,IAAU,EACVD,OAA8B;IAAA,IAAAwB,IAAA,EAAAC,MAAA,EAAAC,aAAA,EAAAC,eAAA,EAAAC,MAAA,EAAAC,SAAA,EAAAC,0BAAA,EAAAC,kBAAA,EAAAC,eAAA,EAAAC,UAAA,EAAAC,MAAA,EAAAC,QAAA;IAAA,OAAAvC,YAAA,CAAAD,OAAA,CAAAa,IAAA,UAAA4B,SAAAC,QAAA;MAAA,kBAAAA,QAAA,CAAA1B,IAAA,GAAA0B,QAAA,CAAAzD,IAAA;QAAA;UAExB4C,IAAI,GAAG,IAAAc,6BAAgB,EAACrC,IAAI,CAAC;UAC7BwB,MAAM,GAAG,IAAIc,4BAAa,CAACf,IAAI,CAAC;UAAAa,QAAA,CAAAzD,IAAA;UAAA,WAAA4D,qBAAA,CAAA7C,OAAA,EACV8B,MAAM,CAACgB,SAAS,CAAC,CAAC;QAAA;UAAxCf,aAAa,GAAAW,QAAA,CAAAvB,IAAA;UAAAuB,QAAA,CAAAzD,IAAA;UAAA,WAAA4D,qBAAA,CAAA7C,OAAA,EACW8B,MAAM,CAACiB,eAAe,CAAC,CAAC;QAAA;UAAhDf,eAAe,GAAAU,QAAA,CAAAvB,IAAA;UACfc,MAAM,GAAG,IAAAe,kDAAwB,EAACjB,aAAa,EAAEC,eAAe,CAAC;UACvE,IAAAiB,oCAAiB,EAAChB,MAAM,CAAC;UACnBC,SAAS,GAAGJ,MAAM,CAACoB,gBAAgB,CAAC7C,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAE8C,OAAO,CAAC;UAAAhB,0BAAA;UAAAC,kBAAA;UAAAM,QAAA,CAAA1B,IAAA;UAAAsB,UAAA,GAAA5E,cAAA,CAC9BwE,SAAS;QAAA;UAAAQ,QAAA,CAAAzD,IAAA;UAAA,WAAA4D,qBAAA,CAAA7C,OAAA,EAAAsC,UAAA,CAAArD,IAAA;QAAA;UAAA,MAAAkD,0BAAA,KAAAI,MAAA,GAAAG,QAAA,CAAAvB,IAAA,EAAAvC,IAAA;YAAA8D,QAAA,CAAAzD,IAAA;YAAA;UAAA;UAArBuD,QAAQ,GAAAD,MAAA,CAAAzD,KAAA;UAAA4D,QAAA,CAAAzD,IAAA;UACvB,OAAMmE,2BAA2B,CAACnB,MAAM,EAAEO,QAAQ,CAAC;QAAA;UAAAL,0BAAA;UAAAO,QAAA,CAAAzD,IAAA;UAAA;QAAA;UAAAyD,QAAA,CAAAzD,IAAA;UAAA;QAAA;UAAAyD,QAAA,CAAA1B,IAAA;UAAA0B,QAAA,CAAArB,EAAA,GAAAqB,QAAA;UAAAN,kBAAA;UAAAC,eAAA,GAAAK,QAAA,CAAArB,EAAA;QAAA;UAAAqB,QAAA,CAAA1B,IAAA;UAAA0B,QAAA,CAAA1B,IAAA;UAAA,MAAAmB,0BAAA,IAAAG,UAAA,CAAAjD,MAAA;YAAAqD,QAAA,CAAAzD,IAAA;YAAA;UAAA;UAAAyD,QAAA,CAAAzD,IAAA;UAAA,WAAA4D,qBAAA,CAAA7C,OAAA,EAAAsC,UAAA,CAAAjD,MAAA;QAAA;UAAAqD,QAAA,CAAA1B,IAAA;UAAA,KAAAoB,kBAAA;YAAAM,QAAA,CAAAzD,IAAA;YAAA;UAAA;UAAA,MAAAoD,eAAA;QAAA;UAAA,OAAAK,QAAA,CAAApB,MAAA;QAAA;UAAA,OAAAoB,QAAA,CAAApB,MAAA;QAAA;QAAA;UAAA,OAAAoB,QAAA,CAAAnB,IAAA;MAAA;IAAA,GAAAK,OAAA;EAAA,CAEtD;EAAA,OAAAF,kCAAA,CAAAvC,KAAA,OAAAC,SAAA;AAAA;AAED,SAASgE,2BAA2BA,CAACnB,MAAc,EAAEO,QAAuB,EAAsB;EAChG,IAAMa,IAAI,GAAG,IAAAC,yDAA+B,EAACrB,MAAM,EAAEO,QAAQ,CAAC;EAC9D,OAAO;IACLe,KAAK,EAAE,gBAAgB;IACvBC,SAAS,EAAE,MAAM;IACjBvB,MAAM,EAANA,MAAM;IACNoB,IAAI,EAAJA,IAAI;IACJI,MAAM,EAAEjB,QAAQ,CAACkB;EACnB,CAAC;AACH"}
1
+ {"version":3,"file":"parse-parquet-to-columns.js","names":["_loaderUtils","require","_parquetReader","_convertSchemaFromParquet","_shred","_decodeGeoMetadata","_asyncIterator","iterable","method","async","sync","retry","Symbol","asyncIterator","iterator","call","AsyncFromSyncIterator","TypeError","s","AsyncFromSyncIteratorContinuation","r","Object","Promise","reject","done","resolve","value","then","n","next","prototype","apply","arguments","return","_return","ret","throw","_throw","thr","parseParquetInColumns","_x3","_x4","_parseParquetInColumns","_asyncToGenerator2","default","_regenerator","mark","_callee2","arrayBuffer","options","blob","_iteratorAbruptCompletion","_didIteratorError","_iteratorError","_iterator","_step","batch","wrap","_callee2$","_context2","prev","Blob","parseParquetFileInColumnarBatches","sent","abrupt","shape","schema","data","t0","finish","Error","stop","_x","_x2","_parseParquetFileInColumnarBatches","_wrapAsyncGenerator2","_callee","file","reader","parquetSchema","parquetMetadata","rowGroups","_iteratorAbruptCompletion2","_didIteratorError2","_iteratorError2","_iterator2","_step2","rowGroup","_callee$","_context","makeReadableFile","ParquetReader","_awaitAsyncGenerator2","getSchema","getFileMetadata","convertParquetSchema","unpackGeoMetadata","rowGroupIterator","parquet","convertRowGroupToTableBatch","materializeColumns","batchType","length","rowCount"],"sources":["../../../../src/lib/parsers/parse-parquet-to-columns.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\nimport {ColumnarTable, ColumnarTableBatch, Schema} from '@loaders.gl/schema';\nimport {makeReadableFile} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../../parquet-loader';\nimport {ParquetReader} from '../../parquetjs/parser/parquet-reader';\nimport {ParquetRowGroup} from '../../parquetjs/schema/declare';\nimport {ParquetSchema} from '../../parquetjs/schema/schema';\nimport {convertParquetSchema} from '../arrow/convert-schema-from-parquet';\nimport {materializeColumns} from '../../parquetjs/schema/shred';\n// import {convertParquetRowGroupToColumns} from '../arrow/convert-row-group-to-columns';\nimport {unpackGeoMetadata} from '../geo/decode-geo-metadata';\n\nexport async function parseParquetInColumns(\n arrayBuffer: ArrayBuffer,\n options?: ParquetLoaderOptions\n): Promise<ColumnarTable> {\n const blob = new Blob([arrayBuffer]);\n for await (const batch of parseParquetFileInColumnarBatches(blob, options)) {\n return {\n shape: 'columnar-table',\n schema: batch.schema,\n data: batch.data\n };\n }\n throw new Error('empty table');\n}\n\nexport async function* parseParquetFileInColumnarBatches(\n blob: Blob,\n options?: ParquetLoaderOptions\n): AsyncIterable<ColumnarTableBatch> {\n const file = makeReadableFile(blob);\n const reader = new ParquetReader(file);\n const parquetSchema = await reader.getSchema();\n const parquetMetadata = await reader.getFileMetadata();\n const schema = convertParquetSchema(parquetSchema, parquetMetadata);\n unpackGeoMetadata(schema);\n const rowGroups = reader.rowGroupIterator(options?.parquet);\n for await (const rowGroup of rowGroups) {\n yield convertRowGroupToTableBatch(parquetSchema, rowGroup, schema);\n }\n}\n\nfunction convertRowGroupToTableBatch(\n parquetSchema: ParquetSchema,\n rowGroup: ParquetRowGroup,\n schema: Schema\n): ColumnarTableBatch {\n // const data = convertParquetRowGroupToColumns(schema, rowGroup);\n const data = materializeColumns(parquetSchema, rowGroup);\n return {\n shape: 'columnar-table',\n batchType: 'data',\n schema,\n data,\n length: rowGroup.rowCount\n };\n}\n"],"mappings":";;;;;;;;;;;;AAIA,IAAAA,YAAA,GAAAC,OAAA;AAEA,IAAAC,cAAA,GAAAD,OAAA;AAGA,IAAAE,yBAAA,GAAAF,OAAA;AACA,IAAAG,MAAA,GAAAH,OAAA;AAEA,IAAAI,kBAAA,GAAAJ,OAAA;AAA6D,SAAAK,eAAAC,QAAA,QAAAC,MAAA,EAAAC,KAAA,EAAAC,IAAA,EAAAC,KAAA,iCAAAC,MAAA,KAAAH,KAAA,GAAAG,MAAA,CAAAC,aAAA,EAAAH,IAAA,GAAAE,MAAA,CAAAE,QAAA,GAAAH,KAAA,WAAAF,KAAA,aAAAD,MAAA,GAAAD,QAAA,CAAAE,KAAA,WAAAD,MAAA,CAAAO,IAAA,CAAAR,QAAA,OAAAG,IAAA,aAAAF,MAAA,GAAAD,QAAA,CAAAG,IAAA,eAAAM,qBAAA,CAAAR,MAAA,CAAAO,IAAA,CAAAR,QAAA,IAAAE,KAAA,sBAAAC,IAAA,6BAAAO,SAAA;AAAA,SAAAD,sBAAAE,CAAA,aAAAC,kCAAAC,CAAA,QAAAC,MAAA,CAAAD,CAAA,MAAAA,CAAA,SAAAE,OAAA,CAAAC,MAAA,KAAAN,SAAA,CAAAG,CAAA,+BAAAI,IAAA,GAAAJ,CAAA,CAAAI,IAAA,SAAAF,OAAA,CAAAG,OAAA,CAAAL,CAAA,CAAAM,KAAA,EAAAC,IAAA,WAAAD,KAAA,aAAAA,KAAA,EAAAA,KAAA,EAAAF,IAAA,EAAAA,IAAA,iBAAAR,qBAAA,YAAAA,sBAAAE,CAAA,SAAAA,CAAA,GAAAA,CAAA,OAAAU,CAAA,GAAAV,CAAA,CAAAW,IAAA,KAAAb,qBAAA,CAAAc,SAAA,KAAAZ,CAAA,QAAAU,CAAA,QAAAC,IAAA,WAAAA,KAAA,WAAAV,iCAAA,MAAAS,CAAA,CAAAG,KAAA,MAAAb,CAAA,EAAAc,SAAA,OAAAC,MAAA,WAAAC,QAAAR,KAAA,QAAAS,GAAA,QAAAjB,CAAA,CAAAe,MAAA,oBAAAE,GAAA,GAAAb,OAAA,CAAAG,OAAA,GAAAC,KAAA,EAAAA,KAAA,EAAAF,IAAA,UAAAL,iCAAA,CAAAgB,GAAA,CAAAJ,KAAA,MAAAb,CAAA,EAAAc,SAAA,OAAAI,KAAA,WAAAC,OAAAX,KAAA,QAAAY,GAAA,QAAApB,CAAA,CAAAe,MAAA,oBAAAK,GAAA,GAAAhB,OAAA,CAAAC,MAAA,CAAAG,KAAA,IAAAP,iCAAA,CAAAmB,GAAA,CAAAP,KAAA,MAAAb,CAAA,EAAAc,SAAA,aAAAhB,qBAAA,CAAAE,CAAA;AAAA,SAEvCqB,qBAAqBA,CAAAC,GAAA,EAAAC,GAAA;EAAA,OAAAC,sBAAA,CAAAX,KAAA,OAAAC,SAAA;AAAA;AAAA,SAAAU,uBAAA;EAAAA,sBAAA,OAAAC,kBAAA,CAAAC,OAAA,EAAAC,YAAA,CAAAD,OAAA,CAAAE,IAAA,CAApC,SAAAC,SACLC,WAAwB,EACxBC,OAA8B;IAAA,IAAAC,IAAA,EAAAC,yBAAA,EAAAC,iBAAA,EAAAC,cAAA,EAAAC,SAAA,EAAAC,KAAA,EAAAC,KAAA;IAAA,OAAAX,YAAA,CAAAD,OAAA,CAAAa,IAAA,UAAAC,UAAAC,SAAA;MAAA,kBAAAA,SAAA,CAAAC,IAAA,GAAAD,SAAA,CAAA9B,IAAA;QAAA;UAExBqB,IAAI,GAAG,IAAIW,IAAI,CAAC,CAACb,WAAW,CAAC,CAAC;UAAAG,yBAAA;UAAAC,iBAAA;UAAAO,SAAA,CAAAC,IAAA;UAAAN,SAAA,GAAAhD,cAAA,CACVwD,iCAAiC,CAACZ,IAAI,EAAED,OAAO,CAAC;QAAA;UAAAU,SAAA,CAAA9B,IAAA;UAAA,OAAAyB,SAAA,CAAAzB,IAAA;QAAA;UAAA,MAAAsB,yBAAA,KAAAI,KAAA,GAAAI,SAAA,CAAAI,IAAA,EAAAvC,IAAA;YAAAmC,SAAA,CAAA9B,IAAA;YAAA;UAAA;UAAzD2B,KAAK,GAAAD,KAAA,CAAA7B,KAAA;UAAA,OAAAiC,SAAA,CAAAK,MAAA,WACb;YACLC,KAAK,EAAE,gBAAgB;YACvBC,MAAM,EAAEV,KAAK,CAACU,MAAM;YACpBC,IAAI,EAAEX,KAAK,CAACW;UACd,CAAC;QAAA;UAAAhB,yBAAA;UAAAQ,SAAA,CAAA9B,IAAA;UAAA;QAAA;UAAA8B,SAAA,CAAA9B,IAAA;UAAA;QAAA;UAAA8B,SAAA,CAAAC,IAAA;UAAAD,SAAA,CAAAS,EAAA,GAAAT,SAAA;UAAAP,iBAAA;UAAAC,cAAA,GAAAM,SAAA,CAAAS,EAAA;QAAA;UAAAT,SAAA,CAAAC,IAAA;UAAAD,SAAA,CAAAC,IAAA;UAAA,MAAAT,yBAAA,IAAAG,SAAA,CAAArB,MAAA;YAAA0B,SAAA,CAAA9B,IAAA;YAAA;UAAA;UAAA8B,SAAA,CAAA9B,IAAA;UAAA,OAAAyB,SAAA,CAAArB,MAAA;QAAA;UAAA0B,SAAA,CAAAC,IAAA;UAAA,KAAAR,iBAAA;YAAAO,SAAA,CAAA9B,IAAA;YAAA;UAAA;UAAA,MAAAwB,cAAA;QAAA;UAAA,OAAAM,SAAA,CAAAU,MAAA;QAAA;UAAA,OAAAV,SAAA,CAAAU,MAAA;QAAA;UAAA,MAEG,IAAIC,KAAK,CAAC,aAAa,CAAC;QAAA;QAAA;UAAA,OAAAX,SAAA,CAAAY,IAAA;MAAA;IAAA,GAAAxB,QAAA;EAAA,CAC/B;EAAA,OAAAL,sBAAA,CAAAX,KAAA,OAAAC,SAAA;AAAA;AAAA,SAEsB8B,iCAAiCA,CAAAU,EAAA,EAAAC,GAAA;EAAA,OAAAC,kCAAA,CAAA3C,KAAA,OAAAC,SAAA;AAAA;AAAA,SAAA0C,mCAAA;EAAAA,kCAAA,OAAAC,oBAAA,CAAA/B,OAAA,EAAAC,YAAA,CAAAD,OAAA,CAAAE,IAAA,CAAjD,SAAA8B,QACL1B,IAAU,EACVD,OAA8B;IAAA,IAAA4B,IAAA,EAAAC,MAAA,EAAAC,aAAA,EAAAC,eAAA,EAAAd,MAAA,EAAAe,SAAA,EAAAC,0BAAA,EAAAC,kBAAA,EAAAC,eAAA,EAAAC,UAAA,EAAAC,MAAA,EAAAC,QAAA;IAAA,OAAA1C,YAAA,CAAAD,OAAA,CAAAa,IAAA,UAAA+B,SAAAC,QAAA;MAAA,kBAAAA,QAAA,CAAA7B,IAAA,GAAA6B,QAAA,CAAA5D,IAAA;QAAA;UAExBgD,IAAI,GAAG,IAAAa,6BAAgB,EAACxC,IAAI,CAAC;UAC7B4B,MAAM,GAAG,IAAIa,4BAAa,CAACd,IAAI,CAAC;UAAAY,QAAA,CAAA5D,IAAA;UAAA,WAAA+D,qBAAA,CAAAhD,OAAA,EACVkC,MAAM,CAACe,SAAS,CAAC,CAAC;QAAA;UAAxCd,aAAa,GAAAU,QAAA,CAAA1B,IAAA;UAAA0B,QAAA,CAAA5D,IAAA;UAAA,WAAA+D,qBAAA,CAAAhD,OAAA,EACWkC,MAAM,CAACgB,eAAe,CAAC,CAAC;QAAA;UAAhDd,eAAe,GAAAS,QAAA,CAAA1B,IAAA;UACfG,MAAM,GAAG,IAAA6B,8CAAoB,EAAChB,aAAa,EAAEC,eAAe,CAAC;UACnE,IAAAgB,oCAAiB,EAAC9B,MAAM,CAAC;UACnBe,SAAS,GAAGH,MAAM,CAACmB,gBAAgB,CAAChD,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEiD,OAAO,CAAC;UAAAhB,0BAAA;UAAAC,kBAAA;UAAAM,QAAA,CAAA7B,IAAA;UAAAyB,UAAA,GAAA/E,cAAA,CAC9B2E,SAAS;QAAA;UAAAQ,QAAA,CAAA5D,IAAA;UAAA,WAAA+D,qBAAA,CAAAhD,OAAA,EAAAyC,UAAA,CAAAxD,IAAA;QAAA;UAAA,MAAAqD,0BAAA,KAAAI,MAAA,GAAAG,QAAA,CAAA1B,IAAA,EAAAvC,IAAA;YAAAiE,QAAA,CAAA5D,IAAA;YAAA;UAAA;UAArB0D,QAAQ,GAAAD,MAAA,CAAA5D,KAAA;UAAA+D,QAAA,CAAA5D,IAAA;UACvB,OAAMsE,2BAA2B,CAACpB,aAAa,EAAEQ,QAAQ,EAAErB,MAAM,CAAC;QAAA;UAAAgB,0BAAA;UAAAO,QAAA,CAAA5D,IAAA;UAAA;QAAA;UAAA4D,QAAA,CAAA5D,IAAA;UAAA;QAAA;UAAA4D,QAAA,CAAA7B,IAAA;UAAA6B,QAAA,CAAArB,EAAA,GAAAqB,QAAA;UAAAN,kBAAA;UAAAC,eAAA,GAAAK,QAAA,CAAArB,EAAA;QAAA;UAAAqB,QAAA,CAAA7B,IAAA;UAAA6B,QAAA,CAAA7B,IAAA;UAAA,MAAAsB,0BAAA,IAAAG,UAAA,CAAApD,MAAA;YAAAwD,QAAA,CAAA5D,IAAA;YAAA;UAAA;UAAA4D,QAAA,CAAA5D,IAAA;UAAA,WAAA+D,qBAAA,CAAAhD,OAAA,EAAAyC,UAAA,CAAApD,MAAA;QAAA;UAAAwD,QAAA,CAAA7B,IAAA;UAAA,KAAAuB,kBAAA;YAAAM,QAAA,CAAA5D,IAAA;YAAA;UAAA;UAAA,MAAAuD,eAAA;QAAA;UAAA,OAAAK,QAAA,CAAApB,MAAA;QAAA;UAAA,OAAAoB,QAAA,CAAApB,MAAA;QAAA;QAAA;UAAA,OAAAoB,QAAA,CAAAlB,IAAA;MAAA;IAAA,GAAAK,OAAA;EAAA,CAErE;EAAA,OAAAF,kCAAA,CAAA3C,KAAA,OAAAC,SAAA;AAAA;AAED,SAASmE,2BAA2BA,CAClCpB,aAA4B,EAC5BQ,QAAyB,EACzBrB,MAAc,EACM;EAEpB,IAAMC,IAAI,GAAG,IAAAiC,yBAAkB,EAACrB,aAAa,EAAEQ,QAAQ,CAAC;EACxD,OAAO;IACLtB,KAAK,EAAE,gBAAgB;IACvBoC,SAAS,EAAE,MAAM;IACjBnC,MAAM,EAANA,MAAM;IACNC,IAAI,EAAJA,IAAI;IACJmC,MAAM,EAAEf,QAAQ,CAACgB;EACnB,CAAC;AACH"}