@loaders.gl/parquet 3.4.6 → 4.0.0-alpha.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (130) hide show
  1. package/dist/dist.min.js +27 -34
  2. package/dist/dist.min.js.map +3 -3
  3. package/dist/es5/index.js +6 -6
  4. package/dist/es5/index.js.map +1 -1
  5. package/dist/es5/lib/arrow/convert-row-group-to-columns.js.map +1 -1
  6. package/dist/es5/lib/arrow/convert-schema-from-parquet.js +58 -42
  7. package/dist/es5/lib/arrow/convert-schema-from-parquet.js.map +1 -1
  8. package/dist/es5/lib/arrow/convert-schema-to-parquet.js +33 -31
  9. package/dist/es5/lib/arrow/convert-schema-to-parquet.js.map +1 -1
  10. package/dist/es5/lib/geo/decode-geo-metadata.js +12 -8
  11. package/dist/es5/lib/geo/decode-geo-metadata.js.map +1 -1
  12. package/dist/es5/lib/parsers/parse-parquet-to-columns.js +11 -7
  13. package/dist/es5/lib/parsers/parse-parquet-to-columns.js.map +1 -1
  14. package/dist/es5/lib/parsers/parse-parquet-to-rows.js +51 -29
  15. package/dist/es5/lib/parsers/parse-parquet-to-rows.js.map +1 -1
  16. package/dist/es5/lib/wasm/parse-parquet-wasm.js +6 -6
  17. package/dist/es5/lib/wasm/parse-parquet-wasm.js.map +1 -1
  18. package/dist/es5/parquet-loader.js +16 -4
  19. package/dist/es5/parquet-loader.js.map +1 -1
  20. package/dist/es5/parquet-wasm-loader.js +1 -1
  21. package/dist/es5/parquet-wasm-loader.js.map +1 -1
  22. package/dist/es5/parquet-wasm-writer.js +1 -1
  23. package/dist/es5/parquet-wasm-writer.js.map +1 -1
  24. package/dist/es5/parquet-writer.js +1 -1
  25. package/dist/es5/parquet-writer.js.map +1 -1
  26. package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -1
  27. package/dist/es5/parquetjs/parser/decoders.js.map +1 -1
  28. package/dist/es5/parquetjs/parser/parquet-reader.js +1 -1
  29. package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
  30. package/dist/es5/parquetjs/schema/declare.js +4 -4
  31. package/dist/es5/parquetjs/schema/declare.js.map +1 -1
  32. package/dist/es5/parquetjs/schema/schema.js +7 -7
  33. package/dist/es5/parquetjs/schema/schema.js.map +1 -1
  34. package/dist/es5/parquetjs/schema/shred.js +117 -22
  35. package/dist/es5/parquetjs/schema/shred.js.map +1 -1
  36. package/dist/esm/index.js +5 -5
  37. package/dist/esm/index.js.map +1 -1
  38. package/dist/esm/lib/arrow/convert-row-group-to-columns.js.map +1 -1
  39. package/dist/esm/lib/arrow/convert-schema-from-parquet.js +57 -41
  40. package/dist/esm/lib/arrow/convert-schema-from-parquet.js.map +1 -1
  41. package/dist/esm/lib/arrow/convert-schema-to-parquet.js +33 -31
  42. package/dist/esm/lib/arrow/convert-schema-to-parquet.js.map +1 -1
  43. package/dist/esm/lib/geo/decode-geo-metadata.js +12 -8
  44. package/dist/esm/lib/geo/decode-geo-metadata.js.map +1 -1
  45. package/dist/esm/lib/parsers/parse-parquet-to-columns.js +12 -8
  46. package/dist/esm/lib/parsers/parse-parquet-to-columns.js.map +1 -1
  47. package/dist/esm/lib/parsers/parse-parquet-to-rows.js +14 -3
  48. package/dist/esm/lib/parsers/parse-parquet-to-rows.js.map +1 -1
  49. package/dist/esm/lib/wasm/parse-parquet-wasm.js +3 -3
  50. package/dist/esm/lib/wasm/parse-parquet-wasm.js.map +1 -1
  51. package/dist/esm/parquet-loader.js +14 -2
  52. package/dist/esm/parquet-loader.js.map +1 -1
  53. package/dist/esm/parquet-wasm-loader.js +1 -1
  54. package/dist/esm/parquet-wasm-loader.js.map +1 -1
  55. package/dist/esm/parquet-wasm-writer.js +1 -1
  56. package/dist/esm/parquet-wasm-writer.js.map +1 -1
  57. package/dist/esm/parquet-writer.js +1 -1
  58. package/dist/esm/parquet-writer.js.map +1 -1
  59. package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +1 -1
  60. package/dist/esm/parquetjs/parser/decoders.js.map +1 -1
  61. package/dist/esm/parquetjs/parser/parquet-reader.js +2 -2
  62. package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
  63. package/dist/esm/parquetjs/schema/declare.js +1 -1
  64. package/dist/esm/parquetjs/schema/declare.js.map +1 -1
  65. package/dist/esm/parquetjs/schema/schema.js +6 -6
  66. package/dist/esm/parquetjs/schema/schema.js.map +1 -1
  67. package/dist/esm/parquetjs/schema/shred.js +108 -21
  68. package/dist/esm/parquetjs/schema/shred.js.map +1 -1
  69. package/dist/index.d.ts +8 -49
  70. package/dist/index.d.ts.map +1 -1
  71. package/dist/index.js +8 -6
  72. package/dist/lib/arrow/convert-row-group-to-columns.d.ts +2 -2
  73. package/dist/lib/arrow/convert-row-group-to-columns.d.ts.map +1 -1
  74. package/dist/lib/arrow/convert-schema-from-parquet.d.ts +4 -4
  75. package/dist/lib/arrow/convert-schema-from-parquet.d.ts.map +1 -1
  76. package/dist/lib/arrow/convert-schema-from-parquet.js +48 -44
  77. package/dist/lib/arrow/convert-schema-to-parquet.d.ts +1 -1
  78. package/dist/lib/arrow/convert-schema-to-parquet.d.ts.map +1 -1
  79. package/dist/lib/arrow/convert-schema-to-parquet.js +30 -31
  80. package/dist/lib/geo/decode-geo-metadata.js +12 -8
  81. package/dist/lib/parsers/parse-parquet-to-columns.d.ts +2 -2
  82. package/dist/lib/parsers/parse-parquet-to-columns.d.ts.map +1 -1
  83. package/dist/lib/parsers/parse-parquet-to-columns.js +13 -7
  84. package/dist/lib/parsers/parse-parquet-to-rows.d.ts +3 -2
  85. package/dist/lib/parsers/parse-parquet-to-rows.d.ts.map +1 -1
  86. package/dist/lib/parsers/parse-parquet-to-rows.js +16 -19
  87. package/dist/lib/wasm/parse-parquet-wasm.d.ts +3 -3
  88. package/dist/lib/wasm/parse-parquet-wasm.d.ts.map +1 -1
  89. package/dist/lib/wasm/parse-parquet-wasm.js +3 -3
  90. package/dist/parquet-loader.d.ts +3 -14
  91. package/dist/parquet-loader.d.ts.map +1 -1
  92. package/dist/parquet-loader.js +14 -2
  93. package/dist/parquet-worker.js +31 -38
  94. package/dist/parquet-worker.js.map +3 -3
  95. package/dist/parquet-writer.d.ts +2 -1
  96. package/dist/parquet-writer.d.ts.map +1 -1
  97. package/dist/parquet-writer.js +1 -0
  98. package/dist/parquetjs/encoder/parquet-encoder.d.ts +4 -4
  99. package/dist/parquetjs/encoder/parquet-encoder.d.ts.map +1 -1
  100. package/dist/parquetjs/parser/decoders.d.ts +2 -2
  101. package/dist/parquetjs/parser/decoders.d.ts.map +1 -1
  102. package/dist/parquetjs/parser/parquet-reader.d.ts +6 -6
  103. package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -1
  104. package/dist/parquetjs/parser/parquet-reader.js +1 -1
  105. package/dist/parquetjs/schema/declare.d.ts +6 -5
  106. package/dist/parquetjs/schema/declare.d.ts.map +1 -1
  107. package/dist/parquetjs/schema/declare.js +3 -3
  108. package/dist/parquetjs/schema/schema.d.ts +4 -4
  109. package/dist/parquetjs/schema/schema.d.ts.map +1 -1
  110. package/dist/parquetjs/schema/schema.js +5 -5
  111. package/dist/parquetjs/schema/shred.d.ts +17 -111
  112. package/dist/parquetjs/schema/shred.d.ts.map +1 -1
  113. package/dist/parquetjs/schema/shred.js +127 -119
  114. package/package.json +8 -8
  115. package/src/index.ts +32 -9
  116. package/src/lib/arrow/convert-row-group-to-columns.ts +2 -2
  117. package/src/lib/arrow/convert-schema-from-parquet.ts +56 -66
  118. package/src/lib/arrow/convert-schema-to-parquet.ts +32 -44
  119. package/src/lib/geo/decode-geo-metadata.ts +17 -8
  120. package/src/lib/parsers/parse-parquet-to-columns.ts +22 -11
  121. package/src/lib/parsers/parse-parquet-to-rows.ts +28 -23
  122. package/src/lib/wasm/parse-parquet-wasm.ts +7 -7
  123. package/src/parquet-loader.ts +25 -2
  124. package/src/parquet-writer.ts +4 -1
  125. package/src/parquetjs/encoder/parquet-encoder.ts +11 -10
  126. package/src/parquetjs/parser/decoders.ts +3 -3
  127. package/src/parquetjs/parser/parquet-reader.ts +7 -7
  128. package/src/parquetjs/schema/declare.ts +6 -5
  129. package/src/parquetjs/schema/schema.ts +8 -8
  130. package/src/parquetjs/schema/shred.ts +142 -103
@@ -1 +1 @@
1
- {"version":3,"file":"convert-schema-from-parquet.js","names":["Schema","Struct","Field","Bool","Float64","Int32","Float32","Binary","Utf8","Int64","Uint16","Uint32","Uint64","Int8","Int16","PARQUET_TYPE_MAPPING","BOOLEAN","INT32","INT64","INT96","FLOAT","DOUBLE","BYTE_ARRAY","FIXED_LEN_BYTE_ARRAY","UTF8","DATE","TIME_MILLIS","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","UINT_32","UINT_64","INT_8","INT_16","INT_32","INT_64","JSON","BSON","INTERVAL","DECIMAL_INT32","DECIMAL_INT64","DECIMAL_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","convertSchemaFromParquet","parquetSchema","parquetMetadata","fields","getFields","schema","metadata","getSchemaMetadata","name","field","childFields","nestedField","optional","push","FieldType","type","getFieldMetadata","arrowField","Map","key","value","stringify","set","keyValueList","key_value_metadata"],"sources":["../../../../src/lib/arrow/convert-schema-from-parquet.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport type {ParquetSchema} from '../../parquetjs/schema/schema';\nimport type {FieldDefinition, ParquetField, ParquetType} from '../../parquetjs/schema/declare';\nimport {FileMetaData} from '@loaders.gl/parquet/parquetjs/parquet-thrift';\n\nimport {\n Schema,\n Struct,\n Field,\n DataType,\n Bool,\n Float64,\n Int32,\n Float32,\n Binary,\n Utf8,\n Int64,\n Uint16,\n Uint32,\n Uint64,\n Int8,\n Int16\n} from '@loaders.gl/schema';\n\nexport const PARQUET_TYPE_MAPPING: {[type in ParquetType]: typeof DataType} = {\n BOOLEAN: Bool,\n INT32: Int32,\n INT64: Float64,\n INT96: Float64,\n FLOAT: Float32,\n DOUBLE: Float64,\n BYTE_ARRAY: Binary,\n FIXED_LEN_BYTE_ARRAY: Binary,\n UTF8: Utf8,\n DATE: Int32,\n TIME_MILLIS: Int64,\n TIME_MICROS: Int64,\n TIMESTAMP_MILLIS: Int64,\n TIMESTAMP_MICROS: Int64,\n UINT_8: Int32,\n UINT_16: Uint16,\n UINT_32: Uint32,\n UINT_64: Uint64,\n INT_8: Int8,\n INT_16: Int16,\n INT_32: Int32,\n INT_64: Int64,\n JSON: Binary,\n BSON: Binary,\n // TODO check interval type\n INTERVAL: Binary,\n DECIMAL_INT32: Float32,\n DECIMAL_INT64: Float64,\n DECIMAL_BYTE_ARRAY: Float64,\n DECIMAL_FIXED_LEN_BYTE_ARRAY: Float64\n};\n\nexport function convertSchemaFromParquet(\n parquetSchema: ParquetSchema,\n parquetMetadata?: FileMetaData\n): Schema {\n const fields = getFields(parquetSchema.schema);\n const metadata = parquetMetadata && getSchemaMetadata(parquetMetadata);\n return new Schema(fields, metadata);\n}\n\nfunction getFields(schema: FieldDefinition): Field[] {\n const fields: Field[] = [];\n\n for (const name in schema) {\n const field = schema[name];\n\n if (field.fields) {\n const childFields = getFields(field.fields);\n const nestedField = new Field(name, new Struct(childFields), field.optional);\n fields.push(nestedField);\n } else {\n const FieldType = PARQUET_TYPE_MAPPING[field.type];\n const metadata = getFieldMetadata(field);\n const arrowField = new Field(name, new FieldType(), field.optional, metadata);\n fields.push(arrowField);\n }\n }\n\n return fields;\n}\n\nfunction getFieldMetadata(field: ParquetField): Map<string, string> {\n const metadata = new Map();\n\n for (const key in field) {\n if (key !== 'name') {\n let value = field[key] || '';\n value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];\n metadata.set(key, value);\n }\n }\n\n return metadata;\n}\n\nfunction getSchemaMetadata(parquetMetadata: FileMetaData): Map<string, string> {\n const metadata = new Map();\n\n const keyValueList = parquetMetadata.key_value_metadata || [];\n for (const {key, value} of keyValueList) {\n if (typeof value === 'string') {\n metadata.set(key, value);\n }\n }\n\n return metadata;\n}\n"],"mappings":"AAMA,SACEA,MAAM,EACNC,MAAM,EACNC,KAAK,EAELC,IAAI,EACJC,OAAO,EACPC,KAAK,EACLC,OAAO,EACPC,MAAM,EACNC,IAAI,EACJC,KAAK,EACLC,MAAM,EACNC,MAAM,EACNC,MAAM,EACNC,IAAI,EACJC,KAAK,QACA,oBAAoB;AAE3B,OAAO,MAAMC,oBAA8D,GAAG;EAC5EC,OAAO,EAAEb,IAAI;EACbc,KAAK,EAAEZ,KAAK;EACZa,KAAK,EAAEd,OAAO;EACde,KAAK,EAAEf,OAAO;EACdgB,KAAK,EAAEd,OAAO;EACde,MAAM,EAAEjB,OAAO;EACfkB,UAAU,EAAEf,MAAM;EAClBgB,oBAAoB,EAAEhB,MAAM;EAC5BiB,IAAI,EAAEhB,IAAI;EACViB,IAAI,EAAEpB,KAAK;EACXqB,WAAW,EAAEjB,KAAK;EAClBkB,WAAW,EAAElB,KAAK;EAClBmB,gBAAgB,EAAEnB,KAAK;EACvBoB,gBAAgB,EAAEpB,KAAK;EACvBqB,MAAM,EAAEzB,KAAK;EACb0B,OAAO,EAAErB,MAAM;EACfsB,OAAO,EAAErB,MAAM;EACfsB,OAAO,EAAErB,MAAM;EACfsB,KAAK,EAAErB,IAAI;EACXsB,MAAM,EAAErB,KAAK;EACbsB,MAAM,EAAE/B,KAAK;EACbgC,MAAM,EAAE5B,KAAK;EACb6B,IAAI,EAAE/B,MAAM;EACZgC,IAAI,EAAEhC,MAAM;EAEZiC,QAAQ,EAAEjC,MAAM;EAChBkC,aAAa,EAAEnC,OAAO;EACtBoC,aAAa,EAAEtC,OAAO;EACtBuC,kBAAkB,EAAEvC,OAAO;EAC3BwC,4BAA4B,EAAExC;AAChC,CAAC;AAED,OAAO,SAASyC,wBAAwBA,CACtCC,aAA4B,EAC5BC,eAA8B,EACtB;EACR,MAAMC,MAAM,GAAGC,SAAS,CAACH,aAAa,CAACI,MAAM,CAAC;EAC9C,MAAMC,QAAQ,GAAGJ,eAAe,IAAIK,iBAAiB,CAACL,eAAe,CAAC;EACtE,OAAO,IAAI/C,MAAM,CAACgD,MAAM,EAAEG,QAAQ,CAAC;AACrC;AAEA,SAASF,SAASA,CAACC,MAAuB,EAAW;EACnD,MAAMF,MAAe,GAAG,EAAE;EAE1B,KAAK,MAAMK,IAAI,IAAIH,MAAM,EAAE;IACzB,MAAMI,KAAK,GAAGJ,MAAM,CAACG,IAAI,CAAC;IAE1B,IAAIC,KAAK,CAACN,MAAM,EAAE;MAChB,MAAMO,WAAW,GAAGN,SAAS,CAACK,KAAK,CAACN,MAAM,CAAC;MAC3C,MAAMQ,WAAW,GAAG,IAAItD,KAAK,CAACmD,IAAI,EAAE,IAAIpD,MAAM,CAACsD,WAAW,CAAC,EAAED,KAAK,CAACG,QAAQ,CAAC;MAC5ET,MAAM,CAACU,IAAI,CAACF,WAAW,CAAC;IAC1B,CAAC,MAAM;MACL,MAAMG,SAAS,GAAG5C,oBAAoB,CAACuC,KAAK,CAACM,IAAI,CAAC;MAClD,MAAMT,QAAQ,GAAGU,gBAAgB,CAACP,KAAK,CAAC;MACxC,MAAMQ,UAAU,GAAG,IAAI5D,KAAK,CAACmD,IAAI,EAAE,IAAIM,SAAS,CAAC,CAAC,EAAEL,KAAK,CAACG,QAAQ,EAAEN,QAAQ,CAAC;MAC7EH,MAAM,CAACU,IAAI,CAACI,UAAU,CAAC;IACzB;EACF;EAEA,OAAOd,MAAM;AACf;AAEA,SAASa,gBAAgBA,CAACP,KAAmB,EAAuB;EAClE,MAAMH,QAAQ,GAAG,IAAIY,GAAG,CAAC,CAAC;EAE1B,KAAK,MAAMC,GAAG,IAAIV,KAAK,EAAE;IACvB,IAAIU,GAAG,KAAK,MAAM,EAAE;MAClB,IAAIC,KAAK,GAAGX,KAAK,CAACU,GAAG,CAAC,IAAI,EAAE;MAC5BC,KAAK,GAAG,OAAOX,KAAK,CAACU,GAAG,CAAC,KAAK,QAAQ,GAAG1B,IAAI,CAAC4B,SAAS,CAACZ,KAAK,CAACU,GAAG,CAAC,CAAC,GAAGV,KAAK,CAACU,GAAG,CAAC;MAChFb,QAAQ,CAACgB,GAAG,CAACH,GAAG,EAAEC,KAAK,CAAC;IAC1B;EACF;EAEA,OAAOd,QAAQ;AACjB;AAEA,SAASC,iBAAiBA,CAACL,eAA6B,EAAuB;EAC7E,MAAMI,QAAQ,GAAG,IAAIY,GAAG,CAAC,CAAC;EAE1B,MAAMK,YAAY,GAAGrB,eAAe,CAACsB,kBAAkB,IAAI,EAAE;EAC7D,KAAK,MAAM;IAACL,GAAG;IAAEC;EAAK,CAAC,IAAIG,YAAY,EAAE;IACvC,IAAI,OAAOH,KAAK,KAAK,QAAQ,EAAE;MAC7Bd,QAAQ,CAACgB,GAAG,CAACH,GAAG,EAAEC,KAAK,CAAC;IAC1B;EACF;EAEA,OAAOd,QAAQ;AACjB"}
1
+ {"version":3,"file":"convert-schema-from-parquet.js","names":["PARQUET_TYPE_MAPPING","BOOLEAN","INT32","INT64","INT96","FLOAT","DOUBLE","BYTE_ARRAY","FIXED_LEN_BYTE_ARRAY","UTF8","DATE","TIME_MILLIS","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","UINT_32","UINT_64","INT_8","INT_16","INT_32","INT_64","JSON","BSON","INTERVAL","DECIMAL_INT32","DECIMAL_INT64","DECIMAL_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","convertParquetSchema","parquetSchema","parquetMetadata","fields","getFields","schema","metadata","getSchemaMetadata","name","field","children","push","type","nullable","optional","getFieldMetadata","arrowField","key","value","stringify","keyValueList","key_value_metadata"],"sources":["../../../../src/lib/arrow/convert-schema-from-parquet.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport {Schema, Field, DataType} from '@loaders.gl/schema';\n\nimport type {ParquetSchema} from '../../parquetjs/schema/schema';\nimport type {FieldDefinition, ParquetField, ParquetType} from '../../parquetjs/schema/declare';\nimport {FileMetaData} from '../../parquetjs/parquet-thrift';\n\nexport const PARQUET_TYPE_MAPPING: {[type in ParquetType]: DataType} = {\n BOOLEAN: 'bool',\n INT32: 'int32',\n INT64: 'float64',\n INT96: 'float64',\n FLOAT: 'float32',\n DOUBLE: 'float64',\n BYTE_ARRAY: 'binary',\n FIXED_LEN_BYTE_ARRAY: 'binary',\n UTF8: 'utf8',\n DATE: 'int32',\n TIME_MILLIS: 'int64',\n TIME_MICROS: 'int64',\n TIMESTAMP_MILLIS: 'int64',\n TIMESTAMP_MICROS: 'int64',\n UINT_8: 'int32',\n UINT_16: 'uint16',\n UINT_32: 'uint32',\n UINT_64: 'uint64',\n INT_8: 'int8',\n INT_16: 'int16',\n INT_32: 'int32',\n INT_64: 'int64',\n JSON: 'binary',\n BSON: 'binary',\n // TODO check interal type\n INTERVAL: 'binary',\n DECIMAL_INT32: 'float32',\n DECIMAL_INT64: 'float64',\n DECIMAL_BYTE_ARRAY: 'float64',\n DECIMAL_FIXED_LEN_BYTE_ARRAY: 'float64'\n};\n\nexport function convertParquetSchema(\n parquetSchema: ParquetSchema,\n parquetMetadata: FileMetaData | null\n): Schema {\n const fields = getFields(parquetSchema.schema);\n const metadata = parquetMetadata && getSchemaMetadata(parquetMetadata);\n\n const schema: Schema = {\n fields,\n metadata: metadata || {}\n };\n\n return schema;\n}\n\nfunction getFields(schema: FieldDefinition): Field[] {\n const fields: Field[] = [];\n\n for (const name in schema) {\n const field = schema[name];\n\n if (field.fields) {\n const children = getFields(field.fields);\n fields.push({name, type: {type: 'struct', children}, nullable: field.optional});\n } else {\n const type = PARQUET_TYPE_MAPPING[field.type];\n const metadata = getFieldMetadata(field);\n const arrowField = {name, type, nullable: field.optional, metadata};\n fields.push(arrowField);\n }\n }\n\n return fields;\n}\n\nfunction getFieldMetadata(field: ParquetField): Record<string, string> | undefined {\n let metadata: Record<string, string> | undefined;\n\n for (const key in field) {\n if (key !== 'name') {\n let value = field[key] || '';\n value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];\n metadata = metadata || {};\n metadata[key] = value;\n }\n }\n\n return metadata;\n}\n\nfunction getSchemaMetadata(parquetMetadata: FileMetaData): Record<string, string> | undefined {\n let metadata: Record<string, string> | undefined;\n\n const keyValueList = parquetMetadata.key_value_metadata || [];\n for (const {key, value} of keyValueList) {\n if (typeof value === 'string') {\n metadata = metadata || {};\n metadata[key] = value;\n }\n }\n\n return metadata;\n}\n"],"mappings":"AAQA,OAAO,MAAMA,oBAAuD,GAAG;EACrEC,OAAO,EAAE,MAAM;EACfC,KAAK,EAAE,OAAO;EACdC,KAAK,EAAE,SAAS;EAChBC,KAAK,EAAE,SAAS;EAChBC,KAAK,EAAE,SAAS;EAChBC,MAAM,EAAE,SAAS;EACjBC,UAAU,EAAE,QAAQ;EACpBC,oBAAoB,EAAE,QAAQ;EAC9BC,IAAI,EAAE,MAAM;EACZC,IAAI,EAAE,OAAO;EACbC,WAAW,EAAE,OAAO;EACpBC,WAAW,EAAE,OAAO;EACpBC,gBAAgB,EAAE,OAAO;EACzBC,gBAAgB,EAAE,OAAO;EACzBC,MAAM,EAAE,OAAO;EACfC,OAAO,EAAE,QAAQ;EACjBC,OAAO,EAAE,QAAQ;EACjBC,OAAO,EAAE,QAAQ;EACjBC,KAAK,EAAE,MAAM;EACbC,MAAM,EAAE,OAAO;EACfC,MAAM,EAAE,OAAO;EACfC,MAAM,EAAE,OAAO;EACfC,IAAI,EAAE,QAAQ;EACdC,IAAI,EAAE,QAAQ;EAEdC,QAAQ,EAAE,QAAQ;EAClBC,aAAa,EAAE,SAAS;EACxBC,aAAa,EAAE,SAAS;EACxBC,kBAAkB,EAAE,SAAS;EAC7BC,4BAA4B,EAAE;AAChC,CAAC;AAED,OAAO,SAASC,oBAAoBA,CAClCC,aAA4B,EAC5BC,eAAoC,EAC5B;EACR,MAAMC,MAAM,GAAGC,SAAS,CAACH,aAAa,CAACI,MAAM,CAAC;EAC9C,MAAMC,QAAQ,GAAGJ,eAAe,IAAIK,iBAAiB,CAACL,eAAe,CAAC;EAEtE,MAAMG,MAAc,GAAG;IACrBF,MAAM;IACNG,QAAQ,EAAEA,QAAQ,IAAI,CAAC;EACzB,CAAC;EAED,OAAOD,MAAM;AACf;AAEA,SAASD,SAASA,CAACC,MAAuB,EAAW;EACnD,MAAMF,MAAe,GAAG,EAAE;EAE1B,KAAK,MAAMK,IAAI,IAAIH,MAAM,EAAE;IACzB,MAAMI,KAAK,GAAGJ,MAAM,CAACG,IAAI,CAAC;IAE1B,IAAIC,KAAK,CAACN,MAAM,EAAE;MAChB,MAAMO,QAAQ,GAAGN,SAAS,CAACK,KAAK,CAACN,MAAM,CAAC;MACxCA,MAAM,CAACQ,IAAI,CAAC;QAACH,IAAI;QAAEI,IAAI,EAAE;UAACA,IAAI,EAAE,QAAQ;UAAEF;QAAQ,CAAC;QAAEG,QAAQ,EAAEJ,KAAK,CAACK;MAAQ,CAAC,CAAC;IACjF,CAAC,MAAM;MACL,MAAMF,IAAI,GAAG1C,oBAAoB,CAACuC,KAAK,CAACG,IAAI,CAAC;MAC7C,MAAMN,QAAQ,GAAGS,gBAAgB,CAACN,KAAK,CAAC;MACxC,MAAMO,UAAU,GAAG;QAACR,IAAI;QAAEI,IAAI;QAAEC,QAAQ,EAAEJ,KAAK,CAACK,QAAQ;QAAER;MAAQ,CAAC;MACnEH,MAAM,CAACQ,IAAI,CAACK,UAAU,CAAC;IACzB;EACF;EAEA,OAAOb,MAAM;AACf;AAEA,SAASY,gBAAgBA,CAACN,KAAmB,EAAsC;EACjF,IAAIH,QAA4C;EAEhD,KAAK,MAAMW,GAAG,IAAIR,KAAK,EAAE;IACvB,IAAIQ,GAAG,KAAK,MAAM,EAAE;MAClB,IAAIC,KAAK,GAAGT,KAAK,CAACQ,GAAG,CAAC,IAAI,EAAE;MAC5BC,KAAK,GAAG,OAAOT,KAAK,CAACQ,GAAG,CAAC,KAAK,QAAQ,GAAGxB,IAAI,CAAC0B,SAAS,CAACV,KAAK,CAACQ,GAAG,CAAC,CAAC,GAAGR,KAAK,CAACQ,GAAG,CAAC;MAChFX,QAAQ,GAAGA,QAAQ,IAAI,CAAC,CAAC;MACzBA,QAAQ,CAACW,GAAG,CAAC,GAAGC,KAAK;IACvB;EACF;EAEA,OAAOZ,QAAQ;AACjB;AAEA,SAASC,iBAAiBA,CAACL,eAA6B,EAAsC;EAC5F,IAAII,QAA4C;EAEhD,MAAMc,YAAY,GAAGlB,eAAe,CAACmB,kBAAkB,IAAI,EAAE;EAC7D,KAAK,MAAM;IAACJ,GAAG;IAAEC;EAAK,CAAC,IAAIE,YAAY,EAAE;IACvC,IAAI,OAAOF,KAAK,KAAK,QAAQ,EAAE;MAC7BZ,QAAQ,GAAGA,QAAQ,IAAI,CAAC,CAAC;MACzBA,QAAQ,CAACW,GAAG,CAAC,GAAGC,KAAK;IACvB;EACF;EAEA,OAAOZ,QAAQ;AACjB"}
@@ -1,37 +1,39 @@
1
- import { Schema, Bool, Float64, Int32, Float32, Binary, Utf8, Int64, Uint16, Uint32, Uint64, Int8, Int16 } from '@loaders.gl/schema';
2
1
  export const PARQUET_TYPE_MAPPING = {
3
- BOOLEAN: Bool,
4
- INT32: Int32,
5
- INT64: Float64,
6
- INT96: Float64,
7
- FLOAT: Float32,
8
- DOUBLE: Float64,
9
- BYTE_ARRAY: Binary,
10
- FIXED_LEN_BYTE_ARRAY: Binary,
11
- UTF8: Utf8,
12
- DATE: Int32,
13
- TIME_MILLIS: Int64,
14
- TIME_MICROS: Int64,
15
- TIMESTAMP_MILLIS: Int64,
16
- TIMESTAMP_MICROS: Int64,
17
- UINT_8: Int32,
18
- UINT_16: Uint16,
19
- UINT_32: Uint32,
20
- UINT_64: Uint64,
21
- INT_8: Int8,
22
- INT_16: Int16,
23
- INT_32: Int32,
24
- INT_64: Int64,
25
- JSON: Binary,
26
- BSON: Binary,
27
- INTERVAL: Binary,
28
- DECIMAL_INT32: Float32,
29
- DECIMAL_INT64: Float64,
30
- DECIMAL_BYTE_ARRAY: Float64,
31
- DECIMAL_FIXED_LEN_BYTE_ARRAY: Float64
2
+ BOOLEAN: 'bool',
3
+ INT32: 'int32',
4
+ INT64: 'float64',
5
+ INT96: 'float64',
6
+ FLOAT: 'float32',
7
+ DOUBLE: 'float64',
8
+ BYTE_ARRAY: 'binary',
9
+ FIXED_LEN_BYTE_ARRAY: 'binary',
10
+ UTF8: 'utf8',
11
+ DATE: 'int32',
12
+ TIME_MILLIS: 'int64',
13
+ TIME_MICROS: 'int64',
14
+ TIMESTAMP_MILLIS: 'int64',
15
+ TIMESTAMP_MICROS: 'int64',
16
+ UINT_8: 'int32',
17
+ UINT_16: 'uint16',
18
+ UINT_32: 'uint32',
19
+ UINT_64: 'uint64',
20
+ INT_8: 'int8',
21
+ INT_16: 'int16',
22
+ INT_32: 'int32',
23
+ INT_64: 'int64',
24
+ JSON: 'binary',
25
+ BSON: 'binary',
26
+ INTERVAL: 'binary',
27
+ DECIMAL_INT32: 'float32',
28
+ DECIMAL_INT64: 'float64',
29
+ DECIMAL_BYTE_ARRAY: 'float64',
30
+ DECIMAL_FIXED_LEN_BYTE_ARRAY: 'float64'
32
31
  };
33
32
  export function convertToParquetSchema(schema) {
34
33
  const fields = [];
35
- return new Schema(fields);
34
+ return {
35
+ fields,
36
+ metadata: {}
37
+ };
36
38
  }
37
39
  //# sourceMappingURL=convert-schema-to-parquet.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"convert-schema-to-parquet.js","names":["Schema","Bool","Float64","Int32","Float32","Binary","Utf8","Int64","Uint16","Uint32","Uint64","Int8","Int16","PARQUET_TYPE_MAPPING","BOOLEAN","INT32","INT64","INT96","FLOAT","DOUBLE","BYTE_ARRAY","FIXED_LEN_BYTE_ARRAY","UTF8","DATE","TIME_MILLIS","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","UINT_32","UINT_64","INT_8","INT_16","INT_32","INT_64","JSON","BSON","INTERVAL","DECIMAL_INT32","DECIMAL_INT64","DECIMAL_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","convertToParquetSchema","schema","fields"],"sources":["../../../../src/lib/arrow/convert-schema-to-parquet.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// import type {ParquetSchema} from '../../parquetjs/schema/schema';\nimport type {\n // FieldDefinition, ParquetField,\n ParquetType\n} from '../../parquetjs/schema/declare';\n\nimport {\n Schema,\n // Struct,\n // Field,\n DataType,\n Bool,\n Float64,\n Int32,\n Float32,\n Binary,\n Utf8,\n Int64,\n Uint16,\n Uint32,\n Uint64,\n Int8,\n Int16\n} from '@loaders.gl/schema';\n\nexport const PARQUET_TYPE_MAPPING: {[type in ParquetType]: typeof DataType} = {\n BOOLEAN: Bool,\n INT32: Int32,\n INT64: Float64,\n INT96: Float64,\n FLOAT: Float32,\n DOUBLE: Float64,\n BYTE_ARRAY: Binary,\n FIXED_LEN_BYTE_ARRAY: Binary,\n UTF8: Utf8,\n DATE: Int32,\n TIME_MILLIS: Int64,\n TIME_MICROS: Int64,\n TIMESTAMP_MILLIS: Int64,\n TIMESTAMP_MICROS: Int64,\n UINT_8: Int32,\n UINT_16: Uint16,\n UINT_32: Uint32,\n UINT_64: Uint64,\n INT_8: Int8,\n INT_16: Int16,\n INT_32: Int32,\n INT_64: Int64,\n JSON: Binary,\n BSON: Binary,\n // TODO check interval type\n INTERVAL: Binary,\n DECIMAL_INT32: Float32,\n DECIMAL_INT64: Float64,\n DECIMAL_BYTE_ARRAY: Float64,\n DECIMAL_FIXED_LEN_BYTE_ARRAY: Float64\n};\n\nexport function convertToParquetSchema(schema: Schema): Schema {\n const fields = []; // getFields(schema.fields);\n\n // TODO add metadata if needed.\n return new Schema(fields);\n}\n\n// function getFields(schema: Field[]): Definition[] {\n// const fields: Field[] = [];\n\n// for (const name in schema) {\n// const field = schema[name];\n\n// // @ts-ignore\n// const children = field.children as DataType[];\n// if (children) {\n// const childField = getFields(field.fields);\n// const nestedField = new Field(name, new Struct(childField), field.optional);\n// fields.push(nestedField);\n// } else {\n// const FieldType = PARQUET_TYPE_MAPPING[field.type];\n// const metadata = getFieldMetadata(field);\n// const arrowField = new Field(name, new FieldType(), field.optional, metadata);\n// fields.push(arrowField);\n// }\n// }\n\n// return fields;\n// }\n\n// function getFieldMetadata(field: ParquetField): Map<string, string> {\n// const metadata = new Map();\n\n// for (const key in field) {\n// if (key !== 'name') {\n// const value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];\n// metadata.set(key, value);\n// }\n// }\n\n// return metadata;\n// }\n"],"mappings":"AAQA,SACEA,MAAM,EAINC,IAAI,EACJC,OAAO,EACPC,KAAK,EACLC,OAAO,EACPC,MAAM,EACNC,IAAI,EACJC,KAAK,EACLC,MAAM,EACNC,MAAM,EACNC,MAAM,EACNC,IAAI,EACJC,KAAK,QACA,oBAAoB;AAE3B,OAAO,MAAMC,oBAA8D,GAAG;EAC5EC,OAAO,EAAEb,IAAI;EACbc,KAAK,EAAEZ,KAAK;EACZa,KAAK,EAAEd,OAAO;EACde,KAAK,EAAEf,OAAO;EACdgB,KAAK,EAAEd,OAAO;EACde,MAAM,EAAEjB,OAAO;EACfkB,UAAU,EAAEf,MAAM;EAClBgB,oBAAoB,EAAEhB,MAAM;EAC5BiB,IAAI,EAAEhB,IAAI;EACViB,IAAI,EAAEpB,KAAK;EACXqB,WAAW,EAAEjB,KAAK;EAClBkB,WAAW,EAAElB,KAAK;EAClBmB,gBAAgB,EAAEnB,KAAK;EACvBoB,gBAAgB,EAAEpB,KAAK;EACvBqB,MAAM,EAAEzB,KAAK;EACb0B,OAAO,EAAErB,MAAM;EACfsB,OAAO,EAAErB,MAAM;EACfsB,OAAO,EAAErB,MAAM;EACfsB,KAAK,EAAErB,IAAI;EACXsB,MAAM,EAAErB,KAAK;EACbsB,MAAM,EAAE/B,KAAK;EACbgC,MAAM,EAAE5B,KAAK;EACb6B,IAAI,EAAE/B,MAAM;EACZgC,IAAI,EAAEhC,MAAM;EAEZiC,QAAQ,EAAEjC,MAAM;EAChBkC,aAAa,EAAEnC,OAAO;EACtBoC,aAAa,EAAEtC,OAAO;EACtBuC,kBAAkB,EAAEvC,OAAO;EAC3BwC,4BAA4B,EAAExC;AAChC,CAAC;AAED,OAAO,SAASyC,sBAAsBA,CAACC,MAAc,EAAU;EAC7D,MAAMC,MAAM,GAAG,EAAE;EAGjB,OAAO,IAAI7C,MAAM,CAAC6C,MAAM,CAAC;AAC3B"}
1
+ {"version":3,"file":"convert-schema-to-parquet.js","names":["PARQUET_TYPE_MAPPING","BOOLEAN","INT32","INT64","INT96","FLOAT","DOUBLE","BYTE_ARRAY","FIXED_LEN_BYTE_ARRAY","UTF8","DATE","TIME_MILLIS","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","UINT_32","UINT_64","INT_8","INT_16","INT_32","INT_64","JSON","BSON","INTERVAL","DECIMAL_INT32","DECIMAL_INT64","DECIMAL_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","convertToParquetSchema","schema","fields","metadata"],"sources":["../../../../src/lib/arrow/convert-schema-to-parquet.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// import type {ParquetSchema} from '../../parquetjs/schema/schema';\nimport type {\n // FieldDefinition, ParquetField,\n ParquetType\n} from '../../parquetjs/schema/declare';\n\nimport {\n Schema,\n // Struct,\n // Field,\n DataType\n} from '@loaders.gl/schema';\n\nexport const PARQUET_TYPE_MAPPING: {[type in ParquetType]: DataType} = {\n BOOLEAN: 'bool',\n INT32: 'int32',\n INT64: 'float64',\n INT96: 'float64',\n FLOAT: 'float32',\n DOUBLE: 'float64',\n BYTE_ARRAY: 'binary',\n FIXED_LEN_BYTE_ARRAY: 'binary',\n UTF8: 'utf8',\n DATE: 'int32',\n TIME_MILLIS: 'int64',\n TIME_MICROS: 'int64',\n TIMESTAMP_MILLIS: 'int64',\n TIMESTAMP_MICROS: 'int64',\n UINT_8: 'int32',\n UINT_16: 'uint16',\n UINT_32: 'uint32',\n UINT_64: 'uint64',\n INT_8: 'int8',\n INT_16: 'int16',\n INT_32: 'int32',\n INT_64: 'int64',\n JSON: 'binary',\n BSON: 'binary',\n // TODO check interval type\n INTERVAL: 'binary',\n DECIMAL_INT32: 'float32',\n DECIMAL_INT64: 'float64',\n DECIMAL_BYTE_ARRAY: 'float64',\n DECIMAL_FIXED_LEN_BYTE_ARRAY: 'float64'\n};\n\nexport function convertToParquetSchema(schema: Schema): Schema {\n const fields = []; // getFields(schema.fields);\n\n // TODO add metadata if needed.\n return {fields, metadata: {}};\n}\n\n// function getFields(schema: Field[]): Definition[] {\n// const fields: Field[] = [];\n\n// for (const name in schema) {\n// const field = schema[name];\n\n// // @ts-ignore\n// const children = field.children as DataType[];\n// if (children) {\n// const childField = getFields(field.fields);\n// const nestedField = new Field(name, new Struct(childField), field.optional);\n// fields.push(nestedField);\n// } else {\n// const FieldType = PARQUET_TYPE_MAPPING[field.type];\n// const metadata = getFieldMetadata(field);\n// const arrowField = new Field(name, new FieldType(), field.optional, metadata);\n// fields.push(arrowField);\n// }\n// }\n\n// return fields;\n// }\n\n// function getFieldMetadata(field: ParquetField): Map<string, string> {\n// const metadata = new Map();\n\n// for (const key in field) {\n// if (key !== 'name') {\n// const value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];\n// metadata.set(key, value);\n// }\n// }\n\n// return metadata;\n// }\n"],"mappings":"AAeA,OAAO,MAAMA,oBAAuD,GAAG;EACrEC,OAAO,EAAE,MAAM;EACfC,KAAK,EAAE,OAAO;EACdC,KAAK,EAAE,SAAS;EAChBC,KAAK,EAAE,SAAS;EAChBC,KAAK,EAAE,SAAS;EAChBC,MAAM,EAAE,SAAS;EACjBC,UAAU,EAAE,QAAQ;EACpBC,oBAAoB,EAAE,QAAQ;EAC9BC,IAAI,EAAE,MAAM;EACZC,IAAI,EAAE,OAAO;EACbC,WAAW,EAAE,OAAO;EACpBC,WAAW,EAAE,OAAO;EACpBC,gBAAgB,EAAE,OAAO;EACzBC,gBAAgB,EAAE,OAAO;EACzBC,MAAM,EAAE,OAAO;EACfC,OAAO,EAAE,QAAQ;EACjBC,OAAO,EAAE,QAAQ;EACjBC,OAAO,EAAE,QAAQ;EACjBC,KAAK,EAAE,MAAM;EACbC,MAAM,EAAE,OAAO;EACfC,MAAM,EAAE,OAAO;EACfC,MAAM,EAAE,OAAO;EACfC,IAAI,EAAE,QAAQ;EACdC,IAAI,EAAE,QAAQ;EAEdC,QAAQ,EAAE,QAAQ;EAClBC,aAAa,EAAE,SAAS;EACxBC,aAAa,EAAE,SAAS;EACxBC,kBAAkB,EAAE,SAAS;EAC7BC,4BAA4B,EAAE;AAChC,CAAC;AAED,OAAO,SAASC,sBAAsBA,CAACC,MAAc,EAAU;EAC7D,MAAMC,MAAM,GAAG,EAAE;EAGjB,OAAO;IAACA,MAAM;IAAEC,QAAQ,EAAE,CAAC;EAAC,CAAC;AAC/B"}
@@ -1,5 +1,5 @@
1
1
  export function getGeoMetadata(schema) {
2
- const stringifiedGeoMetadata = schema.metadata.get('geo');
2
+ const stringifiedGeoMetadata = schema.metadata.geo;
3
3
  if (!stringifiedGeoMetadata) {
4
4
  return null;
5
5
  }
@@ -12,7 +12,7 @@ export function getGeoMetadata(schema) {
12
12
  }
13
13
  export function setGeoMetadata(schema, geoMetadata) {
14
14
  const stringifiedGeoMetadata = JSON.stringify(geoMetadata);
15
- schema.metadata.set('geo', stringifiedGeoMetadata);
15
+ schema.metadata.geo = stringifiedGeoMetadata;
16
16
  }
17
17
  export function unpackGeoMetadata(schema) {
18
18
  const geoMetadata = getGeoMetadata(schema);
@@ -25,17 +25,17 @@ export function unpackGeoMetadata(schema) {
25
25
  columns
26
26
  } = geoMetadata;
27
27
  if (version) {
28
- schema.metadata.set('geo.version', version);
28
+ schema.metadata['geo.version'] = version;
29
29
  }
30
30
  if (primary_column) {
31
- schema.metadata.set('geo.primary_column', primary_column);
31
+ schema.metadata['geo.primary_column'] = primary_column;
32
32
  }
33
- schema.metadata.set('geo.columns', Object.keys(columns || {}).join(''));
33
+ schema.metadata['geo.columns'] = Object.keys(columns || {}).join('');
34
34
  for (const [columnName, columnMetadata] of Object.entries(columns || {})) {
35
35
  const field = schema.fields.find(field => field.name === columnName);
36
36
  if (field) {
37
37
  if (field.name === primary_column) {
38
- field.metadata.set('geo.primary_field', 'true');
38
+ setFieldMetadata(field, 'geo.primary_field', 'true');
39
39
  }
40
40
  unpackGeoFieldMetadata(field, columnMetadata);
41
41
  }
@@ -45,14 +45,18 @@ function unpackGeoFieldMetadata(field, columnMetadata) {
45
45
  for (const [key, value] of Object.entries(columnMetadata || {})) {
46
46
  switch (key) {
47
47
  case 'geometry_type':
48
- field.metadata.set("geo.".concat(key), value.join(','));
48
+ setFieldMetadata(field, "geo.".concat(key), value.join(','));
49
49
  break;
50
50
  case 'bbox':
51
51
  case 'crs':
52
52
  case 'edges':
53
53
  default:
54
- field.metadata.set("geo.".concat(key), typeof value === 'string' ? value : JSON.stringify(value));
54
+ setFieldMetadata(field, "geo.".concat(key), typeof value === 'string' ? value : JSON.stringify(value));
55
55
  }
56
56
  }
57
57
  }
58
+ function setFieldMetadata(field, key, value) {
59
+ field.metadata = field.metadata || {};
60
+ field.metadata[key] = value;
61
+ }
58
62
  //# sourceMappingURL=decode-geo-metadata.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"decode-geo-metadata.js","names":["getGeoMetadata","schema","stringifiedGeoMetadata","metadata","get","geoMetadata","JSON","parse","setGeoMetadata","stringify","set","unpackGeoMetadata","version","primary_column","columns","Object","keys","join","columnName","columnMetadata","entries","field","fields","find","name","unpackGeoFieldMetadata","key","value","concat"],"sources":["../../../../src/lib/geo/decode-geo-metadata.ts"],"sourcesContent":["// loaders.gl, MIT license\nimport {Schema, Field} from '@loaders.gl/schema';\n\n/* eslint-disable camelcase */\n\n/** A geoarrow / geoparquet geo metadata object (stored in stringified form in the top level metadata 'geo' key) */\nexport type GeoMetadata = {\n version?: string;\n primary_column?: string;\n columns: Record<string, GeoColumnMetadata>;\n [key: string]: unknown;\n};\n\n/** A geoarrow / geoparquet geo metadata for one geometry column */\nexport type GeoColumnMetadata = {\n bounding_box?:\n | [number, number, number, number]\n | [number, number, number, number, number, number];\n crs?: string;\n geometry_type?: string[];\n edges?: string;\n [key: string]: unknown;\n};\n\n/**\n * Reads the GeoMetadata object from the metadata\n * @note geoarrow / parquet schema is stringified into a single key-value pair in the parquet metadata */\nexport function getGeoMetadata(schema: Schema): GeoMetadata | null {\n const stringifiedGeoMetadata = schema.metadata.get('geo');\n if (!stringifiedGeoMetadata) {\n return null;\n }\n\n try {\n const geoMetadata = JSON.parse(stringifiedGeoMetadata) as GeoMetadata;\n return geoMetadata;\n } catch {\n return null;\n }\n}\n\n/**\n * Stores a geoarrow / geoparquet geo metadata object in the schema\n * @note geoarrow / geoparquet geo metadata is a single stringified JSON field\n */\nexport function setGeoMetadata(schema: Schema, geoMetadata: GeoMetadata): void {\n const stringifiedGeoMetadata = JSON.stringify(geoMetadata);\n schema.metadata.set('geo', stringifiedGeoMetadata);\n}\n\n/**\n * Unpacks geo metadata into separate metadata fields (parses the long JSON string)\n * @note geoarrow / parquet schema is stringified into a single key-value pair in the parquet metadata\n */\nexport function unpackGeoMetadata(schema: Schema): void {\n const geoMetadata = getGeoMetadata(schema);\n if (!geoMetadata) {\n return;\n }\n\n // Store Parquet Schema Level Metadata\n\n const {version, primary_column, columns} = geoMetadata;\n if (version) {\n schema.metadata.set('geo.version', version);\n }\n\n if (primary_column) {\n schema.metadata.set('geo.primary_column', primary_column);\n }\n\n // store column names as comma separated list\n schema.metadata.set('geo.columns', Object.keys(columns || {}).join(''));\n\n for (const [columnName, columnMetadata] of Object.entries(columns || {})) {\n const field = schema.fields.find((field) => field.name === columnName);\n if (field) {\n if (field.name === primary_column) {\n field.metadata.set('geo.primary_field', 'true');\n }\n unpackGeoFieldMetadata(field, columnMetadata);\n }\n }\n}\n\nfunction unpackGeoFieldMetadata(field: Field, columnMetadata): void {\n for (const [key, value] of Object.entries(columnMetadata || {})) {\n switch (key) {\n case 'geometry_type':\n field.metadata.set(`geo.${key}`, (value as string[]).join(','));\n break;\n case 'bbox':\n case 'crs':\n case 'edges':\n default:\n field.metadata.set(`geo.${key}`, typeof value === 'string' ? value : JSON.stringify(value));\n }\n }\n}\n"],"mappings":"AA2BA,OAAO,SAASA,cAAcA,CAACC,MAAc,EAAsB;EACjE,MAAMC,sBAAsB,GAAGD,MAAM,CAACE,QAAQ,CAACC,GAAG,CAAC,KAAK,CAAC;EACzD,IAAI,CAACF,sBAAsB,EAAE;IAC3B,OAAO,IAAI;EACb;EAEA,IAAI;IACF,MAAMG,WAAW,GAAGC,IAAI,CAACC,KAAK,CAACL,sBAAsB,CAAgB;IACrE,OAAOG,WAAW;EACpB,CAAC,CAAC,MAAM;IACN,OAAO,IAAI;EACb;AACF;AAMA,OAAO,SAASG,cAAcA,CAACP,MAAc,EAAEI,WAAwB,EAAQ;EAC7E,MAAMH,sBAAsB,GAAGI,IAAI,CAACG,SAAS,CAACJ,WAAW,CAAC;EAC1DJ,MAAM,CAACE,QAAQ,CAACO,GAAG,CAAC,KAAK,EAAER,sBAAsB,CAAC;AACpD;AAMA,OAAO,SAASS,iBAAiBA,CAACV,MAAc,EAAQ;EACtD,MAAMI,WAAW,GAAGL,cAAc,CAACC,MAAM,CAAC;EAC1C,IAAI,CAACI,WAAW,EAAE;IAChB;EACF;EAIA,MAAM;IAACO,OAAO;IAAEC,cAAc;IAAEC;EAAO,CAAC,GAAGT,WAAW;EACtD,IAAIO,OAAO,EAAE;IACXX,MAAM,CAACE,QAAQ,CAACO,GAAG,CAAC,aAAa,EAAEE,OAAO,CAAC;EAC7C;EAEA,IAAIC,cAAc,EAAE;IAClBZ,MAAM,CAACE,QAAQ,CAACO,GAAG,CAAC,oBAAoB,EAAEG,cAAc,CAAC;EAC3D;EAGAZ,MAAM,CAACE,QAAQ,CAACO,GAAG,CAAC,aAAa,EAAEK,MAAM,CAACC,IAAI,CAACF,OAAO,IAAI,CAAC,CAAC,CAAC,CAACG,IAAI,CAAC,EAAE,CAAC,CAAC;EAEvE,KAAK,MAAM,CAACC,UAAU,EAAEC,cAAc,CAAC,IAAIJ,MAAM,CAACK,OAAO,CAACN,OAAO,IAAI,CAAC,CAAC,CAAC,EAAE;IACxE,MAAMO,KAAK,GAAGpB,MAAM,CAACqB,MAAM,CAACC,IAAI,CAAEF,KAAK,IAAKA,KAAK,CAACG,IAAI,KAAKN,UAAU,CAAC;IACtE,IAAIG,KAAK,EAAE;MACT,IAAIA,KAAK,CAACG,IAAI,KAAKX,cAAc,EAAE;QACjCQ,KAAK,CAAClB,QAAQ,CAACO,GAAG,CAAC,mBAAmB,EAAE,MAAM,CAAC;MACjD;MACAe,sBAAsB,CAACJ,KAAK,EAAEF,cAAc,CAAC;IAC/C;EACF;AACF;AAEA,SAASM,sBAAsBA,CAACJ,KAAY,EAAEF,cAAc,EAAQ;EAClE,KAAK,MAAM,CAACO,GAAG,EAAEC,KAAK,CAAC,IAAIZ,MAAM,CAACK,OAAO,CAACD,cAAc,IAAI,CAAC,CAAC,CAAC,EAAE;IAC/D,QAAQO,GAAG;MACT,KAAK,eAAe;QAClBL,KAAK,CAAClB,QAAQ,CAACO,GAAG,QAAAkB,MAAA,CAAQF,GAAG,GAAKC,KAAK,CAAcV,IAAI,CAAC,GAAG,CAAC,CAAC;QAC/D;MACF,KAAK,MAAM;MACX,KAAK,KAAK;MACV,KAAK,OAAO;MACZ;QACEI,KAAK,CAAClB,QAAQ,CAACO,GAAG,QAAAkB,MAAA,CAAQF,GAAG,GAAI,OAAOC,KAAK,KAAK,QAAQ,GAAGA,KAAK,GAAGrB,IAAI,CAACG,SAAS,CAACkB,KAAK,CAAC,CAAC;IAC/F;EACF;AACF"}
1
+ {"version":3,"file":"decode-geo-metadata.js","names":["getGeoMetadata","schema","stringifiedGeoMetadata","metadata","geo","geoMetadata","JSON","parse","setGeoMetadata","stringify","unpackGeoMetadata","version","primary_column","columns","Object","keys","join","columnName","columnMetadata","entries","field","fields","find","name","setFieldMetadata","unpackGeoFieldMetadata","key","value","concat"],"sources":["../../../../src/lib/geo/decode-geo-metadata.ts"],"sourcesContent":["// loaders.gl, MIT license\nimport {Schema, Field} from '@loaders.gl/schema';\n\n/* eslint-disable camelcase */\n\n/** A geoarrow / geoparquet geo metadata object (stored in stringified form in the top level metadata 'geo' key) */\nexport type GeoMetadata = {\n version?: string;\n primary_column?: string;\n columns: Record<string, GeoColumnMetadata>;\n [key: string]: unknown;\n};\n\n/** A geoarrow / geoparquet geo metadata for one geometry column */\nexport type GeoColumnMetadata = {\n bounding_box?:\n | [number, number, number, number]\n | [number, number, number, number, number, number];\n crs?: string;\n geometry_type?: string[];\n edges?: string;\n [key: string]: unknown;\n};\n\n/**\n * Reads the GeoMetadata object from the metadata\n * @note geoarrow / parquet schema is stringified into a single key-value pair in the parquet metadata */\nexport function getGeoMetadata(schema: Schema): GeoMetadata | null {\n const stringifiedGeoMetadata = schema.metadata.geo;\n if (!stringifiedGeoMetadata) {\n return null;\n }\n\n try {\n const geoMetadata = JSON.parse(stringifiedGeoMetadata) as GeoMetadata;\n return geoMetadata;\n } catch {\n return null;\n }\n}\n\n/**\n * Stores a geoarrow / geoparquet geo metadata object in the schema\n * @note geoarrow / geoparquet geo metadata is a single stringified JSON field\n */\nexport function setGeoMetadata(schema: Schema, geoMetadata: GeoMetadata): void {\n const stringifiedGeoMetadata = JSON.stringify(geoMetadata);\n schema.metadata.geo = stringifiedGeoMetadata;\n}\n\n/**\n * Unpacks geo metadata into separate metadata fields (parses the long JSON string)\n * @note geoarrow / parquet schema is stringified into a single key-value pair in the parquet metadata\n */\nexport function unpackGeoMetadata(schema: Schema): void {\n const geoMetadata = getGeoMetadata(schema);\n if (!geoMetadata) {\n return;\n }\n\n // Store Parquet Schema Level Metadata\n\n const {version, primary_column, columns} = geoMetadata;\n if (version) {\n schema.metadata['geo.version'] = version;\n }\n\n if (primary_column) {\n schema.metadata['geo.primary_column'] = primary_column;\n }\n\n // store column names as comma separated list\n schema.metadata['geo.columns'] = Object.keys(columns || {}).join('');\n\n for (const [columnName, columnMetadata] of Object.entries(columns || {})) {\n const field = schema.fields.find((field) => field.name === columnName);\n if (field) {\n if (field.name === primary_column) {\n setFieldMetadata(field, 'geo.primary_field', 'true');\n }\n unpackGeoFieldMetadata(field, columnMetadata);\n }\n }\n}\n\nfunction unpackGeoFieldMetadata(field: Field, columnMetadata): void {\n for (const [key, value] of Object.entries(columnMetadata || {})) {\n switch (key) {\n case 'geometry_type':\n setFieldMetadata(field, `geo.${key}`, (value as string[]).join(','));\n break;\n case 'bbox':\n case 'crs':\n case 'edges':\n default:\n setFieldMetadata(\n field,\n `geo.${key}`,\n typeof value === 'string' ? value : JSON.stringify(value)\n );\n }\n }\n}\n\nfunction setFieldMetadata(field: Field, key: string, value: string): void {\n field.metadata = field.metadata || {};\n field.metadata[key] = value;\n}\n"],"mappings":"AA2BA,OAAO,SAASA,cAAcA,CAACC,MAAc,EAAsB;EACjE,MAAMC,sBAAsB,GAAGD,MAAM,CAACE,QAAQ,CAACC,GAAG;EAClD,IAAI,CAACF,sBAAsB,EAAE;IAC3B,OAAO,IAAI;EACb;EAEA,IAAI;IACF,MAAMG,WAAW,GAAGC,IAAI,CAACC,KAAK,CAACL,sBAAsB,CAAgB;IACrE,OAAOG,WAAW;EACpB,CAAC,CAAC,MAAM;IACN,OAAO,IAAI;EACb;AACF;AAMA,OAAO,SAASG,cAAcA,CAACP,MAAc,EAAEI,WAAwB,EAAQ;EAC7E,MAAMH,sBAAsB,GAAGI,IAAI,CAACG,SAAS,CAACJ,WAAW,CAAC;EAC1DJ,MAAM,CAACE,QAAQ,CAACC,GAAG,GAAGF,sBAAsB;AAC9C;AAMA,OAAO,SAASQ,iBAAiBA,CAACT,MAAc,EAAQ;EACtD,MAAMI,WAAW,GAAGL,cAAc,CAACC,MAAM,CAAC;EAC1C,IAAI,CAACI,WAAW,EAAE;IAChB;EACF;EAIA,MAAM;IAACM,OAAO;IAAEC,cAAc;IAAEC;EAAO,CAAC,GAAGR,WAAW;EACtD,IAAIM,OAAO,EAAE;IACXV,MAAM,CAACE,QAAQ,CAAC,aAAa,CAAC,GAAGQ,OAAO;EAC1C;EAEA,IAAIC,cAAc,EAAE;IAClBX,MAAM,CAACE,QAAQ,CAAC,oBAAoB,CAAC,GAAGS,cAAc;EACxD;EAGAX,MAAM,CAACE,QAAQ,CAAC,aAAa,CAAC,GAAGW,MAAM,CAACC,IAAI,CAACF,OAAO,IAAI,CAAC,CAAC,CAAC,CAACG,IAAI,CAAC,EAAE,CAAC;EAEpE,KAAK,MAAM,CAACC,UAAU,EAAEC,cAAc,CAAC,IAAIJ,MAAM,CAACK,OAAO,CAACN,OAAO,IAAI,CAAC,CAAC,CAAC,EAAE;IACxE,MAAMO,KAAK,GAAGnB,MAAM,CAACoB,MAAM,CAACC,IAAI,CAAEF,KAAK,IAAKA,KAAK,CAACG,IAAI,KAAKN,UAAU,CAAC;IACtE,IAAIG,KAAK,EAAE;MACT,IAAIA,KAAK,CAACG,IAAI,KAAKX,cAAc,EAAE;QACjCY,gBAAgB,CAACJ,KAAK,EAAE,mBAAmB,EAAE,MAAM,CAAC;MACtD;MACAK,sBAAsB,CAACL,KAAK,EAAEF,cAAc,CAAC;IAC/C;EACF;AACF;AAEA,SAASO,sBAAsBA,CAACL,KAAY,EAAEF,cAAc,EAAQ;EAClE,KAAK,MAAM,CAACQ,GAAG,EAAEC,KAAK,CAAC,IAAIb,MAAM,CAACK,OAAO,CAACD,cAAc,IAAI,CAAC,CAAC,CAAC,EAAE;IAC/D,QAAQQ,GAAG;MACT,KAAK,eAAe;QAClBF,gBAAgB,CAACJ,KAAK,SAAAQ,MAAA,CAASF,GAAG,GAAKC,KAAK,CAAcX,IAAI,CAAC,GAAG,CAAC,CAAC;QACpE;MACF,KAAK,MAAM;MACX,KAAK,KAAK;MACV,KAAK,OAAO;MACZ;QACEQ,gBAAgB,CACdJ,KAAK,SAAAQ,MAAA,CACEF,GAAG,GACV,OAAOC,KAAK,KAAK,QAAQ,GAAGA,KAAK,GAAGrB,IAAI,CAACG,SAAS,CAACkB,KAAK,CAC1D,CAAC;IACL;EACF;AACF;AAEA,SAASH,gBAAgBA,CAACJ,KAAY,EAAEM,GAAW,EAAEC,KAAa,EAAQ;EACxEP,KAAK,CAACjB,QAAQ,GAAGiB,KAAK,CAACjB,QAAQ,IAAI,CAAC,CAAC;EACrCiB,KAAK,CAACjB,QAAQ,CAACuB,GAAG,CAAC,GAAGC,KAAK;AAC7B"}
@@ -1,29 +1,33 @@
1
1
  import { makeReadableFile } from '@loaders.gl/loader-utils';
2
2
  import { ParquetReader } from '../../parquetjs/parser/parquet-reader';
3
- import { convertSchemaFromParquet } from '../arrow/convert-schema-from-parquet';
4
- import { convertParquetRowGroupToColumns } from '../arrow/convert-row-group-to-columns';
3
+ import { convertParquetSchema } from '../arrow/convert-schema-from-parquet';
4
+ import { materializeColumns } from '../../parquetjs/schema/shred';
5
5
  import { unpackGeoMetadata } from '../geo/decode-geo-metadata';
6
6
  export async function parseParquetInColumns(arrayBuffer, options) {
7
7
  const blob = new Blob([arrayBuffer]);
8
8
  for await (const batch of parseParquetFileInColumnarBatches(blob, options)) {
9
- return batch;
9
+ return {
10
+ shape: 'columnar-table',
11
+ schema: batch.schema,
12
+ data: batch.data
13
+ };
10
14
  }
11
- return null;
15
+ throw new Error('empty table');
12
16
  }
13
17
  export async function* parseParquetFileInColumnarBatches(blob, options) {
14
18
  const file = makeReadableFile(blob);
15
19
  const reader = new ParquetReader(file);
16
20
  const parquetSchema = await reader.getSchema();
17
21
  const parquetMetadata = await reader.getFileMetadata();
18
- const schema = convertSchemaFromParquet(parquetSchema, parquetMetadata);
22
+ const schema = convertParquetSchema(parquetSchema, parquetMetadata);
19
23
  unpackGeoMetadata(schema);
20
24
  const rowGroups = reader.rowGroupIterator(options === null || options === void 0 ? void 0 : options.parquet);
21
25
  for await (const rowGroup of rowGroups) {
22
- yield convertRowGroupToTableBatch(schema, rowGroup);
26
+ yield convertRowGroupToTableBatch(parquetSchema, rowGroup, schema);
23
27
  }
24
28
  }
25
- function convertRowGroupToTableBatch(schema, rowGroup) {
26
- const data = convertParquetRowGroupToColumns(schema, rowGroup);
29
+ function convertRowGroupToTableBatch(parquetSchema, rowGroup, schema) {
30
+ const data = materializeColumns(parquetSchema, rowGroup);
27
31
  return {
28
32
  shape: 'columnar-table',
29
33
  batchType: 'data',
@@ -1 +1 @@
1
- {"version":3,"file":"parse-parquet-to-columns.js","names":["makeReadableFile","ParquetReader","convertSchemaFromParquet","convertParquetRowGroupToColumns","unpackGeoMetadata","parseParquetInColumns","arrayBuffer","options","blob","Blob","batch","parseParquetFileInColumnarBatches","file","reader","parquetSchema","getSchema","parquetMetadata","getFileMetadata","schema","rowGroups","rowGroupIterator","parquet","rowGroup","convertRowGroupToTableBatch","data","shape","batchType","length","rowCount"],"sources":["../../../../src/lib/parsers/parse-parquet-to-columns.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\nimport {ColumnarTableBatch, Schema} from '@loaders.gl/schema';\nimport {makeReadableFile} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../../parquet-loader';\nimport {ParquetReader} from '../../parquetjs/parser/parquet-reader';\nimport {ParquetBuffer} from '../../parquetjs/schema/declare';\nimport {convertSchemaFromParquet} from '../arrow/convert-schema-from-parquet';\nimport {convertParquetRowGroupToColumns} from '../arrow/convert-row-group-to-columns';\nimport {unpackGeoMetadata} from '../geo/decode-geo-metadata';\n\nexport async function parseParquetInColumns(\n arrayBuffer: ArrayBuffer,\n options?: ParquetLoaderOptions\n) {\n const blob = new Blob([arrayBuffer]);\n for await (const batch of parseParquetFileInColumnarBatches(blob, options)) {\n return batch;\n }\n return null;\n}\n\nexport async function* parseParquetFileInColumnarBatches(\n blob: Blob,\n options?: ParquetLoaderOptions\n): AsyncIterable<ColumnarTableBatch> {\n const file = makeReadableFile(blob);\n const reader = new ParquetReader(file);\n const parquetSchema = await reader.getSchema();\n const parquetMetadata = await reader.getFileMetadata();\n const schema = convertSchemaFromParquet(parquetSchema, parquetMetadata);\n unpackGeoMetadata(schema);\n const rowGroups = reader.rowGroupIterator(options?.parquet);\n for await (const rowGroup of rowGroups) {\n yield convertRowGroupToTableBatch(schema, rowGroup);\n }\n}\n\nfunction convertRowGroupToTableBatch(schema: Schema, rowGroup: ParquetBuffer): ColumnarTableBatch {\n const data = convertParquetRowGroupToColumns(schema, rowGroup);\n return {\n shape: 'columnar-table',\n batchType: 'data',\n schema,\n data,\n length: rowGroup.rowCount\n };\n}\n"],"mappings":"AAIA,SAAQA,gBAAgB,QAAO,0BAA0B;AAEzD,SAAQC,aAAa,QAAO,uCAAuC;AAEnE,SAAQC,wBAAwB,QAAO,sCAAsC;AAC7E,SAAQC,+BAA+B,QAAO,uCAAuC;AACrF,SAAQC,iBAAiB,QAAO,4BAA4B;AAE5D,OAAO,eAAeC,qBAAqBA,CACzCC,WAAwB,EACxBC,OAA8B,EAC9B;EACA,MAAMC,IAAI,GAAG,IAAIC,IAAI,CAAC,CAACH,WAAW,CAAC,CAAC;EACpC,WAAW,MAAMI,KAAK,IAAIC,iCAAiC,CAACH,IAAI,EAAED,OAAO,CAAC,EAAE;IAC1E,OAAOG,KAAK;EACd;EACA,OAAO,IAAI;AACb;AAEA,OAAO,gBAAgBC,iCAAiCA,CACtDH,IAAU,EACVD,OAA8B,EACK;EACnC,MAAMK,IAAI,GAAGZ,gBAAgB,CAACQ,IAAI,CAAC;EACnC,MAAMK,MAAM,GAAG,IAAIZ,aAAa,CAACW,IAAI,CAAC;EACtC,MAAME,aAAa,GAAG,MAAMD,MAAM,CAACE,SAAS,CAAC,CAAC;EAC9C,MAAMC,eAAe,GAAG,MAAMH,MAAM,CAACI,eAAe,CAAC,CAAC;EACtD,MAAMC,MAAM,GAAGhB,wBAAwB,CAACY,aAAa,EAAEE,eAAe,CAAC;EACvEZ,iBAAiB,CAACc,MAAM,CAAC;EACzB,MAAMC,SAAS,GAAGN,MAAM,CAACO,gBAAgB,CAACb,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEc,OAAO,CAAC;EAC3D,WAAW,MAAMC,QAAQ,IAAIH,SAAS,EAAE;IACtC,MAAMI,2BAA2B,CAACL,MAAM,EAAEI,QAAQ,CAAC;EACrD;AACF;AAEA,SAASC,2BAA2BA,CAACL,MAAc,EAAEI,QAAuB,EAAsB;EAChG,MAAME,IAAI,GAAGrB,+BAA+B,CAACe,MAAM,EAAEI,QAAQ,CAAC;EAC9D,OAAO;IACLG,KAAK,EAAE,gBAAgB;IACvBC,SAAS,EAAE,MAAM;IACjBR,MAAM;IACNM,IAAI;IACJG,MAAM,EAAEL,QAAQ,CAACM;EACnB,CAAC;AACH"}
1
+ {"version":3,"file":"parse-parquet-to-columns.js","names":["makeReadableFile","ParquetReader","convertParquetSchema","materializeColumns","unpackGeoMetadata","parseParquetInColumns","arrayBuffer","options","blob","Blob","batch","parseParquetFileInColumnarBatches","shape","schema","data","Error","file","reader","parquetSchema","getSchema","parquetMetadata","getFileMetadata","rowGroups","rowGroupIterator","parquet","rowGroup","convertRowGroupToTableBatch","batchType","length","rowCount"],"sources":["../../../../src/lib/parsers/parse-parquet-to-columns.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\nimport {ColumnarTable, ColumnarTableBatch, Schema} from '@loaders.gl/schema';\nimport {makeReadableFile} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../../parquet-loader';\nimport {ParquetReader} from '../../parquetjs/parser/parquet-reader';\nimport {ParquetRowGroup} from '../../parquetjs/schema/declare';\nimport {ParquetSchema} from '../../parquetjs/schema/schema';\nimport {convertParquetSchema} from '../arrow/convert-schema-from-parquet';\nimport {materializeColumns} from '../../parquetjs/schema/shred';\n// import {convertParquetRowGroupToColumns} from '../arrow/convert-row-group-to-columns';\nimport {unpackGeoMetadata} from '../geo/decode-geo-metadata';\n\nexport async function parseParquetInColumns(\n arrayBuffer: ArrayBuffer,\n options?: ParquetLoaderOptions\n): Promise<ColumnarTable> {\n const blob = new Blob([arrayBuffer]);\n for await (const batch of parseParquetFileInColumnarBatches(blob, options)) {\n return {\n shape: 'columnar-table',\n schema: batch.schema,\n data: batch.data\n };\n }\n throw new Error('empty table');\n}\n\nexport async function* parseParquetFileInColumnarBatches(\n blob: Blob,\n options?: ParquetLoaderOptions\n): AsyncIterable<ColumnarTableBatch> {\n const file = makeReadableFile(blob);\n const reader = new ParquetReader(file);\n const parquetSchema = await reader.getSchema();\n const parquetMetadata = await reader.getFileMetadata();\n const schema = convertParquetSchema(parquetSchema, parquetMetadata);\n unpackGeoMetadata(schema);\n const rowGroups = reader.rowGroupIterator(options?.parquet);\n for await (const rowGroup of rowGroups) {\n yield convertRowGroupToTableBatch(parquetSchema, rowGroup, schema);\n }\n}\n\nfunction convertRowGroupToTableBatch(\n parquetSchema: ParquetSchema,\n rowGroup: ParquetRowGroup,\n schema: Schema\n): ColumnarTableBatch {\n // const data = convertParquetRowGroupToColumns(schema, rowGroup);\n const data = materializeColumns(parquetSchema, rowGroup);\n return {\n shape: 'columnar-table',\n batchType: 'data',\n schema,\n data,\n length: rowGroup.rowCount\n };\n}\n"],"mappings":"AAIA,SAAQA,gBAAgB,QAAO,0BAA0B;AAEzD,SAAQC,aAAa,QAAO,uCAAuC;AAGnE,SAAQC,oBAAoB,QAAO,sCAAsC;AACzE,SAAQC,kBAAkB,QAAO,8BAA8B;AAE/D,SAAQC,iBAAiB,QAAO,4BAA4B;AAE5D,OAAO,eAAeC,qBAAqBA,CACzCC,WAAwB,EACxBC,OAA8B,EACN;EACxB,MAAMC,IAAI,GAAG,IAAIC,IAAI,CAAC,CAACH,WAAW,CAAC,CAAC;EACpC,WAAW,MAAMI,KAAK,IAAIC,iCAAiC,CAACH,IAAI,EAAED,OAAO,CAAC,EAAE;IAC1E,OAAO;MACLK,KAAK,EAAE,gBAAgB;MACvBC,MAAM,EAAEH,KAAK,CAACG,MAAM;MACpBC,IAAI,EAAEJ,KAAK,CAACI;IACd,CAAC;EACH;EACA,MAAM,IAAIC,KAAK,CAAC,aAAa,CAAC;AAChC;AAEA,OAAO,gBAAgBJ,iCAAiCA,CACtDH,IAAU,EACVD,OAA8B,EACK;EACnC,MAAMS,IAAI,GAAGhB,gBAAgB,CAACQ,IAAI,CAAC;EACnC,MAAMS,MAAM,GAAG,IAAIhB,aAAa,CAACe,IAAI,CAAC;EACtC,MAAME,aAAa,GAAG,MAAMD,MAAM,CAACE,SAAS,CAAC,CAAC;EAC9C,MAAMC,eAAe,GAAG,MAAMH,MAAM,CAACI,eAAe,CAAC,CAAC;EACtD,MAAMR,MAAM,GAAGX,oBAAoB,CAACgB,aAAa,EAAEE,eAAe,CAAC;EACnEhB,iBAAiB,CAACS,MAAM,CAAC;EACzB,MAAMS,SAAS,GAAGL,MAAM,CAACM,gBAAgB,CAAChB,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEiB,OAAO,CAAC;EAC3D,WAAW,MAAMC,QAAQ,IAAIH,SAAS,EAAE;IACtC,MAAMI,2BAA2B,CAACR,aAAa,EAAEO,QAAQ,EAAEZ,MAAM,CAAC;EACpE;AACF;AAEA,SAASa,2BAA2BA,CAClCR,aAA4B,EAC5BO,QAAyB,EACzBZ,MAAc,EACM;EAEpB,MAAMC,IAAI,GAAGX,kBAAkB,CAACe,aAAa,EAAEO,QAAQ,CAAC;EACxD,OAAO;IACLb,KAAK,EAAE,gBAAgB;IACvBe,SAAS,EAAE,MAAM;IACjBd,MAAM;IACNC,IAAI;IACJc,MAAM,EAAEH,QAAQ,CAACI;EACnB,CAAC;AACH"}
@@ -2,17 +2,28 @@ import { makeReadableFile } from '@loaders.gl/loader-utils';
2
2
  import { ParquetReader } from '../../parquetjs/parser/parquet-reader';
3
3
  export async function parseParquet(arrayBuffer, options) {
4
4
  const blob = new Blob([arrayBuffer]);
5
+ const rows = [];
5
6
  for await (const batch of parseParquetFileInBatches(blob, options)) {
6
- return batch;
7
+ for (const row of batch.data) {
8
+ rows.push(row);
9
+ }
7
10
  }
8
- return null;
11
+ return {
12
+ shape: 'object-row-table',
13
+ data: rows
14
+ };
9
15
  }
10
16
  export async function* parseParquetFileInBatches(blob, options) {
11
17
  const file = makeReadableFile(blob);
12
18
  const reader = new ParquetReader(file);
13
19
  const rowBatches = reader.rowBatchIterator(options === null || options === void 0 ? void 0 : options.parquet);
14
20
  for await (const rows of rowBatches) {
15
- yield rows;
21
+ yield {
22
+ shape: 'object-row-table',
23
+ data: rows,
24
+ batchType: 'data',
25
+ length: rows.length
26
+ };
16
27
  }
17
28
  }
18
29
  //# sourceMappingURL=parse-parquet-to-rows.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"parse-parquet-to-rows.js","names":["makeReadableFile","ParquetReader","parseParquet","arrayBuffer","options","blob","Blob","batch","parseParquetFileInBatches","file","reader","rowBatches","rowBatchIterator","parquet","rows"],"sources":["../../../../src/lib/parsers/parse-parquet-to-rows.ts"],"sourcesContent":["// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\n// import {ColumnarTableBatch} from '@loaders.gl/schema';\nimport {makeReadableFile} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../../parquet-loader';\nimport {ParquetReader} from '../../parquetjs/parser/parquet-reader';\n\nexport async function parseParquet(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions) {\n const blob = new Blob([arrayBuffer]);\n for await (const batch of parseParquetFileInBatches(blob, options)) {\n return batch;\n }\n return null;\n}\n\nexport async function* parseParquetFileInBatches(blob: Blob, options?: ParquetLoaderOptions) {\n const file = makeReadableFile(blob);\n const reader = new ParquetReader(file);\n const rowBatches = reader.rowBatchIterator(options?.parquet);\n for await (const rows of rowBatches) {\n yield rows;\n }\n}\n\n// export async function* parseParquetFileInColumnarBatches(blob: Blob, options?: {columnList?: string[][]}): AsyncIterable<ColumnarTableBatch> {\n// const rowGroupReader = new ParquetRowGroupReader({data: blob, columnList: options?.columnList});\n// try {\n// for await (const rowGroup of rowGroupReader) {\n// yield convertRowGroupToTableBatch(rowGroup);\n// }\n// } finally {\n// await rowGroupReader.close();\n// }\n// }\n\n// function convertRowGroupToTableBatch(rowGroup): ColumnarTableBatch {\n// // @ts-expect-error\n// return {\n// data: rowGroup\n// };\n// }\n"],"mappings":"AAEA,SAAQA,gBAAgB,QAAO,0BAA0B;AAEzD,SAAQC,aAAa,QAAO,uCAAuC;AAEnE,OAAO,eAAeC,YAAYA,CAACC,WAAwB,EAAEC,OAA8B,EAAE;EAC3F,MAAMC,IAAI,GAAG,IAAIC,IAAI,CAAC,CAACH,WAAW,CAAC,CAAC;EACpC,WAAW,MAAMI,KAAK,IAAIC,yBAAyB,CAACH,IAAI,EAAED,OAAO,CAAC,EAAE;IAClE,OAAOG,KAAK;EACd;EACA,OAAO,IAAI;AACb;AAEA,OAAO,gBAAgBC,yBAAyBA,CAACH,IAAU,EAAED,OAA8B,EAAE;EAC3F,MAAMK,IAAI,GAAGT,gBAAgB,CAACK,IAAI,CAAC;EACnC,MAAMK,MAAM,GAAG,IAAIT,aAAa,CAACQ,IAAI,CAAC;EACtC,MAAME,UAAU,GAAGD,MAAM,CAACE,gBAAgB,CAACR,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAES,OAAO,CAAC;EAC5D,WAAW,MAAMC,IAAI,IAAIH,UAAU,EAAE;IACnC,MAAMG,IAAI;EACZ;AACF"}
1
+ {"version":3,"file":"parse-parquet-to-rows.js","names":["makeReadableFile","ParquetReader","parseParquet","arrayBuffer","options","blob","Blob","rows","batch","parseParquetFileInBatches","row","data","push","shape","file","reader","rowBatches","rowBatchIterator","parquet","batchType","length"],"sources":["../../../../src/lib/parsers/parse-parquet-to-rows.ts"],"sourcesContent":["// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\n// import {ColumnarTableBatch} from '@loaders.gl/schema';\nimport {makeReadableFile} from '@loaders.gl/loader-utils';\nimport {ObjectRowTable, ObjectRowTableBatch} from '@loaders.gl/schema';\nimport type {ParquetLoaderOptions} from '../../parquet-loader';\nimport type {ParquetRow} from '../../parquetjs/schema/declare';\nimport {ParquetReader} from '../../parquetjs/parser/parquet-reader';\n\nexport async function parseParquet(\n arrayBuffer: ArrayBuffer,\n options?: ParquetLoaderOptions\n): Promise<ObjectRowTable> {\n const blob = new Blob([arrayBuffer]);\n\n const rows: ParquetRow[] = [];\n for await (const batch of parseParquetFileInBatches(blob, options)) {\n // we have only one input batch so return\n for (const row of batch.data) {\n rows.push(row);\n }\n }\n\n return {\n shape: 'object-row-table',\n // TODO - spread can fail for very large number of batches\n data: rows\n };\n}\n\nexport async function* parseParquetFileInBatches(\n blob: Blob,\n options?: ParquetLoaderOptions\n): AsyncIterable<ObjectRowTableBatch> {\n const file = makeReadableFile(blob);\n const reader = new ParquetReader(file);\n const rowBatches = reader.rowBatchIterator(options?.parquet);\n for await (const rows of rowBatches) {\n yield {\n shape: 'object-row-table',\n data: rows,\n batchType: 'data',\n length: rows.length\n };\n }\n}\n"],"mappings":"AAEA,SAAQA,gBAAgB,QAAO,0BAA0B;AAIzD,SAAQC,aAAa,QAAO,uCAAuC;AAEnE,OAAO,eAAeC,YAAYA,CAChCC,WAAwB,EACxBC,OAA8B,EACL;EACzB,MAAMC,IAAI,GAAG,IAAIC,IAAI,CAAC,CAACH,WAAW,CAAC,CAAC;EAEpC,MAAMI,IAAkB,GAAG,EAAE;EAC7B,WAAW,MAAMC,KAAK,IAAIC,yBAAyB,CAACJ,IAAI,EAAED,OAAO,CAAC,EAAE;IAElE,KAAK,MAAMM,GAAG,IAAIF,KAAK,CAACG,IAAI,EAAE;MAC5BJ,IAAI,CAACK,IAAI,CAACF,GAAG,CAAC;IAChB;EACF;EAEA,OAAO;IACLG,KAAK,EAAE,kBAAkB;IAEzBF,IAAI,EAAEJ;EACR,CAAC;AACH;AAEA,OAAO,gBAAgBE,yBAAyBA,CAC9CJ,IAAU,EACVD,OAA8B,EACM;EACpC,MAAMU,IAAI,GAAGd,gBAAgB,CAACK,IAAI,CAAC;EACnC,MAAMU,MAAM,GAAG,IAAId,aAAa,CAACa,IAAI,CAAC;EACtC,MAAME,UAAU,GAAGD,MAAM,CAACE,gBAAgB,CAACb,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEc,OAAO,CAAC;EAC5D,WAAW,MAAMX,IAAI,IAAIS,UAAU,EAAE;IACnC,MAAM;MACJH,KAAK,EAAE,kBAAkB;MACzBF,IAAI,EAAEJ,IAAI;MACVY,SAAS,EAAE,MAAM;MACjBC,MAAM,EAAEb,IAAI,CAACa;IACf,CAAC;EACH;AACF"}
@@ -1,6 +1,6 @@
1
- import { Table, RecordBatchStreamReader } from 'apache-arrow';
1
+ import { Table as ArrowTable, RecordBatchStreamReader } from 'apache-arrow';
2
2
  import { loadWasm } from './load-wasm/load-wasm-node';
3
- export async function parseParquet(arrayBuffer, options) {
3
+ export async function parseParquetWasm(arrayBuffer, options) {
4
4
  var _options$parquet;
5
5
  const wasmUrl = options === null || options === void 0 ? void 0 : (_options$parquet = options.parquet) === null || _options$parquet === void 0 ? void 0 : _options$parquet.wasmUrl;
6
6
  const wasm = await loadWasm(wasmUrl);
@@ -16,6 +16,6 @@ function tableFromIPC(input) {
16
16
  for (const recordBatch of reader) {
17
17
  recordBatches.push(recordBatch);
18
18
  }
19
- return new Table(recordBatches);
19
+ return new ArrowTable(recordBatches);
20
20
  }
21
21
  //# sourceMappingURL=parse-parquet-wasm.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"parse-parquet-wasm.js","names":["Table","RecordBatchStreamReader","loadWasm","parseParquet","arrayBuffer","options","_options$parquet","wasmUrl","parquet","wasm","arr","Uint8Array","arrowIPCUint8Arr","readParquet","arrowIPCBuffer","buffer","slice","byteOffset","byteLength","arrowTable","tableFromIPC","input","reader","from","recordBatches","recordBatch","push"],"sources":["../../../../src/lib/wasm/parse-parquet-wasm.ts"],"sourcesContent":["// eslint-disable\nimport type {RecordBatch} from 'apache-arrow';\nimport type {LoaderOptions} from '@loaders.gl/loader-utils';\nimport {Table, RecordBatchStreamReader} from 'apache-arrow';\nimport {loadWasm} from './load-wasm/load-wasm-node';\n\nexport type ParquetLoaderOptions = LoaderOptions & {\n parquet?: {\n type?: 'arrow-table';\n wasmUrl?: string;\n };\n};\n\nexport async function parseParquet(\n arrayBuffer: ArrayBuffer,\n options?: ParquetLoaderOptions\n): Promise<Table> {\n const wasmUrl = options?.parquet?.wasmUrl;\n const wasm = await loadWasm(wasmUrl);\n\n const arr = new Uint8Array(arrayBuffer);\n const arrowIPCUint8Arr = wasm.readParquet(arr);\n const arrowIPCBuffer = arrowIPCUint8Arr.buffer.slice(\n arrowIPCUint8Arr.byteOffset,\n arrowIPCUint8Arr.byteLength + arrowIPCUint8Arr.byteOffset\n );\n const arrowTable = tableFromIPC(arrowIPCBuffer);\n return arrowTable;\n}\n\n/**\n * Deserialize the IPC format into a {@link Table}. This function is a\n * convenience wrapper for {@link RecordBatchReader}. Opposite of {@link tableToIPC}.\n */\nfunction tableFromIPC(input: ArrayBuffer): Table {\n const reader = RecordBatchStreamReader.from(input);\n const recordBatches: RecordBatch[] = [];\n for (const recordBatch of reader) {\n recordBatches.push(recordBatch);\n }\n return new Table(recordBatches);\n}\n"],"mappings":"AAGA,SAAQA,KAAK,EAAEC,uBAAuB,QAAO,cAAc;AAC3D,SAAQC,QAAQ,QAAO,4BAA4B;AASnD,OAAO,eAAeC,YAAYA,CAChCC,WAAwB,EACxBC,OAA8B,EACd;EAAA,IAAAC,gBAAA;EAChB,MAAMC,OAAO,GAAGF,OAAO,aAAPA,OAAO,wBAAAC,gBAAA,GAAPD,OAAO,CAAEG,OAAO,cAAAF,gBAAA,uBAAhBA,gBAAA,CAAkBC,OAAO;EACzC,MAAME,IAAI,GAAG,MAAMP,QAAQ,CAACK,OAAO,CAAC;EAEpC,MAAMG,GAAG,GAAG,IAAIC,UAAU,CAACP,WAAW,CAAC;EACvC,MAAMQ,gBAAgB,GAAGH,IAAI,CAACI,WAAW,CAACH,GAAG,CAAC;EAC9C,MAAMI,cAAc,GAAGF,gBAAgB,CAACG,MAAM,CAACC,KAAK,CAClDJ,gBAAgB,CAACK,UAAU,EAC3BL,gBAAgB,CAACM,UAAU,GAAGN,gBAAgB,CAACK,UACjD,CAAC;EACD,MAAME,UAAU,GAAGC,YAAY,CAACN,cAAc,CAAC;EAC/C,OAAOK,UAAU;AACnB;AAMA,SAASC,YAAYA,CAACC,KAAkB,EAAS;EAC/C,MAAMC,MAAM,GAAGrB,uBAAuB,CAACsB,IAAI,CAACF,KAAK,CAAC;EAClD,MAAMG,aAA4B,GAAG,EAAE;EACvC,KAAK,MAAMC,WAAW,IAAIH,MAAM,EAAE;IAChCE,aAAa,CAACE,IAAI,CAACD,WAAW,CAAC;EACjC;EACA,OAAO,IAAIzB,KAAK,CAACwB,aAAa,CAAC;AACjC"}
1
+ {"version":3,"file":"parse-parquet-wasm.js","names":["Table","ArrowTable","RecordBatchStreamReader","loadWasm","parseParquetWasm","arrayBuffer","options","_options$parquet","wasmUrl","parquet","wasm","arr","Uint8Array","arrowIPCUint8Arr","readParquet","arrowIPCBuffer","buffer","slice","byteOffset","byteLength","arrowTable","tableFromIPC","input","reader","from","recordBatches","recordBatch","push"],"sources":["../../../../src/lib/wasm/parse-parquet-wasm.ts"],"sourcesContent":["// eslint-disable\nimport type {RecordBatch} from 'apache-arrow';\nimport type {LoaderOptions} from '@loaders.gl/loader-utils';\nimport {Table as ArrowTable, RecordBatchStreamReader} from 'apache-arrow';\nimport {loadWasm} from './load-wasm/load-wasm-node';\n\nexport type ParquetWasmLoaderOptions = LoaderOptions & {\n parquet?: {\n type?: 'arrow-table';\n wasmUrl?: string;\n };\n};\n\nexport async function parseParquetWasm(\n arrayBuffer: ArrayBuffer,\n options?: ParquetWasmLoaderOptions\n): Promise<ArrowTable> {\n const wasmUrl = options?.parquet?.wasmUrl;\n const wasm = await loadWasm(wasmUrl);\n\n const arr = new Uint8Array(arrayBuffer);\n const arrowIPCUint8Arr = wasm.readParquet(arr);\n const arrowIPCBuffer = arrowIPCUint8Arr.buffer.slice(\n arrowIPCUint8Arr.byteOffset,\n arrowIPCUint8Arr.byteLength + arrowIPCUint8Arr.byteOffset\n );\n const arrowTable = tableFromIPC(arrowIPCBuffer);\n return arrowTable;\n}\n\n/**\n * Deserialize the IPC format into a {@link Table}. This function is a\n * convenience wrapper for {@link RecordBatchReader}. Opposite of {@link tableToIPC}.\n */\nfunction tableFromIPC(input: ArrayBuffer): ArrowTable {\n const reader = RecordBatchStreamReader.from(input);\n const recordBatches: RecordBatch[] = [];\n for (const recordBatch of reader) {\n recordBatches.push(recordBatch);\n }\n return new ArrowTable(recordBatches);\n}\n"],"mappings":"AAGA,SAAQA,KAAK,IAAIC,UAAU,EAAEC,uBAAuB,QAAO,cAAc;AACzE,SAAQC,QAAQ,QAAO,4BAA4B;AASnD,OAAO,eAAeC,gBAAgBA,CACpCC,WAAwB,EACxBC,OAAkC,EACb;EAAA,IAAAC,gBAAA;EACrB,MAAMC,OAAO,GAAGF,OAAO,aAAPA,OAAO,wBAAAC,gBAAA,GAAPD,OAAO,CAAEG,OAAO,cAAAF,gBAAA,uBAAhBA,gBAAA,CAAkBC,OAAO;EACzC,MAAME,IAAI,GAAG,MAAMP,QAAQ,CAACK,OAAO,CAAC;EAEpC,MAAMG,GAAG,GAAG,IAAIC,UAAU,CAACP,WAAW,CAAC;EACvC,MAAMQ,gBAAgB,GAAGH,IAAI,CAACI,WAAW,CAACH,GAAG,CAAC;EAC9C,MAAMI,cAAc,GAAGF,gBAAgB,CAACG,MAAM,CAACC,KAAK,CAClDJ,gBAAgB,CAACK,UAAU,EAC3BL,gBAAgB,CAACM,UAAU,GAAGN,gBAAgB,CAACK,UACjD,CAAC;EACD,MAAME,UAAU,GAAGC,YAAY,CAACN,cAAc,CAAC;EAC/C,OAAOK,UAAU;AACnB;AAMA,SAASC,YAAYA,CAACC,KAAkB,EAAc;EACpD,MAAMC,MAAM,GAAGrB,uBAAuB,CAACsB,IAAI,CAACF,KAAK,CAAC;EAClD,MAAMG,aAA4B,GAAG,EAAE;EACvC,KAAK,MAAMC,WAAW,IAAIH,MAAM,EAAE;IAChCE,aAAa,CAACE,IAAI,CAACD,WAAW,CAAC;EACjC;EACA,OAAO,IAAIzB,UAAU,CAACwB,aAAa,CAAC;AACtC"}
@@ -1,4 +1,4 @@
1
- const VERSION = typeof "3.4.6" !== 'undefined' ? "3.4.6" : 'latest';
1
+ const VERSION = typeof "4.0.0-alpha.10" !== 'undefined' ? "4.0.0-alpha.10" : 'latest';
2
2
  const DEFAULT_PARQUET_LOADER_OPTIONS = {
3
3
  parquet: {
4
4
  type: 'object-row-table',
@@ -20,5 +20,17 @@ export const ParquetLoader = {
20
20
  tests: ['PAR1', 'PARE'],
21
21
  options: DEFAULT_PARQUET_LOADER_OPTIONS
22
22
  };
23
- export const _typecheckParquetLoader = ParquetLoader;
23
+ export const ParqueColumnnartLoader = {
24
+ name: 'Apache Parquet',
25
+ id: 'parquet',
26
+ module: 'parquet',
27
+ version: VERSION,
28
+ worker: true,
29
+ category: 'table',
30
+ extensions: ['parquet'],
31
+ mimeTypes: ['application/octet-stream'],
32
+ binary: true,
33
+ tests: ['PAR1', 'PARE'],
34
+ options: DEFAULT_PARQUET_LOADER_OPTIONS
35
+ };
24
36
  //# sourceMappingURL=parquet-loader.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"parquet-loader.js","names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","parquet","type","url","undefined","columnList","geoparquet","ParquetLoader","name","id","module","version","worker","category","extensions","mimeTypes","binary","tests","options","_typecheckParquetLoader"],"sources":["../../src/parquet-loader.ts"],"sourcesContent":["import type {Loader, LoaderOptions} from '@loaders.gl/loader-utils';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetLoaderOptions = LoaderOptions & {\n parquet?: {\n type?: 'object-row-table';\n url?: string;\n columnList?: string[] | string[][];\n geoparquet?: boolean;\n };\n};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS: ParquetLoaderOptions = {\n parquet: {\n type: 'object-row-table',\n url: undefined,\n columnList: [],\n geoparquet: true\n }\n};\n\n/** ParquetJS table loader */\nexport const ParquetLoader = {\n name: 'Apache Parquet',\n id: 'parquet',\n module: 'parquet',\n version: VERSION,\n worker: true,\n category: 'table',\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n binary: true,\n tests: ['PAR1', 'PARE'],\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nexport const _typecheckParquetLoader: Loader = ParquetLoader;\n"],"mappings":"AAIA,MAAMA,OAAO,GAAG,cAAkB,KAAK,WAAW,aAAiB,QAAQ;AAW3E,MAAMC,8BAAoD,GAAG;EAC3DC,OAAO,EAAE;IACPC,IAAI,EAAE,kBAAkB;IACxBC,GAAG,EAAEC,SAAS;IACdC,UAAU,EAAE,EAAE;IACdC,UAAU,EAAE;EACd;AACF,CAAC;AAGD,OAAO,MAAMC,aAAa,GAAG;EAC3BC,IAAI,EAAE,gBAAgB;EACtBC,EAAE,EAAE,SAAS;EACbC,MAAM,EAAE,SAAS;EACjBC,OAAO,EAAEZ,OAAO;EAChBa,MAAM,EAAE,IAAI;EACZC,QAAQ,EAAE,OAAO;EACjBC,UAAU,EAAE,CAAC,SAAS,CAAC;EACvBC,SAAS,EAAE,CAAC,0BAA0B,CAAC;EACvCC,MAAM,EAAE,IAAI;EACZC,KAAK,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;EACvBC,OAAO,EAAElB;AACX,CAAC;AAED,OAAO,MAAMmB,uBAA+B,GAAGZ,aAAa"}
1
+ {"version":3,"file":"parquet-loader.js","names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","parquet","type","url","undefined","columnList","geoparquet","ParquetLoader","name","id","module","version","worker","category","extensions","mimeTypes","binary","tests","options","ParqueColumnnartLoader"],"sources":["../../src/parquet-loader.ts"],"sourcesContent":["// loaders.gl, MIT license\nimport type {Loader, LoaderOptions} from '@loaders.gl/loader-utils';\nimport type {\n ObjectRowTable,\n ObjectRowTableBatch,\n ColumnarTable,\n ColumnarTableBatch\n} from '@loaders.gl/schema';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetLoaderOptions = LoaderOptions & {\n parquet?: {\n type?: 'object-row-table';\n url?: string;\n columnList?: string[] | string[][];\n geoparquet?: boolean;\n };\n};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS: ParquetLoaderOptions = {\n parquet: {\n type: 'object-row-table',\n url: undefined,\n columnList: [],\n geoparquet: true\n }\n};\n\n/** ParquetJS table loader */\nexport const ParquetLoader: Loader<ObjectRowTable, ObjectRowTableBatch, ParquetLoaderOptions> = {\n name: 'Apache Parquet',\n id: 'parquet',\n module: 'parquet',\n version: VERSION,\n worker: true,\n category: 'table',\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n binary: true,\n tests: ['PAR1', 'PARE'],\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nexport const ParqueColumnnartLoader: Loader<\n ColumnarTable,\n ColumnarTableBatch,\n ParquetLoaderOptions\n> = {\n name: 'Apache Parquet',\n id: 'parquet',\n module: 'parquet',\n version: VERSION,\n worker: true,\n category: 'table',\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n binary: true,\n tests: ['PAR1', 'PARE'],\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n"],"mappings":"AAWA,MAAMA,OAAO,GAAG,uBAAkB,KAAK,WAAW,sBAAiB,QAAQ;AAW3E,MAAMC,8BAAoD,GAAG;EAC3DC,OAAO,EAAE;IACPC,IAAI,EAAE,kBAAkB;IACxBC,GAAG,EAAEC,SAAS;IACdC,UAAU,EAAE,EAAE;IACdC,UAAU,EAAE;EACd;AACF,CAAC;AAGD,OAAO,MAAMC,aAAgF,GAAG;EAC9FC,IAAI,EAAE,gBAAgB;EACtBC,EAAE,EAAE,SAAS;EACbC,MAAM,EAAE,SAAS;EACjBC,OAAO,EAAEZ,OAAO;EAChBa,MAAM,EAAE,IAAI;EACZC,QAAQ,EAAE,OAAO;EACjBC,UAAU,EAAE,CAAC,SAAS,CAAC;EACvBC,SAAS,EAAE,CAAC,0BAA0B,CAAC;EACvCC,MAAM,EAAE,IAAI;EACZC,KAAK,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;EACvBC,OAAO,EAAElB;AACX,CAAC;AAED,OAAO,MAAMmB,sBAIZ,GAAG;EACFX,IAAI,EAAE,gBAAgB;EACtBC,EAAE,EAAE,SAAS;EACbC,MAAM,EAAE,SAAS;EACjBC,OAAO,EAAEZ,OAAO;EAChBa,MAAM,EAAE,IAAI;EACZC,QAAQ,EAAE,OAAO;EACjBC,UAAU,EAAE,CAAC,SAAS,CAAC;EACvBC,SAAS,EAAE,CAAC,0BAA0B,CAAC;EACvCC,MAAM,EAAE,IAAI;EACZC,KAAK,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;EACvBC,OAAO,EAAElB;AACX,CAAC"}
@@ -1,4 +1,4 @@
1
- const VERSION = typeof "3.4.6" !== 'undefined' ? "3.4.6" : 'latest';
1
+ const VERSION = typeof "4.0.0-alpha.10" !== 'undefined' ? "4.0.0-alpha.10" : 'latest';
2
2
  const DEFAULT_PARQUET_LOADER_OPTIONS = {
3
3
  parquet: {
4
4
  type: 'arrow-table',
@@ -1 +1 @@
1
- {"version":3,"file":"parquet-wasm-loader.js","names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","parquet","type","wasmUrl","ParquetWasmLoader","name","id","module","version","worker","category","extensions","mimeTypes","binary","tests","options","_typecheckParquetLoader"],"sources":["../../src/parquet-wasm-loader.ts"],"sourcesContent":["import type {Loader, LoaderOptions} from '@loaders.gl/loader-utils';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetLoaderOptions = LoaderOptions & {\n parquet?: {\n type?: 'arrow-table';\n wasmUrl?: string;\n };\n};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS: ParquetLoaderOptions = {\n parquet: {\n type: 'arrow-table',\n wasmUrl: 'https://unpkg.com/parquet-wasm@0.3.1/esm2/arrow1_bg.wasm'\n }\n};\n\n/** ParquetJS table loader */\nexport const ParquetWasmLoader = {\n name: 'Apache Parquet',\n id: 'parquet-wasm',\n module: 'parquet',\n version: VERSION,\n worker: false,\n category: 'table',\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n binary: true,\n tests: ['PAR1', 'PARE'],\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nexport const _typecheckParquetLoader: Loader = ParquetWasmLoader;\n"],"mappings":"AAIA,MAAMA,OAAO,GAAG,cAAkB,KAAK,WAAW,aAAiB,QAAQ;AAS3E,MAAMC,8BAAoD,GAAG;EAC3DC,OAAO,EAAE;IACPC,IAAI,EAAE,aAAa;IACnBC,OAAO,EAAE;EACX;AACF,CAAC;AAGD,OAAO,MAAMC,iBAAiB,GAAG;EAC/BC,IAAI,EAAE,gBAAgB;EACtBC,EAAE,EAAE,cAAc;EAClBC,MAAM,EAAE,SAAS;EACjBC,OAAO,EAAET,OAAO;EAChBU,MAAM,EAAE,KAAK;EACbC,QAAQ,EAAE,OAAO;EACjBC,UAAU,EAAE,CAAC,SAAS,CAAC;EACvBC,SAAS,EAAE,CAAC,0BAA0B,CAAC;EACvCC,MAAM,EAAE,IAAI;EACZC,KAAK,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;EACvBC,OAAO,EAAEf;AACX,CAAC;AAED,OAAO,MAAMgB,uBAA+B,GAAGZ,iBAAiB"}
1
+ {"version":3,"file":"parquet-wasm-loader.js","names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","parquet","type","wasmUrl","ParquetWasmLoader","name","id","module","version","worker","category","extensions","mimeTypes","binary","tests","options","_typecheckParquetLoader"],"sources":["../../src/parquet-wasm-loader.ts"],"sourcesContent":["import type {Loader, LoaderOptions} from '@loaders.gl/loader-utils';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetLoaderOptions = LoaderOptions & {\n parquet?: {\n type?: 'arrow-table';\n wasmUrl?: string;\n };\n};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS: ParquetLoaderOptions = {\n parquet: {\n type: 'arrow-table',\n wasmUrl: 'https://unpkg.com/parquet-wasm@0.3.1/esm2/arrow1_bg.wasm'\n }\n};\n\n/** ParquetJS table loader */\nexport const ParquetWasmLoader = {\n name: 'Apache Parquet',\n id: 'parquet-wasm',\n module: 'parquet',\n version: VERSION,\n worker: false,\n category: 'table',\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n binary: true,\n tests: ['PAR1', 'PARE'],\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nexport const _typecheckParquetLoader: Loader = ParquetWasmLoader;\n"],"mappings":"AAIA,MAAMA,OAAO,GAAG,uBAAkB,KAAK,WAAW,sBAAiB,QAAQ;AAS3E,MAAMC,8BAAoD,GAAG;EAC3DC,OAAO,EAAE;IACPC,IAAI,EAAE,aAAa;IACnBC,OAAO,EAAE;EACX;AACF,CAAC;AAGD,OAAO,MAAMC,iBAAiB,GAAG;EAC/BC,IAAI,EAAE,gBAAgB;EACtBC,EAAE,EAAE,cAAc;EAClBC,MAAM,EAAE,SAAS;EACjBC,OAAO,EAAET,OAAO;EAChBU,MAAM,EAAE,KAAK;EACbC,QAAQ,EAAE,OAAO;EACjBC,UAAU,EAAE,CAAC,SAAS,CAAC;EACvBC,SAAS,EAAE,CAAC,0BAA0B,CAAC;EACvCC,MAAM,EAAE,IAAI;EACZC,KAAK,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;EACvBC,OAAO,EAAEf;AACX,CAAC;AAED,OAAO,MAAMgB,uBAA+B,GAAGZ,iBAAiB"}
@@ -1,5 +1,5 @@
1
1
  import { encode } from './lib/wasm/encode-parquet-wasm';
2
- const VERSION = typeof "3.4.6" !== 'undefined' ? "3.4.6" : 'latest';
2
+ const VERSION = typeof "4.0.0-alpha.10" !== 'undefined' ? "4.0.0-alpha.10" : 'latest';
3
3
  const DEFAULT_PARQUET_WRITER_OPTIONS = {
4
4
  parquet: {
5
5
  wasmUrl: 'https://unpkg.com/parquet-wasm@0.3.1/esm2/arrow1_bg.wasm'
@@ -1 +1 @@
1
- {"version":3,"file":"parquet-wasm-writer.js","names":["encode","VERSION","DEFAULT_PARQUET_WRITER_OPTIONS","parquet","wasmUrl","ParquetWasmWriter","name","id","module","version","extensions","mimeTypes","binary","options"],"sources":["../../src/parquet-wasm-writer.ts"],"sourcesContent":["import type {Writer} from '@loaders.gl/loader-utils';\nimport {encode, ParquetWriterOptions} from './lib/wasm/encode-parquet-wasm';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nconst DEFAULT_PARQUET_WRITER_OPTIONS: ParquetWriterOptions = {\n parquet: {\n wasmUrl: 'https://unpkg.com/parquet-wasm@0.3.1/esm2/arrow1_bg.wasm'\n }\n};\n\nexport const ParquetWasmWriter: Writer = {\n name: 'Apache Parquet',\n id: 'parquet-wasm',\n module: 'parquet',\n version: VERSION,\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n encode,\n binary: true,\n options: DEFAULT_PARQUET_WRITER_OPTIONS\n};\n"],"mappings":"AACA,SAAQA,MAAM,QAA6B,gCAAgC;AAI3E,MAAMC,OAAO,GAAG,cAAkB,KAAK,WAAW,aAAiB,QAAQ;AAE3E,MAAMC,8BAAoD,GAAG;EAC3DC,OAAO,EAAE;IACPC,OAAO,EAAE;EACX;AACF,CAAC;AAED,OAAO,MAAMC,iBAAyB,GAAG;EACvCC,IAAI,EAAE,gBAAgB;EACtBC,EAAE,EAAE,cAAc;EAClBC,MAAM,EAAE,SAAS;EACjBC,OAAO,EAAER,OAAO;EAChBS,UAAU,EAAE,CAAC,SAAS,CAAC;EACvBC,SAAS,EAAE,CAAC,0BAA0B,CAAC;EACvCX,MAAM;EACNY,MAAM,EAAE,IAAI;EACZC,OAAO,EAAEX;AACX,CAAC"}
1
+ {"version":3,"file":"parquet-wasm-writer.js","names":["encode","VERSION","DEFAULT_PARQUET_WRITER_OPTIONS","parquet","wasmUrl","ParquetWasmWriter","name","id","module","version","extensions","mimeTypes","binary","options"],"sources":["../../src/parquet-wasm-writer.ts"],"sourcesContent":["import type {Writer} from '@loaders.gl/loader-utils';\nimport {encode, ParquetWriterOptions} from './lib/wasm/encode-parquet-wasm';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nconst DEFAULT_PARQUET_WRITER_OPTIONS: ParquetWriterOptions = {\n parquet: {\n wasmUrl: 'https://unpkg.com/parquet-wasm@0.3.1/esm2/arrow1_bg.wasm'\n }\n};\n\nexport const ParquetWasmWriter: Writer = {\n name: 'Apache Parquet',\n id: 'parquet-wasm',\n module: 'parquet',\n version: VERSION,\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n encode,\n binary: true,\n options: DEFAULT_PARQUET_WRITER_OPTIONS\n};\n"],"mappings":"AACA,SAAQA,MAAM,QAA6B,gCAAgC;AAI3E,MAAMC,OAAO,GAAG,uBAAkB,KAAK,WAAW,sBAAiB,QAAQ;AAE3E,MAAMC,8BAAoD,GAAG;EAC3DC,OAAO,EAAE;IACPC,OAAO,EAAE;EACX;AACF,CAAC;AAED,OAAO,MAAMC,iBAAyB,GAAG;EACvCC,IAAI,EAAE,gBAAgB;EACtBC,EAAE,EAAE,cAAc;EAClBC,MAAM,EAAE,SAAS;EACjBC,OAAO,EAAER,OAAO;EAChBS,UAAU,EAAE,CAAC,SAAS,CAAC;EACvBC,SAAS,EAAE,CAAC,0BAA0B,CAAC;EACvCX,MAAM;EACNY,MAAM,EAAE,IAAI;EACZC,OAAO,EAAEX;AACX,CAAC"}
@@ -1,4 +1,4 @@
1
- const VERSION = typeof "3.4.6" !== 'undefined' ? "3.4.6" : 'latest';
1
+ const VERSION = typeof "4.0.0-alpha.10" !== 'undefined' ? "4.0.0-alpha.10" : 'latest';
2
2
  const DEFAULT_PARQUET_LOADER_OPTIONS = {};
3
3
  export const ParquetWriter = {
4
4
  name: 'Apache Parquet',
@@ -1 +1 @@
1
- {"version":3,"file":"parquet-writer.js","names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","ParquetWriter","name","id","module","version","extensions","mimeTypes","encodeSync","binary","options","data","ArrayBuffer"],"sources":["../../src/parquet-writer.ts"],"sourcesContent":["import type {Writer} from '@loaders.gl/loader-utils';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetWriterOptions = {};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS = {};\n\nexport const ParquetWriter: Writer = {\n name: 'Apache Parquet',\n id: 'parquet',\n module: 'parquet',\n version: VERSION,\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n encodeSync,\n binary: true,\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nfunction encodeSync(data, options?: ParquetWriterOptions) {\n return new ArrayBuffer(0);\n}\n"],"mappings":"AAIA,MAAMA,OAAO,GAAG,cAAkB,KAAK,WAAW,aAAiB,QAAQ;AAI3E,MAAMC,8BAA8B,GAAG,CAAC,CAAC;AAEzC,OAAO,MAAMC,aAAqB,GAAG;EACnCC,IAAI,EAAE,gBAAgB;EACtBC,EAAE,EAAE,SAAS;EACbC,MAAM,EAAE,SAAS;EACjBC,OAAO,EAAEN,OAAO;EAChBO,UAAU,EAAE,CAAC,SAAS,CAAC;EACvBC,SAAS,EAAE,CAAC,0BAA0B,CAAC;EACvCC,UAAU;EACVC,MAAM,EAAE,IAAI;EACZC,OAAO,EAAEV;AACX,CAAC;AAED,SAASQ,UAAUA,CAACG,IAAI,EAAED,OAA8B,EAAE;EACxD,OAAO,IAAIE,WAAW,CAAC,CAAC,CAAC;AAC3B"}
1
+ {"version":3,"file":"parquet-writer.js","names":["VERSION","DEFAULT_PARQUET_LOADER_OPTIONS","ParquetWriter","name","id","module","version","extensions","mimeTypes","encodeSync","binary","options","data","ArrayBuffer"],"sources":["../../src/parquet-writer.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport type {Writer} from '@loaders.gl/loader-utils';\nimport {Table, TableBatch} from '@loaders.gl/schema';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport type ParquetWriterOptions = {};\n\nconst DEFAULT_PARQUET_LOADER_OPTIONS = {};\n\nexport const ParquetWriter: Writer<Table, TableBatch, ParquetWriterOptions> = {\n name: 'Apache Parquet',\n id: 'parquet',\n module: 'parquet',\n version: VERSION,\n extensions: ['parquet'],\n mimeTypes: ['application/octet-stream'],\n encodeSync,\n binary: true,\n options: DEFAULT_PARQUET_LOADER_OPTIONS\n};\n\nfunction encodeSync(data, options?: ParquetWriterOptions) {\n return new ArrayBuffer(0);\n}\n"],"mappings":"AAOA,MAAMA,OAAO,GAAG,uBAAkB,KAAK,WAAW,sBAAiB,QAAQ;AAI3E,MAAMC,8BAA8B,GAAG,CAAC,CAAC;AAEzC,OAAO,MAAMC,aAA8D,GAAG;EAC5EC,IAAI,EAAE,gBAAgB;EACtBC,EAAE,EAAE,SAAS;EACbC,MAAM,EAAE,SAAS;EACjBC,OAAO,EAAEN,OAAO;EAChBO,UAAU,EAAE,CAAC,SAAS,CAAC;EACvBC,SAAS,EAAE,CAAC,0BAA0B,CAAC;EACvCC,UAAU;EACVC,MAAM,EAAE,IAAI;EACZC,OAAO,EAAEV;AACX,CAAC;AAED,SAASQ,UAAUA,CAACG,IAAI,EAAED,OAA8B,EAAE;EACxD,OAAO,IAAIE,WAAW,CAAC,CAAC,CAAC;AAC3B"}