iparq 0.2.0__py3-none-any.whl → 0.2.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
iparq/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.0.1"
1
+ __version__ = "0.2.6"
iparq/py.typed CHANGED
@@ -1 +0,0 @@
1
- # This empty file marks the package as typed for mypy
iparq/source.py CHANGED
@@ -1,3 +1,5 @@
1
+ import json
2
+ from enum import Enum
1
3
  from typing import List, Optional
2
4
 
3
5
  import pyarrow.parquet as pq
@@ -7,10 +9,19 @@ from rich import print
7
9
  from rich.console import Console
8
10
  from rich.table import Table
9
11
 
10
- app = typer.Typer()
12
+ app = typer.Typer(
13
+ help="Inspect Parquet files for metadata, compression, and bloom filters"
14
+ )
11
15
  console = Console()
12
16
 
13
17
 
18
+ class OutputFormat(str, Enum):
19
+ """Enum for output format options."""
20
+
21
+ RICH = "rich"
22
+ JSON = "json"
23
+
24
+
14
25
  class ParquetMetaModel(BaseModel):
15
26
  """
16
27
  ParquetMetaModel is a data model representing metadata for a Parquet file.
@@ -227,20 +238,59 @@ def print_column_info_table(column_info: ParquetColumnInfo) -> None:
227
238
  console.print(table)
228
239
 
229
240
 
230
- @app.command()
231
- def main(filename: str):
241
+ def output_json(
242
+ meta_model: ParquetMetaModel,
243
+ column_info: ParquetColumnInfo,
244
+ compression_codecs: set,
245
+ ) -> None:
232
246
  """
233
- Main function to read and print Parquet file metadata.
247
+ Outputs the parquet information in JSON format.
234
248
 
235
249
  Args:
236
- filename (str): The path to the Parquet file.
237
-
238
- Returns:
239
- Metadata of the Parquet file and the compression codecs used.
250
+ meta_model: The Parquet metadata model
251
+ column_info: The column information model
252
+ compression_codecs: Set of compression codecs used
253
+ """
254
+ result = {
255
+ "metadata": meta_model.model_dump(),
256
+ "columns": [column.model_dump() for column in column_info.columns],
257
+ "compression_codecs": list(compression_codecs),
258
+ }
259
+
260
+ print(json.dumps(result, indent=2))
261
+
262
+
263
+ @app.command(name="")
264
+ @app.command(name="inspect")
265
+ def inspect(
266
+ filename: str = typer.Argument(..., help="Path to the Parquet file to inspect"),
267
+ format: OutputFormat = typer.Option(
268
+ OutputFormat.RICH, "--format", "-f", help="Output format (rich or json)"
269
+ ),
270
+ metadata_only: bool = typer.Option(
271
+ False,
272
+ "--metadata-only",
273
+ "-m",
274
+ help="Show only file metadata without column details",
275
+ ),
276
+ column_filter: Optional[str] = typer.Option(
277
+ None, "--column", "-c", help="Filter results to show only specific column"
278
+ ),
279
+ ):
280
+ """
281
+ Inspect a Parquet file and display its metadata, compression settings, and bloom filter information.
240
282
  """
241
283
  (parquet_metadata, compression) = read_parquet_metadata(filename)
242
284
 
243
- print_parquet_metadata(parquet_metadata)
285
+ # Create metadata model
286
+ meta_model = ParquetMetaModel(
287
+ created_by=parquet_metadata.created_by,
288
+ num_columns=parquet_metadata.num_columns,
289
+ num_rows=parquet_metadata.num_rows,
290
+ num_row_groups=parquet_metadata.num_row_groups,
291
+ format_version=str(parquet_metadata.format_version),
292
+ serialized_size=parquet_metadata.serialized_size,
293
+ )
244
294
 
245
295
  # Create a model to store column information
246
296
  column_info = ParquetColumnInfo()
@@ -249,10 +299,27 @@ def main(filename: str):
249
299
  print_compression_types(parquet_metadata, column_info)
250
300
  print_bloom_filter_info(parquet_metadata, column_info)
251
301
 
252
- # Print the information as a table
253
- print_column_info_table(column_info)
254
-
255
- print(f"Compression codecs: {compression}")
302
+ # Filter columns if requested
303
+ if column_filter:
304
+ column_info.columns = [
305
+ col for col in column_info.columns if col.column_name == column_filter
306
+ ]
307
+ if not column_info.columns:
308
+ console.print(
309
+ f"No columns match the filter: {column_filter}", style="yellow"
310
+ )
311
+
312
+ # Output based on format selection
313
+ if format == OutputFormat.JSON:
314
+ output_json(meta_model, column_info, compression)
315
+ else: # Rich format
316
+ # Print the metadata
317
+ console.print(meta_model)
318
+
319
+ # Print column details if not metadata only
320
+ if not metadata_only:
321
+ print_column_info_table(column_info)
322
+ console.print(f"Compression codecs: {compression}")
256
323
 
257
324
 
258
325
  if __name__ == "__main__":
@@ -0,0 +1,145 @@
1
+ Metadata-Version: 2.4
2
+ Name: iparq
3
+ Version: 0.2.6
4
+ Summary: Display version compression and bloom filter information about a parquet file
5
+ Author-email: MiguelElGallo <miguel.zurcher@gmail.com>
6
+ License-File: LICENSE
7
+ Requires-Python: >=3.9
8
+ Requires-Dist: pyarrow
9
+ Requires-Dist: pydantic
10
+ Requires-Dist: rich
11
+ Requires-Dist: typer
12
+ Provides-Extra: checks
13
+ Requires-Dist: mypy>=1.14.1; extra == 'checks'
14
+ Requires-Dist: ruff>=0.9.3; extra == 'checks'
15
+ Provides-Extra: test
16
+ Requires-Dist: pytest>=7.0; extra == 'test'
17
+ Description-Content-Type: text/markdown
18
+
19
+ # iparq
20
+
21
+ [![Python package](https://github.com/MiguelElGallo/iparq/actions/workflows/python-package.yml/badge.svg)](https://github.com/MiguelElGallo/iparq/actions/workflows/python-package.yml)
22
+
23
+ [![Dependabot Updates](https://github.com/MiguelElGallo/iparq/actions/workflows/dependabot/dependabot-updates/badge.svg)](https://github.com/MiguelElGallo/iparq/actions/workflows/dependabot/dependabot-updates)
24
+
25
+ [![Upload Python Package](https://github.com/MiguelElGallo/iparq/actions/workflows/python-publish.yml/badge.svg)](https://github.com/MiguelElGallo/iparq/actions/workflows/python-publish.yml)
26
+
27
+ ![alt text](media/iparq.png)
28
+ After reading [this blog](https://duckdb.org/2025/01/22/parquet-encodings.html), I began to wonder which Parquet version and compression methods the everyday tools we rely on actually use, only to find that there's no straightforward way to determine this. That curiosity and the difficulty of quickly discovering such details motivated me to create iparq (Information Parquet). My goal with iparq is to help users easily identify the specifics of the Parquet files generated by different engines, making it clear which features—like newer encodings or certain compression algorithms—the creator of the parquet is using.
29
+
30
+ ***New*** Bloom filters information: Displays if there are bloom filters.
31
+ Read more about bloom filters in this [great article](https://duckdb.org/2025/03/07/parquet-bloom-filters-in-duckdb.html).
32
+
33
+ ## Installation
34
+
35
+ ### Zero installation - Recommended
36
+
37
+ 1) Make sure to have Astral's UV installed by following the steps here:
38
+
39
+ <https://docs.astral.sh/uv/getting-started/installation/>
40
+
41
+ 2) Execute the following command:
42
+
43
+ ```sh
44
+ uvx --refresh iparq inspect yourparquet.parquet
45
+ ```
46
+
47
+ ### Using pip
48
+
49
+ 1) Install the package using pip:
50
+
51
+ ```sh
52
+ pip install iparq
53
+ ```
54
+
55
+ 2) Verify the installation by running:
56
+
57
+ ```sh
58
+ iparq --help
59
+ ```
60
+
61
+ ### Using uv
62
+
63
+ 1) Make sure to have Astral's UV installed by following the steps here:
64
+
65
+ <https://docs.astral.sh/uv/getting-started/installation/>
66
+
67
+ 2) Execute the following command:
68
+
69
+ ```sh
70
+ uv pip install iparq
71
+ ```
72
+
73
+ 3) Verify the installation by running:
74
+
75
+ ```sh
76
+ iparq --help
77
+ ```
78
+
79
+ ### Using Homebrew in a MAC
80
+
81
+ 1) Run the following:
82
+
83
+ ```sh
84
+ brew tap MiguelElGallo/tap https://github.com/MiguelElGallo//homebrew-iparq.git
85
+ brew install MiguelElGallo/tap/iparq
86
+ iparq --help
87
+ ```
88
+
89
+ ## Usage
90
+
91
+ iparq now supports additional options:
92
+
93
+ ```sh
94
+ iparq inspect <filename> [OPTIONS]
95
+ ```
96
+
97
+ Options include:
98
+
99
+ - `--format`, `-f`: Output format, either `rich` (default) or `json`
100
+ - `--metadata-only`, `-m`: Show only file metadata without column details
101
+ - `--column`, `-c`: Filter results to show only a specific column
102
+
103
+ Examples:
104
+
105
+ ```sh
106
+ # Output in JSON format
107
+ iparq inspect yourfile.parquet --format json
108
+
109
+ # Show only metadata
110
+ iparq inspect yourfile.parquet --metadata-only
111
+
112
+ # Filter to show only a specific column
113
+ iparq inspect yourfile.parquet --column column_name
114
+ ```
115
+
116
+ Replace `<filename>` with the path to your .parquet file. The utility will read the metadata of the file and print the compression codecs used in the parquet file.
117
+
118
+ ## Example ouput - Bloom Filters
119
+
120
+ ```log
121
+ ParquetMetaModel(
122
+ created_by='DuckDB version v1.2.1 (build 8e52ec4395)',
123
+ num_columns=1,
124
+ num_rows=100000000,
125
+ num_row_groups=10,
126
+ format_version='1.0',
127
+ serialized_size=1196
128
+ )
129
+ Parquet Column Information
130
+ ┏━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┓
131
+ ┃ Row Group ┃ Column Name ┃ Index ┃ Compression ┃ Bloom Filter ┃
132
+ ┡━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━┩
133
+ │ 0 │ r │ 0 │ SNAPPY │ ✅ │
134
+ │ 1 │ r │ 0 │ SNAPPY │ ✅ │
135
+ │ 2 │ r │ 0 │ SNAPPY │ ✅ │
136
+ │ 3 │ r │ 0 │ SNAPPY │ ✅ │
137
+ │ 4 │ r │ 0 │ SNAPPY │ ✅ │
138
+ │ 5 │ r │ 0 │ SNAPPY │ ✅ │
139
+ │ 6 │ r │ 0 │ SNAPPY │ ✅ │
140
+ │ 7 │ r │ 0 │ SNAPPY │ ✅ │
141
+ │ 8 │ r │ 0 │ SNAPPY │ ✅ │
142
+ │ 9 │ r │ 0 │ SNAPPY │ ✅ │
143
+ └───────────┴─────────────┴───────┴─────────────┴──────────────┘
144
+ Compression codecs: {'SNAPPY'}
145
+ ```
@@ -0,0 +1,8 @@
1
+ iparq/__init__.py,sha256=Oz5HbwHMyE87nmwV80AZzpkJPf-wBg7eDuJr_BXZkhU,22
2
+ iparq/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ iparq/source.py,sha256=huC6I0hqwyv4BZ5xjI6FMZs9KH60xVHEKbmX6X8hhiA,10721
4
+ iparq-0.2.6.dist-info/METADATA,sha256=LtiLJlVCHuOlx0gOOTqJ97S8baPFhnd_lOmKG2a-94g,5496
5
+ iparq-0.2.6.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
6
+ iparq-0.2.6.dist-info/entry_points.txt,sha256=vrE2lwvuheySWTOJdr_gh9AT47ck02WCHo0muRq5HS8,43
7
+ iparq-0.2.6.dist-info/licenses/LICENSE,sha256=apqXCIYD_rrtbJVE-Ex1-1X7N0cBwZTOm4KL3TEFmYA,1067
8
+ iparq-0.2.6.dist-info/RECORD,,
@@ -1,229 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: iparq
3
- Version: 0.2.0
4
- Summary: Display version compression and bloom filter information about a parquet file
5
- Author-email: MiguelElGallo <miguel.zurcher@gmail.com>
6
- License-File: LICENSE
7
- Requires-Python: >=3.9
8
- Requires-Dist: pyarrow
9
- Requires-Dist: pydantic
10
- Requires-Dist: rich
11
- Requires-Dist: typer[all]
12
- Provides-Extra: checks
13
- Requires-Dist: mypy>=1.14.1; extra == 'checks'
14
- Requires-Dist: ruff>=0.9.3; extra == 'checks'
15
- Provides-Extra: test
16
- Requires-Dist: pytest>=7.0; extra == 'test'
17
- Description-Content-Type: text/markdown
18
-
19
- # iparq
20
-
21
- [![Python package](https://github.com/MiguelElGallo/iparq/actions/workflows/python-package.yml/badge.svg)](https://github.com/MiguelElGallo/iparq/actions/workflows/python-package.yml)
22
-
23
- [![Dependabot Updates](https://github.com/MiguelElGallo/iparq/actions/workflows/dependabot/dependabot-updates/badge.svg)](https://github.com/MiguelElGallo/iparq/actions/workflows/dependabot/dependabot-updates)
24
-
25
- [![Upload Python Package](https://github.com/MiguelElGallo/iparq/actions/workflows/python-publish.yml/badge.svg)](https://github.com/MiguelElGallo/iparq/actions/workflows/python-publish.yml)
26
-
27
- ![alt text](media/iparq.png)
28
- After reading [this blog](https://duckdb.org/2025/01/22/parquet-encodings.html), I began to wonder which Parquet version and compression methods the everyday tools we rely on actually use, only to find that there’s no straightforward way to determine this. That curiosity and the difficulty of quickly discovering such details motivated me to create iparq (Information Parquet). My goal with iparq is to help users easily identify the specifics of the Parquet files generated by different engines, making it clear which features—like newer encodings or certain compression algorithms—the creator of the parquet is using.
29
-
30
- ***New*** Bloom filters information: Displays if there are bloom filters.
31
- Read more about bloom filters in this [great article](https://duckdb.org/2025/03/07/parquet-bloom-filters-in-duckdb.html).
32
-
33
-
34
- ## Installation
35
-
36
- ### Zero installation - Recommended
37
-
38
- 1) Make sure to have Astral’s UV installed by following the steps here:
39
-
40
- <https://docs.astral.sh/uv/getting-started/installation/>
41
-
42
- 2) Execute the following command:
43
-
44
- ```sh
45
- uvx iparq yourparquet.parquet
46
- ```
47
-
48
- ### Using pip
49
-
50
- 1) Install the package using pip:
51
-
52
- ```sh
53
- pip install iparq
54
- ```
55
-
56
- 2) Verify the installation by running:
57
-
58
- ```sh
59
- iparq --help
60
- ```
61
-
62
- ### Using uv
63
-
64
- 1) Make sure to have Astral’s UV installed by following the steps here:
65
-
66
- <https://docs.astral.sh/uv/getting-started/installation/>
67
-
68
- 2) Execute the following command:
69
-
70
- ```sh
71
- uv pip install iparq
72
- ```
73
-
74
- 3) Verify the installation by running:
75
-
76
- ```sh
77
- iparq --help
78
- ```
79
-
80
- ### Using Homebrew in a MAC
81
-
82
- 1) Run the following:
83
-
84
- ```sh
85
- brew tap MiguelElGallo/tap https://github.com/MiguelElGallo//homebrew-iparq.git
86
- brew install MiguelElGallo/tap/iparq
87
- iparq —help
88
- ```
89
-
90
- ## Usage
91
-
92
- Run
93
-
94
- ```sh
95
- iparq <filename>
96
- ```
97
-
98
- Replace `<filename>` with the path to your .parquet file. The utility will read the metadata of the file and print the compression codecs used in the parquet file.
99
-
100
- ## Example ouput - Bloom Filters
101
-
102
- ```log
103
- ParquetMetaModel(
104
- created_by='DuckDB version v1.2.1 (build 8e52ec4395)',
105
- num_columns=1,
106
- num_rows=100000000,
107
- num_row_groups=10,
108
- format_version='1.0',
109
- serialized_size=1196
110
- )
111
- Column Compression Info:
112
- Row Group 0:
113
- Column 'r' (Index 0): SNAPPY
114
- Row Group 1:
115
- Column 'r' (Index 0): SNAPPY
116
- Row Group 2:
117
- Column 'r' (Index 0): SNAPPY
118
- Row Group 3:
119
- Column 'r' (Index 0): SNAPPY
120
- Row Group 4:
121
- Column 'r' (Index 0): SNAPPY
122
- Row Group 5:
123
- Column 'r' (Index 0): SNAPPY
124
- Row Group 6:
125
- Column 'r' (Index 0): SNAPPY
126
- Row Group 7:
127
- Column 'r' (Index 0): SNAPPY
128
- Row Group 8:
129
- Column 'r' (Index 0): SNAPPY
130
- Row Group 9:
131
- Column 'r' (Index 0): SNAPPY
132
- Bloom Filter Info:
133
- Row Group 0:
134
- Column 'r' (Index 0): Has bloom filter
135
- Row Group 1:
136
- Column 'r' (Index 0): Has bloom filter
137
- Row Group 2:
138
- Column 'r' (Index 0): Has bloom filter
139
- Row Group 3:
140
- Column 'r' (Index 0): Has bloom filter
141
- Row Group 4:
142
- Column 'r' (Index 0): Has bloom filter
143
- Row Group 5:
144
- Column 'r' (Index 0): Has bloom filter
145
- Row Group 6:
146
- Column 'r' (Index 0): Has bloom filter
147
- Row Group 7:
148
- Column 'r' (Index 0): Has bloom filter
149
- Row Group 8:
150
- Column 'r' (Index 0): Has bloom filter
151
- Row Group 9:
152
- Column 'r' (Index 0): Has bloom filter
153
- Compression codecs: {'SNAPPY'}
154
- ```
155
-
156
- ## Example output
157
-
158
- ```log
159
- ParquetMetaModel(
160
- created_by='parquet-cpp-arrow version 14.0.2',
161
- num_columns=19,
162
- num_rows=2964624,
163
- num_row_groups=3,
164
- format_version='2.6',
165
- serialized_size=6357
166
- )
167
- Column Compression Info:
168
- Row Group 0:
169
- Column 'VendorID' (Index 0): ZSTD
170
- Column 'tpep_pickup_datetime' (Index 1): ZSTD
171
- Column 'tpep_dropoff_datetime' (Index 2): ZSTD
172
- Column 'passenger_count' (Index 3): ZSTD
173
- Column 'trip_distance' (Index 4): ZSTD
174
- Column 'RatecodeID' (Index 5): ZSTD
175
- Column 'store_and_fwd_flag' (Index 6): ZSTD
176
- Column 'PULocationID' (Index 7): ZSTD
177
- Column 'DOLocationID' (Index 8): ZSTD
178
- Column 'payment_type' (Index 9): ZSTD
179
- Column 'fare_amount' (Index 10): ZSTD
180
- Column 'extra' (Index 11): ZSTD
181
- Column 'mta_tax' (Index 12): ZSTD
182
- Column 'tip_amount' (Index 13): ZSTD
183
- Column 'tolls_amount' (Index 14): ZSTD
184
- Column 'improvement_surcharge' (Index 15): ZSTD
185
- Column 'total_amount' (Index 16): ZSTD
186
- Column 'congestion_surcharge' (Index 17): ZSTD
187
- Column 'Airport_fee' (Index 18): ZSTD
188
- Row Group 1:
189
- Column 'VendorID' (Index 0): ZSTD
190
- Column 'tpep_pickup_datetime' (Index 1): ZSTD
191
- Column 'tpep_dropoff_datetime' (Index 2): ZSTD
192
- Column 'passenger_count' (Index 3): ZSTD
193
- Column 'trip_distance' (Index 4): ZSTD
194
- Column 'RatecodeID' (Index 5): ZSTD
195
- Column 'store_and_fwd_flag' (Index 6): ZSTD
196
- Column 'PULocationID' (Index 7): ZSTD
197
- Column 'DOLocationID' (Index 8): ZSTD
198
- Column 'payment_type' (Index 9): ZSTD
199
- Column 'fare_amount' (Index 10): ZSTD
200
- Column 'extra' (Index 11): ZSTD
201
- Column 'mta_tax' (Index 12): ZSTD
202
- Column 'tip_amount' (Index 13): ZSTD
203
- Column 'tolls_amount' (Index 14): ZSTD
204
- Column 'improvement_surcharge' (Index 15): ZSTD
205
- Column 'total_amount' (Index 16): ZSTD
206
- Column 'congestion_surcharge' (Index 17): ZSTD
207
- Column 'Airport_fee' (Index 18): ZSTD
208
- Row Group 2:
209
- Column 'VendorID' (Index 0): ZSTD
210
- Column 'tpep_pickup_datetime' (Index 1): ZSTD
211
- Column 'tpep_dropoff_datetime' (Index 2): ZSTD
212
- Column 'passenger_count' (Index 3): ZSTD
213
- Column 'trip_distance' (Index 4): ZSTD
214
- Column 'RatecodeID' (Index 5): ZSTD
215
- Column 'store_and_fwd_flag' (Index 6): ZSTD
216
- Column 'PULocationID' (Index 7): ZSTD
217
- Column 'DOLocationID' (Index 8): ZSTD
218
- Column 'payment_type' (Index 9): ZSTD
219
- Column 'fare_amount' (Index 10): ZSTD
220
- Column 'extra' (Index 11): ZSTD
221
- Column 'mta_tax' (Index 12): ZSTD
222
- Column 'tip_amount' (Index 13): ZSTD
223
- Column 'tolls_amount' (Index 14): ZSTD
224
- Column 'improvement_surcharge' (Index 15): ZSTD
225
- Column 'total_amount' (Index 16): ZSTD
226
- Column 'congestion_surcharge' (Index 17): ZSTD
227
- Column 'Airport_fee' (Index 18): ZSTD
228
- Compression codecs: {'ZSTD'}
229
- ```
@@ -1,8 +0,0 @@
1
- iparq/__init__.py,sha256=sXLh7g3KC4QCFxcZGBTpG2scR7hmmBsMjq6LqRptkRg,22
2
- iparq/py.typed,sha256=bOHAx3O6ryp453lBypAaF78WipxsJDO9hH0PZFTAWYs,54
3
- iparq/source.py,sha256=qyBNysMLX0FkjZVw5dPSRuhswX3GuRXvM79v7g7emWM,8482
4
- iparq-0.2.0.dist-info/METADATA,sha256=TwQ7wiLtdEwh3NwY2talWs4BM-oeJw55LCzgMnlr5Gc,7163
5
- iparq-0.2.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
6
- iparq-0.2.0.dist-info/entry_points.txt,sha256=vrE2lwvuheySWTOJdr_gh9AT47ck02WCHo0muRq5HS8,43
7
- iparq-0.2.0.dist-info/licenses/LICENSE,sha256=apqXCIYD_rrtbJVE-Ex1-1X7N0cBwZTOm4KL3TEFmYA,1067
8
- iparq-0.2.0.dist-info/RECORD,,
File without changes