iparq 0.1.4__py3-none-any.whl → 0.1.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- iparq/source.py +68 -3
- iparq-0.1.7.dist-info/METADATA +216 -0
- iparq-0.1.7.dist-info/RECORD +7 -0
- iparq-0.1.4.dist-info/METADATA +0 -96
- iparq-0.1.4.dist-info/RECORD +0 -7
- {iparq-0.1.4.dist-info → iparq-0.1.7.dist-info}/WHEEL +0 -0
- {iparq-0.1.4.dist-info → iparq-0.1.7.dist-info}/entry_points.txt +0 -0
- {iparq-0.1.4.dist-info → iparq-0.1.7.dist-info}/licenses/LICENSE +0 -0
iparq/source.py
CHANGED
|
@@ -94,6 +94,71 @@ def print_parquet_metadata(parquet_metadata):
|
|
|
94
94
|
pass
|
|
95
95
|
|
|
96
96
|
|
|
97
|
+
def print_compression_types(parquet_metadata) -> None:
|
|
98
|
+
"""
|
|
99
|
+
Prints the compression type for each column in each row group of the Parquet file.
|
|
100
|
+
"""
|
|
101
|
+
try:
|
|
102
|
+
num_row_groups = parquet_metadata.num_row_groups
|
|
103
|
+
num_columns = parquet_metadata.num_columns
|
|
104
|
+
console.print("[bold underline]Column Compression Info:[/bold underline]")
|
|
105
|
+
for i in range(num_row_groups):
|
|
106
|
+
console.print(f"[bold]Row Group {i}:[/bold]")
|
|
107
|
+
for j in range(num_columns):
|
|
108
|
+
column_chunk = parquet_metadata.row_group(i).column(j)
|
|
109
|
+
compression = column_chunk.compression
|
|
110
|
+
column_name = parquet_metadata.schema.column(j).name
|
|
111
|
+
console.print(
|
|
112
|
+
f" Column '{column_name}' (Index {j}): [italic]{compression}[/italic]"
|
|
113
|
+
)
|
|
114
|
+
except Exception as e:
|
|
115
|
+
console.print(
|
|
116
|
+
f"Error while printing compression types: {e}",
|
|
117
|
+
style="blink bold red underline on white",
|
|
118
|
+
)
|
|
119
|
+
finally:
|
|
120
|
+
pass
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def print_bloom_filter_info(parquet_metadata) -> None:
|
|
124
|
+
"""
|
|
125
|
+
Prints information about bloom filters for each column in each row group of the Parquet file.
|
|
126
|
+
"""
|
|
127
|
+
try:
|
|
128
|
+
num_row_groups = parquet_metadata.num_row_groups
|
|
129
|
+
num_columns = parquet_metadata.num_columns
|
|
130
|
+
has_bloom_filters = False
|
|
131
|
+
|
|
132
|
+
console.print("[bold underline]Bloom Filter Info:[/bold underline]")
|
|
133
|
+
|
|
134
|
+
for i in range(num_row_groups):
|
|
135
|
+
row_group = parquet_metadata.row_group(i)
|
|
136
|
+
bloom_filters_in_group = False
|
|
137
|
+
|
|
138
|
+
for j in range(num_columns):
|
|
139
|
+
column_chunk = row_group.column(j)
|
|
140
|
+
column_name = parquet_metadata.schema.column(j).name
|
|
141
|
+
|
|
142
|
+
# Check if this column has bloom filters using is_stats_set
|
|
143
|
+
if hasattr(column_chunk, "is_stats_set") and column_chunk.is_stats_set:
|
|
144
|
+
if not bloom_filters_in_group:
|
|
145
|
+
console.print(f"[bold]Row Group {i}:[/bold]")
|
|
146
|
+
bloom_filters_in_group = True
|
|
147
|
+
has_bloom_filters = True
|
|
148
|
+
console.print(
|
|
149
|
+
f" Column '{column_name}' (Index {j}): [green]Has bloom filter[/green]"
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
if not has_bloom_filters:
|
|
153
|
+
console.print(" [italic]No bloom filters found in any column[/italic]")
|
|
154
|
+
|
|
155
|
+
except Exception as e:
|
|
156
|
+
console.print(
|
|
157
|
+
f"Error while printing bloom filter information: {e}",
|
|
158
|
+
style="blink bold red underline on white",
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
|
|
97
162
|
@app.command()
|
|
98
163
|
def main(filename: str):
|
|
99
164
|
"""
|
|
@@ -107,9 +172,9 @@ def main(filename: str):
|
|
|
107
172
|
"""
|
|
108
173
|
(parquet_metadata, compression) = read_parquet_metadata(filename)
|
|
109
174
|
|
|
110
|
-
print_parquet_metadata(
|
|
111
|
-
|
|
112
|
-
)
|
|
175
|
+
print_parquet_metadata(parquet_metadata)
|
|
176
|
+
print_compression_types(parquet_metadata)
|
|
177
|
+
print_bloom_filter_info(parquet_metadata)
|
|
113
178
|
print(f"Compression codecs: {compression}")
|
|
114
179
|
|
|
115
180
|
|
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: iparq
|
|
3
|
+
Version: 0.1.7
|
|
4
|
+
Summary: Display version and compression information about a parquet file
|
|
5
|
+
Author-email: MiguelElGallo <miguel.zurcher@gmail.com>
|
|
6
|
+
License-File: LICENSE
|
|
7
|
+
Requires-Python: >=3.9
|
|
8
|
+
Requires-Dist: pyarrow>=19.0.0
|
|
9
|
+
Requires-Dist: pydantic>=2.10.6
|
|
10
|
+
Requires-Dist: typer>=0.15.1
|
|
11
|
+
Provides-Extra: checks
|
|
12
|
+
Requires-Dist: mypy>=1.14.1; extra == 'checks'
|
|
13
|
+
Requires-Dist: ruff>=0.9.3; extra == 'checks'
|
|
14
|
+
Provides-Extra: test
|
|
15
|
+
Requires-Dist: pytest>=7.0; extra == 'test'
|
|
16
|
+
Description-Content-Type: text/markdown
|
|
17
|
+
|
|
18
|
+
# iparq
|
|
19
|
+
|
|
20
|
+
[](https://github.com/MiguelElGallo/iparq/actions/workflows/python-package.yml)
|
|
21
|
+
|
|
22
|
+
[](https://github.com/MiguelElGallo/iparq/actions/workflows/dependabot/dependabot-updates)
|
|
23
|
+
|
|
24
|
+
[](https://github.com/MiguelElGallo/iparq/actions/workflows/python-publish.yml)
|
|
25
|
+
|
|
26
|
+

|
|
27
|
+
After reading [this blog](https://duckdb.org/2025/01/22/parquet-encodings.html), I began to wonder which Parquet version and compression methods the everyday tools we rely on actually use, only to find that there’s no straightforward way to determine this. That curiosity and the difficulty of quickly discovering such details motivated me to create iparq (Information Parquet). My goal with iparq is to help users easily identify the specifics of the Parquet files generated by different engines, making it clear which features—like newer encodings or certain compression algorithms—the creator of the parquet is using.
|
|
28
|
+
|
|
29
|
+
***New*** Bloom filters information: Displays if there are bloom filters.
|
|
30
|
+
Read more about bloom filters in this [great article](https://duckdb.org/2025/03/07/parquet-bloom-filters-in-duckdb.html).
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
## Installation
|
|
34
|
+
|
|
35
|
+
### Using pip
|
|
36
|
+
|
|
37
|
+
1) Install the package using pip:
|
|
38
|
+
|
|
39
|
+
```sh
|
|
40
|
+
pip install iparq
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
2) Verify the installation by running:
|
|
44
|
+
|
|
45
|
+
```sh
|
|
46
|
+
iparq --help
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
### Using uv
|
|
50
|
+
|
|
51
|
+
1) Make sure to have Astral’s UV installed by following the steps here:
|
|
52
|
+
|
|
53
|
+
<https://docs.astral.sh/uv/getting-started/installation/>
|
|
54
|
+
|
|
55
|
+
2) Execute the following command:
|
|
56
|
+
|
|
57
|
+
```sh
|
|
58
|
+
uv pip install iparq
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
3) Verify the installation by running:
|
|
62
|
+
|
|
63
|
+
```sh
|
|
64
|
+
iparq --help
|
|
65
|
+
```
|
|
66
|
+
|
|
67
|
+
### Using Homebrew in a MAC
|
|
68
|
+
|
|
69
|
+
1) Run the following:
|
|
70
|
+
|
|
71
|
+
```sh
|
|
72
|
+
brew tap MiguelElGallo/tap https://github.com/MiguelElGallo//homebrew-iparq.git
|
|
73
|
+
brew install MiguelElGallo/tap/iparq
|
|
74
|
+
iparq —help
|
|
75
|
+
```
|
|
76
|
+
|
|
77
|
+
## Usage
|
|
78
|
+
|
|
79
|
+
Run
|
|
80
|
+
|
|
81
|
+
```sh
|
|
82
|
+
iparq <filename>
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
Replace `<filename>` with the path to your .parquet file. The utility will read the metadata of the file and print the compression codecs used in the parquet file.
|
|
86
|
+
|
|
87
|
+
## Example ouput - Bloom Filters
|
|
88
|
+
|
|
89
|
+
```log
|
|
90
|
+
ParquetMetaModel(
|
|
91
|
+
created_by='DuckDB version v1.2.1 (build 8e52ec4395)',
|
|
92
|
+
num_columns=1,
|
|
93
|
+
num_rows=100000000,
|
|
94
|
+
num_row_groups=10,
|
|
95
|
+
format_version='1.0',
|
|
96
|
+
serialized_size=1196
|
|
97
|
+
)
|
|
98
|
+
Column Compression Info:
|
|
99
|
+
Row Group 0:
|
|
100
|
+
Column 'r' (Index 0): SNAPPY
|
|
101
|
+
Row Group 1:
|
|
102
|
+
Column 'r' (Index 0): SNAPPY
|
|
103
|
+
Row Group 2:
|
|
104
|
+
Column 'r' (Index 0): SNAPPY
|
|
105
|
+
Row Group 3:
|
|
106
|
+
Column 'r' (Index 0): SNAPPY
|
|
107
|
+
Row Group 4:
|
|
108
|
+
Column 'r' (Index 0): SNAPPY
|
|
109
|
+
Row Group 5:
|
|
110
|
+
Column 'r' (Index 0): SNAPPY
|
|
111
|
+
Row Group 6:
|
|
112
|
+
Column 'r' (Index 0): SNAPPY
|
|
113
|
+
Row Group 7:
|
|
114
|
+
Column 'r' (Index 0): SNAPPY
|
|
115
|
+
Row Group 8:
|
|
116
|
+
Column 'r' (Index 0): SNAPPY
|
|
117
|
+
Row Group 9:
|
|
118
|
+
Column 'r' (Index 0): SNAPPY
|
|
119
|
+
Bloom Filter Info:
|
|
120
|
+
Row Group 0:
|
|
121
|
+
Column 'r' (Index 0): Has bloom filter
|
|
122
|
+
Row Group 1:
|
|
123
|
+
Column 'r' (Index 0): Has bloom filter
|
|
124
|
+
Row Group 2:
|
|
125
|
+
Column 'r' (Index 0): Has bloom filter
|
|
126
|
+
Row Group 3:
|
|
127
|
+
Column 'r' (Index 0): Has bloom filter
|
|
128
|
+
Row Group 4:
|
|
129
|
+
Column 'r' (Index 0): Has bloom filter
|
|
130
|
+
Row Group 5:
|
|
131
|
+
Column 'r' (Index 0): Has bloom filter
|
|
132
|
+
Row Group 6:
|
|
133
|
+
Column 'r' (Index 0): Has bloom filter
|
|
134
|
+
Row Group 7:
|
|
135
|
+
Column 'r' (Index 0): Has bloom filter
|
|
136
|
+
Row Group 8:
|
|
137
|
+
Column 'r' (Index 0): Has bloom filter
|
|
138
|
+
Row Group 9:
|
|
139
|
+
Column 'r' (Index 0): Has bloom filter
|
|
140
|
+
Compression codecs: {'SNAPPY'}
|
|
141
|
+
```
|
|
142
|
+
|
|
143
|
+
## Example output
|
|
144
|
+
|
|
145
|
+
```log
|
|
146
|
+
ParquetMetaModel(
|
|
147
|
+
created_by='parquet-cpp-arrow version 14.0.2',
|
|
148
|
+
num_columns=19,
|
|
149
|
+
num_rows=2964624,
|
|
150
|
+
num_row_groups=3,
|
|
151
|
+
format_version='2.6',
|
|
152
|
+
serialized_size=6357
|
|
153
|
+
)
|
|
154
|
+
Column Compression Info:
|
|
155
|
+
Row Group 0:
|
|
156
|
+
Column 'VendorID' (Index 0): ZSTD
|
|
157
|
+
Column 'tpep_pickup_datetime' (Index 1): ZSTD
|
|
158
|
+
Column 'tpep_dropoff_datetime' (Index 2): ZSTD
|
|
159
|
+
Column 'passenger_count' (Index 3): ZSTD
|
|
160
|
+
Column 'trip_distance' (Index 4): ZSTD
|
|
161
|
+
Column 'RatecodeID' (Index 5): ZSTD
|
|
162
|
+
Column 'store_and_fwd_flag' (Index 6): ZSTD
|
|
163
|
+
Column 'PULocationID' (Index 7): ZSTD
|
|
164
|
+
Column 'DOLocationID' (Index 8): ZSTD
|
|
165
|
+
Column 'payment_type' (Index 9): ZSTD
|
|
166
|
+
Column 'fare_amount' (Index 10): ZSTD
|
|
167
|
+
Column 'extra' (Index 11): ZSTD
|
|
168
|
+
Column 'mta_tax' (Index 12): ZSTD
|
|
169
|
+
Column 'tip_amount' (Index 13): ZSTD
|
|
170
|
+
Column 'tolls_amount' (Index 14): ZSTD
|
|
171
|
+
Column 'improvement_surcharge' (Index 15): ZSTD
|
|
172
|
+
Column 'total_amount' (Index 16): ZSTD
|
|
173
|
+
Column 'congestion_surcharge' (Index 17): ZSTD
|
|
174
|
+
Column 'Airport_fee' (Index 18): ZSTD
|
|
175
|
+
Row Group 1:
|
|
176
|
+
Column 'VendorID' (Index 0): ZSTD
|
|
177
|
+
Column 'tpep_pickup_datetime' (Index 1): ZSTD
|
|
178
|
+
Column 'tpep_dropoff_datetime' (Index 2): ZSTD
|
|
179
|
+
Column 'passenger_count' (Index 3): ZSTD
|
|
180
|
+
Column 'trip_distance' (Index 4): ZSTD
|
|
181
|
+
Column 'RatecodeID' (Index 5): ZSTD
|
|
182
|
+
Column 'store_and_fwd_flag' (Index 6): ZSTD
|
|
183
|
+
Column 'PULocationID' (Index 7): ZSTD
|
|
184
|
+
Column 'DOLocationID' (Index 8): ZSTD
|
|
185
|
+
Column 'payment_type' (Index 9): ZSTD
|
|
186
|
+
Column 'fare_amount' (Index 10): ZSTD
|
|
187
|
+
Column 'extra' (Index 11): ZSTD
|
|
188
|
+
Column 'mta_tax' (Index 12): ZSTD
|
|
189
|
+
Column 'tip_amount' (Index 13): ZSTD
|
|
190
|
+
Column 'tolls_amount' (Index 14): ZSTD
|
|
191
|
+
Column 'improvement_surcharge' (Index 15): ZSTD
|
|
192
|
+
Column 'total_amount' (Index 16): ZSTD
|
|
193
|
+
Column 'congestion_surcharge' (Index 17): ZSTD
|
|
194
|
+
Column 'Airport_fee' (Index 18): ZSTD
|
|
195
|
+
Row Group 2:
|
|
196
|
+
Column 'VendorID' (Index 0): ZSTD
|
|
197
|
+
Column 'tpep_pickup_datetime' (Index 1): ZSTD
|
|
198
|
+
Column 'tpep_dropoff_datetime' (Index 2): ZSTD
|
|
199
|
+
Column 'passenger_count' (Index 3): ZSTD
|
|
200
|
+
Column 'trip_distance' (Index 4): ZSTD
|
|
201
|
+
Column 'RatecodeID' (Index 5): ZSTD
|
|
202
|
+
Column 'store_and_fwd_flag' (Index 6): ZSTD
|
|
203
|
+
Column 'PULocationID' (Index 7): ZSTD
|
|
204
|
+
Column 'DOLocationID' (Index 8): ZSTD
|
|
205
|
+
Column 'payment_type' (Index 9): ZSTD
|
|
206
|
+
Column 'fare_amount' (Index 10): ZSTD
|
|
207
|
+
Column 'extra' (Index 11): ZSTD
|
|
208
|
+
Column 'mta_tax' (Index 12): ZSTD
|
|
209
|
+
Column 'tip_amount' (Index 13): ZSTD
|
|
210
|
+
Column 'tolls_amount' (Index 14): ZSTD
|
|
211
|
+
Column 'improvement_surcharge' (Index 15): ZSTD
|
|
212
|
+
Column 'total_amount' (Index 16): ZSTD
|
|
213
|
+
Column 'congestion_surcharge' (Index 17): ZSTD
|
|
214
|
+
Column 'Airport_fee' (Index 18): ZSTD
|
|
215
|
+
Compression codecs: {'ZSTD'}
|
|
216
|
+
```
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
iparq/__init__.py,sha256=sXLh7g3KC4QCFxcZGBTpG2scR7hmmBsMjq6LqRptkRg,22
|
|
2
|
+
iparq/source.py,sha256=crKRTuZY6W2zEhFfAzb4XWopaVy9qnEkFqz4jbyGmeM,6439
|
|
3
|
+
iparq-0.1.7.dist-info/METADATA,sha256=ku4ZsLQ1Iq2ovPzKqv8aGqBGBkn3nTviW6hFzFsP6bw,6884
|
|
4
|
+
iparq-0.1.7.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
5
|
+
iparq-0.1.7.dist-info/entry_points.txt,sha256=vrE2lwvuheySWTOJdr_gh9AT47ck02WCHo0muRq5HS8,43
|
|
6
|
+
iparq-0.1.7.dist-info/licenses/LICENSE,sha256=apqXCIYD_rrtbJVE-Ex1-1X7N0cBwZTOm4KL3TEFmYA,1067
|
|
7
|
+
iparq-0.1.7.dist-info/RECORD,,
|
iparq-0.1.4.dist-info/METADATA
DELETED
|
@@ -1,96 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.4
|
|
2
|
-
Name: iparq
|
|
3
|
-
Version: 0.1.4
|
|
4
|
-
Summary: Display version and compression information about a parquet file
|
|
5
|
-
Author-email: MiguelElGallo <miguel.zurcher@gmail.com>
|
|
6
|
-
License-File: LICENSE
|
|
7
|
-
Requires-Python: >=3.9
|
|
8
|
-
Requires-Dist: pyarrow>=19.0.0
|
|
9
|
-
Requires-Dist: pydantic>=2.10.6
|
|
10
|
-
Requires-Dist: typer>=0.15.1
|
|
11
|
-
Provides-Extra: checks
|
|
12
|
-
Requires-Dist: mypy>=1.14.1; extra == 'checks'
|
|
13
|
-
Requires-Dist: ruff>=0.9.3; extra == 'checks'
|
|
14
|
-
Provides-Extra: test
|
|
15
|
-
Requires-Dist: pytest>=7.0; extra == 'test'
|
|
16
|
-
Description-Content-Type: text/markdown
|
|
17
|
-
|
|
18
|
-
# iparq
|
|
19
|
-
|
|
20
|
-
[](https://github.com/MiguelElGallo/iparq/actions/workflows/python-package.yml)
|
|
21
|
-
|
|
22
|
-
[](https://github.com/MiguelElGallo/iparq/actions/workflows/dependabot/dependabot-updates)
|
|
23
|
-
|
|
24
|
-
[](https://github.com/MiguelElGallo/iparq/actions/workflows/python-publish.yml)
|
|
25
|
-
|
|
26
|
-

|
|
27
|
-
After reading [this blog](https://duckdb.org/2025/01/22/parquet-encodings.html), I began to wonder which Parquet version and compression methods the everyday tools we rely on actually use, only to find that there’s no straightforward way to determine this. That curiosity and the difficulty of quickly discovering such details motivated me to create iparq (Information Parquet). My goal with iparq is to help users easily identify the specifics of the Parquet files generated by different engines, making it clear which features—like newer encodings or certain compression algorithms—the creator of the parquet is using.
|
|
28
|
-
|
|
29
|
-
## Installation
|
|
30
|
-
|
|
31
|
-
### Using pip
|
|
32
|
-
|
|
33
|
-
1) Install the package using pip:
|
|
34
|
-
|
|
35
|
-
```sh
|
|
36
|
-
pip install iparq
|
|
37
|
-
```
|
|
38
|
-
|
|
39
|
-
2) Verify the installation by running:
|
|
40
|
-
|
|
41
|
-
```sh
|
|
42
|
-
iparq --help
|
|
43
|
-
```
|
|
44
|
-
|
|
45
|
-
### Using uv
|
|
46
|
-
|
|
47
|
-
1) Make sure to have Astral’s UV installed by following the steps here:
|
|
48
|
-
|
|
49
|
-
<https://docs.astral.sh/uv/getting-started/installation/>
|
|
50
|
-
|
|
51
|
-
2) Execute the following command:
|
|
52
|
-
|
|
53
|
-
```sh
|
|
54
|
-
uv pip install iparq
|
|
55
|
-
```
|
|
56
|
-
|
|
57
|
-
3) Verify the installation by running:
|
|
58
|
-
|
|
59
|
-
```sh
|
|
60
|
-
iparq --help
|
|
61
|
-
```
|
|
62
|
-
|
|
63
|
-
### Using Homebrew in a MAC
|
|
64
|
-
|
|
65
|
-
1) Run the following:
|
|
66
|
-
|
|
67
|
-
```sh
|
|
68
|
-
brew tap MiguelElGallo/tap https://github.com/MiguelElGallo//homebrew-iparq.git
|
|
69
|
-
brew install MiguelElGallo/tap/iparq
|
|
70
|
-
iparq —help
|
|
71
|
-
```
|
|
72
|
-
|
|
73
|
-
## Usage
|
|
74
|
-
|
|
75
|
-
Run
|
|
76
|
-
|
|
77
|
-
```sh
|
|
78
|
-
iparq <filename>
|
|
79
|
-
```
|
|
80
|
-
|
|
81
|
-
Replace `<filename>` with the path to your .parquet file. The utility will read the metadata of the file and print the compression codecs used in the parquet file.
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
## Example output
|
|
85
|
-
|
|
86
|
-
```log
|
|
87
|
-
ParquetMetaModel(
|
|
88
|
-
created_by='parquet-cpp-arrow version 14.0.2',
|
|
89
|
-
num_columns=3,
|
|
90
|
-
num_rows=3,
|
|
91
|
-
num_row_groups=1,
|
|
92
|
-
format_version='2.6',
|
|
93
|
-
serialized_size=2223
|
|
94
|
-
)
|
|
95
|
-
Compression codecs: {'SNAPPY'}
|
|
96
|
-
```
|
iparq-0.1.4.dist-info/RECORD
DELETED
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
iparq/__init__.py,sha256=sXLh7g3KC4QCFxcZGBTpG2scR7hmmBsMjq6LqRptkRg,22
|
|
2
|
-
iparq/source.py,sha256=Jo_q4vo39MyJHF1GAqxW6DAJ47pqP5VNYJ2xvlLqUdk,3784
|
|
3
|
-
iparq-0.1.4.dist-info/METADATA,sha256=aU7lRS8-sSIak88j-31FTh9mz4XhmbbHUGurjinbD9k,2950
|
|
4
|
-
iparq-0.1.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
5
|
-
iparq-0.1.4.dist-info/entry_points.txt,sha256=vrE2lwvuheySWTOJdr_gh9AT47ck02WCHo0muRq5HS8,43
|
|
6
|
-
iparq-0.1.4.dist-info/licenses/LICENSE,sha256=apqXCIYD_rrtbJVE-Ex1-1X7N0cBwZTOm4KL3TEFmYA,1067
|
|
7
|
-
iparq-0.1.4.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|