pydeflate 2.1.2__py3-none-any.whl → 2.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pydeflate/__init__.py +64 -20
- pydeflate/cache.py +139 -0
- pydeflate/constants.py +121 -0
- pydeflate/context.py +211 -0
- pydeflate/core/api.py +34 -12
- pydeflate/core/source.py +92 -11
- pydeflate/deflate/deflators.py +1 -1
- pydeflate/deflate/legacy_deflate.py +1 -1
- pydeflate/exceptions.py +166 -0
- pydeflate/exchange/exchangers.py +1 -2
- pydeflate/plugins.py +289 -0
- pydeflate/protocols.py +168 -0
- pydeflate/pydeflate_config.py +77 -6
- pydeflate/schemas.py +297 -0
- pydeflate/sources/common.py +60 -107
- pydeflate/sources/dac.py +39 -52
- pydeflate/sources/imf.py +51 -38
- pydeflate/sources/world_bank.py +44 -117
- pydeflate/utils.py +14 -9
- {pydeflate-2.1.2.dist-info → pydeflate-2.2.0.dist-info}/METADATA +119 -18
- pydeflate-2.2.0.dist-info/RECORD +32 -0
- pydeflate-2.2.0.dist-info/WHEEL +4 -0
- {pydeflate-2.1.2.dist-info → pydeflate-2.2.0.dist-info/licenses}/LICENSE +1 -1
- pydeflate-2.1.2.dist-info/RECORD +0 -25
- pydeflate-2.1.2.dist-info/WHEEL +0 -4
pydeflate/sources/world_bank.py
CHANGED
|
@@ -1,15 +1,17 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
2
4
|
from pathlib import Path
|
|
5
|
+
from typing import Callable
|
|
3
6
|
|
|
4
7
|
import pandas as pd
|
|
5
8
|
import wbgapi as wb
|
|
6
9
|
|
|
7
|
-
from pydeflate.
|
|
10
|
+
from pydeflate.cache import CacheEntry, cache_manager
|
|
11
|
+
from pydeflate.pydeflate_config import logger
|
|
8
12
|
from pydeflate.sources.common import (
|
|
9
|
-
enforce_pyarrow_types,
|
|
10
|
-
today,
|
|
11
13
|
compute_exchange_deflator,
|
|
12
|
-
|
|
14
|
+
enforce_pyarrow_types,
|
|
13
15
|
prefix_pydeflate_to_columns,
|
|
14
16
|
)
|
|
15
17
|
from pydeflate.utils import emu
|
|
@@ -56,8 +58,8 @@ def get_wb_indicator(series: str, value_name: str | None = None) -> pd.DataFrame
|
|
|
56
58
|
labels=True,
|
|
57
59
|
)
|
|
58
60
|
.reset_index()
|
|
59
|
-
.sort_values(by=["economy", "Time"])
|
|
60
|
-
.drop(columns=["Time"])
|
|
61
|
+
.sort_values(by=["economy", "Time"])
|
|
62
|
+
.drop(columns=["Time"])
|
|
61
63
|
.rename(
|
|
62
64
|
columns={
|
|
63
65
|
"economy": "entity_code",
|
|
@@ -66,7 +68,7 @@ def get_wb_indicator(series: str, value_name: str | None = None) -> pd.DataFrame
|
|
|
66
68
|
series: value_name or series,
|
|
67
69
|
}
|
|
68
70
|
)
|
|
69
|
-
.reset_index(drop=True)
|
|
71
|
+
.reset_index(drop=True)
|
|
70
72
|
)
|
|
71
73
|
|
|
72
74
|
|
|
@@ -119,22 +121,17 @@ def _parallel_download_indicators(indicators: dict) -> list[pd.DataFrame]:
|
|
|
119
121
|
|
|
120
122
|
# Use ThreadPoolExecutor to fetch indicators in parallel
|
|
121
123
|
with ThreadPoolExecutor() as executor:
|
|
122
|
-
# Submit all tasks to the executor (downloading indicators in parallel)
|
|
123
124
|
future_to_series = {
|
|
124
125
|
executor.submit(get_wb_indicator, series, value_name): series
|
|
125
126
|
for series, value_name in indicators.items()
|
|
126
127
|
}
|
|
127
|
-
|
|
128
|
-
# Collect the results as they complete
|
|
129
128
|
for future in as_completed(future_to_series):
|
|
130
129
|
series = future_to_series[future]
|
|
131
130
|
try:
|
|
132
131
|
df_ = future.result().set_index(["year", "entity_code", "entity"])
|
|
133
132
|
dfs.append(df_)
|
|
134
|
-
except Exception as exc:
|
|
135
|
-
|
|
136
|
-
logger.warning(f"Error downloading series {series}: {exc}")
|
|
137
|
-
|
|
133
|
+
except Exception as exc: # pragma: no cover - defensive logging
|
|
134
|
+
logger.warning("Error downloading series %s: %s", series, exc)
|
|
138
135
|
return dfs
|
|
139
136
|
|
|
140
137
|
|
|
@@ -151,140 +148,70 @@ def _add_ppp_ppp_exchange(df: pd.DataFrame) -> pd.DataFrame:
|
|
|
151
148
|
"""
|
|
152
149
|
ppp = df.loc[lambda d: d["entity_code"] == "USA"].copy()
|
|
153
150
|
ppp[["entity_code", "entity", "pydeflate_iso3"]] = "PPP"
|
|
151
|
+
return pd.concat([df, ppp], ignore_index=True)
|
|
154
152
|
|
|
155
|
-
df = pd.concat([df, ppp], ignore_index=True)
|
|
156
153
|
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
def _download_wb(
|
|
161
|
-
indicators: dict, prefix: str = "wb", add_ppp_exchange: bool = False
|
|
154
|
+
def _download_wb_dataset(
|
|
155
|
+
indicators: dict, output_path: Path, add_ppp_exchange: bool = False
|
|
162
156
|
) -> None:
|
|
163
|
-
"""Download
|
|
164
|
-
|
|
165
|
-
This function fetches all indicators defined in _INDICATORS in parallel, concatenates
|
|
166
|
-
them into a single DataFrame, and saves the result as a parquet file using today's date as a suffix.
|
|
167
|
-
"""
|
|
168
|
-
logger.info("Downloading the latest World Bank data...")
|
|
169
|
-
|
|
170
|
-
indicators_data = _parallel_download_indicators(indicators=indicators)
|
|
157
|
+
"""Download and materialise a World Bank dataset to ``output_path``."""
|
|
171
158
|
|
|
172
|
-
|
|
159
|
+
logger.info("Downloading World Bank indicators for %s", output_path.name)
|
|
160
|
+
indicators_data = _parallel_download_indicators(indicators)
|
|
173
161
|
df = pd.concat(indicators_data, axis=1).reset_index()
|
|
174
|
-
|
|
175
|
-
# cleaning
|
|
176
162
|
df = (
|
|
177
163
|
df.pipe(_eur_series_fix)
|
|
178
164
|
.pipe(compute_exchange_deflator, base_year_measure="NGDP_D")
|
|
179
165
|
.assign(pydeflate_iso3=lambda d: d.entity_code)
|
|
180
166
|
.sort_values(by=["year", "entity_code"])
|
|
181
167
|
)
|
|
182
|
-
|
|
183
168
|
if add_ppp_exchange:
|
|
184
169
|
df = df.pipe(_add_ppp_ppp_exchange)
|
|
185
|
-
|
|
186
170
|
df = (
|
|
187
171
|
df.pipe(prefix_pydeflate_to_columns)
|
|
188
172
|
.pipe(enforce_pyarrow_types)
|
|
189
173
|
.reset_index(drop=True)
|
|
190
174
|
)
|
|
191
|
-
|
|
192
|
-
# Get today's date to use as a file suffix
|
|
193
|
-
suffix = today()
|
|
194
|
-
|
|
195
|
-
# Save the DataFrame as a parquet file
|
|
196
|
-
output_path = PYDEFLATE_PATHS.data / f"{prefix}_{suffix}.parquet"
|
|
175
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
197
176
|
df.to_parquet(output_path)
|
|
177
|
+
logger.info("Saved World Bank data to %s", output_path)
|
|
198
178
|
|
|
199
|
-
logger.info(f"Saved World Bank data to {prefix}_{suffix}.parquet")
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
def download_wb() -> None:
|
|
203
|
-
"""Download the latest World Bank data."""
|
|
204
|
-
_download_wb(indicators=_INDICATORS, prefix="wb")
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
def download_wb_lcu_ppp() -> None:
|
|
208
|
-
"""Download the latest World Bank data (PPP)."""
|
|
209
|
-
_download_wb(
|
|
210
|
-
indicators=_INDICATORS_LCU_PPP, prefix="wb_lcu_ppp", add_ppp_exchange=True
|
|
211
|
-
)
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
def download_wb_usd_ppp() -> None:
|
|
215
|
-
"""Download the latest World Bank data (PPP)."""
|
|
216
|
-
_download_wb(
|
|
217
|
-
indicators=_INDICATORS_USD_PPP, prefix="wb_usd_ppp", add_ppp_exchange=True
|
|
218
|
-
)
|
|
219
179
|
|
|
180
|
+
def _entry(
|
|
181
|
+
key: str, filename: str, fetcher: Callable[[Path], None], ttl_days: int = 30
|
|
182
|
+
) -> CacheEntry:
|
|
183
|
+
return CacheEntry(key=key, filename=filename, fetcher=fetcher, ttl_days=ttl_days)
|
|
220
184
|
|
|
221
|
-
def _find_wb_files_in_path(path: Path) -> list:
|
|
222
|
-
"""Find all WB parquet files in the specified directory.
|
|
223
185
|
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
""
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
""
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
path (Path): The directory path to search for WB parquet files.
|
|
238
|
-
|
|
239
|
-
Returns:
|
|
240
|
-
list: List of WB parquet files found in the directory.
|
|
241
|
-
"""
|
|
242
|
-
return list(path.glob(f"wb_lcu_ppp_*.parquet"))
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
def _find_wb_usd_ppp_files_in_path(path: Path) -> list:
|
|
246
|
-
"""Find all WB PPP parquet files in the specified directory.
|
|
247
|
-
|
|
248
|
-
Args:
|
|
249
|
-
path (Path): The directory path to search for WB parquet files.
|
|
250
|
-
|
|
251
|
-
Returns:
|
|
252
|
-
list: List of WB parquet files found in the directory.
|
|
253
|
-
"""
|
|
254
|
-
return list(path.glob(f"wb_usd_ppp_*.parquet"))
|
|
186
|
+
_WB_ENTRY = _entry(
|
|
187
|
+
"world_bank", "wb.parquet", lambda p: _download_wb_dataset(_INDICATORS, p)
|
|
188
|
+
)
|
|
189
|
+
_WB_LCU_PPP_ENTRY = _entry(
|
|
190
|
+
"world_bank_lcu_ppp",
|
|
191
|
+
"wb_lcu_ppp.parquet",
|
|
192
|
+
lambda p: _download_wb_dataset(_INDICATORS_LCU_PPP, p, add_ppp_exchange=True),
|
|
193
|
+
)
|
|
194
|
+
_WB_USD_PPP_ENTRY = _entry(
|
|
195
|
+
"world_bank_usd_ppp",
|
|
196
|
+
"wb_usd_ppp.parquet",
|
|
197
|
+
lambda p: _download_wb_dataset(_INDICATORS_USD_PPP, p, add_ppp_exchange=True),
|
|
198
|
+
)
|
|
255
199
|
|
|
256
200
|
|
|
257
201
|
def read_wb(update: bool = False) -> pd.DataFrame:
|
|
258
|
-
|
|
259
|
-
return
|
|
260
|
-
file_finder_func=_find_wb_files_in_path,
|
|
261
|
-
download_func=download_wb,
|
|
262
|
-
data_name="World Bank",
|
|
263
|
-
update=update,
|
|
264
|
-
)
|
|
202
|
+
path = cache_manager().ensure(_WB_ENTRY, refresh=update)
|
|
203
|
+
return pd.read_parquet(path)
|
|
265
204
|
|
|
266
205
|
|
|
267
206
|
def read_wb_lcu_ppp(update: bool = False) -> pd.DataFrame:
|
|
268
|
-
|
|
269
|
-
return
|
|
270
|
-
file_finder_func=_find_wb_lcu_ppp_files_in_path,
|
|
271
|
-
download_func=download_wb_lcu_ppp,
|
|
272
|
-
data_name="World Bank",
|
|
273
|
-
update=update,
|
|
274
|
-
)
|
|
207
|
+
path = cache_manager().ensure(_WB_LCU_PPP_ENTRY, refresh=update)
|
|
208
|
+
return pd.read_parquet(path)
|
|
275
209
|
|
|
276
210
|
|
|
277
211
|
def read_wb_usd_ppp(update: bool = False) -> pd.DataFrame:
|
|
278
|
-
|
|
279
|
-
return
|
|
280
|
-
file_finder_func=_find_wb_usd_ppp_files_in_path,
|
|
281
|
-
download_func=download_wb_usd_ppp,
|
|
282
|
-
data_name="World Bank",
|
|
283
|
-
update=update,
|
|
284
|
-
)
|
|
212
|
+
path = cache_manager().ensure(_WB_USD_PPP_ENTRY, refresh=update)
|
|
213
|
+
return pd.read_parquet(path)
|
|
285
214
|
|
|
286
215
|
|
|
287
|
-
if __name__ == "__main__":
|
|
288
|
-
|
|
289
|
-
df_usd = read_wb_usd_ppp(False)
|
|
290
|
-
df_lcu = read_wb_lcu_ppp(False)
|
|
216
|
+
if __name__ == "__main__": # pragma: no cover
|
|
217
|
+
read_wb(update=True)
|
pydeflate/utils.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import json
|
|
2
|
+
import re
|
|
2
3
|
|
|
3
4
|
import numpy as np
|
|
4
5
|
import pandas as pd
|
|
@@ -22,18 +23,25 @@ def emu() -> list:
|
|
|
22
23
|
|
|
23
24
|
|
|
24
25
|
def clean_number(number):
|
|
25
|
-
"""Clean a number and return as float
|
|
26
|
-
|
|
26
|
+
"""Clean a number-like value and return it as a float.
|
|
27
|
+
|
|
28
|
+
Preserves leading signs and scientific notation while stripping
|
|
29
|
+
formatting artifacts such as commas or surrounding text.
|
|
30
|
+
"""
|
|
27
31
|
|
|
28
32
|
if not isinstance(number, str):
|
|
29
33
|
number = str(number)
|
|
30
34
|
|
|
31
|
-
|
|
35
|
+
normalized = number.replace(",", "").strip()
|
|
36
|
+
match = re.search(r"[-+]?\d*\.?\d+(?:[eE][-+]?\d+)?", normalized)
|
|
32
37
|
|
|
33
|
-
if
|
|
38
|
+
if not match:
|
|
34
39
|
return np.nan
|
|
35
40
|
|
|
36
|
-
|
|
41
|
+
try:
|
|
42
|
+
return float(match.group())
|
|
43
|
+
except ValueError:
|
|
44
|
+
return np.nan
|
|
37
45
|
|
|
38
46
|
|
|
39
47
|
def create_pydeflate_year(
|
|
@@ -65,9 +73,7 @@ def _use_implied_dac_rates(
|
|
|
65
73
|
data.loc[
|
|
66
74
|
lambda d: ~d[f"temp_{entity_column}"].isin(pydeflate_data[ix[-1]].unique()),
|
|
67
75
|
f"temp_{entity_column}",
|
|
68
|
-
] =
|
|
69
|
-
20001 if source_codes else "DAC"
|
|
70
|
-
)
|
|
76
|
+
] = 20001 if source_codes else "DAC"
|
|
71
77
|
|
|
72
78
|
# Log the fact that implied rates are being used
|
|
73
79
|
flag_missing_pydeflate_data(
|
|
@@ -90,7 +96,6 @@ def merge_user_and_pydeflate_data(
|
|
|
90
96
|
source_codes: bool = True,
|
|
91
97
|
dac: bool = False,
|
|
92
98
|
) -> pd.DataFrame:
|
|
93
|
-
|
|
94
99
|
data[f"temp_{entity_column}"] = data[entity_column]
|
|
95
100
|
|
|
96
101
|
if dac:
|
|
@@ -1,24 +1,26 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: pydeflate
|
|
3
|
-
Version: 2.
|
|
3
|
+
Version: 2.2.0
|
|
4
4
|
Summary: Package to convert current prices figures to constant prices and vice versa
|
|
5
|
-
License: MIT
|
|
6
5
|
Author: Jorge Rivera
|
|
7
|
-
Author-email:
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
Requires-Dist:
|
|
16
|
-
Requires-Dist:
|
|
17
|
-
Requires-Dist:
|
|
18
|
-
Requires-Dist:
|
|
19
|
-
Requires-Dist:
|
|
20
|
-
|
|
21
|
-
Requires-
|
|
6
|
+
Author-email: Jorge Rivera <Jorge.Rivera@one.org>
|
|
7
|
+
License-Expression: MIT
|
|
8
|
+
License-File: LICENSE
|
|
9
|
+
Requires-Dist: hdx-python-country>=3.9.8
|
|
10
|
+
Requires-Dist: imf-reader>=1.3.0
|
|
11
|
+
Requires-Dist: oda-reader>=1.2.2
|
|
12
|
+
Requires-Dist: pandas>=2.0
|
|
13
|
+
Requires-Dist: pandera>=0.20.0
|
|
14
|
+
Requires-Dist: pyarrow>=17.0
|
|
15
|
+
Requires-Dist: requests>=2.32.5
|
|
16
|
+
Requires-Dist: wbgapi>=1.0.12
|
|
17
|
+
Requires-Dist: platformdirs>=3.0.0
|
|
18
|
+
Requires-Dist: filelock>=3.15.0
|
|
19
|
+
Maintainer: Jorge Rivera
|
|
20
|
+
Requires-Python: >=3.11
|
|
21
|
+
Project-URL: Homepage, https://github.com/jm-rivera/pydeflate
|
|
22
|
+
Project-URL: Issues, https://github.com/jm-rivera/pydeflate/issues
|
|
23
|
+
Project-URL: Repository, https://github.com/jm-rivera/pydeflate
|
|
22
24
|
Description-Content-Type: text/markdown
|
|
23
25
|
|
|
24
26
|
# pydeflate
|
|
@@ -280,5 +282,104 @@ Pydeflate relies on data from external sources. If there are missing values in t
|
|
|
280
282
|
|
|
281
283
|
Pydeflate periodically updates its underlying data from the World Bank, IMF, and OECD. If the data on your system is older than 50 days, pydeflate will display a warning upon import.
|
|
282
284
|
|
|
285
|
+
## Advanced Features
|
|
283
286
|
|
|
287
|
+
### Error Handling
|
|
288
|
+
|
|
289
|
+
Pydeflate v2.1.3+ provides specific exception types for better error handling:
|
|
290
|
+
|
|
291
|
+
```python
|
|
292
|
+
from pydeflate import imf_gdp_deflate
|
|
293
|
+
from pydeflate.exceptions import NetworkError, ConfigurationError, MissingDataError
|
|
294
|
+
|
|
295
|
+
try:
|
|
296
|
+
result = imf_gdp_deflate(df, base_year=2015, source_currency="USA", target_currency="EUR")
|
|
297
|
+
except NetworkError as e:
|
|
298
|
+
# Handle network failures (retry, fallback to cached data, etc.)
|
|
299
|
+
print(f"Network error: {e}")
|
|
300
|
+
# Implement retry logic
|
|
301
|
+
except ConfigurationError as e:
|
|
302
|
+
# Handle invalid parameters (wrong currency codes, missing columns, etc.)
|
|
303
|
+
print(f"Configuration error: {e}")
|
|
304
|
+
raise
|
|
305
|
+
except MissingDataError as e:
|
|
306
|
+
# Handle missing deflator/exchange data for specific country-year combinations
|
|
307
|
+
print(f"Missing data: {e}")
|
|
308
|
+
# Use alternative source or fill gaps
|
|
309
|
+
```
|
|
310
|
+
|
|
311
|
+
Available exception types:
|
|
312
|
+
- `PydeflateError`: Base exception for all pydeflate errors
|
|
313
|
+
- `NetworkError`: Network-related failures
|
|
314
|
+
- `ConfigurationError`: Invalid parameters or configuration
|
|
315
|
+
- `DataSourceError`: Issues loading or parsing data from sources
|
|
316
|
+
- `CacheError`: Cache operation failures
|
|
317
|
+
- `MissingDataError`: Required deflator/exchange data unavailable
|
|
318
|
+
- `SchemaValidationError`: Data validation failures
|
|
319
|
+
|
|
320
|
+
### Custom Data Sources (Plugin System)
|
|
321
|
+
|
|
322
|
+
You can register custom data sources without modifying pydeflate's code:
|
|
323
|
+
|
|
324
|
+
```python
|
|
325
|
+
from pydeflate.plugins import register_source, list_sources
|
|
326
|
+
|
|
327
|
+
# Define your custom source
|
|
328
|
+
@register_source("my_central_bank")
|
|
329
|
+
class MyCentralBankSource:
|
|
330
|
+
def __init__(self, update: bool = False):
|
|
331
|
+
self.name = "my_central_bank"
|
|
332
|
+
self.data = self.load_my_data(update) # Your data loading logic
|
|
333
|
+
self._idx = ["pydeflate_year", "pydeflate_entity_code", "pydeflate_iso3"]
|
|
334
|
+
|
|
335
|
+
def lcu_usd_exchange(self):
|
|
336
|
+
# Return exchange rate data
|
|
337
|
+
return self.data.filter(self._idx + ["pydeflate_EXCHANGE"])
|
|
338
|
+
|
|
339
|
+
def price_deflator(self, kind="NGDP_D"):
|
|
340
|
+
# Return deflator data
|
|
341
|
+
return self.data.filter(self._idx + [f"pydeflate_{kind}"])
|
|
342
|
+
|
|
343
|
+
def validate(self):
|
|
344
|
+
# Validate data format
|
|
345
|
+
if self.data.empty:
|
|
346
|
+
raise ValueError("No data loaded")
|
|
347
|
+
|
|
348
|
+
# List all available sources
|
|
349
|
+
print(list_sources()) # ['DAC', 'IMF', 'World Bank', 'my_central_bank', ...]
|
|
350
|
+
|
|
351
|
+
# Your custom source is now available for use with pydeflate
|
|
352
|
+
```
|
|
353
|
+
|
|
354
|
+
### Advanced Configuration
|
|
355
|
+
|
|
356
|
+
For advanced use cases, you can use context managers to customize pydeflate's behavior:
|
|
357
|
+
|
|
358
|
+
```python
|
|
359
|
+
from pydeflate.context import pydeflate_session
|
|
360
|
+
import logging
|
|
361
|
+
|
|
362
|
+
# Use a custom cache directory and logging level
|
|
363
|
+
with pydeflate_session(data_dir="/tmp/my_cache", log_level=logging.DEBUG) as ctx:
|
|
364
|
+
result = imf_gdp_deflate(df, base_year=2015, ...)
|
|
365
|
+
# Data is cached in /tmp/my_cache
|
|
366
|
+
# Debug logging is enabled
|
|
367
|
+
|
|
368
|
+
# Or set a default context for your entire application
|
|
369
|
+
from pydeflate.context import PydeflateContext, set_default_context
|
|
370
|
+
|
|
371
|
+
ctx = PydeflateContext.create(
|
|
372
|
+
data_dir="/app/data/pydeflate_cache",
|
|
373
|
+
log_level=logging.INFO
|
|
374
|
+
)
|
|
375
|
+
set_default_context(ctx)
|
|
376
|
+
|
|
377
|
+
# All subsequent pydeflate operations use this configuration
|
|
378
|
+
```
|
|
379
|
+
|
|
380
|
+
This is useful for:
|
|
381
|
+
- Using different cache directories for different projects
|
|
382
|
+
- Running multiple pydeflate operations in parallel without cache conflicts
|
|
383
|
+
- Customizing logging verbosity
|
|
384
|
+
- Testing with temporary cache directories
|
|
284
385
|
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
pydeflate/.pydeflate_data/README.md,sha256=atNtUL9dD8G184YSd6juFib8TgEQBcSLogiz99APPVs,25
|
|
2
|
+
pydeflate/__init__.py,sha256=5-aroo2bVhvtGkDc-1JA8ZgI1fffm9sENh4x1xkojxA,1955
|
|
3
|
+
pydeflate/cache.py,sha256=jfNilTrzGucdFpaTn6AwQWCFMT5123ZgcYwhDgtsCK8,4755
|
|
4
|
+
pydeflate/constants.py,sha256=-xxH2skAWHHMQxIJSFWOayU-GmRlqowjkIhIzvyUuac,2972
|
|
5
|
+
pydeflate/context.py,sha256=0HBHnaHcpESDbyfZLo-5W_Kg03WI5Itf0LCABa2w08E,6565
|
|
6
|
+
pydeflate/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
|
+
pydeflate/core/api.py,sha256=W-MFdxsbO1dNG0pX1e2xxtjQL3qmYxTqiLLISVkizQA,15941
|
|
8
|
+
pydeflate/core/deflator.py,sha256=Ax3dmOF3tYRZnkIfFvMMo3SOLgAJHkXSmA-OtIUZkp0,5932
|
|
9
|
+
pydeflate/core/exchange.py,sha256=br6RVgTGa7LW09XemUJZ4Koazf65zuXPQKYKGhS6ROM,8535
|
|
10
|
+
pydeflate/core/source.py,sha256=dZiMqVdUXD6KQ0RuzR4clb7sCPYcCMZOkg9GhhjqppI,4908
|
|
11
|
+
pydeflate/deflate/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
|
+
pydeflate/deflate/deflators.py,sha256=qd6afJfVNHpNb12xDfS0TGHg5p4ngH5Tb_CWk6555Us,7768
|
|
13
|
+
pydeflate/deflate/legacy_deflate.py,sha256=N9tIuKzVOQtSw5QBeJG6Um86nheLG0yXgdvPVZCmrIA,3900
|
|
14
|
+
pydeflate/exceptions.py,sha256=31LzEG0aOT41e6S21NvsR2Lt_pozQmVgonqLwvOcn7I,4711
|
|
15
|
+
pydeflate/exchange/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
16
|
+
pydeflate/exchange/exchangers.py,sha256=HxKM2cqDEArQplww2d-lm-EAbccSMxEovErpkgTunkk,6721
|
|
17
|
+
pydeflate/plugins.py,sha256=fZxO79v4cbbHvsYAvstIC7kk2uSPVCiurXqrS2mukqI,8872
|
|
18
|
+
pydeflate/protocols.py,sha256=0jVSynQItjt-yv8HSU19RPQrfo1Vgo7Ws1CQfX5ZIaI,4694
|
|
19
|
+
pydeflate/pydeflate_config.py,sha256=9r8tu-k-XhFMM2x3dQwEjk2dHj-pwdUf6pdvEowbE2I,2725
|
|
20
|
+
pydeflate/schemas.py,sha256=XBfBVIQjie6IK5rsEn6MSBqv5IxRkKxa7xnc7UaH9kU,7779
|
|
21
|
+
pydeflate/settings/emu.json,sha256=BIvbiMUeHUtCESR3sMcBNrS028yp2YraCJdhDJGvAAo,133
|
|
22
|
+
pydeflate/settings/oecd_codes.json,sha256=jAKI1EgQP4rttjoG3Z-44r1tUJrIEzPCZF5V2aboQhE,911
|
|
23
|
+
pydeflate/sources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
24
|
+
pydeflate/sources/common.py,sha256=37HuExGr86RtCD-ZkF0NiIIfRbGgPbImZCQeZjYSzhk,8509
|
|
25
|
+
pydeflate/sources/dac.py,sha256=RkO_nFZcp8AeSJNFzdBrGqaL2kLVUeXBHTPO6PkUFy0,3824
|
|
26
|
+
pydeflate/sources/imf.py,sha256=Nu8Gm4wT-FdBpTuaYUrzOC2V3BNP9srNOIuQJ9E16_0,6712
|
|
27
|
+
pydeflate/sources/world_bank.py,sha256=1DzDlr0Xe0DA0OUsycjwiq-4yMasnP0TneXEK0VmcRw,7281
|
|
28
|
+
pydeflate/utils.py,sha256=HRVMUuWKS1IpU8lmkvQN1jIMA5-LEFzyEd0ReZSx65k,4252
|
|
29
|
+
pydeflate-2.2.0.dist-info/licenses/LICENSE,sha256=8ymAThz7Z4JhjqL6vDwTaoq29tFytQRDkoW3rnTGstI,1075
|
|
30
|
+
pydeflate-2.2.0.dist-info/WHEEL,sha256=n2u5OFBbdZvCiUKAmfnY1Po2j3FB_NWfuUlt5WiAjrk,79
|
|
31
|
+
pydeflate-2.2.0.dist-info/METADATA,sha256=uPc4iQXHbryXusjnzdR9gSoV7ShzcAh93s0NYUPRGKA,15802
|
|
32
|
+
pydeflate-2.2.0.dist-info/RECORD,,
|
pydeflate-2.1.2.dist-info/RECORD
DELETED
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
pydeflate/.pydeflate_data/README.md,sha256=atNtUL9dD8G184YSd6juFib8TgEQBcSLogiz99APPVs,25
|
|
2
|
-
pydeflate/__init__.py,sha256=LFrWXqC2bHJU0QJSUOUtWPW_YJ3e31Jg54zGXZUt0jc,1013
|
|
3
|
-
pydeflate/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
-
pydeflate/core/api.py,sha256=mD5zhGlAIvlpT6KcmrHW5rmYwtEq5x2Pqo-jQCQMGpo,14687
|
|
5
|
-
pydeflate/core/deflator.py,sha256=Ax3dmOF3tYRZnkIfFvMMo3SOLgAJHkXSmA-OtIUZkp0,5932
|
|
6
|
-
pydeflate/core/exchange.py,sha256=br6RVgTGa7LW09XemUJZ4Koazf65zuXPQKYKGhS6ROM,8535
|
|
7
|
-
pydeflate/core/source.py,sha256=n603ocgGjthXNcBWwgADkefxWmSFuN77Km9Z0T2zpIg,2027
|
|
8
|
-
pydeflate/deflate/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
|
-
pydeflate/deflate/deflators.py,sha256=hyLFoX46BAn5m8gTLjw-TFv3x3W6CTQmvmUvq5a_cio,7768
|
|
10
|
-
pydeflate/deflate/legacy_deflate.py,sha256=9pfqsi5KeWgP1yhXeI6K7bAjUeFY-fmRxrpDB7Zu0zo,3900
|
|
11
|
-
pydeflate/exchange/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
|
-
pydeflate/exchange/exchangers.py,sha256=_3QA3gzUQJgtR6o5n7qrVq8WKLL7x01A8V_DtW8ocPI,6768
|
|
13
|
-
pydeflate/pydeflate_config.py,sha256=5s4SLJf5is5XcUgJHDRx4f27pPiaVh0H2BL8w9QjW0k,1097
|
|
14
|
-
pydeflate/settings/emu.json,sha256=BIvbiMUeHUtCESR3sMcBNrS028yp2YraCJdhDJGvAAo,133
|
|
15
|
-
pydeflate/settings/oecd_codes.json,sha256=jAKI1EgQP4rttjoG3Z-44r1tUJrIEzPCZF5V2aboQhE,911
|
|
16
|
-
pydeflate/sources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
17
|
-
pydeflate/sources/common.py,sha256=aQ0p8fOccFZnq93tTmHJbPyF13BJbZep8QP-yzizfcE,9941
|
|
18
|
-
pydeflate/sources/dac.py,sha256=ngFiApGZ_tIQg74ogGVTIbGUA0efnF1SYwfUuqGofOQ,3791
|
|
19
|
-
pydeflate/sources/imf.py,sha256=10vc8xhNJvANb7RDD1WFn9oaZ8g53yUV5LxCQCz6ImM,6337
|
|
20
|
-
pydeflate/sources/world_bank.py,sha256=uMHidFVgEpj1HVUnNhIZV-rV64hsCBV9ZAbYKk6H0Vw,9333
|
|
21
|
-
pydeflate/utils.py,sha256=tJBd271WzZxVhW9ZMiIRHR0fZWr_MagAYLdmXXaUY3M,3983
|
|
22
|
-
pydeflate-2.1.2.dist-info/LICENSE,sha256=q5tm9mQxwSbV5Ivvjxs7MMqBgan6DM8I4r4irPvmqZM,1075
|
|
23
|
-
pydeflate-2.1.2.dist-info/METADATA,sha256=1uagncn8PNwpR-ID4-S1CC2cv4ybE6AbpDndTjKM6zo,12437
|
|
24
|
-
pydeflate-2.1.2.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
25
|
-
pydeflate-2.1.2.dist-info/RECORD,,
|
pydeflate-2.1.2.dist-info/WHEEL
DELETED