pyconvexity 0.1.1__tar.gz → 0.1.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyconvexity might be problematic. Click here for more details.

Files changed (52) hide show
  1. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/PKG-INFO +5 -2
  2. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/pyproject.toml +6 -2
  3. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity/__init__.py +30 -6
  4. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity/_version.py +1 -1
  5. pyconvexity-0.1.3/src/pyconvexity/data/README.md +101 -0
  6. pyconvexity-0.1.3/src/pyconvexity/data/__init__.py +18 -0
  7. pyconvexity-0.1.3/src/pyconvexity/data/__pycache__/__init__.cpython-313.pyc +0 -0
  8. pyconvexity-0.1.3/src/pyconvexity/data/loaders/__init__.py +3 -0
  9. pyconvexity-0.1.3/src/pyconvexity/data/loaders/__pycache__/__init__.cpython-313.pyc +0 -0
  10. pyconvexity-0.1.3/src/pyconvexity/data/loaders/__pycache__/cache.cpython-313.pyc +0 -0
  11. pyconvexity-0.1.3/src/pyconvexity/data/loaders/cache.py +212 -0
  12. pyconvexity-0.1.3/src/pyconvexity/data/sources/__init__.py +5 -0
  13. pyconvexity-0.1.3/src/pyconvexity/data/sources/__pycache__/__init__.cpython-313.pyc +0 -0
  14. pyconvexity-0.1.3/src/pyconvexity/data/sources/__pycache__/gem.cpython-313.pyc +0 -0
  15. pyconvexity-0.1.3/src/pyconvexity/data/sources/gem.py +412 -0
  16. pyconvexity-0.1.3/src/pyconvexity/io/__init__.py +32 -0
  17. pyconvexity-0.1.3/src/pyconvexity/io/excel_exporter.py +991 -0
  18. pyconvexity-0.1.3/src/pyconvexity/io/excel_importer.py +1112 -0
  19. pyconvexity-0.1.3/src/pyconvexity/io/netcdf_exporter.py +192 -0
  20. pyconvexity-0.1.3/src/pyconvexity/io/netcdf_importer.py +599 -0
  21. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity/models/__init__.py +7 -0
  22. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity/models/attributes.py +3 -1
  23. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity/models/components.py +3 -0
  24. pyconvexity-0.1.3/src/pyconvexity/models/scenarios.py +177 -0
  25. pyconvexity-0.1.3/src/pyconvexity/solvers/__init__.py +29 -0
  26. pyconvexity-0.1.3/src/pyconvexity/solvers/pypsa/__init__.py +24 -0
  27. pyconvexity-0.1.3/src/pyconvexity/solvers/pypsa/api.py +398 -0
  28. pyconvexity-0.1.3/src/pyconvexity/solvers/pypsa/batch_loader.py +311 -0
  29. pyconvexity-0.1.3/src/pyconvexity/solvers/pypsa/builder.py +656 -0
  30. pyconvexity-0.1.3/src/pyconvexity/solvers/pypsa/constraints.py +321 -0
  31. pyconvexity-0.1.3/src/pyconvexity/solvers/pypsa/solver.py +1255 -0
  32. pyconvexity-0.1.3/src/pyconvexity/solvers/pypsa/storage.py +2207 -0
  33. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity.egg-info/PKG-INFO +5 -2
  34. pyconvexity-0.1.3/src/pyconvexity.egg-info/SOURCES.txt +49 -0
  35. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity.egg-info/requires.txt +5 -1
  36. pyconvexity-0.1.1/src/pyconvexity.egg-info/SOURCES.txt +0 -24
  37. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/README.md +0 -0
  38. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/setup.cfg +0 -0
  39. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity/core/__init__.py +0 -0
  40. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity/core/database.py +0 -0
  41. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity/core/errors.py +0 -0
  42. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity/core/types.py +0 -0
  43. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity/data/schema/01_core_schema.sql +0 -0
  44. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity/data/schema/02_data_metadata.sql +0 -0
  45. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity/data/schema/03_validation_data.sql +0 -0
  46. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity/data/schema/04_scenario_schema.sql +0 -0
  47. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity/models/network.py +0 -0
  48. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity/validation/__init__.py +0 -0
  49. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity/validation/rules.py +0 -0
  50. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity.egg-info/dependency_links.txt +0 -0
  51. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/src/pyconvexity.egg-info/top_level.txt +0 -0
  52. {pyconvexity-0.1.1 → pyconvexity-0.1.3}/tests/test_core_types.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pyconvexity
3
- Version: 0.1.1
3
+ Version: 0.1.3
4
4
  Summary: Python library for energy system modeling and optimization with PyPSA
5
5
  Author-email: Convexity Team <info@convexity.com>
6
6
  License: MIT
@@ -32,6 +32,9 @@ Requires-Dist: xlsxwriter>=3.0.0; extra == "excel"
32
32
  Provides-Extra: netcdf
33
33
  Requires-Dist: netcdf4>=1.6.0; extra == "netcdf"
34
34
  Requires-Dist: xarray>=2022.3.0; extra == "netcdf"
35
+ Provides-Extra: data
36
+ Requires-Dist: country-converter>=1.0.0; extra == "data"
37
+ Requires-Dist: pyyaml>=6.0.0; extra == "data"
35
38
  Provides-Extra: dev
36
39
  Requires-Dist: pytest>=7.0.0; extra == "dev"
37
40
  Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
@@ -40,7 +43,7 @@ Requires-Dist: isort>=5.10.0; extra == "dev"
40
43
  Requires-Dist: mypy>=1.0.0; extra == "dev"
41
44
  Requires-Dist: pre-commit>=2.20.0; extra == "dev"
42
45
  Provides-Extra: all
43
- Requires-Dist: pyconvexity[excel,netcdf,pypsa]; extra == "all"
46
+ Requires-Dist: pyconvexity[data,excel,netcdf,pypsa]; extra == "all"
44
47
 
45
48
  # PyConvexity
46
49
 
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "pyconvexity"
7
- version = "0.1.1"
7
+ version = "0.1.3"
8
8
  description = "Python library for energy system modeling and optimization with PyPSA"
9
9
  readme = "README.md"
10
10
  license = {text = "MIT"}
@@ -44,6 +44,10 @@ netcdf = [
44
44
  "netcdf4>=1.6.0",
45
45
  "xarray>=2022.3.0",
46
46
  ]
47
+ data = [
48
+ "country-converter>=1.0.0",
49
+ "pyyaml>=6.0.0",
50
+ ]
47
51
  dev = [
48
52
  "pytest>=7.0.0",
49
53
  "pytest-cov>=4.0.0",
@@ -53,7 +57,7 @@ dev = [
53
57
  "pre-commit>=2.20.0",
54
58
  ]
55
59
  all = [
56
- "pyconvexity[pypsa,excel,netcdf]",
60
+ "pyconvexity[pypsa,excel,netcdf,data]",
57
61
  ]
58
62
 
59
63
  [project.urls]
@@ -48,6 +48,9 @@ from pyconvexity.models import (
48
48
  create_network, get_network_info, get_network_time_periods, list_networks,
49
49
  create_carrier, list_carriers, get_network_config, set_network_config,
50
50
  get_master_scenario_id, resolve_scenario_id,
51
+
52
+ # Scenario operations
53
+ create_scenario, list_scenarios, get_scenario, delete_scenario,
51
54
  )
52
55
 
53
56
  from pyconvexity.validation import (
@@ -93,28 +96,49 @@ __all__ = [
93
96
  "create_carrier", "list_carriers", "get_network_config", "set_network_config",
94
97
  "get_master_scenario_id", "resolve_scenario_id",
95
98
 
99
+ # Scenario operations
100
+ "create_scenario", "list_scenarios", "get_scenario", "delete_scenario",
101
+
96
102
  # Validation
97
103
  "get_validation_rule", "list_validation_rules", "validate_timeseries_alignment",
98
104
  ]
99
105
 
106
+ # Data module imports
107
+ try:
108
+ from pyconvexity import data
109
+ __all__.append("data")
110
+ except ImportError:
111
+ # Data dependencies not available
112
+ pass
113
+
100
114
  # Optional imports with graceful fallbacks
101
115
  try:
102
- from pyconvexity.solvers.pypsa import PyPSASolver
103
- __all__.append("PyPSASolver")
116
+ from pyconvexity.solvers.pypsa import (
117
+ solve_network, build_pypsa_network, solve_pypsa_network,
118
+ load_network_components, apply_constraints, store_solve_results
119
+ )
120
+ __all__.extend([
121
+ "solve_network", "build_pypsa_network", "solve_pypsa_network",
122
+ "load_network_components", "apply_constraints", "store_solve_results"
123
+ ])
104
124
  except ImportError:
105
125
  # PyPSA not available
106
126
  pass
107
127
 
128
+ # Excel I/O functionality
108
129
  try:
109
- from pyconvexity.io.excel import ExcelImporter, ExcelExporter
110
- __all__.extend(["ExcelImporter", "ExcelExporter"])
130
+ from pyconvexity.io import ExcelModelExporter, ExcelModelImporter
131
+ __all__.extend([
132
+ "ExcelModelExporter", "ExcelModelImporter"
133
+ ])
111
134
  except ImportError:
112
135
  # Excel dependencies not available
113
136
  pass
114
137
 
138
+
115
139
  try:
116
- from pyconvexity.io.netcdf import NetCDFImporter, NetCDFExporter
117
- __all__.extend(["NetCDFImporter", "NetCDFExporter"])
140
+ from pyconvexity.io import NetCDFModelExporter, NetCDFModelImporter
141
+ __all__.extend(["NetCDFModelExporter", "NetCDFModelImporter"])
118
142
  except ImportError:
119
143
  # NetCDF dependencies not available
120
144
  pass
@@ -1,2 +1,2 @@
1
1
  # This file is automatically updated by GitHub Actions during release
2
- __version__ = "0.1.1" # Default version for local development
2
+ __version__ = "0.1.2" # Default version for local development
@@ -0,0 +1,101 @@
1
+ # PyConvexity Data Module
2
+
3
+ The `pyconvexity.data` module provides functions for loading external energy data and integrating it with PyConvexity models. This is a simple, expert-friendly toolbox for working with real-world energy data.
4
+
5
+ ## Installation
6
+
7
+ Install PyConvexity with data dependencies:
8
+
9
+ ```bash
10
+ pip install pyconvexity[data]
11
+ ```
12
+
13
+ ## Current Data Sources
14
+
15
+ ### Global Energy Monitor (GEM)
16
+
17
+ Load power plant data from GEM's Global Integrated Power dataset.
18
+
19
+ **Setup:**
20
+ 1. Download the GEM Excel file: `Global-Integrated-Power-August-2025.xlsx`
21
+ 2. Place it in a `data/raw/global-energy-monitor/` directory, or set the path manually
22
+
23
+ **Usage:**
24
+
25
+ ```python
26
+ import pyconvexity as px
27
+
28
+ # Load generators for a specific country
29
+ generators = px.data.get_generators_from_gem(
30
+ country="USA", # ISO 3-letter country code
31
+ technology_types=["solar", "wind", "nuclear"], # Optional filter
32
+ min_capacity_mw=100.0 # Optional minimum capacity
33
+ )
34
+
35
+ # Create a network and add generators
36
+ px.create_database_with_schema("my_model.db")
37
+
38
+ with px.database_context("my_model.db") as conn:
39
+ network_id = px.create_network(conn, network_req)
40
+
41
+ # Create carriers
42
+ carriers = {}
43
+ for carrier_name in generators['carrier'].unique():
44
+ carriers[carrier_name] = px.create_carrier(conn, network_id, carrier_name)
45
+
46
+ # Add generators to network
47
+ generator_ids = px.data.add_gem_generators_to_network(
48
+ conn, network_id, generators, carrier_mapping=carriers
49
+ )
50
+ ```
51
+
52
+ ## Data Output Format
53
+
54
+ The `get_generators_from_gem()` function returns a pandas DataFrame with these columns:
55
+
56
+ - `plant_name`: Name of the power plant
57
+ - `country_iso_3`: ISO 3-letter country code
58
+ - `category`: Energy category (nuclear, thermal, renewables, storage, etc.)
59
+ - `carrier`: Energy carrier (coal, gas, solar, wind, nuclear, etc.)
60
+ - `type`: Technology type (subcritical, combined-cycle, photovoltaic, etc.)
61
+ - `capacity_mw`: Capacity in megawatts
62
+ - `start_year`: Year the plant started operation
63
+ - `latitude`: Latitude coordinate
64
+ - `longitude`: Longitude coordinate
65
+
66
+ ## Technology Mapping
67
+
68
+ GEM technologies are automatically mapped to a standardized schema:
69
+
70
+ - **Nuclear**: pressurized-water-reactor, boiling-water-reactor, small-modular-reactor
71
+ - **Thermal**: subcritical, supercritical, combined-cycle, gas-turbine
72
+ - **Renewables**: photovoltaic, thermal (solar), onshore/offshore (wind), run-of-river (hydro)
73
+ - **Storage**: lithium-ion (battery), pumped-hydro
74
+ - **Bioenergy**: biomass, biogas
75
+
76
+ ## Caching
77
+
78
+ Data is automatically cached for 7 days to improve performance. You can:
79
+
80
+ ```python
81
+ # Disable caching
82
+ generators = px.data.get_generators_from_gem(country="USA", use_cache=False)
83
+
84
+ # Clear cache
85
+ cache = px.data.DataCache()
86
+ cache.clear_cache('gem_generators')
87
+ ```
88
+
89
+ ## Examples
90
+
91
+ See `examples/gem_data_example.py` for a complete working example.
92
+
93
+ ## Future Data Sources
94
+
95
+ The framework is designed to be extensible. Planned additions include:
96
+
97
+ - IRENA Global Energy Atlas (renewable resource data)
98
+ - World Bank energy statistics
99
+ - IEA World Energy Outlook data
100
+ - OpenStreetMap transmission infrastructure
101
+ - NASA weather data for renewable profiles
@@ -0,0 +1,18 @@
1
+ """
2
+ PyConvexity Data Module
3
+
4
+ Provides functions for loading external energy data and integrating it with PyConvexity models.
5
+ This module offers a simple, expert-friendly toolbox for working with real-world energy data.
6
+ """
7
+
8
+ from .sources.gem import get_generators_from_gem, add_gem_generators_to_network
9
+ from .loaders.cache import DataCache
10
+
11
+ __all__ = [
12
+ # GEM (Global Energy Monitor) functions
13
+ "get_generators_from_gem",
14
+ "add_gem_generators_to_network",
15
+
16
+ # Caching utilities
17
+ "DataCache",
18
+ ]
@@ -0,0 +1,3 @@
1
+ """
2
+ Data loaders and caching utilities for PyConvexity.
3
+ """
@@ -0,0 +1,212 @@
1
+ """
2
+ Caching functionality for PyConvexity data operations.
3
+
4
+ This module handles caching of processed datasets to improve performance.
5
+ """
6
+
7
+ import pandas as pd
8
+ import hashlib
9
+ import json
10
+ from pathlib import Path
11
+ from typing import Dict, Any, Optional
12
+ import logging
13
+ from datetime import datetime, timedelta
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+ class DataCache:
18
+ """Manages caching of processed datasets."""
19
+
20
+ def __init__(self, cache_dir: Optional[str] = None):
21
+ """
22
+ Initialize the cache manager.
23
+
24
+ Args:
25
+ cache_dir: Directory to store cache files. Defaults to 'data/cache'
26
+ """
27
+ if cache_dir is None:
28
+ cache_dir = "data/cache"
29
+
30
+ self.cache_dir = Path(cache_dir)
31
+ self.cache_dir.mkdir(parents=True, exist_ok=True)
32
+
33
+ # Cache metadata file
34
+ self.metadata_file = self.cache_dir / "cache_metadata.json"
35
+ self._load_metadata()
36
+
37
+ def _load_metadata(self):
38
+ """Load cache metadata from file."""
39
+ if self.metadata_file.exists():
40
+ try:
41
+ with open(self.metadata_file, 'r') as f:
42
+ self.metadata = json.load(f)
43
+ except (json.JSONDecodeError, FileNotFoundError):
44
+ self.metadata = {}
45
+ else:
46
+ self.metadata = {}
47
+
48
+ def _save_metadata(self):
49
+ """Save cache metadata to file."""
50
+ with open(self.metadata_file, 'w') as f:
51
+ json.dump(self.metadata, f, indent=2)
52
+
53
+ def _get_cache_key(self, dataset_name: str, filters: Dict[str, Any]) -> str:
54
+ """Generate a unique cache key for a dataset and filters combination."""
55
+ # Create a hash of the filters
56
+ filters_str = json.dumps(filters, sort_keys=True)
57
+ filters_hash = hashlib.md5(filters_str.encode()).hexdigest()
58
+
59
+ return f"{dataset_name}_{filters_hash}"
60
+
61
+ def _get_cache_file_path(self, cache_key: str) -> Path:
62
+ """Get the file path for a cache key."""
63
+ return self.cache_dir / f"{cache_key}.parquet"
64
+
65
+ def get_cached_data(
66
+ self,
67
+ dataset_name: str,
68
+ filters: Dict[str, Any]
69
+ ) -> Optional[pd.DataFrame]:
70
+ """
71
+ Retrieve cached data if available and not expired.
72
+
73
+ Args:
74
+ dataset_name: Name of the dataset
75
+ filters: Filters applied to the dataset
76
+
77
+ Returns:
78
+ pandas.DataFrame or None: Cached data if available and valid
79
+ """
80
+ cache_key = self._get_cache_key(dataset_name, filters)
81
+ cache_file = self._get_cache_file_path(cache_key)
82
+
83
+ # Check if cache file exists
84
+ if not cache_file.exists():
85
+ return None
86
+
87
+ # Check if cache entry exists in metadata
88
+ if cache_key not in self.metadata:
89
+ # Clean up orphaned cache file
90
+ cache_file.unlink(missing_ok=True)
91
+ return None
92
+
93
+ # Check if cache is expired (default: 7 days)
94
+ cache_info = self.metadata[cache_key]
95
+ created_time = datetime.fromisoformat(cache_info['created'])
96
+ max_age = timedelta(days=cache_info.get('max_age_days', 7))
97
+
98
+ if datetime.now() - created_time > max_age:
99
+ logger.info(f"Cache expired for '{dataset_name}', removing...")
100
+ self._remove_cache_entry(cache_key)
101
+ return None
102
+
103
+ # Load cached data
104
+ try:
105
+ cached_data = pd.read_parquet(cache_file)
106
+ logger.info(f"Loaded cached data for '{dataset_name}' ({len(cached_data)} rows)")
107
+ return cached_data
108
+ except Exception as e:
109
+ logger.warning(f"Failed to load cached data for '{dataset_name}': {e}")
110
+ self._remove_cache_entry(cache_key)
111
+ return None
112
+
113
+ def cache_data(
114
+ self,
115
+ dataset_name: str,
116
+ data: pd.DataFrame,
117
+ filters: Dict[str, Any],
118
+ max_age_days: int = 7
119
+ ):
120
+ """
121
+ Cache processed data.
122
+
123
+ Args:
124
+ dataset_name: Name of the dataset
125
+ data: Processed pandas DataFrame
126
+ filters: Filters applied to the dataset
127
+ max_age_days: Maximum age of cache in days
128
+ """
129
+ cache_key = self._get_cache_key(dataset_name, filters)
130
+ cache_file = self._get_cache_file_path(cache_key)
131
+
132
+ # Save data to parquet file
133
+ data.to_parquet(cache_file, index=False)
134
+
135
+ # Update metadata
136
+ self.metadata[cache_key] = {
137
+ 'dataset_name': dataset_name,
138
+ 'filters': filters,
139
+ 'created': datetime.now().isoformat(),
140
+ 'max_age_days': max_age_days,
141
+ 'rows': len(data),
142
+ 'columns': list(data.columns)
143
+ }
144
+
145
+ self._save_metadata()
146
+ logger.info(f"Cached data for '{dataset_name}' ({len(data)} rows)")
147
+
148
+ def _remove_cache_entry(self, cache_key: str):
149
+ """Remove a cache entry and its file."""
150
+ cache_file = self._get_cache_file_path(cache_key)
151
+ cache_file.unlink(missing_ok=True)
152
+
153
+ if cache_key in self.metadata:
154
+ del self.metadata[cache_key]
155
+ self._save_metadata()
156
+
157
+ def clear_cache(self, dataset_name: Optional[str] = None):
158
+ """
159
+ Clear cache entries.
160
+
161
+ Args:
162
+ dataset_name: If provided, only clear cache for this dataset
163
+ """
164
+ keys_to_remove = []
165
+
166
+ for cache_key, info in self.metadata.items():
167
+ if dataset_name is None or info['dataset_name'] == dataset_name:
168
+ keys_to_remove.append(cache_key)
169
+
170
+ for key in keys_to_remove:
171
+ self._remove_cache_entry(key)
172
+
173
+ logger.info(f"Cleared {len(keys_to_remove)} cache entries")
174
+
175
+ def get_cache_info(self) -> Dict[str, Any]:
176
+ """Get information about the cache."""
177
+ total_size = 0
178
+ dataset_counts = {}
179
+
180
+ for cache_key, info in self.metadata.items():
181
+ dataset_name = info['dataset_name']
182
+ dataset_counts[dataset_name] = dataset_counts.get(dataset_name, 0) + 1
183
+
184
+ cache_file = self._get_cache_file_path(cache_key)
185
+ if cache_file.exists():
186
+ total_size += cache_file.stat().st_size
187
+
188
+ return {
189
+ 'total_entries': len(self.metadata),
190
+ 'total_size_mb': round(total_size / (1024 * 1024), 2),
191
+ 'dataset_counts': dataset_counts,
192
+ 'cache_dir': str(self.cache_dir)
193
+ }
194
+
195
+ def cleanup_expired_cache(self):
196
+ """Remove expired cache entries."""
197
+ expired_keys = []
198
+
199
+ for cache_key, info in self.metadata.items():
200
+ created_time = datetime.fromisoformat(info['created'])
201
+ max_age = timedelta(days=info.get('max_age_days', 7))
202
+
203
+ if datetime.now() - created_time > max_age:
204
+ expired_keys.append(cache_key)
205
+
206
+ for key in expired_keys:
207
+ self._remove_cache_entry(key)
208
+
209
+ if expired_keys:
210
+ logger.info(f"Cleaned up {len(expired_keys)} expired cache entries")
211
+ else:
212
+ logger.info("No expired cache entries found")
@@ -0,0 +1,5 @@
1
+ """
2
+ Data sources for PyConvexity.
3
+
4
+ This module contains integrations with external energy data sources.
5
+ """