pyconvexity 0.4.2.post1__tar.gz → 0.4.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyconvexity might be problematic. Click here for more details.
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/PKG-INFO +1 -1
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/pyproject.toml +2 -2
- pyconvexity-0.4.3/src/pyconvexity/_version.py +1 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/models/network.py +93 -2
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/solvers/pypsa/api.py +15 -10
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/solvers/pypsa/builder.py +16 -12
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/solvers/pypsa/solver.py +19 -231
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/solvers/pypsa/storage.py +22 -254
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity.egg-info/PKG-INFO +1 -1
- pyconvexity-0.4.2.post1/src/pyconvexity/_version.py +0 -1
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/setup.cfg +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/__init__.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/core/__init__.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/core/database.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/core/errors.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/core/types.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/data/README.md +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/data/__init__.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/data/loaders/__init__.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/data/loaders/cache.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/data/schema/01_core_schema.sql +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/data/schema/02_data_metadata.sql +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/data/schema/03_validation_data.sql +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/data/sources/__init__.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/data/sources/gem.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/io/__init__.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/io/excel_exporter.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/io/excel_importer.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/io/netcdf_exporter.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/io/netcdf_importer.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/models/__init__.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/models/attributes.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/models/carriers.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/models/components.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/models/results.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/models/scenarios.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/solvers/__init__.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/solvers/pypsa/__init__.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/solvers/pypsa/batch_loader.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/solvers/pypsa/constraints.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/timeseries.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/validation/__init__.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity/validation/rules.py +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity.egg-info/SOURCES.txt +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity.egg-info/dependency_links.txt +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity.egg-info/requires.txt +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/src/pyconvexity.egg-info/top_level.txt +0 -0
- {pyconvexity-0.4.2.post1 → pyconvexity-0.4.3}/tests/test_core_types.py +0 -0
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "pyconvexity"
|
|
7
|
-
version = "0.4.
|
|
7
|
+
version = "0.4.3"
|
|
8
8
|
description = "Python library for energy system modeling and optimization with PyPSA"
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
license = {text = "MIT"}
|
|
@@ -81,7 +81,7 @@ profile = "black"
|
|
|
81
81
|
line_length = 88
|
|
82
82
|
|
|
83
83
|
[tool.mypy]
|
|
84
|
-
python_version = "0.4.
|
|
84
|
+
python_version = "0.4.3"
|
|
85
85
|
warn_return_any = true
|
|
86
86
|
warn_unused_configs = true
|
|
87
87
|
disallow_untyped_defs = true
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.4.3"
|
|
@@ -19,7 +19,7 @@ logger = logging.getLogger(__name__)
|
|
|
19
19
|
|
|
20
20
|
def create_network(conn: sqlite3.Connection, request: CreateNetworkRequest) -> None:
|
|
21
21
|
"""
|
|
22
|
-
Create network metadata (single network per database).
|
|
22
|
+
Create network metadata and time periods (single network per database).
|
|
23
23
|
|
|
24
24
|
Args:
|
|
25
25
|
conn: Database connection
|
|
@@ -36,6 +36,8 @@ def create_network(conn: sqlite3.Connection, request: CreateNetworkRequest) -> N
|
|
|
36
36
|
if not request.end_time:
|
|
37
37
|
raise ValidationError("end_time is required")
|
|
38
38
|
|
|
39
|
+
time_resolution = request.time_resolution or "PT1H"
|
|
40
|
+
|
|
39
41
|
# Insert into network_metadata table (single row per database)
|
|
40
42
|
conn.execute(
|
|
41
43
|
"""
|
|
@@ -48,10 +50,99 @@ def create_network(conn: sqlite3.Connection, request: CreateNetworkRequest) -> N
|
|
|
48
50
|
or f"Created on {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}",
|
|
49
51
|
request.start_time,
|
|
50
52
|
request.end_time,
|
|
51
|
-
|
|
53
|
+
time_resolution,
|
|
52
54
|
),
|
|
53
55
|
)
|
|
54
56
|
|
|
57
|
+
# Automatically create time periods from the request parameters
|
|
58
|
+
_create_time_periods_from_request(conn, request.start_time, request.end_time, time_resolution)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _create_time_periods_from_request(
|
|
62
|
+
conn: sqlite3.Connection,
|
|
63
|
+
start_time: str,
|
|
64
|
+
end_time: str,
|
|
65
|
+
time_resolution: str,
|
|
66
|
+
) -> None:
|
|
67
|
+
"""
|
|
68
|
+
Create time periods from network request parameters.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
conn: Database connection
|
|
72
|
+
start_time: Start time string (YYYY-MM-DD HH:MM:SS)
|
|
73
|
+
end_time: End time string (YYYY-MM-DD HH:MM:SS)
|
|
74
|
+
time_resolution: ISO 8601 duration (PT1H, PT30M, PT2H, etc.)
|
|
75
|
+
"""
|
|
76
|
+
# Parse start and end times
|
|
77
|
+
start_dt = datetime.strptime(start_time, "%Y-%m-%d %H:%M:%S")
|
|
78
|
+
end_dt = datetime.strptime(end_time, "%Y-%m-%d %H:%M:%S")
|
|
79
|
+
|
|
80
|
+
# Parse time resolution to seconds
|
|
81
|
+
interval_seconds = _parse_iso8601_duration_to_seconds(time_resolution)
|
|
82
|
+
|
|
83
|
+
# Calculate period count (inclusive of both start and end)
|
|
84
|
+
total_seconds = int((end_dt - start_dt).total_seconds())
|
|
85
|
+
period_count = (total_seconds // interval_seconds) + 1
|
|
86
|
+
|
|
87
|
+
# Get Unix timestamp for start
|
|
88
|
+
start_timestamp = int(start_dt.timestamp())
|
|
89
|
+
|
|
90
|
+
# Insert time periods
|
|
91
|
+
conn.execute(
|
|
92
|
+
"""
|
|
93
|
+
INSERT INTO network_time_periods (period_count, start_timestamp, interval_seconds)
|
|
94
|
+
VALUES (?, ?, ?)
|
|
95
|
+
""",
|
|
96
|
+
(period_count, start_timestamp, interval_seconds),
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def _parse_iso8601_duration_to_seconds(duration: str) -> int:
|
|
101
|
+
"""
|
|
102
|
+
Parse ISO 8601 duration string to seconds.
|
|
103
|
+
|
|
104
|
+
Supports: PT1H (1 hour), PT30M (30 minutes), PT2H (2 hours), PT15M (15 minutes), etc.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
duration: ISO 8601 duration string
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
Duration in seconds
|
|
111
|
+
|
|
112
|
+
Raises:
|
|
113
|
+
ValidationError: If duration format is invalid
|
|
114
|
+
"""
|
|
115
|
+
if not duration.startswith("PT"):
|
|
116
|
+
raise ValidationError(f"Invalid ISO 8601 duration format: {duration}. Must start with 'PT'")
|
|
117
|
+
|
|
118
|
+
remaining = duration[2:] # Remove 'PT' prefix
|
|
119
|
+
total_seconds = 0
|
|
120
|
+
|
|
121
|
+
# Parse hours
|
|
122
|
+
if "H" in remaining:
|
|
123
|
+
parts = remaining.split("H")
|
|
124
|
+
hours = int(parts[0]) if parts[0] else 0
|
|
125
|
+
total_seconds += hours * 3600
|
|
126
|
+
remaining = parts[1] if len(parts) > 1 else ""
|
|
127
|
+
|
|
128
|
+
# Parse minutes
|
|
129
|
+
if "M" in remaining:
|
|
130
|
+
parts = remaining.split("M")
|
|
131
|
+
minutes = int(parts[0]) if parts[0] else 0
|
|
132
|
+
total_seconds += minutes * 60
|
|
133
|
+
remaining = parts[1] if len(parts) > 1 else ""
|
|
134
|
+
|
|
135
|
+
# Parse seconds
|
|
136
|
+
if "S" in remaining:
|
|
137
|
+
parts = remaining.split("S")
|
|
138
|
+
seconds = int(parts[0]) if parts[0] else 0
|
|
139
|
+
total_seconds += seconds
|
|
140
|
+
|
|
141
|
+
if total_seconds == 0:
|
|
142
|
+
raise ValidationError(f"Invalid ISO 8601 duration: {duration}. Could not parse any time components.")
|
|
143
|
+
|
|
144
|
+
return total_seconds
|
|
145
|
+
|
|
55
146
|
|
|
56
147
|
def get_network_info(conn: sqlite3.Connection) -> Dict[str, Any]:
|
|
57
148
|
"""
|
|
@@ -27,6 +27,7 @@ def solve_network(
|
|
|
27
27
|
return_detailed_results: bool = True,
|
|
28
28
|
custom_solver_config: Optional[Dict[str, Any]] = None,
|
|
29
29
|
include_unmet_loads: bool = True,
|
|
30
|
+
verbose: bool = False,
|
|
30
31
|
) -> Dict[str, Any]:
|
|
31
32
|
"""
|
|
32
33
|
Complete solve workflow: build PyPSA network from database, solve, store results (single network per database).
|
|
@@ -48,6 +49,7 @@ def solve_network(
|
|
|
48
49
|
Format: {"solver": "actual_solver_name", "solver_options": {...}}
|
|
49
50
|
Example: {"solver": "gurobi", "solver_options": {"Method": 2, "Crossover": 0}}
|
|
50
51
|
include_unmet_loads: Whether to include unmet load components in the network (default: True)
|
|
52
|
+
verbose: Enable detailed logging output (default: False)
|
|
51
53
|
|
|
52
54
|
Returns:
|
|
53
55
|
Dictionary with solve results - comprehensive if return_detailed_results=True, simple status otherwise
|
|
@@ -75,15 +77,12 @@ def solve_network(
|
|
|
75
77
|
if discount_rate is not None
|
|
76
78
|
else network_config.get("discount_rate")
|
|
77
79
|
)
|
|
78
|
-
logger.info(
|
|
79
|
-
f"Using discount rate: {effective_discount_rate} (from {'parameter override' if discount_rate is not None else 'network config'})"
|
|
80
|
-
)
|
|
81
80
|
|
|
82
81
|
# Build network
|
|
83
82
|
if progress_callback:
|
|
84
83
|
progress_callback(10, "Building PyPSA network...")
|
|
85
84
|
|
|
86
|
-
builder = NetworkBuilder()
|
|
85
|
+
builder = NetworkBuilder(verbose=verbose)
|
|
87
86
|
network = builder.build_network(
|
|
88
87
|
conn, scenario_id, progress_callback, include_unmet_loads
|
|
89
88
|
)
|
|
@@ -109,7 +108,7 @@ def solve_network(
|
|
|
109
108
|
if progress_callback:
|
|
110
109
|
progress_callback(70, f"Solving with {solver_name}...")
|
|
111
110
|
|
|
112
|
-
solver = NetworkSolver()
|
|
111
|
+
solver = NetworkSolver(verbose=verbose)
|
|
113
112
|
solve_result = solver.solve_network(
|
|
114
113
|
network,
|
|
115
114
|
solver_name=solver_name,
|
|
@@ -125,7 +124,7 @@ def solve_network(
|
|
|
125
124
|
progress_callback(85, "Storing results...")
|
|
126
125
|
|
|
127
126
|
# Store results - ALWAYS store results regardless of return_detailed_results flag
|
|
128
|
-
storage = ResultStorage()
|
|
127
|
+
storage = ResultStorage(verbose=verbose)
|
|
129
128
|
storage_result = storage.store_results(conn, network, solve_result, scenario_id)
|
|
130
129
|
|
|
131
130
|
if progress_callback:
|
|
@@ -194,6 +193,7 @@ def build_pypsa_network(
|
|
|
194
193
|
db_path: str,
|
|
195
194
|
scenario_id: Optional[int] = None,
|
|
196
195
|
progress_callback: Optional[Callable[[int, str], None]] = None,
|
|
196
|
+
verbose: bool = False,
|
|
197
197
|
) -> "pypsa.Network":
|
|
198
198
|
"""
|
|
199
199
|
Build PyPSA network object from database (single network per database).
|
|
@@ -206,6 +206,7 @@ def build_pypsa_network(
|
|
|
206
206
|
db_path: Path to the database file
|
|
207
207
|
scenario_id: Optional scenario ID (NULL for base network)
|
|
208
208
|
progress_callback: Optional callback for progress updates
|
|
209
|
+
verbose: Enable detailed logging output (default: False)
|
|
209
210
|
|
|
210
211
|
Returns:
|
|
211
212
|
PyPSA Network object ready for solving
|
|
@@ -216,7 +217,7 @@ def build_pypsa_network(
|
|
|
216
217
|
ImportError: If PyPSA is not available
|
|
217
218
|
"""
|
|
218
219
|
with database_context(db_path) as conn:
|
|
219
|
-
builder = NetworkBuilder()
|
|
220
|
+
builder = NetworkBuilder(verbose=verbose)
|
|
220
221
|
return builder.build_network(conn, scenario_id, progress_callback)
|
|
221
222
|
|
|
222
223
|
|
|
@@ -230,6 +231,7 @@ def solve_pypsa_network(
|
|
|
230
231
|
store_results: bool = True,
|
|
231
232
|
progress_callback: Optional[Callable[[int, str], None]] = None,
|
|
232
233
|
custom_solver_config: Optional[Dict[str, Any]] = None,
|
|
234
|
+
verbose: bool = False,
|
|
233
235
|
) -> Dict[str, Any]:
|
|
234
236
|
"""
|
|
235
237
|
Solve PyPSA network and optionally store results back to database (single network per database).
|
|
@@ -248,6 +250,7 @@ def solve_pypsa_network(
|
|
|
248
250
|
progress_callback: Optional callback for progress updates
|
|
249
251
|
custom_solver_config: Optional custom solver configuration when solver_name="custom"
|
|
250
252
|
Format: {"solver": "actual_solver_name", "solver_options": {...}}
|
|
253
|
+
verbose: Enable detailed logging output (default: False)
|
|
251
254
|
|
|
252
255
|
Returns:
|
|
253
256
|
Dictionary with solve results and statistics
|
|
@@ -260,7 +263,7 @@ def solve_pypsa_network(
|
|
|
260
263
|
progress_callback(0, f"Solving network with {solver_name}...")
|
|
261
264
|
|
|
262
265
|
# Solve network
|
|
263
|
-
solver = NetworkSolver()
|
|
266
|
+
solver = NetworkSolver(verbose=verbose)
|
|
264
267
|
solve_result = solver.solve_network(
|
|
265
268
|
network,
|
|
266
269
|
solver_name=solver_name,
|
|
@@ -278,7 +281,7 @@ def solve_pypsa_network(
|
|
|
278
281
|
progress_callback(80, "Storing results...")
|
|
279
282
|
|
|
280
283
|
with database_context(db_path) as conn:
|
|
281
|
-
storage = ResultStorage()
|
|
284
|
+
storage = ResultStorage(verbose=verbose)
|
|
282
285
|
storage_result = storage.store_results(
|
|
283
286
|
conn, network, solve_result, scenario_id
|
|
284
287
|
)
|
|
@@ -348,6 +351,7 @@ def store_solve_results(
|
|
|
348
351
|
db_path: str,
|
|
349
352
|
scenario_id: Optional[int],
|
|
350
353
|
solve_metadata: Dict[str, Any],
|
|
354
|
+
verbose: bool = False,
|
|
351
355
|
) -> Dict[str, Any]:
|
|
352
356
|
"""
|
|
353
357
|
Store PyPSA solve results back to database (single network per database).
|
|
@@ -361,6 +365,7 @@ def store_solve_results(
|
|
|
361
365
|
db_path: Path to the database file
|
|
362
366
|
scenario_id: Scenario ID for result storage (NULL for base network)
|
|
363
367
|
solve_metadata: Dictionary with solve metadata (solver_name, solve_time, etc.)
|
|
368
|
+
verbose: Enable detailed logging output (default: False)
|
|
364
369
|
|
|
365
370
|
Returns:
|
|
366
371
|
Dictionary with storage statistics
|
|
@@ -369,7 +374,7 @@ def store_solve_results(
|
|
|
369
374
|
DatabaseError: If database operations fail
|
|
370
375
|
"""
|
|
371
376
|
with database_context(db_path) as conn:
|
|
372
|
-
storage = ResultStorage()
|
|
377
|
+
storage = ResultStorage(verbose=verbose)
|
|
373
378
|
return storage.store_results(conn, network, solve_metadata, scenario_id)
|
|
374
379
|
|
|
375
380
|
|
|
@@ -26,7 +26,15 @@ class NetworkBuilder:
|
|
|
26
26
|
even for single-year models.
|
|
27
27
|
"""
|
|
28
28
|
|
|
29
|
-
def __init__(self):
|
|
29
|
+
def __init__(self, verbose: bool = False):
|
|
30
|
+
"""
|
|
31
|
+
Initialize NetworkBuilder.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
verbose: Enable detailed logging output
|
|
35
|
+
"""
|
|
36
|
+
self.verbose = verbose
|
|
37
|
+
|
|
30
38
|
# Import PyPSA with error handling
|
|
31
39
|
try:
|
|
32
40
|
import pypsa
|
|
@@ -188,7 +196,6 @@ class NetworkBuilder:
|
|
|
188
196
|
|
|
189
197
|
# Extract unique years for investment periods
|
|
190
198
|
years = sorted(list(set([ts.year for ts in timestamps])))
|
|
191
|
-
logger.info(f"Found {len(years)} investment periods: {years}")
|
|
192
199
|
|
|
193
200
|
# Always create MultiIndex following PyPSA multi-investment tutorial format
|
|
194
201
|
# First level: investment periods (years), Second level: timesteps
|
|
@@ -206,7 +213,6 @@ class NetworkBuilder:
|
|
|
206
213
|
f"Created MultiIndex is not unique! Check timestamp generation."
|
|
207
214
|
)
|
|
208
215
|
|
|
209
|
-
logger.info(f"Created MultiIndex with {len(multi_index)} snapshots")
|
|
210
216
|
network.set_snapshots(multi_index)
|
|
211
217
|
|
|
212
218
|
# Set investment periods for multi-period optimization
|
|
@@ -216,9 +222,8 @@ class NetworkBuilder:
|
|
|
216
222
|
network._available_years = years
|
|
217
223
|
|
|
218
224
|
logger.info(
|
|
219
|
-
f"
|
|
225
|
+
f"Time index: {len(multi_index)} snapshots across {len(years)} investment periods: {years}"
|
|
220
226
|
)
|
|
221
|
-
logger.info(f"Investment periods: {network.investment_periods}")
|
|
222
227
|
|
|
223
228
|
except Exception as e:
|
|
224
229
|
logger.error(f"Failed to set time index: {e}")
|
|
@@ -376,12 +381,14 @@ class NetworkBuilder:
|
|
|
376
381
|
if include_unmet_loads:
|
|
377
382
|
unmet_loads = list_components_by_type(conn, "UNMET_LOAD")
|
|
378
383
|
all_generators = generators + unmet_loads
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
384
|
+
if self.verbose:
|
|
385
|
+
logger.info(
|
|
386
|
+
f"Loading {len(generators)} generators and {len(unmet_loads)} unmet loads"
|
|
387
|
+
)
|
|
382
388
|
else:
|
|
383
389
|
all_generators = generators
|
|
384
|
-
|
|
390
|
+
if self.verbose:
|
|
391
|
+
logger.info(f"Loading {len(generators)} generators (unmet loads disabled)")
|
|
385
392
|
|
|
386
393
|
generator_ids = [gen.id for gen in all_generators]
|
|
387
394
|
|
|
@@ -628,9 +635,6 @@ class NetworkBuilder:
|
|
|
628
635
|
network.snapshot_weightings.loc[:, "objective"] = weightings
|
|
629
636
|
network.snapshot_weightings.loc[:, "generators"] = weightings
|
|
630
637
|
network.snapshot_weightings.loc[:, "stores"] = weightings
|
|
631
|
-
logger.info(
|
|
632
|
-
f"Set snapshot weightings for {len(weightings)} time periods (objective, generators, stores)"
|
|
633
|
-
)
|
|
634
638
|
else:
|
|
635
639
|
logger.warning(
|
|
636
640
|
f"Mismatch between weightings ({len(weightings)}) and snapshots ({len(network.snapshots)})"
|