pyconvexity 0.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyconvexity might be problematic. Click here for more details.
- pyconvexity/__init__.py +226 -0
- pyconvexity/_version.py +1 -0
- pyconvexity/core/__init__.py +60 -0
- pyconvexity/core/database.py +485 -0
- pyconvexity/core/errors.py +106 -0
- pyconvexity/core/types.py +400 -0
- pyconvexity/data/README.md +101 -0
- pyconvexity/data/__init__.py +17 -0
- pyconvexity/data/loaders/__init__.py +3 -0
- pyconvexity/data/loaders/cache.py +213 -0
- pyconvexity/data/schema/01_core_schema.sql +420 -0
- pyconvexity/data/schema/02_data_metadata.sql +120 -0
- pyconvexity/data/schema/03_validation_data.sql +506 -0
- pyconvexity/data/sources/__init__.py +5 -0
- pyconvexity/data/sources/gem.py +442 -0
- pyconvexity/io/__init__.py +26 -0
- pyconvexity/io/excel_exporter.py +1226 -0
- pyconvexity/io/excel_importer.py +1381 -0
- pyconvexity/io/netcdf_exporter.py +197 -0
- pyconvexity/io/netcdf_importer.py +1833 -0
- pyconvexity/models/__init__.py +195 -0
- pyconvexity/models/attributes.py +730 -0
- pyconvexity/models/carriers.py +159 -0
- pyconvexity/models/components.py +611 -0
- pyconvexity/models/network.py +503 -0
- pyconvexity/models/results.py +148 -0
- pyconvexity/models/scenarios.py +234 -0
- pyconvexity/solvers/__init__.py +29 -0
- pyconvexity/solvers/pypsa/__init__.py +24 -0
- pyconvexity/solvers/pypsa/api.py +460 -0
- pyconvexity/solvers/pypsa/batch_loader.py +307 -0
- pyconvexity/solvers/pypsa/builder.py +675 -0
- pyconvexity/solvers/pypsa/constraints.py +405 -0
- pyconvexity/solvers/pypsa/solver.py +1509 -0
- pyconvexity/solvers/pypsa/storage.py +2048 -0
- pyconvexity/timeseries.py +330 -0
- pyconvexity/validation/__init__.py +25 -0
- pyconvexity/validation/rules.py +312 -0
- pyconvexity-0.4.3.dist-info/METADATA +47 -0
- pyconvexity-0.4.3.dist-info/RECORD +42 -0
- pyconvexity-0.4.3.dist-info/WHEEL +5 -0
- pyconvexity-0.4.3.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,1381 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Excel importer for PyConvexity energy system models.
|
|
3
|
+
Imports network models from Excel workbooks with multiple sheets.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import logging
|
|
7
|
+
import pandas as pd
|
|
8
|
+
import numpy as np
|
|
9
|
+
from typing import Dict, Any, Optional, List, Tuple
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
import json
|
|
13
|
+
|
|
14
|
+
# Import functions directly from pyconvexity
|
|
15
|
+
from pyconvexity.core.database import open_connection
|
|
16
|
+
from pyconvexity.core.types import StaticValue, CreateNetworkRequest
|
|
17
|
+
from pyconvexity.core.errors import AttributeNotFound, ValidationError
|
|
18
|
+
from pyconvexity.models import (
|
|
19
|
+
list_components_by_type,
|
|
20
|
+
create_component,
|
|
21
|
+
update_component,
|
|
22
|
+
create_network,
|
|
23
|
+
set_network_config,
|
|
24
|
+
create_carrier,
|
|
25
|
+
get_network_time_periods,
|
|
26
|
+
list_carriers,
|
|
27
|
+
set_static_attribute,
|
|
28
|
+
set_timeseries_attribute,
|
|
29
|
+
get_bus_name_to_id_map,
|
|
30
|
+
get_network_info,
|
|
31
|
+
delete_attribute,
|
|
32
|
+
)
|
|
33
|
+
from pyconvexity.validation import get_validation_rule
|
|
34
|
+
from pyconvexity.timeseries import set_timeseries
|
|
35
|
+
from pyconvexity.models.attributes import (
|
|
36
|
+
set_timeseries_attribute as set_timeseries_conn,
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
logger = logging.getLogger(__name__)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class ExcelModelImporter:
|
|
43
|
+
"""Import network model from Excel workbook"""
|
|
44
|
+
|
|
45
|
+
def __init__(self):
|
|
46
|
+
self.logger = logging.getLogger(__name__)
|
|
47
|
+
|
|
48
|
+
def import_model_from_excel(
|
|
49
|
+
self,
|
|
50
|
+
db_path: str,
|
|
51
|
+
excel_path: str,
|
|
52
|
+
network_name: Optional[str] = None,
|
|
53
|
+
network_description: Optional[str] = None,
|
|
54
|
+
scenario_id: Optional[int] = None,
|
|
55
|
+
progress_callback: Optional[callable] = None,
|
|
56
|
+
) -> Dict[str, Any]:
|
|
57
|
+
"""
|
|
58
|
+
Import network model from Excel workbook
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
db_path: Database path
|
|
62
|
+
excel_path: Excel file path
|
|
63
|
+
|
|
64
|
+
network_name: Name for new network (if creating new)
|
|
65
|
+
network_description: Description for new network (if creating new)
|
|
66
|
+
scenario_id: Scenario ID (defaults to master scenario)
|
|
67
|
+
update_existing: Whether to update existing components
|
|
68
|
+
add_new: Whether to add new components
|
|
69
|
+
progress_callback: Optional callback for progress updates
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
Import statistics and metadata
|
|
73
|
+
"""
|
|
74
|
+
|
|
75
|
+
conn = None
|
|
76
|
+
try:
|
|
77
|
+
if progress_callback:
|
|
78
|
+
progress_callback(0, "Starting Excel import...")
|
|
79
|
+
|
|
80
|
+
# Connect to database
|
|
81
|
+
conn = open_connection(db_path)
|
|
82
|
+
|
|
83
|
+
# Single network per database - always update existing network metadata
|
|
84
|
+
# Check if network already exists
|
|
85
|
+
try:
|
|
86
|
+
existing_network = get_network_info(conn)
|
|
87
|
+
create_new_network = False
|
|
88
|
+
except Exception:
|
|
89
|
+
create_new_network = True
|
|
90
|
+
|
|
91
|
+
if create_new_network:
|
|
92
|
+
if progress_callback:
|
|
93
|
+
progress_callback(3, "Reading Excel Overview sheet...")
|
|
94
|
+
|
|
95
|
+
# Read network configuration from Overview sheet
|
|
96
|
+
overview_df = pd.read_excel(excel_path, sheet_name="Overview")
|
|
97
|
+
network_config = self._read_overview_sheet(overview_df)
|
|
98
|
+
|
|
99
|
+
self.logger.info(f"Network config from Overview: {network_config}")
|
|
100
|
+
|
|
101
|
+
# Extract network name from Excel if not provided
|
|
102
|
+
excel_network_name = network_config.get("name")
|
|
103
|
+
if excel_network_name:
|
|
104
|
+
final_network_name = excel_network_name
|
|
105
|
+
self.logger.info(
|
|
106
|
+
f"Using network name from Excel: '{final_network_name}'"
|
|
107
|
+
)
|
|
108
|
+
elif network_name:
|
|
109
|
+
final_network_name = network_name
|
|
110
|
+
self.logger.info(
|
|
111
|
+
f"Using provided network name: '{final_network_name}'"
|
|
112
|
+
)
|
|
113
|
+
else:
|
|
114
|
+
# Fallback to filename if no name in Excel or provided
|
|
115
|
+
final_network_name = Path(excel_path).stem
|
|
116
|
+
self.logger.info(
|
|
117
|
+
f"Using filename as network name: '{final_network_name}'"
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
# Extract description from Excel if not provided
|
|
121
|
+
excel_description = network_config.get("description")
|
|
122
|
+
if excel_description:
|
|
123
|
+
final_description = excel_description
|
|
124
|
+
self.logger.info(
|
|
125
|
+
f"Using description from Excel: '{final_description}'"
|
|
126
|
+
)
|
|
127
|
+
elif network_description:
|
|
128
|
+
final_description = network_description
|
|
129
|
+
self.logger.info(
|
|
130
|
+
f"Using provided description: '{final_description}'"
|
|
131
|
+
)
|
|
132
|
+
else:
|
|
133
|
+
final_description = f"Imported from {Path(excel_path).name}"
|
|
134
|
+
self.logger.info(
|
|
135
|
+
f"Using default description: '{final_description}'"
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
if progress_callback:
|
|
139
|
+
progress_callback(5, f"Creating network '{final_network_name}'...")
|
|
140
|
+
|
|
141
|
+
# Create new network
|
|
142
|
+
network_request = CreateNetworkRequest(
|
|
143
|
+
name=final_network_name,
|
|
144
|
+
description=final_description,
|
|
145
|
+
time_resolution=network_config.get("time_resolution", "H"),
|
|
146
|
+
start_time=network_config.get("start_time"),
|
|
147
|
+
end_time=network_config.get("end_time"),
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
# Validate that we have the required time information
|
|
151
|
+
if not network_request.start_time or not network_request.end_time:
|
|
152
|
+
missing_fields = []
|
|
153
|
+
if not network_request.start_time:
|
|
154
|
+
missing_fields.append("Time Start")
|
|
155
|
+
if not network_request.end_time:
|
|
156
|
+
missing_fields.append("Time End")
|
|
157
|
+
|
|
158
|
+
self.logger.error(
|
|
159
|
+
f"Missing required time information in Overview sheet: {missing_fields}"
|
|
160
|
+
)
|
|
161
|
+
self.logger.error(f"Available overview data: {network_config}")
|
|
162
|
+
raise ValueError(
|
|
163
|
+
f"Excel file is missing required time information: {', '.join(missing_fields)}. "
|
|
164
|
+
f"Please ensure the Overview sheet contains 'Time Start' and 'Time End' fields."
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
self.logger.info(
|
|
168
|
+
f"Creating network with: name='{network_request.name}', "
|
|
169
|
+
f"start_time='{network_request.start_time}', "
|
|
170
|
+
f"end_time='{network_request.end_time}', "
|
|
171
|
+
f"time_resolution='{network_request.time_resolution}'"
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
create_network(conn, network_request)
|
|
175
|
+
|
|
176
|
+
# Generate time periods for the network
|
|
177
|
+
self._generate_time_periods(
|
|
178
|
+
conn,
|
|
179
|
+
network_request.start_time,
|
|
180
|
+
network_request.end_time,
|
|
181
|
+
network_request.time_resolution,
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
# Verify time periods were created
|
|
185
|
+
verification_periods = get_network_time_periods(conn)
|
|
186
|
+
self.logger.info(
|
|
187
|
+
f"Network now has {len(verification_periods)} time periods"
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
conn.commit()
|
|
191
|
+
|
|
192
|
+
if progress_callback:
|
|
193
|
+
progress_callback(5, f"Updated network '{final_network_name}'")
|
|
194
|
+
else:
|
|
195
|
+
if progress_callback:
|
|
196
|
+
progress_callback(3, f"Updating existing network")
|
|
197
|
+
|
|
198
|
+
# For existing networks, validate time axis compatibility
|
|
199
|
+
if progress_callback:
|
|
200
|
+
progress_callback(5, "Validating time axis compatibility...")
|
|
201
|
+
|
|
202
|
+
# Read network configuration from Overview sheet to compare
|
|
203
|
+
try:
|
|
204
|
+
overview_df = pd.read_excel(excel_path, sheet_name="Overview")
|
|
205
|
+
excel_time_config = self._read_overview_sheet(overview_df)
|
|
206
|
+
except Exception as e:
|
|
207
|
+
self.logger.warning(f"Could not read Overview sheet: {e}")
|
|
208
|
+
self.logger.warning(
|
|
209
|
+
"Skipping time axis validation - assuming Excel is compatible"
|
|
210
|
+
)
|
|
211
|
+
excel_time_config = {}
|
|
212
|
+
|
|
213
|
+
# Validate time axis matches existing network
|
|
214
|
+
self._validate_time_axis_compatibility(conn, excel_time_config)
|
|
215
|
+
|
|
216
|
+
self.logger.info(
|
|
217
|
+
"Time axis validation passed - Excel matches existing network"
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
# Set import behavior based on whether this is a new or existing network
|
|
221
|
+
# Always add all components for single network per database
|
|
222
|
+
if True:
|
|
223
|
+
# New network: Always add all components from Excel
|
|
224
|
+
actual_update_existing = False # No existing components to update
|
|
225
|
+
actual_add_new = True # Add everything from Excel
|
|
226
|
+
self.logger.info(
|
|
227
|
+
"Import mode: NEW NETWORK - Adding all components from Excel"
|
|
228
|
+
)
|
|
229
|
+
else:
|
|
230
|
+
# Existing network: Always update existing and add new (user's requirement)
|
|
231
|
+
actual_update_existing = True # Update components that exist
|
|
232
|
+
actual_add_new = True # Add components that don't exist
|
|
233
|
+
self.logger.info(
|
|
234
|
+
"Import mode: EXISTING NETWORK - Update existing + add new components"
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
if progress_callback:
|
|
238
|
+
progress_callback(8, "Reading Excel file...")
|
|
239
|
+
|
|
240
|
+
# Read Excel file
|
|
241
|
+
excel_data = self._read_excel_file(excel_path)
|
|
242
|
+
|
|
243
|
+
if progress_callback:
|
|
244
|
+
progress_callback(18, "Processing carriers...")
|
|
245
|
+
|
|
246
|
+
# Import carriers first
|
|
247
|
+
carriers_df = excel_data.get("Carriers", pd.DataFrame())
|
|
248
|
+
carriers_imported = self._import_carriers(conn, carriers_df)
|
|
249
|
+
|
|
250
|
+
if progress_callback:
|
|
251
|
+
progress_callback(28, "Processing components...")
|
|
252
|
+
|
|
253
|
+
# Import components by type
|
|
254
|
+
component_types = [
|
|
255
|
+
"Buses",
|
|
256
|
+
"Generators",
|
|
257
|
+
"Loads",
|
|
258
|
+
"Lines",
|
|
259
|
+
"Links",
|
|
260
|
+
"Storage Units",
|
|
261
|
+
"Stores",
|
|
262
|
+
"Constraints",
|
|
263
|
+
]
|
|
264
|
+
components_imported = {}
|
|
265
|
+
|
|
266
|
+
for sheet_name in component_types:
|
|
267
|
+
if sheet_name in excel_data:
|
|
268
|
+
comp_type = self._get_component_type_from_sheet(sheet_name)
|
|
269
|
+
self.logger.info(
|
|
270
|
+
f"Processing sheet '{sheet_name}' as component type '{comp_type}' with {len(excel_data[sheet_name])} rows"
|
|
271
|
+
)
|
|
272
|
+
components_imported[comp_type] = self._import_components(
|
|
273
|
+
conn,
|
|
274
|
+
comp_type,
|
|
275
|
+
excel_data[sheet_name],
|
|
276
|
+
scenario_id,
|
|
277
|
+
actual_update_existing,
|
|
278
|
+
actual_add_new,
|
|
279
|
+
)
|
|
280
|
+
|
|
281
|
+
if progress_callback:
|
|
282
|
+
progress_callback(78, "Processing timeseries data...")
|
|
283
|
+
|
|
284
|
+
# Import timeseries data
|
|
285
|
+
timeseries_imported = self._import_timeseries_data(
|
|
286
|
+
conn, excel_data, scenario_id
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
if progress_callback:
|
|
290
|
+
progress_callback(93, "Processing network configuration...")
|
|
291
|
+
|
|
292
|
+
# Import network configuration
|
|
293
|
+
network_config_df = excel_data.get("Network Config", pd.DataFrame())
|
|
294
|
+
config_imported = self._import_network_config(conn, network_config_df)
|
|
295
|
+
|
|
296
|
+
conn.commit()
|
|
297
|
+
|
|
298
|
+
if progress_callback:
|
|
299
|
+
progress_callback(100, "Excel import completed")
|
|
300
|
+
|
|
301
|
+
# Calculate statistics
|
|
302
|
+
stats = self._calculate_import_stats(
|
|
303
|
+
carriers_imported,
|
|
304
|
+
components_imported,
|
|
305
|
+
timeseries_imported,
|
|
306
|
+
config_imported,
|
|
307
|
+
)
|
|
308
|
+
# network_id no longer needed in stats
|
|
309
|
+
stats["created_new_network"] = False # Single network per database
|
|
310
|
+
|
|
311
|
+
return {
|
|
312
|
+
"success": True,
|
|
313
|
+
"message": f"Network updated from Excel: {excel_path}",
|
|
314
|
+
"stats": stats,
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
except Exception as e:
|
|
318
|
+
self.logger.error(f"Excel import failed: {e}", exc_info=True)
|
|
319
|
+
if progress_callback:
|
|
320
|
+
progress_callback(None, f"Import failed: {str(e)}")
|
|
321
|
+
raise
|
|
322
|
+
finally:
|
|
323
|
+
# Always close the connection, even on error
|
|
324
|
+
if conn is not None:
|
|
325
|
+
try:
|
|
326
|
+
conn.close()
|
|
327
|
+
except Exception as e:
|
|
328
|
+
self.logger.warning(f"Failed to close database connection: {e}")
|
|
329
|
+
|
|
330
|
+
def _generate_time_periods(
|
|
331
|
+
self, conn, start_time: str, end_time: str, time_resolution: str
|
|
332
|
+
) -> None:
|
|
333
|
+
"""Generate and insert time periods for the network"""
|
|
334
|
+
import pandas as pd
|
|
335
|
+
from datetime import datetime
|
|
336
|
+
|
|
337
|
+
try:
|
|
338
|
+
# Parse start and end times
|
|
339
|
+
start_dt = pd.to_datetime(start_time)
|
|
340
|
+
end_dt = pd.to_datetime(end_time)
|
|
341
|
+
|
|
342
|
+
# Convert time_resolution to pandas frequency string
|
|
343
|
+
if time_resolution == "H":
|
|
344
|
+
freq_str = "H"
|
|
345
|
+
elif time_resolution == "D":
|
|
346
|
+
freq_str = "D"
|
|
347
|
+
elif time_resolution.endswith("H"):
|
|
348
|
+
hours = int(time_resolution[:-1])
|
|
349
|
+
freq_str = f"{hours}H"
|
|
350
|
+
elif time_resolution.endswith("min"):
|
|
351
|
+
minutes = int(time_resolution[:-3])
|
|
352
|
+
freq_str = f"{minutes}min"
|
|
353
|
+
else:
|
|
354
|
+
self.logger.warning(
|
|
355
|
+
f"Unknown time resolution '{time_resolution}', defaulting to hourly"
|
|
356
|
+
)
|
|
357
|
+
freq_str = "H"
|
|
358
|
+
|
|
359
|
+
# Generate timestamps
|
|
360
|
+
timestamps = pd.date_range(
|
|
361
|
+
start=start_dt, end=end_dt, freq=freq_str, inclusive="both"
|
|
362
|
+
)
|
|
363
|
+
|
|
364
|
+
self.logger.info(
|
|
365
|
+
f"Generating {len(timestamps)} time periods from {start_time} to {end_time} at {time_resolution} resolution"
|
|
366
|
+
)
|
|
367
|
+
|
|
368
|
+
# Insert optimized time periods metadata
|
|
369
|
+
period_count = len(timestamps)
|
|
370
|
+
start_timestamp = int(timestamps[0].timestamp())
|
|
371
|
+
|
|
372
|
+
# Calculate interval in seconds
|
|
373
|
+
if len(timestamps) > 1:
|
|
374
|
+
interval_seconds = int((timestamps[1] - timestamps[0]).total_seconds())
|
|
375
|
+
else:
|
|
376
|
+
interval_seconds = 3600 # Default to hourly
|
|
377
|
+
|
|
378
|
+
conn.execute(
|
|
379
|
+
"""
|
|
380
|
+
INSERT INTO network_time_periods (period_count, start_timestamp, interval_seconds)
|
|
381
|
+
VALUES (?, ?, ?)
|
|
382
|
+
""",
|
|
383
|
+
(period_count, start_timestamp, interval_seconds),
|
|
384
|
+
)
|
|
385
|
+
|
|
386
|
+
self.logger.info(f"Successfully created {len(timestamps)} time periods")
|
|
387
|
+
|
|
388
|
+
except Exception as e:
|
|
389
|
+
self.logger.error(f"Failed to generate time periods: {e}")
|
|
390
|
+
raise
|
|
391
|
+
|
|
392
|
+
def _read_overview_sheet(self, overview_df: pd.DataFrame) -> Dict[str, Any]:
|
|
393
|
+
"""Extract network configuration from Overview sheet"""
|
|
394
|
+
config = {}
|
|
395
|
+
|
|
396
|
+
if overview_df.empty:
|
|
397
|
+
self.logger.warning("Overview sheet is empty")
|
|
398
|
+
return config
|
|
399
|
+
|
|
400
|
+
self.logger.info(
|
|
401
|
+
f"Overview sheet has {len(overview_df)} rows and columns: {list(overview_df.columns)}"
|
|
402
|
+
)
|
|
403
|
+
self.logger.info(f"First few rows of overview sheet:\n{overview_df.head()}")
|
|
404
|
+
|
|
405
|
+
# Convert to a simple key-value lookup
|
|
406
|
+
overview_data = {}
|
|
407
|
+
|
|
408
|
+
# Handle both old single-column format and new two-column format
|
|
409
|
+
if "Property" in overview_df.columns and "Value" in overview_df.columns:
|
|
410
|
+
# New two-column format
|
|
411
|
+
for _, row in overview_df.iterrows():
|
|
412
|
+
key = str(row["Property"]).strip() if pd.notna(row["Property"]) else ""
|
|
413
|
+
value = str(row["Value"]).strip() if pd.notna(row["Value"]) else ""
|
|
414
|
+
if key and value and value != "nan":
|
|
415
|
+
overview_data[key] = value
|
|
416
|
+
self.logger.debug(f"Parsed overview data: '{key}' = '{value}'")
|
|
417
|
+
elif len(overview_df.columns) >= 2:
|
|
418
|
+
# Old format - try to read from first two columns
|
|
419
|
+
for i, row in overview_df.iterrows():
|
|
420
|
+
key = str(row.iloc[0]).strip() if pd.notna(row.iloc[0]) else ""
|
|
421
|
+
value = str(row.iloc[1]).strip() if pd.notna(row.iloc[1]) else ""
|
|
422
|
+
if key and value and value != "nan":
|
|
423
|
+
overview_data[key] = value
|
|
424
|
+
self.logger.debug(f"Parsed overview data: '{key}' = '{value}'")
|
|
425
|
+
else:
|
|
426
|
+
self.logger.error(
|
|
427
|
+
f"Overview sheet format not recognized. Columns: {list(overview_df.columns)}"
|
|
428
|
+
)
|
|
429
|
+
return config
|
|
430
|
+
|
|
431
|
+
self.logger.info(f"Parsed overview data: {overview_data}")
|
|
432
|
+
|
|
433
|
+
# Extract network configuration
|
|
434
|
+
if "Name" in overview_data:
|
|
435
|
+
config["name"] = overview_data["Name"]
|
|
436
|
+
if "Description" in overview_data:
|
|
437
|
+
config["description"] = overview_data["Description"]
|
|
438
|
+
if "Time Start" in overview_data:
|
|
439
|
+
config["start_time"] = overview_data["Time Start"]
|
|
440
|
+
self.logger.info(f"Found Time Start: {config['start_time']}")
|
|
441
|
+
if "Time End" in overview_data:
|
|
442
|
+
config["end_time"] = overview_data["Time End"]
|
|
443
|
+
self.logger.info(f"Found Time End: {config['end_time']}")
|
|
444
|
+
if "Time Interval" in overview_data:
|
|
445
|
+
# Convert time interval format to our format
|
|
446
|
+
interval = overview_data["Time Interval"].strip()
|
|
447
|
+
self.logger.info(f"Found Time Interval: '{interval}'")
|
|
448
|
+
|
|
449
|
+
if interval == "P1D":
|
|
450
|
+
config["time_resolution"] = "D" # Daily
|
|
451
|
+
elif interval == "PT1H" or interval == "h" or interval == "H":
|
|
452
|
+
config["time_resolution"] = "H" # Hourly
|
|
453
|
+
elif interval.startswith("PT") and interval.endswith("H"):
|
|
454
|
+
# Extract hours (e.g., 'PT3H' -> '3H')
|
|
455
|
+
hours = interval[2:-1]
|
|
456
|
+
config["time_resolution"] = f"{hours}H"
|
|
457
|
+
elif interval.endswith("h") or interval.endswith("H"):
|
|
458
|
+
# Handle simple formats like '2h', '3H'
|
|
459
|
+
if interval[:-1].isdigit():
|
|
460
|
+
hours = interval[:-1]
|
|
461
|
+
config["time_resolution"] = f"{hours}H"
|
|
462
|
+
else:
|
|
463
|
+
config["time_resolution"] = "H" # Default to hourly
|
|
464
|
+
else:
|
|
465
|
+
self.logger.warning(
|
|
466
|
+
f"Unknown time interval format '{interval}', defaulting to hourly"
|
|
467
|
+
)
|
|
468
|
+
config["time_resolution"] = "H" # Default to hourly
|
|
469
|
+
|
|
470
|
+
self.logger.info(f"Final network config from Overview sheet: {config}")
|
|
471
|
+
return config
|
|
472
|
+
|
|
473
|
+
def _read_excel_file(self, excel_path: str) -> Dict[str, pd.DataFrame]:
|
|
474
|
+
"""Read Excel file and return dictionary of DataFrames by sheet name"""
|
|
475
|
+
excel_data = {}
|
|
476
|
+
|
|
477
|
+
try:
|
|
478
|
+
# Read all sheets
|
|
479
|
+
excel_file = pd.ExcelFile(excel_path)
|
|
480
|
+
|
|
481
|
+
self.logger.info(f"Excel file contains sheets: {excel_file.sheet_names}")
|
|
482
|
+
|
|
483
|
+
for sheet_name in excel_file.sheet_names:
|
|
484
|
+
if sheet_name == "Overview":
|
|
485
|
+
continue # Skip overview sheet
|
|
486
|
+
|
|
487
|
+
df = pd.read_excel(excel_path, sheet_name=sheet_name)
|
|
488
|
+
if not df.empty:
|
|
489
|
+
excel_data[sheet_name] = df
|
|
490
|
+
self.logger.info(f"Loaded sheet '{sheet_name}' with {len(df)} rows")
|
|
491
|
+
else:
|
|
492
|
+
self.logger.info(f"Skipped empty sheet '{sheet_name}'")
|
|
493
|
+
|
|
494
|
+
except Exception as e:
|
|
495
|
+
raise ValueError(f"Failed to read Excel file: {e}")
|
|
496
|
+
|
|
497
|
+
return excel_data
|
|
498
|
+
|
|
499
|
+
def _get_component_type_from_sheet(self, sheet_name: str) -> str:
|
|
500
|
+
"""Convert sheet name to component type"""
|
|
501
|
+
mapping = {
|
|
502
|
+
"Buses": "BUS",
|
|
503
|
+
"Generators": "GENERATOR",
|
|
504
|
+
"Loads": "LOAD",
|
|
505
|
+
"Lines": "LINE",
|
|
506
|
+
"Links": "LINK",
|
|
507
|
+
"Storage Units": "STORAGE_UNIT",
|
|
508
|
+
"Stores": "STORE",
|
|
509
|
+
"Constraints": "CONSTRAINT",
|
|
510
|
+
}
|
|
511
|
+
return mapping.get(sheet_name, sheet_name.upper())
|
|
512
|
+
|
|
513
|
+
def _import_carriers(self, conn, carriers_df: pd.DataFrame) -> Dict[str, Any]:
|
|
514
|
+
"""Import carriers from Excel data"""
|
|
515
|
+
imported = {"created": 0, "updated": 0, "errors": 0}
|
|
516
|
+
|
|
517
|
+
if carriers_df.empty:
|
|
518
|
+
return imported
|
|
519
|
+
|
|
520
|
+
# Get existing carriers
|
|
521
|
+
existing_carriers = list_carriers(conn)
|
|
522
|
+
existing_names = {carrier.name for carrier in existing_carriers}
|
|
523
|
+
|
|
524
|
+
for _, row in carriers_df.iterrows():
|
|
525
|
+
try:
|
|
526
|
+
carrier_name = str(row.get("name", "")).strip()
|
|
527
|
+
if not carrier_name:
|
|
528
|
+
continue
|
|
529
|
+
|
|
530
|
+
# Check if carrier exists
|
|
531
|
+
if carrier_name in existing_names:
|
|
532
|
+
imported["updated"] += 1
|
|
533
|
+
else:
|
|
534
|
+
# Create new carrier
|
|
535
|
+
create_carrier(
|
|
536
|
+
conn,
|
|
537
|
+
carrier_name,
|
|
538
|
+
co2_emissions=row.get("co2_emissions", 0.0),
|
|
539
|
+
color=row.get("color", "#ffffff"),
|
|
540
|
+
nice_name=row.get("nice_name", carrier_name),
|
|
541
|
+
)
|
|
542
|
+
imported["created"] += 1
|
|
543
|
+
|
|
544
|
+
except Exception as e:
|
|
545
|
+
self.logger.error(f"Failed to import carrier {carrier_name}: {e}")
|
|
546
|
+
imported["errors"] += 1
|
|
547
|
+
|
|
548
|
+
return imported
|
|
549
|
+
|
|
550
|
+
def _import_components(
|
|
551
|
+
self,
|
|
552
|
+
conn,
|
|
553
|
+
component_type: str,
|
|
554
|
+
components_df: pd.DataFrame,
|
|
555
|
+
scenario_id: int,
|
|
556
|
+
update_existing: bool,
|
|
557
|
+
add_new: bool,
|
|
558
|
+
) -> Dict[str, Any]:
|
|
559
|
+
"""Import components of a specific type"""
|
|
560
|
+
imported = {"created": 0, "updated": 0, "errors": 0}
|
|
561
|
+
|
|
562
|
+
if components_df.empty:
|
|
563
|
+
return imported
|
|
564
|
+
|
|
565
|
+
# Get existing components of this type
|
|
566
|
+
existing_components = list_components_by_type(conn, component_type)
|
|
567
|
+
existing_names = {comp.name for comp in existing_components}
|
|
568
|
+
|
|
569
|
+
# Get carriers and buses for foreign key resolution
|
|
570
|
+
carriers = list_carriers(conn)
|
|
571
|
+
buses = list_components_by_type(conn, "BUS")
|
|
572
|
+
|
|
573
|
+
carrier_name_to_id = {carrier.name: carrier.id for carrier in carriers}
|
|
574
|
+
bus_name_to_id = {bus.name: bus.id for bus in buses}
|
|
575
|
+
|
|
576
|
+
for _, row in components_df.iterrows():
|
|
577
|
+
try:
|
|
578
|
+
component_name = str(row.get("name", "")).strip()
|
|
579
|
+
if not component_name:
|
|
580
|
+
continue
|
|
581
|
+
|
|
582
|
+
# Debug logging for CONSTRAINT components (reduced verbosity)
|
|
583
|
+
if component_type == "CONSTRAINT":
|
|
584
|
+
self.logger.debug(f"Processing CONSTRAINT '{component_name}'")
|
|
585
|
+
|
|
586
|
+
# Resolve foreign keys
|
|
587
|
+
carrier_id = None
|
|
588
|
+
# CONSTRAINT components don't have carriers
|
|
589
|
+
if row.get("carrier") and component_type != "CONSTRAINT":
|
|
590
|
+
carrier_name = str(row["carrier"]).strip()
|
|
591
|
+
carrier_id = carrier_name_to_id.get(carrier_name)
|
|
592
|
+
self.logger.info(
|
|
593
|
+
f"Component '{component_name}' has carrier '{carrier_name}', resolved to carrier_id: {carrier_id}"
|
|
594
|
+
)
|
|
595
|
+
if carrier_id is None:
|
|
596
|
+
self.logger.warning(
|
|
597
|
+
f"Carrier '{carrier_name}' not found for component '{component_name}'. Available carriers: {list(carrier_name_to_id.keys())}"
|
|
598
|
+
)
|
|
599
|
+
elif component_type == "CONSTRAINT":
|
|
600
|
+
self.logger.debug(
|
|
601
|
+
f"CONSTRAINT '{component_name}' - skipping carrier resolution"
|
|
602
|
+
)
|
|
603
|
+
|
|
604
|
+
bus_id = None
|
|
605
|
+
# CONSTRAINT components don't connect to buses
|
|
606
|
+
if row.get("bus") and component_type != "CONSTRAINT":
|
|
607
|
+
bus_name = str(row["bus"]).strip()
|
|
608
|
+
bus_id = bus_name_to_id.get(bus_name)
|
|
609
|
+
if bus_id is None:
|
|
610
|
+
self.logger.warning(
|
|
611
|
+
f"Bus '{bus_name}' not found for component '{component_name}'. Available buses: {list(bus_name_to_id.keys())}"
|
|
612
|
+
)
|
|
613
|
+
|
|
614
|
+
bus0_id = None
|
|
615
|
+
if row.get("bus0") and component_type != "CONSTRAINT":
|
|
616
|
+
bus0_name = str(row["bus0"]).strip()
|
|
617
|
+
bus0_id = bus_name_to_id.get(bus0_name)
|
|
618
|
+
if bus0_id is None:
|
|
619
|
+
self.logger.warning(
|
|
620
|
+
f"Bus0 '{bus0_name}' not found for component '{component_name}'. Available buses: {list(bus_name_to_id.keys())}"
|
|
621
|
+
)
|
|
622
|
+
|
|
623
|
+
bus1_id = None
|
|
624
|
+
if row.get("bus1") and component_type != "CONSTRAINT":
|
|
625
|
+
bus1_name = str(row["bus1"]).strip()
|
|
626
|
+
bus1_id = bus_name_to_id.get(bus1_name)
|
|
627
|
+
if bus1_id is None:
|
|
628
|
+
self.logger.warning(
|
|
629
|
+
f"Bus1 '{bus1_name}' not found for component '{component_name}'. Available buses: {list(bus_name_to_id.keys())}"
|
|
630
|
+
)
|
|
631
|
+
|
|
632
|
+
# Check if component exists
|
|
633
|
+
if component_name in existing_names and update_existing:
|
|
634
|
+
# Update existing component
|
|
635
|
+
existing_comp = next(
|
|
636
|
+
c for c in existing_components if c.name == component_name
|
|
637
|
+
)
|
|
638
|
+
|
|
639
|
+
try:
|
|
640
|
+
# Update component using the proper function
|
|
641
|
+
# CONSTRAINT components must have carrier_id=None per database schema
|
|
642
|
+
final_carrier_id = (
|
|
643
|
+
None if component_type == "CONSTRAINT" else carrier_id
|
|
644
|
+
)
|
|
645
|
+
update_component(
|
|
646
|
+
conn,
|
|
647
|
+
existing_comp.id,
|
|
648
|
+
carrier_id=final_carrier_id,
|
|
649
|
+
bus_id=bus_id,
|
|
650
|
+
bus0_id=bus0_id,
|
|
651
|
+
bus1_id=bus1_id,
|
|
652
|
+
latitude=row.get("latitude"),
|
|
653
|
+
longitude=row.get("longitude"),
|
|
654
|
+
)
|
|
655
|
+
|
|
656
|
+
# Update attributes
|
|
657
|
+
self._update_component_attributes(
|
|
658
|
+
conn, existing_comp.id, row, scenario_id
|
|
659
|
+
)
|
|
660
|
+
imported["updated"] += 1
|
|
661
|
+
|
|
662
|
+
except Exception as e:
|
|
663
|
+
self.logger.error(
|
|
664
|
+
f"Failed to update component '{component_name}': {e}"
|
|
665
|
+
)
|
|
666
|
+
imported["errors"] += 1
|
|
667
|
+
continue
|
|
668
|
+
|
|
669
|
+
elif component_name not in existing_names and add_new:
|
|
670
|
+
# Create new component using the proper function
|
|
671
|
+
# CONSTRAINT components must have carrier_id=None per database schema
|
|
672
|
+
final_carrier_id = (
|
|
673
|
+
None if component_type == "CONSTRAINT" else carrier_id
|
|
674
|
+
)
|
|
675
|
+
|
|
676
|
+
# Handle latitude/longitude - CONSTRAINT components don't have location
|
|
677
|
+
if component_type == "CONSTRAINT":
|
|
678
|
+
lat_val = None
|
|
679
|
+
lon_val = None
|
|
680
|
+
self.logger.debug(
|
|
681
|
+
f"CONSTRAINT '{component_name}' - setting latitude/longitude to None"
|
|
682
|
+
)
|
|
683
|
+
else:
|
|
684
|
+
# Clean empty strings for other component types
|
|
685
|
+
lat_val = row.get("latitude")
|
|
686
|
+
lon_val = row.get("longitude")
|
|
687
|
+
if lat_val == "" or (
|
|
688
|
+
isinstance(lat_val, str) and lat_val.strip() == ""
|
|
689
|
+
):
|
|
690
|
+
lat_val = None
|
|
691
|
+
if lon_val == "" or (
|
|
692
|
+
isinstance(lon_val, str) and lon_val.strip() == ""
|
|
693
|
+
):
|
|
694
|
+
lon_val = None
|
|
695
|
+
|
|
696
|
+
component_id = create_component(
|
|
697
|
+
conn,
|
|
698
|
+
component_type,
|
|
699
|
+
component_name,
|
|
700
|
+
longitude=lon_val,
|
|
701
|
+
latitude=lat_val,
|
|
702
|
+
carrier_id=final_carrier_id,
|
|
703
|
+
bus_id=bus_id,
|
|
704
|
+
bus0_id=bus0_id,
|
|
705
|
+
bus1_id=bus1_id,
|
|
706
|
+
)
|
|
707
|
+
|
|
708
|
+
# Set attributes
|
|
709
|
+
self._set_component_attributes(conn, component_id, row, scenario_id)
|
|
710
|
+
imported["created"] += 1
|
|
711
|
+
|
|
712
|
+
except Exception as e:
|
|
713
|
+
self.logger.error(
|
|
714
|
+
f"Failed to import component '{component_name}' of type '{component_type}': {e}"
|
|
715
|
+
)
|
|
716
|
+
self.logger.error(
|
|
717
|
+
f"Component data: name='{component_name}', carrier_id={carrier_id}, bus_id={bus_id}, bus0_id={bus0_id}, bus1_id={bus1_id}"
|
|
718
|
+
)
|
|
719
|
+
imported["errors"] += 1
|
|
720
|
+
|
|
721
|
+
return imported
|
|
722
|
+
|
|
723
|
+
def _update_component_attributes(
|
|
724
|
+
self, conn, component_id: int, row: pd.Series, scenario_id: int
|
|
725
|
+
):
|
|
726
|
+
"""Update attributes for an existing component"""
|
|
727
|
+
# Get validation rules for this component type
|
|
728
|
+
cursor = conn.execute(
|
|
729
|
+
"SELECT component_type FROM components WHERE id = ?", (component_id,)
|
|
730
|
+
)
|
|
731
|
+
component_type = cursor.fetchone()[0]
|
|
732
|
+
|
|
733
|
+
# Process each column as potential attribute
|
|
734
|
+
for column, value in row.items():
|
|
735
|
+
if column in [
|
|
736
|
+
"name",
|
|
737
|
+
"carrier",
|
|
738
|
+
"bus",
|
|
739
|
+
"bus0",
|
|
740
|
+
"bus1",
|
|
741
|
+
"latitude",
|
|
742
|
+
"longitude",
|
|
743
|
+
"type",
|
|
744
|
+
]:
|
|
745
|
+
continue # Skip basic fields
|
|
746
|
+
|
|
747
|
+
if value == "[timeseries]":
|
|
748
|
+
continue # Skip timeseries markers
|
|
749
|
+
|
|
750
|
+
# Check if this is a valid attribute
|
|
751
|
+
validation_rule = get_validation_rule(conn, component_type, column)
|
|
752
|
+
if validation_rule:
|
|
753
|
+
# Handle blank cells (empty strings or NaN) - these should unset the attribute
|
|
754
|
+
if pd.isna(value) or value == "":
|
|
755
|
+
try:
|
|
756
|
+
delete_attribute(conn, component_id, column, scenario_id)
|
|
757
|
+
self.logger.debug(
|
|
758
|
+
f"Unset attribute '{column}' for component {component_id} due to blank cell"
|
|
759
|
+
)
|
|
760
|
+
except Exception as e:
|
|
761
|
+
# Attribute might not exist, which is fine
|
|
762
|
+
self.logger.debug(
|
|
763
|
+
f"Could not unset attribute '{column}' for component {component_id}: {e}"
|
|
764
|
+
)
|
|
765
|
+
else:
|
|
766
|
+
# Set the attribute with the provided value
|
|
767
|
+
self._set_single_attribute(
|
|
768
|
+
conn, component_id, column, value, validation_rule, scenario_id
|
|
769
|
+
)
|
|
770
|
+
|
|
771
|
+
def _set_component_attributes(
|
|
772
|
+
self, conn, component_id: int, row: pd.Series, scenario_id: int
|
|
773
|
+
):
|
|
774
|
+
"""Set attributes for a new component"""
|
|
775
|
+
# Get validation rules for this component type
|
|
776
|
+
cursor = conn.execute(
|
|
777
|
+
"SELECT component_type FROM components WHERE id = ?", (component_id,)
|
|
778
|
+
)
|
|
779
|
+
component_type = cursor.fetchone()[0]
|
|
780
|
+
|
|
781
|
+
# Process each column as potential attribute
|
|
782
|
+
for column, value in row.items():
|
|
783
|
+
if column in [
|
|
784
|
+
"name",
|
|
785
|
+
"carrier",
|
|
786
|
+
"bus",
|
|
787
|
+
"bus0",
|
|
788
|
+
"bus1",
|
|
789
|
+
"latitude",
|
|
790
|
+
"longitude",
|
|
791
|
+
"type",
|
|
792
|
+
]:
|
|
793
|
+
continue # Skip basic fields
|
|
794
|
+
|
|
795
|
+
if value == "[timeseries]":
|
|
796
|
+
continue # Skip timeseries markers
|
|
797
|
+
|
|
798
|
+
# Check if this is a valid attribute
|
|
799
|
+
validation_rule = get_validation_rule(conn, component_type, column)
|
|
800
|
+
if validation_rule:
|
|
801
|
+
# For new components, only set attributes that have actual values
|
|
802
|
+
# Blank cells (empty strings or NaN) are left unset (which is the default state)
|
|
803
|
+
if not (pd.isna(value) or value == ""):
|
|
804
|
+
# Set the attribute with the provided value
|
|
805
|
+
self._set_single_attribute(
|
|
806
|
+
conn, component_id, column, value, validation_rule, scenario_id
|
|
807
|
+
)
|
|
808
|
+
|
|
809
|
+
def _set_single_attribute(
|
|
810
|
+
self,
|
|
811
|
+
conn,
|
|
812
|
+
component_id: int,
|
|
813
|
+
attr_name: str,
|
|
814
|
+
value: Any,
|
|
815
|
+
validation_rule: Dict,
|
|
816
|
+
scenario_id: int,
|
|
817
|
+
):
|
|
818
|
+
"""Set a single attribute with proper type conversion"""
|
|
819
|
+
data_type = (
|
|
820
|
+
validation_rule.data_type
|
|
821
|
+
if hasattr(validation_rule, "data_type")
|
|
822
|
+
else validation_rule.get("data_type", "string")
|
|
823
|
+
)
|
|
824
|
+
|
|
825
|
+
try:
|
|
826
|
+
if data_type == "float":
|
|
827
|
+
static_value = StaticValue(float(value))
|
|
828
|
+
set_static_attribute(
|
|
829
|
+
conn, component_id, attr_name, static_value, scenario_id
|
|
830
|
+
)
|
|
831
|
+
elif data_type == "int":
|
|
832
|
+
static_value = StaticValue(int(value))
|
|
833
|
+
set_static_attribute(
|
|
834
|
+
conn, component_id, attr_name, static_value, scenario_id
|
|
835
|
+
)
|
|
836
|
+
elif data_type == "boolean":
|
|
837
|
+
bool_value = str(value).lower() in ["true", "1", "yes"]
|
|
838
|
+
static_value = StaticValue(bool_value)
|
|
839
|
+
set_static_attribute(
|
|
840
|
+
conn, component_id, attr_name, static_value, scenario_id
|
|
841
|
+
)
|
|
842
|
+
else: # string
|
|
843
|
+
static_value = StaticValue(str(value))
|
|
844
|
+
set_static_attribute(
|
|
845
|
+
conn, component_id, attr_name, static_value, scenario_id
|
|
846
|
+
)
|
|
847
|
+
except (AttributeNotFound, ValidationError):
|
|
848
|
+
# Skip missing attributes or validation errors silently (same as PyPSA solver)
|
|
849
|
+
pass
|
|
850
|
+
except Exception as e:
|
|
851
|
+
self.logger.warning(
|
|
852
|
+
f"Failed to set attribute {attr_name} for component {component_id}: {e}"
|
|
853
|
+
)
|
|
854
|
+
|
|
855
|
+
def _import_timeseries_data(
|
|
856
|
+
self, conn, excel_data: Dict, scenario_id: int
|
|
857
|
+
) -> Dict[str, Any]:
|
|
858
|
+
"""Import timeseries data from Excel sheets"""
|
|
859
|
+
imported = {"attributes": 0, "errors": 0}
|
|
860
|
+
|
|
861
|
+
# Get network time periods for timestamp mapping
|
|
862
|
+
network_time_periods = get_network_time_periods(conn)
|
|
863
|
+
time_period_map = {
|
|
864
|
+
period.formatted_time: period for period in network_time_periods
|
|
865
|
+
}
|
|
866
|
+
|
|
867
|
+
expected_length = len(network_time_periods)
|
|
868
|
+
self.logger.info(
|
|
869
|
+
f"TIMESERIES DEBUG: Network has {expected_length} time periods for timeseries import"
|
|
870
|
+
)
|
|
871
|
+
if network_time_periods:
|
|
872
|
+
self.logger.info(
|
|
873
|
+
f"TIMESERIES DEBUG: Time period range: {network_time_periods[0].formatted_time} to {network_time_periods[-1].formatted_time}"
|
|
874
|
+
)
|
|
875
|
+
else:
|
|
876
|
+
self.logger.error(
|
|
877
|
+
"TIMESERIES DEBUG: NO TIME PERIODS FOUND! Timeseries import will fail."
|
|
878
|
+
)
|
|
879
|
+
return imported
|
|
880
|
+
|
|
881
|
+
# Look for timeseries sheets
|
|
882
|
+
for sheet_name, df in excel_data.items():
|
|
883
|
+
if "Timeseries" in sheet_name and not df.empty:
|
|
884
|
+
self.logger.info(
|
|
885
|
+
f"TIMESERIES DEBUG: Processing sheet '{sheet_name}' with {len(df)} rows"
|
|
886
|
+
)
|
|
887
|
+
component_type = self._get_component_type_from_sheet(
|
|
888
|
+
sheet_name.replace(" Timeseries", "")
|
|
889
|
+
)
|
|
890
|
+
|
|
891
|
+
# Get timestamps
|
|
892
|
+
timestamps = df.get("timestamp", [])
|
|
893
|
+
if timestamps.empty:
|
|
894
|
+
self.logger.warning(
|
|
895
|
+
f"TIMESERIES DEBUG: No timestamp column found in {sheet_name}"
|
|
896
|
+
)
|
|
897
|
+
continue
|
|
898
|
+
|
|
899
|
+
excel_ts_length = len(timestamps)
|
|
900
|
+
self.logger.info(
|
|
901
|
+
f"TIMESERIES DEBUG: Sheet '{sheet_name}' has {excel_ts_length} timestamps (expected: {expected_length})"
|
|
902
|
+
)
|
|
903
|
+
if excel_ts_length != expected_length:
|
|
904
|
+
self.logger.warning(
|
|
905
|
+
f"TIMESERIES DEBUG: LENGTH MISMATCH in sheet '{sheet_name}': Excel has {excel_ts_length}, network expects {expected_length} (difference: {excel_ts_length - expected_length})"
|
|
906
|
+
)
|
|
907
|
+
|
|
908
|
+
# Log timestamp range for debugging
|
|
909
|
+
if len(timestamps) > 0:
|
|
910
|
+
first_ts = str(timestamps.iloc[0]).strip()
|
|
911
|
+
last_ts = str(timestamps.iloc[-1]).strip()
|
|
912
|
+
self.logger.info(
|
|
913
|
+
f"TIMESERIES DEBUG: Sheet timestamp range: '{first_ts}' to '{last_ts}'"
|
|
914
|
+
)
|
|
915
|
+
|
|
916
|
+
# Process each column (except timestamp)
|
|
917
|
+
for column in df.columns:
|
|
918
|
+
if column == "timestamp":
|
|
919
|
+
continue
|
|
920
|
+
|
|
921
|
+
# Parse component name and attribute from column name
|
|
922
|
+
# Format: "Component Name_attribute_name"
|
|
923
|
+
# We need to find the last underscore that separates component name from attribute
|
|
924
|
+
if "_" in column:
|
|
925
|
+
# Find all components of this type to match against
|
|
926
|
+
components = list_components_by_type(conn, component_type)
|
|
927
|
+
component_names = [c.name for c in components]
|
|
928
|
+
|
|
929
|
+
# Try to find the component name by matching against known components
|
|
930
|
+
component_name = None
|
|
931
|
+
attr_name = None
|
|
932
|
+
|
|
933
|
+
for comp_name in component_names:
|
|
934
|
+
# Check if column starts with component name + underscore
|
|
935
|
+
prefix = f"{comp_name}_"
|
|
936
|
+
if column.startswith(prefix):
|
|
937
|
+
component_name = comp_name
|
|
938
|
+
attr_name = column[len(prefix) :]
|
|
939
|
+
break
|
|
940
|
+
|
|
941
|
+
if component_name and attr_name:
|
|
942
|
+
# Find component by name
|
|
943
|
+
component = next(
|
|
944
|
+
(c for c in components if c.name == component_name),
|
|
945
|
+
None,
|
|
946
|
+
)
|
|
947
|
+
|
|
948
|
+
if component:
|
|
949
|
+
# Create timeseries data using efficient array format
|
|
950
|
+
timeseries_values = []
|
|
951
|
+
filled_missing_values = 0
|
|
952
|
+
|
|
953
|
+
# Debug: Show first few timestamps for comparison
|
|
954
|
+
if len(timestamps) > 0 and len(network_time_periods) > 0:
|
|
955
|
+
excel_first = str(timestamps.iloc[0]).strip()
|
|
956
|
+
excel_last = (
|
|
957
|
+
str(timestamps.iloc[-1]).strip()
|
|
958
|
+
if len(timestamps) > 1
|
|
959
|
+
else excel_first
|
|
960
|
+
)
|
|
961
|
+
network_first = network_time_periods[0].formatted_time
|
|
962
|
+
network_last = (
|
|
963
|
+
network_time_periods[-1].formatted_time
|
|
964
|
+
if len(network_time_periods) > 1
|
|
965
|
+
else network_first
|
|
966
|
+
)
|
|
967
|
+
|
|
968
|
+
self.logger.info(
|
|
969
|
+
f"TIMESERIES DEBUG: Timestamp comparison for '{component_name}.{attr_name}':"
|
|
970
|
+
)
|
|
971
|
+
self.logger.info(
|
|
972
|
+
f" Excel range: '{excel_first}' to '{excel_last}' ({len(timestamps)} periods)"
|
|
973
|
+
)
|
|
974
|
+
self.logger.info(
|
|
975
|
+
f" Network range: '{network_first}' to '{network_last}' ({len(network_time_periods)} periods)"
|
|
976
|
+
)
|
|
977
|
+
|
|
978
|
+
# Take the first N values from Excel where N = expected network periods
|
|
979
|
+
# This puts responsibility on user to format Excel correctly
|
|
980
|
+
max_periods = min(
|
|
981
|
+
len(timestamps),
|
|
982
|
+
len(network_time_periods),
|
|
983
|
+
len(df[column]),
|
|
984
|
+
)
|
|
985
|
+
|
|
986
|
+
for i in range(max_periods):
|
|
987
|
+
value = df[column].iloc[i]
|
|
988
|
+
|
|
989
|
+
# Handle missing values - use 0.0 as default
|
|
990
|
+
if pd.isna(value):
|
|
991
|
+
actual_value = 0.0
|
|
992
|
+
filled_missing_values += 1
|
|
993
|
+
else:
|
|
994
|
+
try:
|
|
995
|
+
actual_value = float(value)
|
|
996
|
+
except (ValueError, TypeError):
|
|
997
|
+
actual_value = 0.0
|
|
998
|
+
filled_missing_values += 1
|
|
999
|
+
|
|
1000
|
+
timeseries_values.append(actual_value)
|
|
1001
|
+
|
|
1002
|
+
final_ts_length = len(timeseries_values)
|
|
1003
|
+
self.logger.info(
|
|
1004
|
+
f"TIMESERIES DEBUG: Component '{component_name}.{attr_name}': "
|
|
1005
|
+
f"Excel rows={excel_ts_length}, "
|
|
1006
|
+
f"Network periods={expected_length}, "
|
|
1007
|
+
f"Used={max_periods}, "
|
|
1008
|
+
f"Filled missing={filled_missing_values}, "
|
|
1009
|
+
f"Final length={final_ts_length}"
|
|
1010
|
+
)
|
|
1011
|
+
|
|
1012
|
+
if filled_missing_values > 0:
|
|
1013
|
+
self.logger.warning(
|
|
1014
|
+
f"TIMESERIES DEBUG: Filled {filled_missing_values} missing/invalid values with 0.0 for '{component_name}.{attr_name}'"
|
|
1015
|
+
)
|
|
1016
|
+
|
|
1017
|
+
if excel_ts_length != expected_length:
|
|
1018
|
+
self.logger.warning(
|
|
1019
|
+
f"TIMESERIES DEBUG: LENGTH MISMATCH for '{component_name}.{attr_name}': "
|
|
1020
|
+
f"Excel has {excel_ts_length} rows, network expects {expected_length} periods"
|
|
1021
|
+
)
|
|
1022
|
+
|
|
1023
|
+
if final_ts_length != expected_length:
|
|
1024
|
+
self.logger.warning(
|
|
1025
|
+
f"TIMESERIES DEBUG: FINAL LENGTH MISMATCH for '{component_name}.{attr_name}': "
|
|
1026
|
+
f"Expected {expected_length}, got {final_ts_length} (difference: {final_ts_length - expected_length})"
|
|
1027
|
+
)
|
|
1028
|
+
|
|
1029
|
+
if timeseries_values:
|
|
1030
|
+
try:
|
|
1031
|
+
# Use new efficient timeseries API
|
|
1032
|
+
set_timeseries_conn(
|
|
1033
|
+
conn,
|
|
1034
|
+
component.id,
|
|
1035
|
+
attr_name,
|
|
1036
|
+
timeseries_values,
|
|
1037
|
+
scenario_id,
|
|
1038
|
+
)
|
|
1039
|
+
imported["attributes"] += 1
|
|
1040
|
+
self.logger.info(
|
|
1041
|
+
f"TIMESERIES DEBUG: Successfully imported {final_ts_length} points for '{component_name}.{attr_name}'"
|
|
1042
|
+
)
|
|
1043
|
+
except Exception as e:
|
|
1044
|
+
self.logger.error(
|
|
1045
|
+
f"TIMESERIES DEBUG: Failed to set timeseries attribute {attr_name} for {component_name}: {e}"
|
|
1046
|
+
)
|
|
1047
|
+
imported["errors"] += 1
|
|
1048
|
+
else:
|
|
1049
|
+
self.logger.warning(
|
|
1050
|
+
f"TIMESERIES DEBUG: No valid timeseries data found for {component_name}.{attr_name}"
|
|
1051
|
+
)
|
|
1052
|
+
else:
|
|
1053
|
+
self.logger.warning(
|
|
1054
|
+
f"TIMESERIES DEBUG: Component '{component_name}' not found for timeseries import"
|
|
1055
|
+
)
|
|
1056
|
+
else:
|
|
1057
|
+
self.logger.warning(
|
|
1058
|
+
f"TIMESERIES DEBUG: Could not parse column '{column}' into component and attribute names"
|
|
1059
|
+
)
|
|
1060
|
+
else:
|
|
1061
|
+
self.logger.warning(
|
|
1062
|
+
f"TIMESERIES DEBUG: Column '{column}' does not contain underscore separator"
|
|
1063
|
+
)
|
|
1064
|
+
|
|
1065
|
+
return imported
|
|
1066
|
+
|
|
1067
|
+
def _import_network_config(self, conn, config_df: pd.DataFrame) -> Dict[str, Any]:
|
|
1068
|
+
"""Import network configuration from Excel"""
|
|
1069
|
+
imported = {"parameters": 0, "errors": 0}
|
|
1070
|
+
|
|
1071
|
+
# Handle case where config_df might be a list (when sheet doesn't exist)
|
|
1072
|
+
if not isinstance(config_df, pd.DataFrame):
|
|
1073
|
+
self.logger.info(
|
|
1074
|
+
"No Network Config sheet found, using default configuration"
|
|
1075
|
+
)
|
|
1076
|
+
# Set default network configuration
|
|
1077
|
+
default_config = {
|
|
1078
|
+
"unmet_load_active": True,
|
|
1079
|
+
"discount_rate": 0.01,
|
|
1080
|
+
"solver_name": "highs",
|
|
1081
|
+
"currency": "USD",
|
|
1082
|
+
}
|
|
1083
|
+
|
|
1084
|
+
for param_name, param_value in default_config.items():
|
|
1085
|
+
try:
|
|
1086
|
+
if isinstance(param_value, bool):
|
|
1087
|
+
param_type = "boolean"
|
|
1088
|
+
elif isinstance(param_value, float):
|
|
1089
|
+
param_type = "real"
|
|
1090
|
+
elif isinstance(param_value, int):
|
|
1091
|
+
param_type = "integer"
|
|
1092
|
+
else:
|
|
1093
|
+
param_type = "string"
|
|
1094
|
+
|
|
1095
|
+
set_network_config(
|
|
1096
|
+
conn,
|
|
1097
|
+
param_name,
|
|
1098
|
+
param_value,
|
|
1099
|
+
param_type,
|
|
1100
|
+
scenario_id=None, # Network default
|
|
1101
|
+
description=f"Default {param_name} setting",
|
|
1102
|
+
)
|
|
1103
|
+
imported["parameters"] += 1
|
|
1104
|
+
self.logger.info(
|
|
1105
|
+
f"Set default network config: {param_name} = {param_value}"
|
|
1106
|
+
)
|
|
1107
|
+
|
|
1108
|
+
except Exception as e:
|
|
1109
|
+
self.logger.error(
|
|
1110
|
+
f"Failed to set default network config parameter {param_name}: {e}"
|
|
1111
|
+
)
|
|
1112
|
+
imported["errors"] += 1
|
|
1113
|
+
|
|
1114
|
+
return imported
|
|
1115
|
+
|
|
1116
|
+
if config_df.empty:
|
|
1117
|
+
self.logger.info(
|
|
1118
|
+
"Network Config sheet is empty, using default configuration"
|
|
1119
|
+
)
|
|
1120
|
+
# Set default network configuration
|
|
1121
|
+
default_config = {
|
|
1122
|
+
"unmet_load_active": True,
|
|
1123
|
+
"discount_rate": 0.01,
|
|
1124
|
+
"solver_name": "default",
|
|
1125
|
+
"currency": "USD",
|
|
1126
|
+
}
|
|
1127
|
+
|
|
1128
|
+
for param_name, param_value in default_config.items():
|
|
1129
|
+
try:
|
|
1130
|
+
if isinstance(param_value, bool):
|
|
1131
|
+
param_type = "boolean"
|
|
1132
|
+
elif isinstance(param_value, float):
|
|
1133
|
+
param_type = "real"
|
|
1134
|
+
elif isinstance(param_value, int):
|
|
1135
|
+
param_type = "integer"
|
|
1136
|
+
else:
|
|
1137
|
+
param_type = "string"
|
|
1138
|
+
|
|
1139
|
+
set_network_config(
|
|
1140
|
+
conn,
|
|
1141
|
+
param_name,
|
|
1142
|
+
param_value,
|
|
1143
|
+
param_type,
|
|
1144
|
+
scenario_id=None, # Network default
|
|
1145
|
+
description=f"Default {param_name} setting",
|
|
1146
|
+
)
|
|
1147
|
+
imported["parameters"] += 1
|
|
1148
|
+
self.logger.info(
|
|
1149
|
+
f"Set default network config: {param_name} = {param_value}"
|
|
1150
|
+
)
|
|
1151
|
+
|
|
1152
|
+
except Exception as e:
|
|
1153
|
+
self.logger.error(
|
|
1154
|
+
f"Failed to set default network config parameter {param_name}: {e}"
|
|
1155
|
+
)
|
|
1156
|
+
imported["errors"] += 1
|
|
1157
|
+
|
|
1158
|
+
return imported
|
|
1159
|
+
|
|
1160
|
+
for _, row in config_df.iterrows():
|
|
1161
|
+
try:
|
|
1162
|
+
param_name = str(row.get("Parameter", "")).strip()
|
|
1163
|
+
param_value = row.get("Value", "")
|
|
1164
|
+
param_type = str(row.get("Type", "string")).strip()
|
|
1165
|
+
param_description = str(row.get("Description", "")).strip()
|
|
1166
|
+
|
|
1167
|
+
if not param_name:
|
|
1168
|
+
continue
|
|
1169
|
+
|
|
1170
|
+
# Validate parameter type and map Python types to database types
|
|
1171
|
+
valid_types = {"boolean", "real", "integer", "string", "json"}
|
|
1172
|
+
|
|
1173
|
+
# Map Python type names to database type names
|
|
1174
|
+
type_mapping = {
|
|
1175
|
+
"bool": "boolean",
|
|
1176
|
+
"float": "real",
|
|
1177
|
+
"int": "integer",
|
|
1178
|
+
"str": "string",
|
|
1179
|
+
}
|
|
1180
|
+
|
|
1181
|
+
# Convert Python type name to database type name if needed
|
|
1182
|
+
if param_type in type_mapping:
|
|
1183
|
+
param_type = type_mapping[param_type]
|
|
1184
|
+
|
|
1185
|
+
if param_type not in valid_types:
|
|
1186
|
+
self.logger.error(
|
|
1187
|
+
f"Invalid parameter type '{param_type}' for parameter '{param_name}'. Must be one of {valid_types}"
|
|
1188
|
+
)
|
|
1189
|
+
imported["errors"] += 1
|
|
1190
|
+
continue
|
|
1191
|
+
|
|
1192
|
+
# Convert value based on type
|
|
1193
|
+
try:
|
|
1194
|
+
if param_type == "boolean":
|
|
1195
|
+
# Handle various boolean representations
|
|
1196
|
+
if isinstance(param_value, bool):
|
|
1197
|
+
converted_value = param_value
|
|
1198
|
+
elif isinstance(param_value, str):
|
|
1199
|
+
converted_value = param_value.lower() in {
|
|
1200
|
+
"true",
|
|
1201
|
+
"1",
|
|
1202
|
+
"yes",
|
|
1203
|
+
"on",
|
|
1204
|
+
}
|
|
1205
|
+
elif isinstance(param_value, (int, float)):
|
|
1206
|
+
converted_value = bool(param_value)
|
|
1207
|
+
else:
|
|
1208
|
+
converted_value = False
|
|
1209
|
+
elif param_type == "real":
|
|
1210
|
+
converted_value = float(param_value)
|
|
1211
|
+
elif param_type == "integer":
|
|
1212
|
+
converted_value = int(
|
|
1213
|
+
float(param_value)
|
|
1214
|
+
) # Handle float strings like "1.0"
|
|
1215
|
+
elif param_type == "json":
|
|
1216
|
+
if isinstance(param_value, str):
|
|
1217
|
+
import json
|
|
1218
|
+
|
|
1219
|
+
converted_value = json.loads(param_value)
|
|
1220
|
+
else:
|
|
1221
|
+
converted_value = param_value
|
|
1222
|
+
else: # string
|
|
1223
|
+
converted_value = str(param_value)
|
|
1224
|
+
except (ValueError, TypeError, json.JSONDecodeError) as e:
|
|
1225
|
+
self.logger.error(
|
|
1226
|
+
f"Failed to convert parameter '{param_name}' value '{param_value}' to type '{param_type}': {e}"
|
|
1227
|
+
)
|
|
1228
|
+
imported["errors"] += 1
|
|
1229
|
+
continue
|
|
1230
|
+
|
|
1231
|
+
# Use the proper set_network_config function from pyconvexity
|
|
1232
|
+
set_network_config(
|
|
1233
|
+
conn,
|
|
1234
|
+
param_name,
|
|
1235
|
+
converted_value,
|
|
1236
|
+
param_type,
|
|
1237
|
+
scenario_id=None, # Network default
|
|
1238
|
+
description=param_description if param_description else None,
|
|
1239
|
+
)
|
|
1240
|
+
imported["parameters"] += 1
|
|
1241
|
+
|
|
1242
|
+
except Exception as e:
|
|
1243
|
+
self.logger.error(
|
|
1244
|
+
f"Failed to import network config parameter {param_name}: {e}"
|
|
1245
|
+
)
|
|
1246
|
+
imported["errors"] += 1
|
|
1247
|
+
|
|
1248
|
+
return imported
|
|
1249
|
+
|
|
1250
|
+
def _validate_time_axis_compatibility(
|
|
1251
|
+
self, conn, excel_time_config: Dict[str, str]
|
|
1252
|
+
) -> None:
|
|
1253
|
+
"""Validate that Excel time axis matches existing network time axis"""
|
|
1254
|
+
try:
|
|
1255
|
+
# Get existing network info
|
|
1256
|
+
existing_network = get_network_info(conn)
|
|
1257
|
+
|
|
1258
|
+
# Compare time axis parameters
|
|
1259
|
+
excel_start = excel_time_config.get("start_time", "").strip()
|
|
1260
|
+
excel_end = excel_time_config.get("end_time", "").strip()
|
|
1261
|
+
excel_interval = excel_time_config.get("time_resolution", "").strip()
|
|
1262
|
+
|
|
1263
|
+
existing_start = existing_network.get("time_start", "").strip()
|
|
1264
|
+
existing_end = existing_network.get("time_end", "").strip()
|
|
1265
|
+
existing_interval = existing_network.get("time_interval", "").strip()
|
|
1266
|
+
|
|
1267
|
+
self.logger.info(f"TIME AXIS DEBUG: Validating time axis compatibility")
|
|
1268
|
+
self.logger.info(
|
|
1269
|
+
f"TIME AXIS DEBUG: Excel: {excel_start} to {excel_end}, interval: {excel_interval}"
|
|
1270
|
+
)
|
|
1271
|
+
self.logger.info(
|
|
1272
|
+
f"TIME AXIS DEBUG: Network: {existing_start} to {existing_end}, interval: {existing_interval}"
|
|
1273
|
+
)
|
|
1274
|
+
|
|
1275
|
+
# Skip validation if Excel doesn't have time information (allow partial updates)
|
|
1276
|
+
if not excel_start or not excel_end or not excel_interval:
|
|
1277
|
+
self.logger.warning(
|
|
1278
|
+
"TIME AXIS DEBUG: Excel Overview sheet missing time axis information - skipping validation"
|
|
1279
|
+
)
|
|
1280
|
+
self.logger.warning(
|
|
1281
|
+
"TIME AXIS DEBUG: Assuming Excel data is compatible with existing network time axis"
|
|
1282
|
+
)
|
|
1283
|
+
return
|
|
1284
|
+
|
|
1285
|
+
# Normalize case and format for time interval comparison
|
|
1286
|
+
excel_interval_normalized = self._normalize_time_interval(excel_interval)
|
|
1287
|
+
existing_interval_normalized = self._normalize_time_interval(
|
|
1288
|
+
existing_interval
|
|
1289
|
+
)
|
|
1290
|
+
|
|
1291
|
+
self.logger.info(
|
|
1292
|
+
f"TIME AXIS DEBUG: Normalized intervals - Excel: '{excel_interval_normalized}', Network: '{existing_interval_normalized}'"
|
|
1293
|
+
)
|
|
1294
|
+
|
|
1295
|
+
# Check if they match
|
|
1296
|
+
if (
|
|
1297
|
+
excel_start != existing_start
|
|
1298
|
+
or excel_end != existing_end
|
|
1299
|
+
or excel_interval_normalized != existing_interval_normalized
|
|
1300
|
+
):
|
|
1301
|
+
|
|
1302
|
+
self.logger.error(f"TIME AXIS DEBUG: MISMATCH DETECTED!")
|
|
1303
|
+
self.logger.error(
|
|
1304
|
+
f"TIME AXIS DEBUG: Start times - Excel: '{excel_start}', Network: '{existing_start}' (match: {excel_start == existing_start})"
|
|
1305
|
+
)
|
|
1306
|
+
self.logger.error(
|
|
1307
|
+
f"TIME AXIS DEBUG: End times - Excel: '{excel_end}', Network: '{existing_end}' (match: {excel_end == existing_end})"
|
|
1308
|
+
)
|
|
1309
|
+
self.logger.error(
|
|
1310
|
+
f"TIME AXIS DEBUG: Intervals - Excel: '{excel_interval_normalized}', Network: '{existing_interval_normalized}' (match: {excel_interval_normalized == existing_interval_normalized})"
|
|
1311
|
+
)
|
|
1312
|
+
|
|
1313
|
+
raise ValueError(
|
|
1314
|
+
f"Time axis mismatch! "
|
|
1315
|
+
f"Excel has {excel_start} to {excel_end} ({excel_interval}), "
|
|
1316
|
+
f"but existing network has {existing_start} to {existing_end} ({existing_interval}). "
|
|
1317
|
+
f"Time axis must match exactly when importing into an existing network."
|
|
1318
|
+
)
|
|
1319
|
+
else:
|
|
1320
|
+
self.logger.info(
|
|
1321
|
+
f"TIME AXIS DEBUG: Time axis validation PASSED - Excel and network time axes match"
|
|
1322
|
+
)
|
|
1323
|
+
|
|
1324
|
+
except Exception as e:
|
|
1325
|
+
if "Time axis mismatch" in str(e):
|
|
1326
|
+
raise # Re-raise validation errors
|
|
1327
|
+
else:
|
|
1328
|
+
# Log other errors but don't fail the import
|
|
1329
|
+
self.logger.error(f"Error during time axis validation: {e}")
|
|
1330
|
+
self.logger.warning(
|
|
1331
|
+
"Continuing with import despite time axis validation error"
|
|
1332
|
+
)
|
|
1333
|
+
|
|
1334
|
+
def _normalize_time_interval(self, interval: str) -> str:
|
|
1335
|
+
"""Normalize time interval format for comparison"""
|
|
1336
|
+
interval = interval.strip().upper()
|
|
1337
|
+
|
|
1338
|
+
# Handle common variations
|
|
1339
|
+
if interval in ["H", "1H", "PT1H", "HOURLY"]:
|
|
1340
|
+
return "H"
|
|
1341
|
+
elif interval in ["D", "1D", "P1D", "DAILY"]:
|
|
1342
|
+
return "D"
|
|
1343
|
+
elif interval.endswith("H") and interval[:-1].isdigit():
|
|
1344
|
+
return interval # Already normalized (e.g., '2H', '3H')
|
|
1345
|
+
elif interval.startswith("PT") and interval.endswith("H"):
|
|
1346
|
+
# Convert PT3H -> 3H
|
|
1347
|
+
hours = interval[2:-1]
|
|
1348
|
+
return f"{hours}H"
|
|
1349
|
+
|
|
1350
|
+
return interval
|
|
1351
|
+
|
|
1352
|
+
def _calculate_import_stats(
|
|
1353
|
+
self,
|
|
1354
|
+
carriers_imported: Dict,
|
|
1355
|
+
components_imported: Dict,
|
|
1356
|
+
timeseries_imported: Dict,
|
|
1357
|
+
config_imported: Dict,
|
|
1358
|
+
) -> Dict[str, Any]:
|
|
1359
|
+
"""Calculate import statistics"""
|
|
1360
|
+
total_created = carriers_imported["created"] + sum(
|
|
1361
|
+
comp["created"] for comp in components_imported.values()
|
|
1362
|
+
)
|
|
1363
|
+
total_updated = carriers_imported["updated"] + sum(
|
|
1364
|
+
comp["updated"] for comp in components_imported.values()
|
|
1365
|
+
)
|
|
1366
|
+
total_errors = (
|
|
1367
|
+
carriers_imported["errors"]
|
|
1368
|
+
+ sum(comp["errors"] for comp in components_imported.values())
|
|
1369
|
+
+ timeseries_imported["errors"]
|
|
1370
|
+
+ config_imported["errors"]
|
|
1371
|
+
)
|
|
1372
|
+
|
|
1373
|
+
return {
|
|
1374
|
+
"total_created": total_created,
|
|
1375
|
+
"total_updated": total_updated,
|
|
1376
|
+
"total_errors": total_errors,
|
|
1377
|
+
"carriers": carriers_imported,
|
|
1378
|
+
"components": components_imported,
|
|
1379
|
+
"timeseries": timeseries_imported,
|
|
1380
|
+
"network_config": config_imported,
|
|
1381
|
+
}
|