ras-commander 0.44.0__py3-none-any.whl → 0.46.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ras_commander/HdfPlan.py CHANGED
@@ -198,3 +198,8 @@ class HdfPlan:
198
198
  return HdfUtils.get_attrs(hdf_path, "Geometry")
199
199
  except Exception as e:
200
200
  raise ValueError(f"Failed to get geometry attributes: {str(e)}")
201
+
202
+
203
+
204
+
205
+
@@ -0,0 +1,269 @@
1
+ import h5py
2
+ import numpy as np
3
+ import pandas as pd
4
+ import geopandas as gpd
5
+ import xarray as xr
6
+ from pathlib import Path
7
+ from shapely.geometry import Point
8
+ from typing import List, Dict, Any, Optional, Union
9
+ from .HdfUtils import HdfUtils
10
+ from .HdfBase import HdfBase
11
+ from .Decorators import standardize_input, log_call
12
+ from .LoggingConfig import get_logger
13
+
14
+ logger = get_logger(__name__)
15
+
16
+ class HdfPump:
17
+ """
18
+ A class for handling pump station related data from HEC-RAS HDF files.
19
+ """
20
+
21
+ @staticmethod
22
+ @log_call
23
+ @standardize_input(file_type='plan_hdf')
24
+ def get_pump_stations(hdf_path: Path) -> gpd.GeoDataFrame:
25
+ """
26
+ Extract pump station data from the HDF file.
27
+
28
+ Args:
29
+ hdf_path (Path): Path to the HDF file.
30
+
31
+ Returns:
32
+ gpd.GeoDataFrame: GeoDataFrame containing pump station data.
33
+
34
+ Raises:
35
+ KeyError: If the required datasets are not found in the HDF file.
36
+ """
37
+ try:
38
+ with h5py.File(hdf_path, 'r') as hdf:
39
+ # Extract pump station data
40
+ attributes = hdf['/Geometry/Pump Stations/Attributes'][()]
41
+ points = hdf['/Geometry/Pump Stations/Points'][()]
42
+
43
+ # Create geometries
44
+ geometries = [Point(x, y) for x, y in points]
45
+
46
+ # Create GeoDataFrame
47
+ gdf = gpd.GeoDataFrame(geometry=geometries)
48
+ gdf['station_id'] = range(len(gdf))
49
+
50
+ # Add attributes and decode byte strings
51
+ attr_df = pd.DataFrame(attributes)
52
+ string_columns = attr_df.select_dtypes([object]).columns
53
+ for col in string_columns:
54
+ attr_df[col] = attr_df[col].apply(lambda x: x.decode('utf-8') if isinstance(x, bytes) else x)
55
+
56
+ for col in attr_df.columns:
57
+ gdf[col] = attr_df[col]
58
+
59
+ # Set CRS if available
60
+ crs = HdfUtils.projection(hdf_path)
61
+ if crs:
62
+ gdf.set_crs(crs, inplace=True)
63
+
64
+ return gdf
65
+
66
+ except KeyError as e:
67
+ logger.error(f"Required dataset not found in HDF file: {e}")
68
+ raise
69
+ except Exception as e:
70
+ logger.error(f"Error extracting pump station data: {e}")
71
+ raise
72
+
73
+ @staticmethod
74
+ @log_call
75
+ @standardize_input(file_type='plan_hdf')
76
+ def get_pump_groups(hdf_path: Path) -> pd.DataFrame:
77
+ """
78
+ Extract pump group data from the HDF file.
79
+
80
+ Args:
81
+ hdf_path (Path): Path to the HDF file.
82
+
83
+ Returns:
84
+ pd.DataFrame: DataFrame containing pump group data.
85
+
86
+ Raises:
87
+ KeyError: If the required datasets are not found in the HDF file.
88
+ """
89
+ try:
90
+ with h5py.File(hdf_path, 'r') as hdf:
91
+ # Extract pump group data
92
+ attributes = hdf['/Geometry/Pump Stations/Pump Groups/Attributes'][()]
93
+ efficiency_curves_info = hdf['/Geometry/Pump Stations/Pump Groups/Efficiency Curves Info'][()]
94
+ efficiency_curves_values = hdf['/Geometry/Pump Stations/Pump Groups/Efficiency Curves Values'][()]
95
+
96
+ # Create DataFrame and decode byte strings
97
+ df = pd.DataFrame(attributes)
98
+ string_columns = df.select_dtypes([object]).columns
99
+ for col in string_columns:
100
+ df[col] = df[col].apply(lambda x: x.decode('utf-8') if isinstance(x, bytes) else x)
101
+
102
+ # Add efficiency curve data
103
+ df['efficiency_curve_start'] = efficiency_curves_info[:, 0]
104
+ df['efficiency_curve_count'] = efficiency_curves_info[:, 1]
105
+
106
+ # Process efficiency curves
107
+ def get_efficiency_curve(start, count):
108
+ return efficiency_curves_values[start:start+count].tolist()
109
+
110
+ df['efficiency_curve'] = df.apply(lambda row: get_efficiency_curve(row['efficiency_curve_start'], row['efficiency_curve_count']), axis=1)
111
+
112
+ return df
113
+
114
+ except KeyError as e:
115
+ logger.error(f"Required dataset not found in HDF file: {e}")
116
+ raise
117
+ except Exception as e:
118
+ logger.error(f"Error extracting pump group data: {e}")
119
+ raise
120
+
121
+ @staticmethod
122
+ @log_call
123
+ @standardize_input(file_type='plan_hdf')
124
+ def get_pump_station_timeseries(hdf_path: Path, pump_station: str) -> xr.DataArray:
125
+ """
126
+ Extract timeseries data for a specific pump station.
127
+
128
+ Args:
129
+ hdf_path (Path): Path to the HDF file.
130
+ pump_station (str): Name of the pump station.
131
+
132
+ Returns:
133
+ xr.DataArray: DataArray containing the timeseries data.
134
+
135
+ Raises:
136
+ KeyError: If the required datasets are not found in the HDF file.
137
+ ValueError: If the specified pump station is not found.
138
+ """
139
+ try:
140
+ with h5py.File(hdf_path, 'r') as hdf:
141
+ # Check if the pump station exists
142
+ pumping_stations_path = "/Results/Unsteady/Output/Output Blocks/DSS Hydrograph Output/Unsteady Time Series/Pumping Stations"
143
+ if pump_station not in hdf[pumping_stations_path]:
144
+ raise ValueError(f"Pump station '{pump_station}' not found in HDF file")
145
+
146
+ # Extract timeseries data
147
+ data_path = f"{pumping_stations_path}/{pump_station}/Structure Variables"
148
+ data = hdf[data_path][()]
149
+
150
+ # Extract time information
151
+ time = HdfBase._get_unsteady_datetimes(hdf)
152
+
153
+ # Create DataArray
154
+ da = xr.DataArray(
155
+ data=data,
156
+ dims=['time', 'variable'],
157
+ coords={'time': time, 'variable': ['Flow', 'Stage HW', 'Stage TW', 'Pump Station', 'Pumps on']},
158
+ name=pump_station
159
+ )
160
+
161
+ # Add attributes and decode byte strings
162
+ units = hdf[data_path].attrs.get('Variable_Unit', b'')
163
+ da.attrs['units'] = units.decode('utf-8') if isinstance(units, bytes) else units
164
+ da.attrs['pump_station'] = pump_station
165
+
166
+ return da
167
+
168
+ except KeyError as e:
169
+ logger.error(f"Required dataset not found in HDF file: {e}")
170
+ raise
171
+ except ValueError as e:
172
+ logger.error(str(e))
173
+ raise
174
+ except Exception as e:
175
+ logger.error(f"Error extracting pump station timeseries data: {e}")
176
+ raise
177
+
178
+ @staticmethod
179
+ @log_call
180
+ @standardize_input(file_type='plan_hdf')
181
+ def get_pump_station_summary(hdf_path: Path) -> pd.DataFrame:
182
+ """
183
+ Extract summary data for pump stations from the HDF file.
184
+
185
+ Args:
186
+ hdf_path (Path): Path to the HDF file.
187
+
188
+ Returns:
189
+ pd.DataFrame: DataFrame containing pump station summary data.
190
+
191
+ Raises:
192
+ KeyError: If the required datasets are not found in the HDF file.
193
+ """
194
+ try:
195
+ with h5py.File(hdf_path, 'r') as hdf:
196
+ # Extract summary data
197
+ summary_path = "/Results/Unsteady/Summary/Pump Station"
198
+ if summary_path not in hdf:
199
+ logger.warning("Pump Station summary data not found in HDF file")
200
+ return pd.DataFrame()
201
+
202
+ summary_data = hdf[summary_path][()]
203
+
204
+ # Create DataFrame and decode byte strings
205
+ df = pd.DataFrame(summary_data)
206
+ string_columns = df.select_dtypes([object]).columns
207
+ for col in string_columns:
208
+ df[col] = df[col].apply(lambda x: x.decode('utf-8') if isinstance(x, bytes) else x)
209
+
210
+ return df
211
+
212
+ except KeyError as e:
213
+ logger.error(f"Required dataset not found in HDF file: {e}")
214
+ raise
215
+ except Exception as e:
216
+ logger.error(f"Error extracting pump station summary data: {e}")
217
+ raise
218
+
219
+ @staticmethod
220
+ @log_call
221
+ @standardize_input(file_type='plan_hdf')
222
+ def get_pump_operation_data(hdf_path: Path, pump_station: str) -> pd.DataFrame:
223
+ """
224
+ Extract pump operation data for a specific pump station.
225
+
226
+ Args:
227
+ hdf_path (Path): Path to the HDF file.
228
+ pump_station (str): Name of the pump station.
229
+
230
+ Returns:
231
+ pd.DataFrame: DataFrame containing pump operation data.
232
+
233
+ Raises:
234
+ KeyError: If the required datasets are not found in the HDF file.
235
+ ValueError: If the specified pump station is not found.
236
+ """
237
+ try:
238
+ with h5py.File(hdf_path, 'r') as hdf:
239
+ # Check if the pump station exists
240
+ pump_stations_path = "/Results/Unsteady/Output/Output Blocks/DSS Profile Output/Unsteady Time Series/Pumping Stations"
241
+ if pump_station not in hdf[pump_stations_path]:
242
+ raise ValueError(f"Pump station '{pump_station}' not found in HDF file")
243
+
244
+ # Extract pump operation data
245
+ data_path = f"{pump_stations_path}/{pump_station}/Structure Variables"
246
+ data = hdf[data_path][()]
247
+
248
+ # Extract time information
249
+ time = HdfBase._get_unsteady_datetimes(hdf)
250
+
251
+ # Create DataFrame and decode byte strings
252
+ df = pd.DataFrame(data, columns=['Flow', 'Stage HW', 'Stage TW', 'Pump Station', 'Pumps on'])
253
+ string_columns = df.select_dtypes([object]).columns
254
+ for col in string_columns:
255
+ df[col] = df[col].apply(lambda x: x.decode('utf-8') if isinstance(x, bytes) else x)
256
+
257
+ df['Time'] = time
258
+
259
+ return df
260
+
261
+ except KeyError as e:
262
+ logger.error(f"Required dataset not found in HDF file: {e}")
263
+ raise
264
+ except ValueError as e:
265
+ logger.error(str(e))
266
+ raise
267
+ except Exception as e:
268
+ logger.error(f"Error extracting pump operation data: {e}")
269
+ raise
@@ -14,7 +14,7 @@ import xarray as xr
14
14
  from pathlib import Path
15
15
  import h5py
16
16
  from typing import Union, List, Optional, Dict, Any, Tuple
17
-
17
+ from .HdfMesh import HdfMesh
18
18
  from .HdfBase import HdfBase
19
19
  from .HdfUtils import HdfUtils
20
20
  from .Decorators import log_call, standardize_input
@@ -233,7 +233,7 @@ class HdfResultsMesh:
233
233
  @standardize_input(file_type='plan_hdf')
234
234
  def mesh_max_face_v(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
235
235
  """
236
- Get maximum face velocity for each mesh cell.
236
+ Get maximum face velocity for each mesh face.
237
237
 
238
238
  Args:
239
239
  hdf_path (Path): Path to the HDF file.
@@ -363,27 +363,22 @@ class HdfResultsMesh:
363
363
  Raises:
364
364
  ValueError: If there's an error processing the timeseries output data.
365
365
  """
366
- TIME_SERIES_OUTPUT_VARS_CELLS = [
367
- "Water Surface",
368
- "Depth",
369
- "Velocity",
370
- "Velocity X",
371
- "Velocity Y",
372
- "Froude Number",
373
- "Courant Number",
374
- "Shear Stress",
375
- "Bed Elevation",
376
- "Precipitation Rate",
377
- "Infiltration Rate",
378
- "Evaporation Rate",
379
- "Percolation Rate",
380
- "Groundwater Elevation",
381
- "Groundwater Depth",
382
- "Groundwater Flow",
383
- "Groundwater Velocity",
384
- "Groundwater Velocity X",
385
- "Groundwater Velocity Y",
386
- ]
366
+ TIME_SERIES_OUTPUT_VARS = {
367
+ "cell": [
368
+ "Water Surface", "Depth", "Velocity", "Velocity X", "Velocity Y",
369
+ "Froude Number", "Courant Number", "Shear Stress", "Bed Elevation",
370
+ "Precipitation Rate", "Infiltration Rate", "Evaporation Rate",
371
+ "Percolation Rate", "Groundwater Elevation", "Groundwater Depth",
372
+ "Groundwater Flow", "Groundwater Velocity", "Groundwater Velocity X",
373
+ "Groundwater Velocity Y"
374
+ ],
375
+ "face": [
376
+ "Face Velocity", "Face Flow", "Face Water Surface", "Face Courant",
377
+ "Face Cumulative Volume", "Face Eddy Viscosity", "Face Flow Period Average",
378
+ "Face Friction Term", "Face Pressure Gradient Term", "Face Shear Stress",
379
+ "Face Tangential Velocity"
380
+ ]
381
+ }
387
382
 
388
383
  try:
389
384
  start_time = HdfBase._get_simulation_start_time(hdf_file)
@@ -397,7 +392,7 @@ class HdfResultsMesh:
397
392
  if var:
398
393
  variables = [var]
399
394
  else:
400
- variables = TIME_SERIES_OUTPUT_VARS_CELLS
395
+ variables = TIME_SERIES_OUTPUT_VARS["cell"] + TIME_SERIES_OUTPUT_VARS["face"]
401
396
 
402
397
  datasets = {}
403
398
  for mesh_name in mesh_names:
@@ -417,13 +412,16 @@ class HdfResultsMesh:
417
412
  truncated_time_stamps = time_stamps
418
413
 
419
414
  if values.shape[0] != len(truncated_time_stamps):
420
- logger.warning(f"Mismatch between number of time steps ({len(truncated_time_stamps)}) and data shape ({values.shape}) for variable {variable}")
415
+ logger.warning(f"Mismatch between time steps ({len(truncated_time_stamps)}) and data shape ({values.shape}) for variable {variable}")
421
416
  continue
422
417
 
418
+ # Determine if this is a face-based or cell-based variable
419
+ id_dim = "face_id" if any(face_var in variable for face_var in TIME_SERIES_OUTPUT_VARS["face"]) else "cell_id"
420
+
423
421
  data_vars[variable] = xr.DataArray(
424
422
  data=values,
425
- dims=['time', 'cell_id'],
426
- coords={'time': truncated_time_stamps, 'cell_id': np.arange(values.shape[1])},
423
+ dims=['time', id_dim],
424
+ coords={'time': truncated_time_stamps, id_dim: np.arange(values.shape[1])},
427
425
  attrs={'units': units}
428
426
  )
429
427
  except KeyError:
@@ -454,16 +452,7 @@ class HdfResultsMesh:
454
452
  Args:
455
453
  hdf_file (h5py.File): Open HDF file object.
456
454
  mesh_name (str): Name of the mesh.
457
- var (str): Variable name to retrieve. Valid options include:
458
- "Water Surface", "Face Velocity", "Cell Velocity X", "Cell Velocity Y",
459
- "Face Flow", "Face Water Surface", "Cell Volume", "Cell Volume Error",
460
- "Cell Water Surface Error", "Cell Courant", "Face Courant",
461
- "Cell Hydraulic Depth", "Cell Invert Depth",
462
- "Cell Cumulative Precipitation Depth", "Cell Divergence Term",
463
- "Cell Eddy Viscosity X", "Cell Eddy Viscosity Y", "Cell Flow Balance",
464
- "Cell Storage Term", "Cell Water Source Term", "Face Cumulative Volume",
465
- "Face Eddy Viscosity", "Face Flow Period Average", "Face Friction Term",
466
- "Face Pressure Gradient Term", "Face Shear Stress", "Face Tangential Velocity"
455
+ var (str): Variable name to retrieve.
467
456
  truncate (bool): Whether to truncate the output to remove trailing zeros (default True).
468
457
 
469
458
  Returns:
@@ -478,7 +467,6 @@ class HdfResultsMesh:
478
467
  if path not in hdf_file:
479
468
  raise ValueError(f"Path {path} not found in HDF file")
480
469
 
481
- # Use h5py to get the dataset
482
470
  dataset = hdf_file[path]
483
471
  values = dataset[:]
484
472
  units = dataset.attrs.get("Units", "").decode("utf-8")
@@ -491,11 +479,12 @@ class HdfResultsMesh:
491
479
  values = values[start:end]
492
480
  times = times[start:end]
493
481
 
494
- # Create xarray DataArray
495
- dims = ["time", "cell"] if values.ndim == 2 else ["time"]
482
+ # Determine if this is a face-based or cell-based variable
483
+ id_dim = "face_id" if "Face" in var else "cell_id"
484
+ dims = ["time", id_dim] if values.ndim == 2 else ["time"]
496
485
  coords = {"time": times}
497
486
  if values.ndim == 2:
498
- coords["cell"] = np.arange(values.shape[1])
487
+ coords[id_dim] = np.arange(values.shape[1])
499
488
 
500
489
  return xr.DataArray(
501
490
  values,
@@ -548,26 +537,30 @@ class HdfResultsMesh:
548
537
  if isinstance(hdf_file[f"{base_path}/{name}"], h5py.Group):
549
538
  mesh_names.append(name)
550
539
  return mesh_names
551
-
540
+
552
541
  @staticmethod
553
542
  def _get_mesh_summary_output(hdf_file: h5py.File, var: str, round_to: str = "100ms") -> pd.DataFrame:
554
543
  """
555
544
  Get the summary output data for a given variable from the HDF file.
556
545
 
557
- This method retrieves summary output data for all 2D flow areas (meshes) in the HDF file
558
- for a specified variable. It handles both 1D and 2D datasets.
559
- Group attributes are added as metadata to the DataFrame.
560
-
561
- Args:
562
- hdf_file (h5py.File): Open HDF file object.
563
- var (str): The summary output variable to retrieve.
564
- round_to (str): The time unit to round the datetimes to. Default is "100ms".
565
-
566
- Returns:
567
- pd.DataFrame: A DataFrame containing the summary output data with attributes as metadata.
568
-
569
- Raises:
570
- ValueError: If the HDF file cannot be opened or read, or if the requested data is not found.
546
+ Parameters
547
+ ----------
548
+ hdf_file : h5py.File
549
+ Open HDF file object.
550
+ var : str
551
+ The summary output variable to retrieve.
552
+ round_to : str, optional
553
+ The time unit to round the datetimes to. Default is "100ms".
554
+
555
+ Returns
556
+ -------
557
+ pd.DataFrame
558
+ A DataFrame containing the summary output data with attributes as metadata.
559
+
560
+ Raises
561
+ ------
562
+ ValueError
563
+ If the HDF file cannot be opened or read, or if the requested data is not found.
571
564
  """
572
565
  try:
573
566
  dfs = []
@@ -584,28 +577,44 @@ class HdfResultsMesh:
584
577
  logger.debug(f"Attributes: {dict(group.attrs)}")
585
578
 
586
579
  if data.ndim == 2 and data.shape[0] == 2:
587
- # This is the case for "Maximum Water Surface"
580
+ # Handle 2D datasets (e.g. Maximum Water Surface)
588
581
  row_variables = group.attrs.get('Row Variables', [b'Value', b'Time'])
589
582
  row_variables = [v.decode('utf-8').strip() for v in row_variables]
590
583
 
591
584
  df = pd.DataFrame({
592
585
  "mesh_name": [mesh_name] * data.shape[1],
593
- "cell_id": range(data.shape[1]),
586
+ "cell_id" if "Face" not in var else "face_id": range(data.shape[1]),
594
587
  f"{var.lower().replace(' ', '_')}": data[0, :],
595
588
  f"{var.lower().replace(' ', '_')}_time": HdfUtils._ras_timesteps_to_datetimes(
596
589
  data[1, :], start_time, time_unit="days", round_to=round_to
597
590
  )
598
591
  })
592
+
599
593
  elif data.ndim == 1:
600
- # Handle 1D datasets (like Cell Last Iteration)
594
+ # Handle 1D datasets (e.g. Cell Last Iteration)
601
595
  df = pd.DataFrame({
602
596
  "mesh_name": [mesh_name] * len(data),
603
- "cell_id": range(len(data)),
597
+ "cell_id" if "Face" not in var else "face_id": range(len(data)),
604
598
  var.lower().replace(' ', '_'): data
605
599
  })
600
+
606
601
  else:
607
602
  raise ValueError(f"Unexpected data shape for {var} in {mesh_name}. "
608
- f"Got shape {data.shape}")
603
+ f"Got shape {data.shape}")
604
+
605
+ # Add geometry based on variable type
606
+ if "Face" in var:
607
+ face_df = HdfMesh.mesh_cell_faces(hdf_file)
608
+ if not face_df.empty:
609
+ df = df.merge(face_df[['mesh_name', 'face_id', 'geometry']],
610
+ on=['mesh_name', 'face_id'],
611
+ how='left')
612
+ else:
613
+ cell_df = HdfMesh.mesh_cell_points(hdf_file)
614
+ if not cell_df.empty:
615
+ df = df.merge(cell_df[['mesh_name', 'cell_id', 'geometry']],
616
+ on=['mesh_name', 'cell_id'],
617
+ how='left')
609
618
 
610
619
  # Add group attributes as metadata
611
620
  df.attrs['mesh_name'] = mesh_name
@@ -618,6 +627,9 @@ class HdfResultsMesh:
618
627
 
619
628
  dfs.append(df)
620
629
 
630
+ if not dfs:
631
+ return pd.DataFrame()
632
+
621
633
  result = pd.concat(dfs, ignore_index=True)
622
634
 
623
635
  # Combine attributes from all meshes
@@ -634,7 +646,7 @@ class HdfResultsMesh:
634
646
  logger.info(f"Processed {len(result)} rows of summary output data")
635
647
  return result
636
648
 
637
- except (KeyError, ValueError, AttributeError) as e:
649
+ except Exception as e:
638
650
  logger.error(f"Error processing summary output data: {e}")
639
651
  raise ValueError(f"Error processing summary output data: {e}")
640
652
 
@@ -660,3 +672,64 @@ class HdfResultsMesh:
660
672
  if output_item is None:
661
673
  raise ValueError(f"Could not find HDF group or dataset at path '{output_path}'")
662
674
  return output_item
675
+
676
+ @staticmethod
677
+ def plot_mesh_variable(variable_df: pd.DataFrame, variable_name: str, colormap: str = 'viridis', point_size: int = 10) -> None:
678
+ """
679
+ Plot any mesh variable with consistent styling.
680
+
681
+ Args:
682
+ variable_df (pd.DataFrame): DataFrame containing the variable data
683
+ variable_name (str): Name of the variable (for labels)
684
+ colormap (str): Matplotlib colormap to use. Default: 'viridis'
685
+ point_size (int): Size of the scatter points. Default: 10
686
+
687
+ Returns:
688
+ None
689
+
690
+ Raises:
691
+ ImportError: If matplotlib is not installed
692
+ ValueError: If required columns are missing from variable_df
693
+ """
694
+ try:
695
+ import matplotlib.pyplot as plt
696
+ except ImportError:
697
+ logger.error("matplotlib is required for plotting. Please install it with 'pip install matplotlib'")
698
+ raise ImportError("matplotlib is required for plotting")
699
+
700
+ # Get cell coordinates if not in variable_df
701
+ if 'geometry' not in variable_df.columns:
702
+ cell_coords = HdfMesh.mesh_cell_points(plan_hdf_path)
703
+ merged_df = pd.merge(variable_df, cell_coords, on=['mesh_name', 'cell_id'])
704
+ else:
705
+ merged_df = variable_df
706
+
707
+ # Extract coordinates, handling None values
708
+ merged_df = merged_df.dropna(subset=['geometry'])
709
+ merged_df['x'] = merged_df['geometry'].apply(lambda geom: geom.x if geom is not None else None)
710
+ merged_df['y'] = merged_df['geometry'].apply(lambda geom: geom.y if geom is not None else None)
711
+
712
+ # Drop any rows with None coordinates
713
+ merged_df = merged_df.dropna(subset=['x', 'y'])
714
+
715
+ if len(merged_df) == 0:
716
+ logger.error("No valid coordinates found for plotting")
717
+ raise ValueError("No valid coordinates found for plotting")
718
+
719
+ # Create plot
720
+ fig, ax = plt.subplots(figsize=(12, 8))
721
+ scatter = ax.scatter(merged_df['x'], merged_df['y'],
722
+ c=merged_df[variable_name],
723
+ cmap=colormap,
724
+ s=point_size)
725
+
726
+ # Customize plot
727
+ ax.set_title(f'{variable_name} per Cell')
728
+ ax.set_xlabel('X Coordinate')
729
+ ax.set_ylabel('Y Coordinate')
730
+ plt.colorbar(scatter, label=variable_name)
731
+ ax.grid(True, linestyle='--', alpha=0.7)
732
+ plt.rcParams.update({'font.size': 12})
733
+ plt.tight_layout()
734
+ plt.show()
735
+
@@ -396,3 +396,6 @@ class HdfResultsPlan:
396
396
  except Exception as e:
397
397
  logger.error(f"Error in reference_summary_output: {str(e)}")
398
398
  return pd.DataFrame() # Return an empty DataFrame on general error
399
+
400
+
401
+