ras-commander 0.42.0__py3-none-any.whl → 0.43.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,648 @@
1
+ import numpy as np
2
+ import pandas as pd
3
+ import xarray as xr
4
+ from pathlib import Path
5
+ import h5py
6
+ from typing import Union, List, Optional, Dict, Any, Tuple
7
+
8
+ from .HdfBase import HdfBase
9
+ from .HdfUtils import HdfUtils
10
+ from .Decorators import log_call, standardize_input
11
+ from .LoggingConfig import setup_logging, get_logger
12
+
13
+ logger = get_logger(__name__)
14
+
15
+ class HdfResultsMesh:
16
+ """
17
+ A class for handling mesh-related results from HEC-RAS HDF files.
18
+
19
+ This class provides methods to extract and analyze mesh summary outputs,
20
+ timeseries data, and various mesh-specific results such as water surface
21
+ elevations, velocities, and errors.
22
+
23
+ The class works with HEC-RAS plan HDF files and uses HdfBase and HdfUtils
24
+ for common operations and utilities.
25
+
26
+ Methods in this class use the @log_call decorator for logging and the
27
+ @standardize_input decorator to handle different input types (e.g.,
28
+ plan number, file path).
29
+
30
+ Attributes:
31
+ None
32
+
33
+ Note:
34
+ This class is designed to work with HEC-RAS version 6.0 and later.
35
+ """
36
+
37
+ @staticmethod
38
+ @log_call
39
+ @standardize_input(file_type='plan_hdf')
40
+ def mesh_summary_output(hdf_path: Path, var: str, round_to: str = "0.1 s") -> pd.DataFrame:
41
+ """
42
+ Return the summary output data for a given variable.
43
+
44
+ Args:
45
+ hdf_path (Path): Path to the HEC-RAS plan HDF file.
46
+ var (str): The summary output variable to retrieve.
47
+ round_to (str): The time unit to round the datetimes to. Default: "0.1 s" (seconds).
48
+
49
+ Returns:
50
+ pd.DataFrame: DataFrame containing the summary output data.
51
+
52
+ Raises:
53
+ ValueError: If there's an error processing the summary output data.
54
+ """
55
+ try:
56
+ with h5py.File(hdf_path, 'r') as hdf_file:
57
+ return HdfResultsMesh._get_mesh_summary_output(hdf_file, var, round_to)
58
+ except Exception as e:
59
+ logger.error(f"Error in mesh_summary_output: {str(e)}")
60
+ logger.error(f"Variable: {var}")
61
+ raise ValueError(f"Failed to get summary output: {str(e)}")
62
+
63
+ @staticmethod
64
+ @log_call
65
+ @standardize_input(file_type='plan_hdf')
66
+ def mesh_timeseries_output(hdf_path: Path, mesh_name: str, var: str, truncate: bool = True) -> xr.DataArray:
67
+ """
68
+ Get timeseries output for a specific mesh and variable.
69
+
70
+ Args:
71
+ hdf_path (Path): Path to the HDF file.
72
+ mesh_name (str): Name of the mesh.
73
+ var (str): Variable to retrieve. Valid options include:
74
+ "Water Surface", "Face Velocity", "Cell Velocity X", "Cell Velocity Y",
75
+ "Face Flow", "Face Water Surface", "Cell Volume", "Cell Volume Error",
76
+ "Cell Water Surface Error", "Cell Courant", "Face Courant",
77
+ "Cell Hydraulic Depth", "Cell Invert Depth",
78
+ "Cell Cumulative Precipitation Depth", "Cell Divergence Term",
79
+ "Cell Eddy Viscosity X", "Cell Eddy Viscosity Y", "Cell Flow Balance",
80
+ "Cell Storage Term", "Cell Water Source Term", "Face Cumulative Volume",
81
+ "Face Eddy Viscosity", "Face Flow Period Average", "Face Friction Term",
82
+ "Face Pressure Gradient Term", "Face Shear Stress", "Face Tangential Velocity"
83
+ truncate (bool): Whether to truncate the output (default True).
84
+
85
+ Returns:
86
+ xr.DataArray: DataArray containing the timeseries output.
87
+ """
88
+ with h5py.File(hdf_path, 'r') as hdf_file:
89
+ return HdfResultsMesh._get_mesh_timeseries_output(hdf_file, mesh_name, var, truncate)
90
+
91
+ @staticmethod
92
+ @log_call
93
+ @standardize_input(file_type='plan_hdf')
94
+ def mesh_faces_timeseries_output(hdf_path: Path, mesh_name: str) -> xr.Dataset:
95
+ """
96
+ Get timeseries output for all face-based variables of a specific mesh.
97
+
98
+ Args:
99
+ hdf_path (Path): Path to the HDF file.
100
+ mesh_name (str): Name of the mesh.
101
+
102
+ Returns:
103
+ xr.Dataset: Dataset containing the timeseries output for all face-based variables.
104
+ """
105
+ face_vars = ["Face Velocity", "Face Flow"]
106
+ datasets = []
107
+
108
+ for var in face_vars:
109
+ try:
110
+ da = HdfResultsMesh.mesh_timeseries_output(hdf_path, mesh_name, var)
111
+ # Assign the variable name as the DataArray name
112
+ da.name = var.lower().replace(' ', '_')
113
+ datasets.append(da)
114
+ except Exception as e:
115
+ logger.warning(f"Failed to process {var} for mesh {mesh_name}: {str(e)}")
116
+
117
+ if not datasets:
118
+ logger.error(f"No valid data found for mesh {mesh_name}")
119
+ return xr.Dataset()
120
+
121
+ try:
122
+ return xr.merge(datasets)
123
+ except Exception as e:
124
+ logger.error(f"Failed to merge datasets: {str(e)}")
125
+ return xr.Dataset()
126
+
127
+ @staticmethod
128
+ @log_call
129
+ @standardize_input(file_type='plan_hdf')
130
+ def mesh_cells_timeseries_output(hdf_path: Path, mesh_names: Optional[Union[str, List[str]]] = None, var: Optional[str] = None, truncate: bool = False, ras_object: Optional[Any] = None) -> Dict[str, xr.Dataset]:
131
+ """
132
+ Get mesh cells timeseries output for specified meshes and variables.
133
+
134
+ Args:
135
+ hdf_path (Union[str, Path]): Path to the HDF file.
136
+ mesh_names (Optional[Union[str, List[str]]]): Name(s) of the mesh(es). If None, processes all available meshes.
137
+ var (Optional[str]): Name of the variable to retrieve. If None, retrieves all variables.
138
+ truncate (bool): If True, truncates the output to remove trailing zeros.
139
+ ras_object (Optional[Any]): RAS object, if available.
140
+
141
+ Returns:
142
+ Dict[str, xr.Dataset]: A dictionary of xarray Datasets, one for each mesh, containing the mesh cells timeseries output.
143
+
144
+ Raises:
145
+ ValueError: If there's an error processing the timeseries output data.
146
+ """
147
+ try:
148
+ with h5py.File(hdf_path, 'r') as hdf_file:
149
+ return HdfResultsMesh._mesh_cells_timeseries_output(hdf_file, mesh_names, var, truncate)
150
+ except Exception as e:
151
+ logger.error(f"Error in mesh_cells_timeseries_output: {str(e)}")
152
+ raise ValueError(f"Error processing timeseries output data: {e}")
153
+
154
+ @staticmethod
155
+ @log_call
156
+ @standardize_input(file_type='plan_hdf')
157
+ def mesh_last_iter(hdf_path: Path) -> pd.DataFrame:
158
+ """
159
+ Get last iteration count for each mesh cell.
160
+
161
+ Args:
162
+ hdf_path (Path): Path to the HDF file.
163
+
164
+ Returns:
165
+ pd.DataFrame: DataFrame containing last iteration counts.
166
+ """
167
+ return HdfResultsMesh._get_mesh_summary_output(hdf_path, "Cell Last Iteration")
168
+
169
+ @staticmethod
170
+ @log_call
171
+ @standardize_input(file_type='plan_hdf')
172
+ def mesh_max_ws(hdf_path: Path, round_to: str = "0.1 s") -> pd.DataFrame:
173
+ """
174
+ Get maximum water surface elevation for each mesh cell.
175
+
176
+ Args:
177
+ hdf_path (Path): Path to the HDF file.
178
+ round_to (str): Time rounding specification (default "0.1 s").
179
+
180
+ Returns:
181
+ pd.DataFrame: DataFrame containing maximum water surface elevations.
182
+
183
+ Raises:
184
+ ValueError: If there's an error processing the maximum water surface data.
185
+ """
186
+ try:
187
+ with h5py.File(hdf_path, 'r') as hdf_file:
188
+ start_time = HdfBase._get_simulation_start_time(hdf_file)
189
+ dfs = []
190
+ for mesh_name, cell_count in HdfBase._get_2d_flow_area_names_and_counts(hdf_file):
191
+ group = HdfResultsMesh._get_mesh_summary_output_group(hdf_file, mesh_name, "Maximum Water Surface")
192
+
193
+ # Check the structure of the group
194
+ if isinstance(group, h5py.Dataset):
195
+ data = group[:]
196
+ if data.ndim > 1:
197
+ values = data[:, 0] # Assume first column is values
198
+ times = data[:, 1] if data.shape[1] > 1 else np.arange(len(values))
199
+ else:
200
+ values = data
201
+ times = np.arange(len(values))
202
+ else:
203
+ values = group['Values'][:] if 'Values' in group else group[:]
204
+ times = group['Time'][:] if 'Time' in group else np.arange(len(values))
205
+
206
+ times = HdfUtils._ras_timesteps_to_datetimes(times, start_time, time_unit="days", round_to=round_to)
207
+ df = pd.DataFrame({
208
+ "mesh_name": [mesh_name] * len(values),
209
+ "cell_id": range(len(values)),
210
+ "maximum_water_surface": values,
211
+ "maximum_water_surface_time": times
212
+ })
213
+ dfs.append(df)
214
+
215
+ return pd.concat(dfs, ignore_index=True)
216
+ except Exception as e:
217
+ logger.error(f"Error in mesh_max_ws: {str(e)}")
218
+ logger.error(f"Data structure: {group.shape if isinstance(group, h5py.Dataset) else [k for k in group.keys()]}")
219
+ raise ValueError(f"Failed to get maximum water surface: {str(e)}")
220
+
221
+ @staticmethod
222
+ @log_call
223
+ @standardize_input(file_type='plan_hdf')
224
+ def mesh_min_ws(hdf_path: Path, round_to: str = "0.1 s") -> pd.DataFrame:
225
+ """
226
+ Get minimum water surface elevation for each mesh cell.
227
+
228
+ Args:
229
+ hdf_path (Path): Path to the HDF file.
230
+ round_to (str): Time rounding specification (default "0.1 s").
231
+
232
+ Returns:
233
+ pd.DataFrame: DataFrame containing minimum water surface elevations.
234
+ """
235
+ try:
236
+ with h5py.File(hdf_path, 'r') as hdf_file:
237
+ return HdfResultsMesh._get_mesh_summary_output(hdf_file, "Minimum Water Surface", round_to)
238
+ except Exception as e:
239
+ logger.error(f"Error in mesh_min_ws: {str(e)}")
240
+ raise ValueError(f"Failed to get minimum water surface: {str(e)}")
241
+
242
+ @staticmethod
243
+ @log_call
244
+ @standardize_input(file_type='plan_hdf')
245
+ def mesh_max_face_v(hdf_path: Path, round_to: str = "0.1 s") -> pd.DataFrame:
246
+ """
247
+ Get maximum face velocity for each mesh cell.
248
+
249
+ Args:
250
+ hdf_path (Path): Path to the HDF file.
251
+ round_to (str): Time rounding specification (default "0.1 s").
252
+
253
+ Returns:
254
+ pd.DataFrame: DataFrame containing maximum face velocities.
255
+
256
+ Raises:
257
+ ValueError: If there's an error processing the maximum face velocity data.
258
+ """
259
+ try:
260
+ with h5py.File(hdf_path, 'r') as hdf_file:
261
+ return HdfResultsMesh._get_mesh_summary_output(hdf_file, "Maximum Face Velocity", round_to)
262
+ except Exception as e:
263
+ logger.error(f"Error in mesh_max_face_v: {str(e)}")
264
+ raise ValueError(f"Failed to get maximum face velocity: {str(e)}")
265
+
266
+ @staticmethod
267
+ @log_call
268
+ @standardize_input(file_type='plan_hdf')
269
+ def mesh_min_face_v(hdf_path: Path, round_to: str = "0.1 s") -> pd.DataFrame:
270
+ """
271
+ Get minimum face velocity for each mesh cell.
272
+
273
+ Args:
274
+ hdf_path (Path): Path to the HDF file.
275
+ round_to (str): Time rounding specification (default "0.1 s").
276
+
277
+ Returns:
278
+ pd.DataFrame: DataFrame containing minimum face velocities.
279
+
280
+ Raises:
281
+ ValueError: If there's an error processing the minimum face velocity data.
282
+ """
283
+ try:
284
+ with h5py.File(hdf_path, 'r') as hdf_file:
285
+ return HdfResultsMesh._get_mesh_summary_output(hdf_file, "Minimum Face Velocity", round_to)
286
+ except Exception as e:
287
+ logger.error(f"Error in mesh_min_face_v: {str(e)}")
288
+ raise ValueError(f"Failed to get minimum face velocity: {str(e)}")
289
+
290
+ @staticmethod
291
+ @log_call
292
+ @standardize_input(file_type='plan_hdf')
293
+ def mesh_max_ws_err(hdf_path: Path, round_to: str = "0.1 s") -> pd.DataFrame:
294
+ """
295
+ Get maximum water surface error for each mesh cell.
296
+
297
+ Args:
298
+ hdf_path (Path): Path to the HDF file.
299
+ round_to (str): Time rounding specification (default "0.1 s").
300
+
301
+ Returns:
302
+ pd.DataFrame: DataFrame containing maximum water surface errors.
303
+
304
+ Raises:
305
+ ValueError: If there's an error processing the maximum water surface error data.
306
+ """
307
+ try:
308
+ with h5py.File(hdf_path, 'r') as hdf_file:
309
+ return HdfResultsMesh._get_mesh_summary_output(hdf_file, "Cell Maximum Water Surface Error", round_to)
310
+ except Exception as e:
311
+ logger.error(f"Error in mesh_max_ws_err: {str(e)}")
312
+ raise ValueError(f"Failed to get maximum water surface error: {str(e)}")
313
+
314
+
315
+ @staticmethod
316
+ @log_call
317
+ @standardize_input(file_type='plan_hdf')
318
+ def mesh_max_iter(hdf_path: Path, round_to: str = "0.1 s") -> pd.DataFrame:
319
+ """
320
+ Get maximum iteration count for each mesh cell.
321
+
322
+ Args:
323
+ hdf_path (Path): Path to the HDF file.
324
+ round_to (str): Time rounding specification (default "0.1 s").
325
+
326
+ Returns:
327
+ pd.DataFrame: DataFrame containing maximum iteration counts.
328
+
329
+ Raises:
330
+ ValueError: If there's an error processing the maximum iteration data.
331
+
332
+ Note: The Maximum Iteration is labeled as "Cell Last Iteration" in the HDF file
333
+ """
334
+ try:
335
+ with h5py.File(hdf_path, 'r') as hdf_file:
336
+ return HdfResultsMesh._get_mesh_summary_output(hdf_file, "Cell Last Iteration", round_to)
337
+ except Exception as e:
338
+ logger.error(f"Error in mesh_max_iter: {str(e)}")
339
+ raise ValueError(f"Failed to get maximum iteration count: {str(e)}")
340
+
341
+
342
+
343
+
344
+ @staticmethod
345
+ def _get_mesh_timeseries_output(hdf_file: h5py.File, mesh_name: str, var: str, truncate: bool = True) -> xr.DataArray:
346
+ """
347
+ Get timeseries output for a specific mesh and variable.
348
+
349
+ Args:
350
+ hdf_file (h5py.File): Open HDF file object.
351
+ mesh_name (str): Name of the mesh.
352
+ var (str): Variable name to retrieve. Valid options include:
353
+ "Water Surface", "Face Velocity", "Cell Velocity X", "Cell Velocity Y",
354
+ "Face Flow", "Face Water Surface", "Cell Volume", "Cell Volume Error",
355
+ "Cell Water Surface Error", "Cell Courant", "Face Courant",
356
+ "Cell Hydraulic Depth", "Cell Invert Depth",
357
+ "Cell Cumulative Precipitation Depth", "Cell Divergence Term",
358
+ "Cell Eddy Viscosity X", "Cell Eddy Viscosity Y", "Cell Flow Balance",
359
+ "Cell Storage Term", "Cell Water Source Term", "Face Cumulative Volume",
360
+ "Face Eddy Viscosity", "Face Flow Period Average", "Face Friction Term",
361
+ "Face Pressure Gradient Term", "Face Shear Stress", "Face Tangential Velocity"
362
+ truncate (bool): Whether to truncate the output to remove trailing zeros (default True).
363
+
364
+ Returns:
365
+ xr.DataArray: DataArray containing the timeseries output.
366
+
367
+ Raises:
368
+ ValueError: If the specified path is not found in the HDF file or if there's an error processing the data.
369
+ """
370
+ try:
371
+ path = HdfResultsMesh._get_mesh_timeseries_output_path(mesh_name, var)
372
+
373
+ if path not in hdf_file:
374
+ raise ValueError(f"Path {path} not found in HDF file")
375
+
376
+ values, units = HdfResultsMesh._get_mesh_timeseries_output_values_units(hdf_file, mesh_name, var)
377
+ times = HdfBase._get_unsteady_datetimes(hdf_file)
378
+
379
+ if truncate:
380
+ non_zero = np.nonzero(values)[0]
381
+ if len(non_zero) > 0:
382
+ start, end = non_zero[0], non_zero[-1] + 1
383
+ values = values[start:end]
384
+ times = times[start:end]
385
+
386
+ # Check if values is 2D and adjust accordingly
387
+ if values.ndim == 2:
388
+ dims = ["time", "cell"]
389
+ else:
390
+ dims = ["time"]
391
+
392
+ return xr.DataArray(
393
+ values,
394
+ coords={"time": times},
395
+ dims=dims,
396
+ attrs={"units": units, "mesh_name": mesh_name, "variable": var},
397
+ )
398
+ except Exception as e:
399
+ logger.error(f"Error in get_mesh_timeseries_output: {str(e)}")
400
+ raise ValueError(f"Failed to get timeseries output: {str(e)}")
401
+
402
+ @staticmethod
403
+ def _get_mesh_timeseries_output_path(mesh_name: str, var_name: str) -> str:
404
+ """
405
+ Get the HDF path for mesh timeseries output.
406
+
407
+ Args:
408
+ mesh_name (str): Name of the mesh.
409
+ var_name (str): Name of the variable.
410
+
411
+ Returns:
412
+ str: The HDF path for the specified mesh and variable.
413
+ """
414
+ return f"Results/Unsteady/Output/Output Blocks/Base Output/Unsteady Time Series/2D Flow Areas/{mesh_name}/{var_name}"
415
+
416
+ @staticmethod
417
+ def _get_mesh_timeseries_output_values_units(hdf_file: h5py.File, mesh_name: str, var: str) -> Tuple[np.ndarray, str]:
418
+ """
419
+ Get the mesh timeseries output values and units for a specific variable from the HDF file.
420
+
421
+ Args:
422
+ hdf_file (h5py.File): Open HDF file object.
423
+ mesh_name (str): Name of the mesh.
424
+ var (str): Variable name to retrieve.
425
+
426
+ Returns:
427
+ Tuple[np.ndarray, str]: A tuple containing the output values and units.
428
+ """
429
+ path = HdfResultsMesh._get_mesh_timeseries_output_path(mesh_name, var)
430
+ group = hdf_file[path]
431
+ values = group[:]
432
+ units = group.attrs.get("Units")
433
+ if units is not None:
434
+ units = units.decode("utf-8")
435
+ return values, units
436
+
437
+ @staticmethod
438
+ def _mesh_cells_timeseries_output(hdf_file: h5py.File, mesh_names: Optional[Union[str, List[str]]] = None, var: Optional[str] = None, truncate: bool = False) -> Dict[str, xr.Dataset]:
439
+ """
440
+ Get mesh cells timeseries output for specified meshes and variables.
441
+
442
+ Args:
443
+ hdf_file (h5py.File): Open HDF file object.
444
+ mesh_names (Optional[Union[str, List[str]]]): Name(s) of the mesh(es). If None, processes all available meshes.
445
+ var (Optional[str]): Name of the variable to retrieve. If None, retrieves all variables.
446
+ truncate (bool): If True, truncates the output to remove trailing zeros.
447
+
448
+ Returns:
449
+ Dict[str, xr.Dataset]: A dictionary of xarray Datasets, one for each mesh, containing the mesh cells timeseries output.
450
+
451
+ Raises:
452
+ ValueError: If there's an error processing the timeseries output data.
453
+ """
454
+ TIME_SERIES_OUTPUT_VARS_CELLS = [
455
+ "Water Surface",
456
+ "Depth",
457
+ "Velocity",
458
+ "Velocity X",
459
+ "Velocity Y",
460
+ "Froude Number",
461
+ "Courant Number",
462
+ "Shear Stress",
463
+ "Bed Elevation",
464
+ "Precipitation Rate",
465
+ "Infiltration Rate",
466
+ "Evaporation Rate",
467
+ "Percolation Rate",
468
+ "Groundwater Elevation",
469
+ "Groundwater Depth",
470
+ "Groundwater Flow",
471
+ "Groundwater Velocity",
472
+ "Groundwater Velocity X",
473
+ "Groundwater Velocity Y",
474
+ ]
475
+
476
+ try:
477
+ start_time = HdfBase._get_simulation_start_time(hdf_file)
478
+ time_stamps = HdfBase._get_unsteady_datetimes(hdf_file)
479
+
480
+ if mesh_names is None:
481
+ mesh_names = HdfResultsMesh._get_available_meshes(hdf_file)
482
+ elif isinstance(mesh_names, str):
483
+ mesh_names = [mesh_names]
484
+
485
+ if var:
486
+ variables = [var]
487
+ else:
488
+ variables = TIME_SERIES_OUTPUT_VARS_CELLS
489
+
490
+ datasets = {}
491
+ for mesh_name in mesh_names:
492
+ data_vars = {}
493
+ for variable in variables:
494
+ try:
495
+ values, units = HdfResultsMesh._get_mesh_timeseries_output_values_units(hdf_file, mesh_name, variable)
496
+ logger.info(f"Variable: {variable}")
497
+ logger.info(f"Original values shape: {values.shape}")
498
+ logger.info(f"Units: {units}")
499
+ logger.info(f"Number of time stamps: {len(time_stamps)}")
500
+
501
+ if truncate:
502
+ last_nonzero = np.max(np.nonzero(values)[1]) + 1 if values.size > 0 else 0
503
+ values = values[:, :last_nonzero]
504
+ truncated_time_stamps = time_stamps[:last_nonzero]
505
+ else:
506
+ truncated_time_stamps = time_stamps
507
+
508
+ # Ensure the data shape matches our expectations
509
+ if values.shape[0] != len(truncated_time_stamps):
510
+ logger.warning(f"Mismatch between number of time steps ({len(truncated_time_stamps)}) and data shape ({values.shape}) for variable {variable}")
511
+ continue
512
+
513
+ logger.info(f"Final values shape: {values.shape}")
514
+
515
+ data_vars[variable] = xr.DataArray(
516
+ data=values,
517
+ dims=['time', 'cell_id'],
518
+ coords={'time': truncated_time_stamps, 'cell_id': np.arange(values.shape[1])},
519
+ attrs={'units': units}
520
+ )
521
+ except KeyError:
522
+ logger.warning(f"Variable '{variable}' not found in the HDF file for mesh '{mesh_name}'. Skipping.")
523
+ except Exception as e:
524
+ logger.error(f"Error processing variable '{variable}' for mesh '{mesh_name}': {str(e)}")
525
+
526
+ if data_vars:
527
+ datasets[mesh_name] = xr.Dataset(
528
+ data_vars=data_vars,
529
+ attrs={'mesh_name': mesh_name, 'start_time': start_time}
530
+ )
531
+ else:
532
+ logger.warning(f"No valid data variables found for mesh '{mesh_name}'")
533
+
534
+ return datasets
535
+ except Exception as e:
536
+ logger.error(f"Error in _mesh_cells_timeseries_output: {str(e)}")
537
+ raise ValueError(f"Error processing timeseries output data: {e}")
538
+
539
+ @staticmethod
540
+ def _get_available_meshes(hdf_file: h5py.File) -> List[str]:
541
+ """
542
+ Get the names of all available meshes in the HDF file.
543
+
544
+ Args:
545
+ hdf_file (h5py.File): Open HDF file object.
546
+
547
+ Returns:
548
+ List[str]: A list of mesh names.
549
+ """
550
+ mesh_names = []
551
+ base_path = "Geometry/2D Flow Areas"
552
+ if base_path in hdf_file:
553
+ for name in hdf_file[base_path]:
554
+ if isinstance(hdf_file[f"{base_path}/{name}"], h5py.Group):
555
+ mesh_names.append(name)
556
+ return mesh_names
557
+
558
+
559
+ @staticmethod
560
+ def _get_mesh_summary_output(hdf_file: h5py.File, var: str, round_to: str = "0.1 s") -> pd.DataFrame:
561
+ """
562
+ Get the summary output data for a given variable from the HDF file.
563
+
564
+ This method retrieves summary output data for all 2D flow areas (meshes) in the HDF file
565
+ for a specified variable. It handles different data structures that may be present in
566
+ different versions of HEC-RAS HDF files.
567
+
568
+ Args:
569
+ hdf_file (h5py.File): Open HDF file object.
570
+ var (str): The summary output variable to retrieve.
571
+ round_to (str): The time unit to round the datetimes to. Default is "0.1 s".
572
+
573
+ Returns:
574
+ pd.DataFrame: A DataFrame containing the summary output data.
575
+
576
+ Raises:
577
+ ValueError: If the HDF file cannot be opened or read, or if the requested data is not found.
578
+ """
579
+ try:
580
+ dfs = []
581
+ start_time = HdfBase._get_simulation_start_time(hdf_file)
582
+
583
+ logger.info(f"Processing summary output for variable: {var}")
584
+ for mesh_name, cell_count in HdfBase._get_2d_flow_area_names_and_counts(hdf_file):
585
+ logger.debug(f"Processing mesh: {mesh_name} with {cell_count} cells")
586
+ group = HdfResultsMesh._get_mesh_summary_output_group(hdf_file, mesh_name, var)
587
+
588
+ # Handle different data structures
589
+ if isinstance(group, h5py.Dataset):
590
+ data = group[:]
591
+ if data.ndim == 2:
592
+ values = data[:cell_count, 0]
593
+ times = data[:cell_count, 1] if data.shape[1] > 1 else np.arange(cell_count)
594
+ else:
595
+ values = data[:cell_count]
596
+ times = np.arange(cell_count)
597
+ else:
598
+ try:
599
+ values = group['Values'][:cell_count]
600
+ times = group['Time'][:cell_count]
601
+ except KeyError:
602
+ # If 'Values' and 'Time' don't exist, assume it's a simple dataset
603
+ values = group[:][:cell_count]
604
+ times = np.arange(cell_count)
605
+
606
+ # Convert times to datetime objects
607
+ times = HdfUtils._ras_timesteps_to_datetimes(times, start_time, time_unit="days", round_to=round_to)
608
+
609
+ # Create DataFrame for this mesh
610
+ df = pd.DataFrame({
611
+ "mesh_name": [mesh_name] * len(values),
612
+ "cell_id": range(len(values)),
613
+ f"{var.lower().replace(' ', '_')}": values,
614
+ f"{var.lower().replace(' ', '_')}_time": times
615
+ })
616
+ dfs.append(df)
617
+
618
+ # Combine all mesh DataFrames
619
+ result = pd.concat(dfs, ignore_index=True)
620
+ logger.info(f"Processed {len(result)} rows of summary output data")
621
+ return result
622
+
623
+ except (KeyError, ValueError, AttributeError) as e:
624
+ logger.error(f"Error processing summary output data: {e}")
625
+ raise ValueError(f"Error processing summary output data: {e}")
626
+
627
+
628
+ @staticmethod
629
+ def _get_mesh_summary_output_group(hdf_file: h5py.File, mesh_name: str, var: str) -> Union[h5py.Group, h5py.Dataset]:
630
+ """
631
+ Return the HDF group for a given mesh and summary output variable.
632
+
633
+ Args:
634
+ hdf_file (h5py.File): Open HDF file object.
635
+ mesh_name (str): Name of the mesh.
636
+ var (str): Name of the summary output variable.
637
+
638
+ Returns:
639
+ Union[h5py.Group, h5py.Dataset]: The HDF group or dataset for the specified mesh and variable.
640
+
641
+ Raises:
642
+ ValueError: If the specified group or dataset is not found in the HDF file.
643
+ """
644
+ output_path = f"Results/Unsteady/Output/Output Blocks/Base Output/Summary Output/2D Flow Areas/{mesh_name}/{var}"
645
+ output_item = hdf_file.get(output_path)
646
+ if output_item is None:
647
+ raise ValueError(f"Could not find HDF group or dataset at path '{output_path}'")
648
+ return output_item