ras-commander 0.42.0__py3-none-any.whl → 0.44.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ras_commander/Decorators.py +111 -0
- ras_commander/HdfBase.py +197 -0
- ras_commander/HdfBndry.py +505 -0
- ras_commander/HdfMesh.py +308 -0
- ras_commander/HdfPlan.py +200 -0
- ras_commander/HdfResultsMesh.py +662 -0
- ras_commander/HdfResultsPlan.py +398 -0
- ras_commander/HdfResultsXsec.py +237 -0
- ras_commander/HdfStruc.py +147 -0
- ras_commander/HdfUtils.py +467 -0
- ras_commander/HdfXsec.py +282 -0
- ras_commander/RasCmdr.py +2 -1
- ras_commander/RasExamples.py +49 -116
- ras_commander/RasGeo.py +2 -2
- ras_commander/RasGpt.py +6 -129
- ras_commander/RasPlan.py +2 -2
- ras_commander/RasPrj.py +55 -9
- ras_commander/RasUnsteady.py +2 -1
- ras_commander/RasUtils.py +198 -73
- ras_commander/__init__.py +31 -9
- {ras_commander-0.42.0.dist-info → ras_commander-0.44.0.dist-info}/METADATA +9 -2
- ras_commander-0.44.0.dist-info/RECORD +26 -0
- ras_commander/RasHdf.py +0 -1619
- ras_commander-0.42.0.dist-info/RECORD +0 -16
- /ras_commander/{logging_config.py → LoggingConfig.py} +0 -0
- {ras_commander-0.42.0.dist-info → ras_commander-0.44.0.dist-info}/LICENSE +0 -0
- {ras_commander-0.42.0.dist-info → ras_commander-0.44.0.dist-info}/WHEEL +0 -0
- {ras_commander-0.42.0.dist-info → ras_commander-0.44.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,662 @@
|
|
1
|
+
"""
|
2
|
+
Class: HdfResultsMesh
|
3
|
+
|
4
|
+
Attribution: A substantial amount of code in this file is sourced or derived
|
5
|
+
from the https://github.com/fema-ffrd/rashdf library,
|
6
|
+
released under MIT license and Copyright (c) 2024 fema-ffrd
|
7
|
+
|
8
|
+
The file has been forked and modified for use in RAS Commander.
|
9
|
+
"""
|
10
|
+
|
11
|
+
import numpy as np
|
12
|
+
import pandas as pd
|
13
|
+
import xarray as xr
|
14
|
+
from pathlib import Path
|
15
|
+
import h5py
|
16
|
+
from typing import Union, List, Optional, Dict, Any, Tuple
|
17
|
+
|
18
|
+
from .HdfBase import HdfBase
|
19
|
+
from .HdfUtils import HdfUtils
|
20
|
+
from .Decorators import log_call, standardize_input
|
21
|
+
from .LoggingConfig import setup_logging, get_logger
|
22
|
+
|
23
|
+
logger = get_logger(__name__)
|
24
|
+
|
25
|
+
class HdfResultsMesh:
|
26
|
+
"""
|
27
|
+
A class for handling mesh-related results from HEC-RAS HDF files.
|
28
|
+
|
29
|
+
This class provides methods to extract and analyze mesh summary outputs,
|
30
|
+
timeseries data, and various mesh-specific results such as water surface
|
31
|
+
elevations, velocities, and errors.
|
32
|
+
|
33
|
+
The class works with HEC-RAS plan HDF files and uses HdfBase and HdfUtils
|
34
|
+
for common operations and utilities.
|
35
|
+
|
36
|
+
Methods in this class use the @log_call decorator for logging and the
|
37
|
+
@standardize_input decorator to handle different input types (e.g.,
|
38
|
+
plan number, file path).
|
39
|
+
|
40
|
+
Attributes:
|
41
|
+
None
|
42
|
+
|
43
|
+
Note:
|
44
|
+
This class is designed to work with HEC-RAS version 6.0 and later.
|
45
|
+
"""
|
46
|
+
|
47
|
+
@staticmethod
|
48
|
+
@log_call
|
49
|
+
@standardize_input(file_type='plan_hdf')
|
50
|
+
def mesh_summary_output(hdf_path: Path, var: str, round_to: str = "100ms") -> pd.DataFrame:
|
51
|
+
"""
|
52
|
+
Return the summary output data for a given variable.
|
53
|
+
|
54
|
+
Args:
|
55
|
+
hdf_path (Path): Path to the HEC-RAS plan HDF file.
|
56
|
+
var (str): The summary output variable to retrieve.
|
57
|
+
round_to (str): The time unit to round the datetimes to. Default: "100ms" (100 milliseconds).
|
58
|
+
|
59
|
+
Returns:
|
60
|
+
pd.DataFrame: DataFrame containing the summary output data.
|
61
|
+
|
62
|
+
Raises:
|
63
|
+
ValueError: If there's an error processing the summary output data.
|
64
|
+
"""
|
65
|
+
try:
|
66
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
67
|
+
return HdfResultsMesh._get_mesh_summary_output(hdf_file, var, round_to)
|
68
|
+
except Exception as e:
|
69
|
+
logger.error(f"Error in mesh_summary_output: {str(e)}")
|
70
|
+
logger.error(f"Variable: {var}")
|
71
|
+
raise ValueError(f"Failed to get summary output: {str(e)}")
|
72
|
+
|
73
|
+
@staticmethod
|
74
|
+
@log_call
|
75
|
+
@standardize_input(file_type='plan_hdf')
|
76
|
+
def mesh_timeseries_output(hdf_path: Path, mesh_name: str, var: str, truncate: bool = True) -> xr.DataArray:
|
77
|
+
"""
|
78
|
+
Get timeseries output for a specific mesh and variable.
|
79
|
+
|
80
|
+
Args:
|
81
|
+
hdf_path (Path): Path to the HDF file.
|
82
|
+
mesh_name (str): Name of the mesh.
|
83
|
+
var (str): Variable to retrieve. Valid options include:
|
84
|
+
"Water Surface", "Face Velocity", "Cell Velocity X", "Cell Velocity Y",
|
85
|
+
"Face Flow", "Face Water Surface", "Cell Volume", "Cell Volume Error",
|
86
|
+
"Cell Water Surface Error", "Cell Courant", "Face Courant",
|
87
|
+
"Cell Hydraulic Depth", "Cell Invert Depth",
|
88
|
+
"Cell Cumulative Precipitation Depth", "Cell Divergence Term",
|
89
|
+
"Cell Eddy Viscosity X", "Cell Eddy Viscosity Y", "Cell Flow Balance",
|
90
|
+
"Cell Storage Term", "Cell Water Source Term", "Face Cumulative Volume",
|
91
|
+
"Face Eddy Viscosity", "Face Flow Period Average", "Face Friction Term",
|
92
|
+
"Face Pressure Gradient Term", "Face Shear Stress", "Face Tangential Velocity"
|
93
|
+
truncate (bool): Whether to truncate the output (default True).
|
94
|
+
|
95
|
+
Returns:
|
96
|
+
xr.DataArray: DataArray containing the timeseries output.
|
97
|
+
"""
|
98
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
99
|
+
return HdfResultsMesh._get_mesh_timeseries_output(hdf_file, mesh_name, var, truncate)
|
100
|
+
|
101
|
+
@staticmethod
|
102
|
+
@log_call
|
103
|
+
@standardize_input(file_type='plan_hdf')
|
104
|
+
def mesh_faces_timeseries_output(hdf_path: Path, mesh_name: str) -> xr.Dataset:
|
105
|
+
"""
|
106
|
+
Get timeseries output for all face-based variables of a specific mesh.
|
107
|
+
|
108
|
+
Args:
|
109
|
+
hdf_path (Path): Path to the HDF file.
|
110
|
+
mesh_name (str): Name of the mesh.
|
111
|
+
|
112
|
+
Returns:
|
113
|
+
xr.Dataset: Dataset containing the timeseries output for all face-based variables.
|
114
|
+
"""
|
115
|
+
face_vars = ["Face Velocity", "Face Flow"]
|
116
|
+
datasets = []
|
117
|
+
|
118
|
+
for var in face_vars:
|
119
|
+
try:
|
120
|
+
da = HdfResultsMesh.mesh_timeseries_output(hdf_path, mesh_name, var)
|
121
|
+
# Assign the variable name as the DataArray name
|
122
|
+
da.name = var.lower().replace(' ', '_')
|
123
|
+
datasets.append(da)
|
124
|
+
except Exception as e:
|
125
|
+
logger.warning(f"Failed to process {var} for mesh {mesh_name}: {str(e)}")
|
126
|
+
|
127
|
+
if not datasets:
|
128
|
+
logger.error(f"No valid data found for mesh {mesh_name}")
|
129
|
+
return xr.Dataset()
|
130
|
+
|
131
|
+
try:
|
132
|
+
return xr.merge(datasets)
|
133
|
+
except Exception as e:
|
134
|
+
logger.error(f"Failed to merge datasets: {str(e)}")
|
135
|
+
return xr.Dataset()
|
136
|
+
|
137
|
+
@staticmethod
|
138
|
+
@log_call
|
139
|
+
@standardize_input(file_type='plan_hdf')
|
140
|
+
def mesh_cells_timeseries_output(hdf_path: Path, mesh_names: Optional[Union[str, List[str]]] = None, var: Optional[str] = None, truncate: bool = False, ras_object: Optional[Any] = None) -> Dict[str, xr.Dataset]:
|
141
|
+
"""
|
142
|
+
Get mesh cells timeseries output for specified meshes and variables.
|
143
|
+
|
144
|
+
Args:
|
145
|
+
hdf_path (Union[str, Path]): Path to the HDF file.
|
146
|
+
mesh_names (Optional[Union[str, List[str]]]): Name(s) of the mesh(es). If None, processes all available meshes.
|
147
|
+
var (Optional[str]): Name of the variable to retrieve. If None, retrieves all variables.
|
148
|
+
truncate (bool): If True, truncates the output to remove trailing zeros.
|
149
|
+
ras_object (Optional[Any]): RAS object, if available.
|
150
|
+
|
151
|
+
Returns:
|
152
|
+
Dict[str, xr.Dataset]: A dictionary of xarray Datasets, one for each mesh, containing the mesh cells timeseries output.
|
153
|
+
|
154
|
+
Raises:
|
155
|
+
ValueError: If there's an error processing the timeseries output data.
|
156
|
+
"""
|
157
|
+
try:
|
158
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
159
|
+
return HdfResultsMesh._mesh_cells_timeseries_output(hdf_file, mesh_names, var, truncate)
|
160
|
+
except Exception as e:
|
161
|
+
logger.error(f"Error in mesh_cells_timeseries_output: {str(e)}")
|
162
|
+
raise ValueError(f"Error processing timeseries output data: {e}")
|
163
|
+
|
164
|
+
@staticmethod
|
165
|
+
@log_call
|
166
|
+
@standardize_input(file_type='plan_hdf')
|
167
|
+
def mesh_last_iter(hdf_path: Path) -> pd.DataFrame:
|
168
|
+
"""
|
169
|
+
Get last iteration count for each mesh cell.
|
170
|
+
|
171
|
+
Args:
|
172
|
+
hdf_path (Path): Path to the HDF file.
|
173
|
+
|
174
|
+
Returns:
|
175
|
+
pd.DataFrame: DataFrame containing last iteration counts.
|
176
|
+
"""
|
177
|
+
return HdfResultsMesh._get_mesh_summary_output(hdf_path, "Cell Last Iteration")
|
178
|
+
|
179
|
+
|
180
|
+
@staticmethod
|
181
|
+
@log_call
|
182
|
+
@standardize_input(file_type='plan_hdf')
|
183
|
+
def mesh_max_ws(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
|
184
|
+
"""
|
185
|
+
Get maximum iteration count for each mesh cell.
|
186
|
+
|
187
|
+
Args:
|
188
|
+
hdf_path (Path): Path to the HDF file.
|
189
|
+
round_to (str): Time rounding specification (default "100ms").
|
190
|
+
|
191
|
+
Returns:
|
192
|
+
pd.DataFrame: DataFrame containing maximum iteration counts.
|
193
|
+
|
194
|
+
Raises:
|
195
|
+
ValueError: If there's an error processing the maximum iteration data.
|
196
|
+
|
197
|
+
Note: The Maximum Iteration is labeled as "Cell Last Iteration" in the HDF file
|
198
|
+
"""
|
199
|
+
try:
|
200
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
201
|
+
return HdfResultsMesh._get_mesh_summary_output(hdf_file, "Maximum Water Surface", round_to)
|
202
|
+
except Exception as e:
|
203
|
+
logger.error(f"Error in mesh_max_ws: {str(e)}")
|
204
|
+
raise ValueError(f"Failed to get maximum water surface: {str(e)}")
|
205
|
+
|
206
|
+
|
207
|
+
|
208
|
+
|
209
|
+
|
210
|
+
@staticmethod
|
211
|
+
@log_call
|
212
|
+
@standardize_input(file_type='plan_hdf')
|
213
|
+
def mesh_min_ws(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
|
214
|
+
"""
|
215
|
+
Get minimum water surface elevation for each mesh cell.
|
216
|
+
|
217
|
+
Args:
|
218
|
+
hdf_path (Path): Path to the HDF file.
|
219
|
+
round_to (str): Time rounding specification (default "100ms").
|
220
|
+
|
221
|
+
Returns:
|
222
|
+
pd.DataFrame: DataFrame containing minimum water surface elevations.
|
223
|
+
"""
|
224
|
+
try:
|
225
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
226
|
+
return HdfResultsMesh._get_mesh_summary_output(hdf_file, "Minimum Water Surface", round_to)
|
227
|
+
except Exception as e:
|
228
|
+
logger.error(f"Error in mesh_min_ws: {str(e)}")
|
229
|
+
raise ValueError(f"Failed to get minimum water surface: {str(e)}")
|
230
|
+
|
231
|
+
@staticmethod
|
232
|
+
@log_call
|
233
|
+
@standardize_input(file_type='plan_hdf')
|
234
|
+
def mesh_max_face_v(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
|
235
|
+
"""
|
236
|
+
Get maximum face velocity for each mesh cell.
|
237
|
+
|
238
|
+
Args:
|
239
|
+
hdf_path (Path): Path to the HDF file.
|
240
|
+
round_to (str): Time rounding specification (default "100ms").
|
241
|
+
|
242
|
+
Returns:
|
243
|
+
pd.DataFrame: DataFrame containing maximum face velocities.
|
244
|
+
|
245
|
+
Raises:
|
246
|
+
ValueError: If there's an error processing the maximum face velocity data.
|
247
|
+
"""
|
248
|
+
try:
|
249
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
250
|
+
return HdfResultsMesh._get_mesh_summary_output(hdf_file, "Maximum Face Velocity", round_to)
|
251
|
+
except Exception as e:
|
252
|
+
logger.error(f"Error in mesh_max_face_v: {str(e)}")
|
253
|
+
raise ValueError(f"Failed to get maximum face velocity: {str(e)}")
|
254
|
+
|
255
|
+
@staticmethod
|
256
|
+
@log_call
|
257
|
+
@standardize_input(file_type='plan_hdf')
|
258
|
+
def mesh_min_face_v(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
|
259
|
+
"""
|
260
|
+
Get minimum face velocity for each mesh cell.
|
261
|
+
|
262
|
+
Args:
|
263
|
+
hdf_path (Path): Path to the HDF file.
|
264
|
+
round_to (str): Time rounding specification (default "100ms").
|
265
|
+
|
266
|
+
Returns:
|
267
|
+
pd.DataFrame: DataFrame containing minimum face velocities.
|
268
|
+
|
269
|
+
Raises:
|
270
|
+
ValueError: If there's an error processing the minimum face velocity data.
|
271
|
+
"""
|
272
|
+
try:
|
273
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
274
|
+
return HdfResultsMesh._get_mesh_summary_output(hdf_file, "Minimum Face Velocity", round_to)
|
275
|
+
except Exception as e:
|
276
|
+
logger.error(f"Error in mesh_min_face_v: {str(e)}")
|
277
|
+
raise ValueError(f"Failed to get minimum face velocity: {str(e)}")
|
278
|
+
|
279
|
+
@staticmethod
|
280
|
+
@log_call
|
281
|
+
@standardize_input(file_type='plan_hdf')
|
282
|
+
def mesh_max_ws_err(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
|
283
|
+
"""
|
284
|
+
Get maximum water surface error for each mesh cell.
|
285
|
+
|
286
|
+
Args:
|
287
|
+
hdf_path (Path): Path to the HDF file.
|
288
|
+
round_to (str): Time rounding specification (default "100ms").
|
289
|
+
|
290
|
+
Returns:
|
291
|
+
pd.DataFrame: DataFrame containing maximum water surface errors.
|
292
|
+
|
293
|
+
Raises:
|
294
|
+
ValueError: If there's an error processing the maximum water surface error data.
|
295
|
+
"""
|
296
|
+
try:
|
297
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
298
|
+
return HdfResultsMesh._get_mesh_summary_output(hdf_file, "Cell Maximum Water Surface Error", round_to)
|
299
|
+
except Exception as e:
|
300
|
+
logger.error(f"Error in mesh_max_ws_err: {str(e)}")
|
301
|
+
raise ValueError(f"Failed to get maximum water surface error: {str(e)}")
|
302
|
+
|
303
|
+
|
304
|
+
@staticmethod
|
305
|
+
@log_call
|
306
|
+
@standardize_input(file_type='plan_hdf')
|
307
|
+
def mesh_max_iter(hdf_path: Path, round_to: str = "100ms") -> pd.DataFrame:
|
308
|
+
"""
|
309
|
+
Get maximum iteration count for each mesh cell.
|
310
|
+
|
311
|
+
Args:
|
312
|
+
hdf_path (Path): Path to the HDF file.
|
313
|
+
round_to (str): Time rounding specification (default "100ms").
|
314
|
+
|
315
|
+
Returns:
|
316
|
+
pd.DataFrame: DataFrame containing maximum iteration counts.
|
317
|
+
|
318
|
+
Raises:
|
319
|
+
ValueError: If there's an error processing the maximum iteration data.
|
320
|
+
|
321
|
+
Note: The Maximum Iteration is labeled as "Cell Last Iteration" in the HDF file
|
322
|
+
"""
|
323
|
+
try:
|
324
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
325
|
+
return HdfResultsMesh._get_mesh_summary_output(hdf_file, "Cell Last Iteration", round_to)
|
326
|
+
except Exception as e:
|
327
|
+
logger.error(f"Error in mesh_max_iter: {str(e)}")
|
328
|
+
raise ValueError(f"Failed to get maximum iteration count: {str(e)}")
|
329
|
+
|
330
|
+
|
331
|
+
|
332
|
+
|
333
|
+
|
334
|
+
@staticmethod
|
335
|
+
def _get_mesh_timeseries_output_path(mesh_name: str, var_name: str) -> str:
|
336
|
+
"""
|
337
|
+
Get the HDF path for mesh timeseries output.
|
338
|
+
|
339
|
+
Args:
|
340
|
+
mesh_name (str): Name of the mesh.
|
341
|
+
var_name (str): Name of the variable.
|
342
|
+
|
343
|
+
Returns:
|
344
|
+
str: The HDF path for the specified mesh and variable.
|
345
|
+
"""
|
346
|
+
return f"Results/Unsteady/Output/Output Blocks/Base Output/Unsteady Time Series/2D Flow Areas/{mesh_name}/{var_name}"
|
347
|
+
|
348
|
+
|
349
|
+
@staticmethod
|
350
|
+
def _mesh_cells_timeseries_output(hdf_file: h5py.File, mesh_names: Optional[Union[str, List[str]]] = None, var: Optional[str] = None, truncate: bool = False) -> Dict[str, xr.Dataset]:
|
351
|
+
"""
|
352
|
+
Get mesh cells timeseries output for specified meshes and variables.
|
353
|
+
|
354
|
+
Args:
|
355
|
+
hdf_file (h5py.File): Open HDF file object.
|
356
|
+
mesh_names (Optional[Union[str, List[str]]]): Name(s) of the mesh(es). If None, processes all available meshes.
|
357
|
+
var (Optional[str]): Name of the variable to retrieve. If None, retrieves all variables.
|
358
|
+
truncate (bool): If True, truncates the output to remove trailing zeros.
|
359
|
+
|
360
|
+
Returns:
|
361
|
+
Dict[str, xr.Dataset]: A dictionary of xarray Datasets, one for each mesh, containing the mesh cells timeseries output.
|
362
|
+
|
363
|
+
Raises:
|
364
|
+
ValueError: If there's an error processing the timeseries output data.
|
365
|
+
"""
|
366
|
+
TIME_SERIES_OUTPUT_VARS_CELLS = [
|
367
|
+
"Water Surface",
|
368
|
+
"Depth",
|
369
|
+
"Velocity",
|
370
|
+
"Velocity X",
|
371
|
+
"Velocity Y",
|
372
|
+
"Froude Number",
|
373
|
+
"Courant Number",
|
374
|
+
"Shear Stress",
|
375
|
+
"Bed Elevation",
|
376
|
+
"Precipitation Rate",
|
377
|
+
"Infiltration Rate",
|
378
|
+
"Evaporation Rate",
|
379
|
+
"Percolation Rate",
|
380
|
+
"Groundwater Elevation",
|
381
|
+
"Groundwater Depth",
|
382
|
+
"Groundwater Flow",
|
383
|
+
"Groundwater Velocity",
|
384
|
+
"Groundwater Velocity X",
|
385
|
+
"Groundwater Velocity Y",
|
386
|
+
]
|
387
|
+
|
388
|
+
try:
|
389
|
+
start_time = HdfBase._get_simulation_start_time(hdf_file)
|
390
|
+
time_stamps = HdfBase._get_unsteady_datetimes(hdf_file)
|
391
|
+
|
392
|
+
if mesh_names is None:
|
393
|
+
mesh_names = HdfResultsMesh._get_available_meshes(hdf_file)
|
394
|
+
elif isinstance(mesh_names, str):
|
395
|
+
mesh_names = [mesh_names]
|
396
|
+
|
397
|
+
if var:
|
398
|
+
variables = [var]
|
399
|
+
else:
|
400
|
+
variables = TIME_SERIES_OUTPUT_VARS_CELLS
|
401
|
+
|
402
|
+
datasets = {}
|
403
|
+
for mesh_name in mesh_names:
|
404
|
+
data_vars = {}
|
405
|
+
for variable in variables:
|
406
|
+
try:
|
407
|
+
path = HdfResultsMesh._get_mesh_timeseries_output_path(mesh_name, variable)
|
408
|
+
dataset = hdf_file[path]
|
409
|
+
values = dataset[:]
|
410
|
+
units = dataset.attrs.get("Units", "").decode("utf-8")
|
411
|
+
|
412
|
+
if truncate:
|
413
|
+
last_nonzero = np.max(np.nonzero(values)[1]) + 1 if values.size > 0 else 0
|
414
|
+
values = values[:, :last_nonzero]
|
415
|
+
truncated_time_stamps = time_stamps[:last_nonzero]
|
416
|
+
else:
|
417
|
+
truncated_time_stamps = time_stamps
|
418
|
+
|
419
|
+
if values.shape[0] != len(truncated_time_stamps):
|
420
|
+
logger.warning(f"Mismatch between number of time steps ({len(truncated_time_stamps)}) and data shape ({values.shape}) for variable {variable}")
|
421
|
+
continue
|
422
|
+
|
423
|
+
data_vars[variable] = xr.DataArray(
|
424
|
+
data=values,
|
425
|
+
dims=['time', 'cell_id'],
|
426
|
+
coords={'time': truncated_time_stamps, 'cell_id': np.arange(values.shape[1])},
|
427
|
+
attrs={'units': units}
|
428
|
+
)
|
429
|
+
except KeyError:
|
430
|
+
logger.warning(f"Variable '{variable}' not found in the HDF file for mesh '{mesh_name}'. Skipping.")
|
431
|
+
except Exception as e:
|
432
|
+
logger.error(f"Error processing variable '{variable}' for mesh '{mesh_name}': {str(e)}")
|
433
|
+
|
434
|
+
if data_vars:
|
435
|
+
datasets[mesh_name] = xr.Dataset(
|
436
|
+
data_vars=data_vars,
|
437
|
+
attrs={'mesh_name': mesh_name, 'start_time': start_time}
|
438
|
+
)
|
439
|
+
else:
|
440
|
+
logger.warning(f"No valid data variables found for mesh '{mesh_name}'")
|
441
|
+
|
442
|
+
return datasets
|
443
|
+
except Exception as e:
|
444
|
+
logger.error(f"Error in _mesh_cells_timeseries_output: {str(e)}")
|
445
|
+
raise ValueError(f"Error processing timeseries output data: {e}")
|
446
|
+
|
447
|
+
|
448
|
+
|
449
|
+
@staticmethod
|
450
|
+
def _get_mesh_timeseries_output(hdf_file: h5py.File, mesh_name: str, var: str, truncate: bool = True) -> xr.DataArray:
|
451
|
+
"""
|
452
|
+
Get timeseries output for a specific mesh and variable.
|
453
|
+
|
454
|
+
Args:
|
455
|
+
hdf_file (h5py.File): Open HDF file object.
|
456
|
+
mesh_name (str): Name of the mesh.
|
457
|
+
var (str): Variable name to retrieve. Valid options include:
|
458
|
+
"Water Surface", "Face Velocity", "Cell Velocity X", "Cell Velocity Y",
|
459
|
+
"Face Flow", "Face Water Surface", "Cell Volume", "Cell Volume Error",
|
460
|
+
"Cell Water Surface Error", "Cell Courant", "Face Courant",
|
461
|
+
"Cell Hydraulic Depth", "Cell Invert Depth",
|
462
|
+
"Cell Cumulative Precipitation Depth", "Cell Divergence Term",
|
463
|
+
"Cell Eddy Viscosity X", "Cell Eddy Viscosity Y", "Cell Flow Balance",
|
464
|
+
"Cell Storage Term", "Cell Water Source Term", "Face Cumulative Volume",
|
465
|
+
"Face Eddy Viscosity", "Face Flow Period Average", "Face Friction Term",
|
466
|
+
"Face Pressure Gradient Term", "Face Shear Stress", "Face Tangential Velocity"
|
467
|
+
truncate (bool): Whether to truncate the output to remove trailing zeros (default True).
|
468
|
+
|
469
|
+
Returns:
|
470
|
+
xr.DataArray: DataArray containing the timeseries output.
|
471
|
+
|
472
|
+
Raises:
|
473
|
+
ValueError: If the specified path is not found in the HDF file or if there's an error processing the data.
|
474
|
+
"""
|
475
|
+
try:
|
476
|
+
path = HdfResultsMesh._get_mesh_timeseries_output_path(mesh_name, var)
|
477
|
+
|
478
|
+
if path not in hdf_file:
|
479
|
+
raise ValueError(f"Path {path} not found in HDF file")
|
480
|
+
|
481
|
+
# Use h5py to get the dataset
|
482
|
+
dataset = hdf_file[path]
|
483
|
+
values = dataset[:]
|
484
|
+
units = dataset.attrs.get("Units", "").decode("utf-8")
|
485
|
+
times = HdfBase._get_unsteady_datetimes(hdf_file)
|
486
|
+
|
487
|
+
if truncate:
|
488
|
+
non_zero = np.nonzero(values)[0]
|
489
|
+
if len(non_zero) > 0:
|
490
|
+
start, end = non_zero[0], non_zero[-1] + 1
|
491
|
+
values = values[start:end]
|
492
|
+
times = times[start:end]
|
493
|
+
|
494
|
+
# Create xarray DataArray
|
495
|
+
dims = ["time", "cell"] if values.ndim == 2 else ["time"]
|
496
|
+
coords = {"time": times}
|
497
|
+
if values.ndim == 2:
|
498
|
+
coords["cell"] = np.arange(values.shape[1])
|
499
|
+
|
500
|
+
return xr.DataArray(
|
501
|
+
values,
|
502
|
+
coords=coords,
|
503
|
+
dims=dims,
|
504
|
+
attrs={"units": units, "mesh_name": mesh_name, "variable": var},
|
505
|
+
)
|
506
|
+
except Exception as e:
|
507
|
+
logger.error(f"Error in get_mesh_timeseries_output: {str(e)}")
|
508
|
+
raise ValueError(f"Failed to get timeseries output: {str(e)}")
|
509
|
+
|
510
|
+
|
511
|
+
@staticmethod
|
512
|
+
def _get_mesh_timeseries_output_values_units(hdf_file: h5py.File, mesh_name: str, var: str) -> Tuple[np.ndarray, str]:
|
513
|
+
"""
|
514
|
+
Get the mesh timeseries output values and units for a specific variable from the HDF file.
|
515
|
+
|
516
|
+
Args:
|
517
|
+
hdf_file (h5py.File): Open HDF file object.
|
518
|
+
mesh_name (str): Name of the mesh.
|
519
|
+
var (str): Variable name to retrieve.
|
520
|
+
|
521
|
+
Returns:
|
522
|
+
Tuple[np.ndarray, str]: A tuple containing the output values and units.
|
523
|
+
"""
|
524
|
+
path = HdfResultsMesh._get_mesh_timeseries_output_path(mesh_name, var)
|
525
|
+
group = hdf_file[path]
|
526
|
+
values = group[:]
|
527
|
+
units = group.attrs.get("Units")
|
528
|
+
if units is not None:
|
529
|
+
units = units.decode("utf-8")
|
530
|
+
return values, units
|
531
|
+
|
532
|
+
|
533
|
+
@staticmethod
|
534
|
+
def _get_available_meshes(hdf_file: h5py.File) -> List[str]:
|
535
|
+
"""
|
536
|
+
Get the names of all available meshes in the HDF file.
|
537
|
+
|
538
|
+
Args:
|
539
|
+
hdf_file (h5py.File): Open HDF file object.
|
540
|
+
|
541
|
+
Returns:
|
542
|
+
List[str]: A list of mesh names.
|
543
|
+
"""
|
544
|
+
mesh_names = []
|
545
|
+
base_path = "Geometry/2D Flow Areas"
|
546
|
+
if base_path in hdf_file:
|
547
|
+
for name in hdf_file[base_path]:
|
548
|
+
if isinstance(hdf_file[f"{base_path}/{name}"], h5py.Group):
|
549
|
+
mesh_names.append(name)
|
550
|
+
return mesh_names
|
551
|
+
|
552
|
+
@staticmethod
|
553
|
+
def _get_mesh_summary_output(hdf_file: h5py.File, var: str, round_to: str = "100ms") -> pd.DataFrame:
|
554
|
+
"""
|
555
|
+
Get the summary output data for a given variable from the HDF file.
|
556
|
+
|
557
|
+
This method retrieves summary output data for all 2D flow areas (meshes) in the HDF file
|
558
|
+
for a specified variable. It handles both 1D and 2D datasets.
|
559
|
+
Group attributes are added as metadata to the DataFrame.
|
560
|
+
|
561
|
+
Args:
|
562
|
+
hdf_file (h5py.File): Open HDF file object.
|
563
|
+
var (str): The summary output variable to retrieve.
|
564
|
+
round_to (str): The time unit to round the datetimes to. Default is "100ms".
|
565
|
+
|
566
|
+
Returns:
|
567
|
+
pd.DataFrame: A DataFrame containing the summary output data with attributes as metadata.
|
568
|
+
|
569
|
+
Raises:
|
570
|
+
ValueError: If the HDF file cannot be opened or read, or if the requested data is not found.
|
571
|
+
"""
|
572
|
+
try:
|
573
|
+
dfs = []
|
574
|
+
start_time = HdfBase._get_simulation_start_time(hdf_file)
|
575
|
+
|
576
|
+
logger.info(f"Processing summary output for variable: {var}")
|
577
|
+
for mesh_name, cell_count in HdfBase._get_2d_flow_area_names_and_counts(hdf_file):
|
578
|
+
logger.debug(f"Processing mesh: {mesh_name} with {cell_count} cells")
|
579
|
+
group = HdfResultsMesh._get_mesh_summary_output_group(hdf_file, mesh_name, var)
|
580
|
+
|
581
|
+
data = group[:]
|
582
|
+
logger.debug(f"Data shape for {var} in {mesh_name}: {data.shape}")
|
583
|
+
logger.debug(f"Data type: {data.dtype}")
|
584
|
+
logger.debug(f"Attributes: {dict(group.attrs)}")
|
585
|
+
|
586
|
+
if data.ndim == 2 and data.shape[0] == 2:
|
587
|
+
# This is the case for "Maximum Water Surface"
|
588
|
+
row_variables = group.attrs.get('Row Variables', [b'Value', b'Time'])
|
589
|
+
row_variables = [v.decode('utf-8').strip() for v in row_variables]
|
590
|
+
|
591
|
+
df = pd.DataFrame({
|
592
|
+
"mesh_name": [mesh_name] * data.shape[1],
|
593
|
+
"cell_id": range(data.shape[1]),
|
594
|
+
f"{var.lower().replace(' ', '_')}": data[0, :],
|
595
|
+
f"{var.lower().replace(' ', '_')}_time": HdfUtils._ras_timesteps_to_datetimes(
|
596
|
+
data[1, :], start_time, time_unit="days", round_to=round_to
|
597
|
+
)
|
598
|
+
})
|
599
|
+
elif data.ndim == 1:
|
600
|
+
# Handle 1D datasets (like Cell Last Iteration)
|
601
|
+
df = pd.DataFrame({
|
602
|
+
"mesh_name": [mesh_name] * len(data),
|
603
|
+
"cell_id": range(len(data)),
|
604
|
+
var.lower().replace(' ', '_'): data
|
605
|
+
})
|
606
|
+
else:
|
607
|
+
raise ValueError(f"Unexpected data shape for {var} in {mesh_name}. "
|
608
|
+
f"Got shape {data.shape}")
|
609
|
+
|
610
|
+
# Add group attributes as metadata
|
611
|
+
df.attrs['mesh_name'] = mesh_name
|
612
|
+
for attr_name, attr_value in group.attrs.items():
|
613
|
+
if isinstance(attr_value, bytes):
|
614
|
+
attr_value = attr_value.decode('utf-8')
|
615
|
+
elif isinstance(attr_value, np.ndarray):
|
616
|
+
attr_value = attr_value.tolist()
|
617
|
+
df.attrs[attr_name] = attr_value
|
618
|
+
|
619
|
+
dfs.append(df)
|
620
|
+
|
621
|
+
result = pd.concat(dfs, ignore_index=True)
|
622
|
+
|
623
|
+
# Combine attributes from all meshes
|
624
|
+
combined_attrs = {}
|
625
|
+
for df in dfs:
|
626
|
+
for key, value in df.attrs.items():
|
627
|
+
if key not in combined_attrs:
|
628
|
+
combined_attrs[key] = value
|
629
|
+
elif combined_attrs[key] != value:
|
630
|
+
combined_attrs[key] = f"Multiple values: {combined_attrs[key]}, {value}"
|
631
|
+
|
632
|
+
result.attrs.update(combined_attrs)
|
633
|
+
|
634
|
+
logger.info(f"Processed {len(result)} rows of summary output data")
|
635
|
+
return result
|
636
|
+
|
637
|
+
except (KeyError, ValueError, AttributeError) as e:
|
638
|
+
logger.error(f"Error processing summary output data: {e}")
|
639
|
+
raise ValueError(f"Error processing summary output data: {e}")
|
640
|
+
|
641
|
+
|
642
|
+
@staticmethod
|
643
|
+
def _get_mesh_summary_output_group(hdf_file: h5py.File, mesh_name: str, var: str) -> Union[h5py.Group, h5py.Dataset]:
|
644
|
+
"""
|
645
|
+
Return the HDF group for a given mesh and summary output variable.
|
646
|
+
|
647
|
+
Args:
|
648
|
+
hdf_file (h5py.File): Open HDF file object.
|
649
|
+
mesh_name (str): Name of the mesh.
|
650
|
+
var (str): Name of the summary output variable.
|
651
|
+
|
652
|
+
Returns:
|
653
|
+
Union[h5py.Group, h5py.Dataset]: The HDF group or dataset for the specified mesh and variable.
|
654
|
+
|
655
|
+
Raises:
|
656
|
+
ValueError: If the specified group or dataset is not found in the HDF file.
|
657
|
+
"""
|
658
|
+
output_path = f"Results/Unsteady/Output/Output Blocks/Base Output/Summary Output/2D Flow Areas/{mesh_name}/{var}"
|
659
|
+
output_item = hdf_file.get(output_path)
|
660
|
+
if output_item is None:
|
661
|
+
raise ValueError(f"Could not find HDF group or dataset at path '{output_path}'")
|
662
|
+
return output_item
|