ras-commander 0.42.0__py3-none-any.whl → 0.44.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ras_commander/Decorators.py +111 -0
- ras_commander/HdfBase.py +197 -0
- ras_commander/HdfBndry.py +505 -0
- ras_commander/HdfMesh.py +308 -0
- ras_commander/HdfPlan.py +200 -0
- ras_commander/HdfResultsMesh.py +662 -0
- ras_commander/HdfResultsPlan.py +398 -0
- ras_commander/HdfResultsXsec.py +237 -0
- ras_commander/HdfStruc.py +147 -0
- ras_commander/HdfUtils.py +467 -0
- ras_commander/HdfXsec.py +282 -0
- ras_commander/RasCmdr.py +2 -1
- ras_commander/RasExamples.py +49 -116
- ras_commander/RasGeo.py +2 -2
- ras_commander/RasGpt.py +6 -129
- ras_commander/RasPlan.py +2 -2
- ras_commander/RasPrj.py +55 -9
- ras_commander/RasUnsteady.py +2 -1
- ras_commander/RasUtils.py +198 -73
- ras_commander/__init__.py +31 -9
- {ras_commander-0.42.0.dist-info → ras_commander-0.44.0.dist-info}/METADATA +9 -2
- ras_commander-0.44.0.dist-info/RECORD +26 -0
- ras_commander/RasHdf.py +0 -1619
- ras_commander-0.42.0.dist-info/RECORD +0 -16
- /ras_commander/{logging_config.py → LoggingConfig.py} +0 -0
- {ras_commander-0.42.0.dist-info → ras_commander-0.44.0.dist-info}/LICENSE +0 -0
- {ras_commander-0.42.0.dist-info → ras_commander-0.44.0.dist-info}/WHEEL +0 -0
- {ras_commander-0.42.0.dist-info → ras_commander-0.44.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,398 @@
|
|
1
|
+
"""
|
2
|
+
Class: HdfResultsPlan
|
3
|
+
|
4
|
+
Attribution: A substantial amount of code in this file is sourced or derived
|
5
|
+
from the https://github.com/fema-ffrd/rashdf library,
|
6
|
+
released under MIT license and Copyright (c) 2024 fema-ffrd
|
7
|
+
|
8
|
+
The file has been forked and modified for use in RAS Commander.
|
9
|
+
"""
|
10
|
+
|
11
|
+
from typing import Dict, List, Union, Optional
|
12
|
+
from pathlib import Path
|
13
|
+
import h5py
|
14
|
+
import pandas as pd
|
15
|
+
import xarray as xr
|
16
|
+
from .Decorators import standardize_input, log_call
|
17
|
+
from .HdfBase import HdfBase
|
18
|
+
from .HdfResultsXsec import HdfResultsXsec
|
19
|
+
from .LoggingConfig import get_logger
|
20
|
+
import numpy as np
|
21
|
+
from datetime import datetime
|
22
|
+
|
23
|
+
logger = get_logger(__name__)
|
24
|
+
|
25
|
+
|
26
|
+
class HdfResultsPlan:
|
27
|
+
"""
|
28
|
+
A class for handling HEC-RAS plan HDF file results related to unsteady flow and reference line/point outputs.
|
29
|
+
|
30
|
+
This class provides methods for extracting and analyzing data from HEC-RAS plan HDF files,
|
31
|
+
focusing on unsteady flow results, volume accounting, and reference line/point time series outputs.
|
32
|
+
|
33
|
+
Methods in this class use the @standardize_input decorator to handle different input types
|
34
|
+
(e.g., plan number, file path) and the @log_call decorator for logging method calls.
|
35
|
+
|
36
|
+
Attributes:
|
37
|
+
None
|
38
|
+
|
39
|
+
Note:
|
40
|
+
This class is designed to work with HEC-RAS plan HDF files and requires the HdfBase class
|
41
|
+
for some of its operations.
|
42
|
+
"""
|
43
|
+
|
44
|
+
@staticmethod
|
45
|
+
@log_call
|
46
|
+
@standardize_input(file_type='plan_hdf')
|
47
|
+
def get_results_unsteady_attrs(hdf_path: Path) -> Dict:
|
48
|
+
"""
|
49
|
+
Get unsteady attributes from a HEC-RAS HDF plan file.
|
50
|
+
|
51
|
+
Args:
|
52
|
+
hdf_path (Path): Path to the HEC-RAS plan HDF file.
|
53
|
+
|
54
|
+
Returns:
|
55
|
+
Dict: A dictionary containing the unsteady attributes.
|
56
|
+
|
57
|
+
Raises:
|
58
|
+
FileNotFoundError: If the specified HDF file is not found.
|
59
|
+
KeyError: If the "Results/Unsteady" group is not found in the HDF file.
|
60
|
+
"""
|
61
|
+
try:
|
62
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
63
|
+
if "Results/Unsteady" not in hdf_file:
|
64
|
+
raise KeyError("Results/Unsteady group not found in the HDF file.")
|
65
|
+
return dict(hdf_file["Results/Unsteady"].attrs)
|
66
|
+
except FileNotFoundError:
|
67
|
+
raise FileNotFoundError(f"HDF file not found: {hdf_path}")
|
68
|
+
except Exception as e:
|
69
|
+
raise RuntimeError(f"Error reading unsteady attributes: {str(e)}")
|
70
|
+
|
71
|
+
@staticmethod
|
72
|
+
@log_call
|
73
|
+
@standardize_input(file_type='plan_hdf')
|
74
|
+
def get_results_unsteady_summary_attrs(hdf_path: Path) -> Dict:
|
75
|
+
"""
|
76
|
+
Get results unsteady summary attributes from a HEC-RAS HDF plan file.
|
77
|
+
|
78
|
+
Args:
|
79
|
+
hdf_path (Path): Path to the HEC-RAS plan HDF file.
|
80
|
+
|
81
|
+
Returns:
|
82
|
+
Dict: A dictionary containing the results unsteady summary attributes.
|
83
|
+
|
84
|
+
Raises:
|
85
|
+
FileNotFoundError: If the specified HDF file is not found.
|
86
|
+
KeyError: If the "Results/Unsteady/Summary" group is not found in the HDF file.
|
87
|
+
"""
|
88
|
+
try:
|
89
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
90
|
+
if "Results/Unsteady/Summary" not in hdf_file:
|
91
|
+
raise KeyError("Results/Unsteady/Summary group not found in the HDF file.")
|
92
|
+
return dict(hdf_file["Results/Unsteady/Summary"].attrs)
|
93
|
+
except FileNotFoundError:
|
94
|
+
raise FileNotFoundError(f"HDF file not found: {hdf_path}")
|
95
|
+
except Exception as e:
|
96
|
+
raise RuntimeError(f"Error reading unsteady summary attributes: {str(e)}")
|
97
|
+
|
98
|
+
@staticmethod
|
99
|
+
@log_call
|
100
|
+
@standardize_input(file_type='plan_hdf')
|
101
|
+
def get_results_volume_accounting_attrs(hdf_path: Path) -> Dict:
|
102
|
+
"""
|
103
|
+
Get volume accounting attributes from a HEC-RAS HDF plan file.
|
104
|
+
|
105
|
+
Args:
|
106
|
+
hdf_path (Path): Path to the HEC-RAS plan HDF file.
|
107
|
+
|
108
|
+
Returns:
|
109
|
+
Dict: A dictionary containing the volume accounting attributes.
|
110
|
+
|
111
|
+
Raises:
|
112
|
+
FileNotFoundError: If the specified HDF file is not found.
|
113
|
+
KeyError: If the "Results/Unsteady/Summary/Volume Accounting" group is not found in the HDF file.
|
114
|
+
"""
|
115
|
+
try:
|
116
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
117
|
+
if "Results/Unsteady/Summary/Volume Accounting" not in hdf_file:
|
118
|
+
raise KeyError("Results/Unsteady/Summary/Volume Accounting group not found in the HDF file.")
|
119
|
+
return dict(hdf_file["Results/Unsteady/Summary/Volume Accounting"].attrs)
|
120
|
+
except FileNotFoundError:
|
121
|
+
raise FileNotFoundError(f"HDF file not found: {hdf_path}")
|
122
|
+
except Exception as e:
|
123
|
+
raise RuntimeError(f"Error reading volume accounting attributes: {str(e)}")
|
124
|
+
|
125
|
+
@staticmethod
|
126
|
+
@standardize_input(file_type='plan_hdf')
|
127
|
+
def get_runtime_data(hdf_path: Path) -> Optional[pd.DataFrame]:
|
128
|
+
"""
|
129
|
+
Extract runtime and compute time data from a single HDF file.
|
130
|
+
|
131
|
+
Args:
|
132
|
+
hdf_path (Path): The full path to the HDF file.
|
133
|
+
|
134
|
+
Returns:
|
135
|
+
Optional[pd.DataFrame]: DataFrame containing runtime and compute time data, or None if data extraction fails.
|
136
|
+
"""
|
137
|
+
if hdf_path is None:
|
138
|
+
logger.error(f"Could not find HDF file for input")
|
139
|
+
return None
|
140
|
+
|
141
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
142
|
+
logger.info(f"Extracting Plan Information from: {Path(hdf_file.filename).name}")
|
143
|
+
plan_info = hdf_file.get('/Plan Data/Plan Information')
|
144
|
+
if plan_info is None:
|
145
|
+
logger.warning("Group '/Plan Data/Plan Information' not found.")
|
146
|
+
return None
|
147
|
+
|
148
|
+
plan_name = plan_info.attrs.get('Plan Name', 'Unknown')
|
149
|
+
plan_name = plan_name.decode('utf-8') if isinstance(plan_name, bytes) else plan_name
|
150
|
+
logger.info(f"Plan Name: {plan_name}")
|
151
|
+
|
152
|
+
start_time_str = plan_info.attrs.get('Simulation Start Time', 'Unknown')
|
153
|
+
end_time_str = plan_info.attrs.get('Simulation End Time', 'Unknown')
|
154
|
+
start_time_str = start_time_str.decode('utf-8') if isinstance(start_time_str, bytes) else start_time_str
|
155
|
+
end_time_str = end_time_str.decode('utf-8') if isinstance(end_time_str, bytes) else end_time_str
|
156
|
+
|
157
|
+
start_time = datetime.strptime(start_time_str, "%d%b%Y %H:%M:%S")
|
158
|
+
end_time = datetime.strptime(end_time_str, "%d%b%Y %H:%M:%S")
|
159
|
+
simulation_duration = end_time - start_time
|
160
|
+
simulation_hours = simulation_duration.total_seconds() / 3600
|
161
|
+
|
162
|
+
logger.info(f"Simulation Start Time: {start_time_str}")
|
163
|
+
logger.info(f"Simulation End Time: {end_time_str}")
|
164
|
+
logger.info(f"Simulation Duration (hours): {simulation_hours}")
|
165
|
+
|
166
|
+
compute_processes = hdf_file.get('/Results/Summary/Compute Processes')
|
167
|
+
if compute_processes is None:
|
168
|
+
logger.warning("Dataset '/Results/Summary/Compute Processes' not found.")
|
169
|
+
return None
|
170
|
+
|
171
|
+
process_names = [name.decode('utf-8') for name in compute_processes['Process'][:]]
|
172
|
+
filenames = [filename.decode('utf-8') for filename in compute_processes['Filename'][:]]
|
173
|
+
completion_times = compute_processes['Compute Time (ms)'][:]
|
174
|
+
|
175
|
+
compute_processes_df = pd.DataFrame({
|
176
|
+
'Process': process_names,
|
177
|
+
'Filename': filenames,
|
178
|
+
'Compute Time (ms)': completion_times,
|
179
|
+
'Compute Time (s)': completion_times / 1000,
|
180
|
+
'Compute Time (hours)': completion_times / (1000 * 3600)
|
181
|
+
})
|
182
|
+
|
183
|
+
logger.debug("Compute processes DataFrame:")
|
184
|
+
logger.debug(compute_processes_df)
|
185
|
+
|
186
|
+
compute_processes_summary = {
|
187
|
+
'Plan Name': [plan_name],
|
188
|
+
'File Name': [Path(hdf_file.filename).name],
|
189
|
+
'Simulation Start Time': [start_time_str],
|
190
|
+
'Simulation End Time': [end_time_str],
|
191
|
+
'Simulation Duration (s)': [simulation_duration.total_seconds()],
|
192
|
+
'Simulation Time (hr)': [simulation_hours],
|
193
|
+
'Completing Geometry (hr)': [compute_processes_df[compute_processes_df['Process'] == 'Completing Geometry']['Compute Time (hours)'].values[0] if 'Completing Geometry' in compute_processes_df['Process'].values else 'N/A'],
|
194
|
+
'Preprocessing Geometry (hr)': [compute_processes_df[compute_processes_df['Process'] == 'Preprocessing Geometry']['Compute Time (hours)'].values[0] if 'Preprocessing Geometry' in compute_processes_df['Process'].values else 'N/A'],
|
195
|
+
'Completing Event Conditions (hr)': [compute_processes_df[compute_processes_df['Process'] == 'Completing Event Conditions']['Compute Time (hours)'].values[0] if 'Completing Event Conditions' in compute_processes_df['Process'].values else 'N/A'],
|
196
|
+
'Unsteady Flow Computations (hr)': [compute_processes_df[compute_processes_df['Process'] == 'Unsteady Flow Computations']['Compute Time (hours)'].values[0] if 'Unsteady Flow Computations' in compute_processes_df['Process'].values else 'N/A'],
|
197
|
+
'Complete Process (hr)': [compute_processes_df['Compute Time (hours)'].sum()]
|
198
|
+
}
|
199
|
+
|
200
|
+
compute_processes_summary['Unsteady Flow Speed (hr/hr)'] = [simulation_hours / compute_processes_summary['Unsteady Flow Computations (hr)'][0] if compute_processes_summary['Unsteady Flow Computations (hr)'][0] != 'N/A' else 'N/A']
|
201
|
+
compute_processes_summary['Complete Process Speed (hr/hr)'] = [simulation_hours / compute_processes_summary['Complete Process (hr)'][0] if compute_processes_summary['Complete Process (hr)'][0] != 'N/A' else 'N/A']
|
202
|
+
|
203
|
+
compute_summary_df = pd.DataFrame(compute_processes_summary)
|
204
|
+
logger.debug("Compute summary DataFrame:")
|
205
|
+
logger.debug(compute_summary_df)
|
206
|
+
|
207
|
+
return compute_summary_df
|
208
|
+
|
209
|
+
|
210
|
+
|
211
|
+
@staticmethod
|
212
|
+
@log_call
|
213
|
+
@standardize_input(file_type='plan_hdf')
|
214
|
+
def reference_timeseries_output(hdf_path: Path, reftype: str = "lines") -> xr.Dataset:
|
215
|
+
"""
|
216
|
+
Get timeseries output for reference lines or points.
|
217
|
+
|
218
|
+
Args:
|
219
|
+
hdf_path (Path): Path to the HDF file.
|
220
|
+
reftype (str): Type of reference, either "lines" or "points" (default "lines").
|
221
|
+
|
222
|
+
Returns:
|
223
|
+
xr.Dataset: Dataset containing the timeseries output for reference lines or points.
|
224
|
+
|
225
|
+
Raises:
|
226
|
+
FileNotFoundError: If the specified HDF file is not found.
|
227
|
+
ValueError: If an invalid reftype is provided.
|
228
|
+
"""
|
229
|
+
try:
|
230
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
231
|
+
return HdfResultsPlan._reference_timeseries_output(hdf_file, reftype)
|
232
|
+
except FileNotFoundError:
|
233
|
+
raise FileNotFoundError(f"HDF file not found: {hdf_path}")
|
234
|
+
except ValueError as ve:
|
235
|
+
raise ValueError(f"Invalid reftype: {str(ve)}")
|
236
|
+
except Exception as e:
|
237
|
+
raise RuntimeError(f"Error getting reference timeseries output: {str(e)}")
|
238
|
+
|
239
|
+
|
240
|
+
@staticmethod
|
241
|
+
def _reference_timeseries_output(hdf_file: h5py.File, reftype: str = "lines") -> xr.Dataset:
|
242
|
+
"""
|
243
|
+
Private method to return timeseries output data for reference lines or points from a HEC-RAS HDF plan file.
|
244
|
+
|
245
|
+
Parameters
|
246
|
+
----------
|
247
|
+
hdf_file : h5py.File
|
248
|
+
Open HDF file object.
|
249
|
+
reftype : str, optional
|
250
|
+
The type of reference data to retrieve. Must be either "lines" or "points".
|
251
|
+
(default: "lines")
|
252
|
+
|
253
|
+
Returns
|
254
|
+
-------
|
255
|
+
xr.Dataset
|
256
|
+
An xarray Dataset with reference line or point timeseries data.
|
257
|
+
Returns an empty Dataset if the reference output data is not found.
|
258
|
+
|
259
|
+
Raises
|
260
|
+
------
|
261
|
+
ValueError
|
262
|
+
If reftype is not "lines" or "points".
|
263
|
+
"""
|
264
|
+
if reftype == "lines":
|
265
|
+
output_path = "Results/Unsteady/Output/Output Blocks/Base Output/Unsteady Time Series/Reference Lines"
|
266
|
+
abbrev = "refln"
|
267
|
+
elif reftype == "points":
|
268
|
+
output_path = "Results/Unsteady/Output/Output Blocks/Base Output/Unsteady Time Series/Reference Points"
|
269
|
+
abbrev = "refpt"
|
270
|
+
else:
|
271
|
+
raise ValueError('reftype must be either "lines" or "points".')
|
272
|
+
|
273
|
+
try:
|
274
|
+
reference_group = hdf_file[output_path]
|
275
|
+
except KeyError:
|
276
|
+
logger.error(f"Could not find HDF group at path '{output_path}'. "
|
277
|
+
f"The Plan HDF file may not contain reference {reftype[:-1]} output data.")
|
278
|
+
return xr.Dataset()
|
279
|
+
|
280
|
+
reference_names = reference_group["Name"][:]
|
281
|
+
names = []
|
282
|
+
mesh_areas = []
|
283
|
+
for s in reference_names:
|
284
|
+
name, mesh_area = s.decode("utf-8").split("|")
|
285
|
+
names.append(name)
|
286
|
+
mesh_areas.append(mesh_area)
|
287
|
+
|
288
|
+
times = HdfBase._get_unsteady_datetimes(hdf_file)
|
289
|
+
|
290
|
+
das = {}
|
291
|
+
for var in ["Flow", "Velocity", "Water Surface"]:
|
292
|
+
group = reference_group.get(var)
|
293
|
+
if group is None:
|
294
|
+
continue
|
295
|
+
values = group[:]
|
296
|
+
units = group.attrs["Units"].decode("utf-8")
|
297
|
+
da = xr.DataArray(
|
298
|
+
values,
|
299
|
+
name=var,
|
300
|
+
dims=["time", f"{abbrev}_id"],
|
301
|
+
coords={
|
302
|
+
"time": times,
|
303
|
+
f"{abbrev}_id": range(values.shape[1]),
|
304
|
+
f"{abbrev}_name": (f"{abbrev}_id", names),
|
305
|
+
"mesh_name": (f"{abbrev}_id", mesh_areas),
|
306
|
+
},
|
307
|
+
attrs={"units": units, "hdf_path": f"{output_path}/{var}"},
|
308
|
+
)
|
309
|
+
das[var] = da
|
310
|
+
return xr.Dataset(das)
|
311
|
+
|
312
|
+
|
313
|
+
|
314
|
+
|
315
|
+
@staticmethod
|
316
|
+
@log_call
|
317
|
+
@standardize_input(file_type='plan_hdf')
|
318
|
+
def reference_lines_timeseries_output(hdf_path: Path) -> xr.Dataset:
|
319
|
+
"""
|
320
|
+
Get timeseries output for reference lines.
|
321
|
+
|
322
|
+
Args:
|
323
|
+
hdf_path (Path): Path to the HDF file.
|
324
|
+
|
325
|
+
Returns:
|
326
|
+
xr.Dataset: Dataset containing the timeseries output for reference lines.
|
327
|
+
|
328
|
+
Raises:
|
329
|
+
FileNotFoundError: If the specified HDF file is not found.
|
330
|
+
"""
|
331
|
+
return HdfResultsPlan.reference_timeseries_output(hdf_path, reftype="lines")
|
332
|
+
|
333
|
+
@staticmethod
|
334
|
+
@log_call
|
335
|
+
@standardize_input(file_type='plan_hdf')
|
336
|
+
def reference_points_timeseries_output(hdf_path: Path) -> xr.Dataset:
|
337
|
+
"""
|
338
|
+
Get timeseries output for reference points.
|
339
|
+
|
340
|
+
Args:
|
341
|
+
hdf_path (Path): Path to the HDF file.
|
342
|
+
|
343
|
+
Returns:
|
344
|
+
xr.Dataset: Dataset containing the timeseries output for reference points.
|
345
|
+
|
346
|
+
Raises:
|
347
|
+
FileNotFoundError: If the specified HDF file is not found.
|
348
|
+
"""
|
349
|
+
return HdfResultsPlan.reference_timeseries_output(hdf_path, reftype="points")
|
350
|
+
|
351
|
+
@staticmethod
|
352
|
+
@log_call
|
353
|
+
@standardize_input(file_type='plan_hdf')
|
354
|
+
def reference_summary_output(hdf_path: Path, reftype: str = "lines") -> pd.DataFrame:
|
355
|
+
"""
|
356
|
+
Get summary output for reference lines or points.
|
357
|
+
|
358
|
+
Args:
|
359
|
+
hdf_path (Path): Path to the HDF file.
|
360
|
+
reftype (str): Type of reference, either "lines" or "points" (default "lines").
|
361
|
+
|
362
|
+
Returns:
|
363
|
+
pd.DataFrame: DataFrame containing the summary output for reference lines or points.
|
364
|
+
|
365
|
+
Raises:
|
366
|
+
ValueError: If an invalid reftype is provided.
|
367
|
+
"""
|
368
|
+
if not hdf_path.exists():
|
369
|
+
logger.error(f"HDF file not found: {hdf_path}")
|
370
|
+
return pd.DataFrame() # Return an empty DataFrame if the path doesn't exist
|
371
|
+
|
372
|
+
try:
|
373
|
+
# Get the timeseries output
|
374
|
+
ds = HdfResultsPlan.reference_timeseries_output(hdf_path, reftype)
|
375
|
+
|
376
|
+
if 'station' not in ds.dims:
|
377
|
+
logger.error("No 'station' dimension found in the dataset.")
|
378
|
+
return pd.DataFrame() # Return an empty DataFrame if 'station' dimension is missing
|
379
|
+
|
380
|
+
# Calculate summary statistics
|
381
|
+
summary = ds.groupby('station').agg({
|
382
|
+
'WSE': ['min', 'max', 'mean'],
|
383
|
+
'Q': ['min', 'max', 'mean']
|
384
|
+
})
|
385
|
+
|
386
|
+
# Flatten column names
|
387
|
+
summary.columns = ['_'.join(col).strip() for col in summary.columns.values]
|
388
|
+
|
389
|
+
# Reset index to make 'station' a column
|
390
|
+
summary = summary.reset_index()
|
391
|
+
|
392
|
+
return summary
|
393
|
+
except ValueError as ve:
|
394
|
+
logger.error(f"Invalid reftype: {str(ve)}")
|
395
|
+
return pd.DataFrame() # Return an empty DataFrame on ValueError
|
396
|
+
except Exception as e:
|
397
|
+
logger.error(f"Error in reference_summary_output: {str(e)}")
|
398
|
+
return pd.DataFrame() # Return an empty DataFrame on general error
|
@@ -0,0 +1,237 @@
|
|
1
|
+
"""
|
2
|
+
Class: HdfResultsXsec
|
3
|
+
|
4
|
+
Attribution: A substantial amount of code in this file is sourced or derived
|
5
|
+
from the https://github.com/fema-ffrd/rashdf library,
|
6
|
+
released under MIT license and Copyright (c) 2024 fema-ffrd
|
7
|
+
|
8
|
+
The file has been forked and modified for use in RAS Commander.
|
9
|
+
"""
|
10
|
+
|
11
|
+
import h5py
|
12
|
+
import numpy as np
|
13
|
+
import pandas as pd
|
14
|
+
from pathlib import Path
|
15
|
+
from typing import Union, Optional, List
|
16
|
+
from .HdfBase import HdfBase
|
17
|
+
from .HdfUtils import HdfUtils
|
18
|
+
from .Decorators import standardize_input, log_call
|
19
|
+
from .LoggingConfig import setup_logging, get_logger
|
20
|
+
import xarray as xr
|
21
|
+
|
22
|
+
logger = get_logger(__name__)
|
23
|
+
|
24
|
+
|
25
|
+
class HdfResultsXsec:
|
26
|
+
"""
|
27
|
+
A class for handling cross-section results from HEC-RAS HDF files.
|
28
|
+
|
29
|
+
This class provides methods to extract and process steady flow simulation results
|
30
|
+
for cross-sections, including water surface elevations, flow rates, energy grades,
|
31
|
+
and additional parameters such as encroachment stations and velocities.
|
32
|
+
|
33
|
+
The class relies on the HdfBase and HdfUtils classes for core HDF file operations
|
34
|
+
and utility functions.
|
35
|
+
|
36
|
+
Attributes:
|
37
|
+
None
|
38
|
+
|
39
|
+
Methods:
|
40
|
+
steady_profile_xs_output: Extract steady profile cross-section output for a specified variable.
|
41
|
+
cross_sections_wsel: Get water surface elevation data for cross-sections.
|
42
|
+
cross_sections_flow: Get flow data for cross-sections.
|
43
|
+
cross_sections_energy_grade: Get energy grade data for cross-sections.
|
44
|
+
cross_sections_additional_enc_station_left: Get left encroachment station data for cross-sections.
|
45
|
+
cross_sections_additional_enc_station_right: Get right encroachment station data for cross-sections.
|
46
|
+
cross_sections_additional_area_total: Get total ineffective area data for cross-sections.
|
47
|
+
cross_sections_additional_velocity_total: Get total velocity data for cross-sections.
|
48
|
+
"""
|
49
|
+
|
50
|
+
@staticmethod
|
51
|
+
@standardize_input(file_type='plan_hdf')
|
52
|
+
def steady_profile_xs_output(hdf_path: Path, var: str, round_to: int = 2) -> pd.DataFrame:
|
53
|
+
"""
|
54
|
+
Create a DataFrame from steady cross section results based on the specified variable.
|
55
|
+
|
56
|
+
Parameters:
|
57
|
+
----------
|
58
|
+
hdf_path : Path
|
59
|
+
Path to the HEC-RAS plan HDF file.
|
60
|
+
var : str
|
61
|
+
The variable to extract from the steady cross section results.
|
62
|
+
round_to : int, optional
|
63
|
+
Number of decimal places to round the results to (default is 2).
|
64
|
+
|
65
|
+
Returns:
|
66
|
+
-------
|
67
|
+
pd.DataFrame
|
68
|
+
DataFrame containing the steady cross section results for the specified variable.
|
69
|
+
"""
|
70
|
+
XS_STEADY_OUTPUT_ADDITIONAL = [
|
71
|
+
"Additional Encroachment Station Left",
|
72
|
+
"Additional Encroachment Station Right",
|
73
|
+
"Additional Area Ineffective Total",
|
74
|
+
"Additional Velocity Total",
|
75
|
+
]
|
76
|
+
|
77
|
+
try:
|
78
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
79
|
+
# Determine the correct path based on the variable
|
80
|
+
if var in XS_STEADY_OUTPUT_ADDITIONAL:
|
81
|
+
path = f"/Results/Steady/Cross Sections/Additional Output/{var}"
|
82
|
+
else:
|
83
|
+
path = f"/Results/Steady/Cross Sections/{var}"
|
84
|
+
|
85
|
+
# Check if the path exists in the HDF file
|
86
|
+
if path not in hdf_file:
|
87
|
+
return pd.DataFrame()
|
88
|
+
|
89
|
+
# Get the profile names
|
90
|
+
profiles = HdfBase.steady_flow_names(hdf_path)
|
91
|
+
|
92
|
+
# Extract the steady data
|
93
|
+
steady_data = hdf_file[path]
|
94
|
+
|
95
|
+
# Create a DataFrame with profiles as index
|
96
|
+
df = pd.DataFrame(steady_data, index=profiles)
|
97
|
+
|
98
|
+
# Transpose the DataFrame and round values
|
99
|
+
df_t = df.T.copy()
|
100
|
+
for p in profiles:
|
101
|
+
df_t[p] = df_t[p].apply(lambda x: round(x, round_to))
|
102
|
+
|
103
|
+
return df_t
|
104
|
+
except Exception as e:
|
105
|
+
HdfUtils.logger.error(f"Failed to get steady profile cross section output: {str(e)}")
|
106
|
+
return pd.DataFrame()
|
107
|
+
|
108
|
+
@staticmethod
|
109
|
+
@standardize_input(file_type='plan_hdf')
|
110
|
+
def cross_sections_wsel(hdf_path: Path) -> pd.DataFrame:
|
111
|
+
"""
|
112
|
+
Return the water surface elevation information for each 1D Cross Section.
|
113
|
+
|
114
|
+
Parameters:
|
115
|
+
----------
|
116
|
+
hdf_path : Path
|
117
|
+
Path to the HEC-RAS plan HDF file.
|
118
|
+
|
119
|
+
Returns:
|
120
|
+
-------
|
121
|
+
pd.DataFrame
|
122
|
+
A DataFrame containing the water surface elevations for each cross section and event.
|
123
|
+
"""
|
124
|
+
return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Water Surface")
|
125
|
+
|
126
|
+
@staticmethod
|
127
|
+
@standardize_input(file_type='plan_hdf')
|
128
|
+
def cross_sections_flow(hdf_path: Path) -> pd.DataFrame:
|
129
|
+
"""
|
130
|
+
Return the Flow information for each 1D Cross Section.
|
131
|
+
|
132
|
+
Parameters:
|
133
|
+
----------
|
134
|
+
hdf_path : Path
|
135
|
+
Path to the HEC-RAS plan HDF file.
|
136
|
+
|
137
|
+
Returns:
|
138
|
+
-------
|
139
|
+
pd.DataFrame
|
140
|
+
A DataFrame containing the flow for each cross section and event.
|
141
|
+
"""
|
142
|
+
return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Flow")
|
143
|
+
|
144
|
+
@staticmethod
|
145
|
+
@standardize_input(file_type='plan_hdf')
|
146
|
+
def cross_sections_energy_grade(hdf_path: Path) -> pd.DataFrame:
|
147
|
+
"""
|
148
|
+
Return the energy grade information for each 1D Cross Section.
|
149
|
+
|
150
|
+
Parameters:
|
151
|
+
----------
|
152
|
+
hdf_path : Path
|
153
|
+
Path to the HEC-RAS plan HDF file.
|
154
|
+
|
155
|
+
Returns:
|
156
|
+
-------
|
157
|
+
pd.DataFrame
|
158
|
+
A DataFrame containing the energy grade for each cross section and event.
|
159
|
+
"""
|
160
|
+
return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Energy Grade")
|
161
|
+
|
162
|
+
@staticmethod
|
163
|
+
@standardize_input(file_type='plan_hdf')
|
164
|
+
def cross_sections_additional_enc_station_left(hdf_path: Path) -> pd.DataFrame:
|
165
|
+
"""
|
166
|
+
Return the left side encroachment information for a floodway plan hdf.
|
167
|
+
|
168
|
+
Parameters:
|
169
|
+
----------
|
170
|
+
hdf_path : Path
|
171
|
+
Path to the HEC-RAS plan HDF file.
|
172
|
+
|
173
|
+
Returns:
|
174
|
+
-------
|
175
|
+
pd.DataFrame
|
176
|
+
A DataFrame containing the cross sections left side encroachment stations.
|
177
|
+
"""
|
178
|
+
return HdfResultsXsec.steady_profile_xs_output(
|
179
|
+
hdf_path, "Encroachment Station Left"
|
180
|
+
)
|
181
|
+
|
182
|
+
@staticmethod
|
183
|
+
@standardize_input(file_type='plan_hdf')
|
184
|
+
def cross_sections_additional_enc_station_right(hdf_path: Path) -> pd.DataFrame:
|
185
|
+
"""
|
186
|
+
Return the right side encroachment information for a floodway plan hdf.
|
187
|
+
|
188
|
+
Parameters:
|
189
|
+
----------
|
190
|
+
hdf_path : Path
|
191
|
+
Path to the HEC-RAS plan HDF file.
|
192
|
+
|
193
|
+
Returns:
|
194
|
+
-------
|
195
|
+
pd.DataFrame
|
196
|
+
A DataFrame containing the cross sections right side encroachment stations.
|
197
|
+
"""
|
198
|
+
return HdfResultsXsec.steady_profile_xs_output(
|
199
|
+
hdf_path, "Encroachment Station Right"
|
200
|
+
)
|
201
|
+
|
202
|
+
@staticmethod
|
203
|
+
@standardize_input(file_type='plan_hdf')
|
204
|
+
def cross_sections_additional_area_total(hdf_path: Path) -> pd.DataFrame:
|
205
|
+
"""
|
206
|
+
Return the 1D cross section area for each profile.
|
207
|
+
|
208
|
+
Parameters:
|
209
|
+
----------
|
210
|
+
hdf_path : Path
|
211
|
+
Path to the HEC-RAS plan HDF file.
|
212
|
+
|
213
|
+
Returns:
|
214
|
+
-------
|
215
|
+
pd.DataFrame
|
216
|
+
A DataFrame containing the wet area inside the cross sections.
|
217
|
+
"""
|
218
|
+
return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Area Ineffective Total")
|
219
|
+
|
220
|
+
@staticmethod
|
221
|
+
@standardize_input(file_type='plan_hdf')
|
222
|
+
def cross_sections_additional_velocity_total(hdf_path: Path) -> pd.DataFrame:
|
223
|
+
"""
|
224
|
+
Return the 1D cross section velocity for each profile.
|
225
|
+
|
226
|
+
Parameters:
|
227
|
+
----------
|
228
|
+
hdf_path : Path
|
229
|
+
Path to the HEC-RAS plan HDF file.
|
230
|
+
|
231
|
+
Returns:
|
232
|
+
-------
|
233
|
+
pd.DataFrame
|
234
|
+
A DataFrame containing the velocity inside the cross sections.
|
235
|
+
"""
|
236
|
+
return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Velocity Total")
|
237
|
+
|