ras-commander 0.44.0__tar.gz → 0.45.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ras_commander-0.44.0/ras_commander.egg-info → ras_commander-0.45.0}/PKG-INFO +1 -1
- ras_commander-0.45.0/ras_commander/HdfPipe.py +262 -0
- ras_commander-0.45.0/ras_commander/HdfPump.py +255 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/HdfResultsPlan.py +3 -0
- ras_commander-0.45.0/ras_commander/HdfResultsXsec.py +443 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/__init__.py +4 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0/ras_commander.egg-info}/PKG-INFO +1 -1
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander.egg-info/SOURCES.txt +2 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/setup.py +1 -1
- ras_commander-0.44.0/ras_commander/HdfResultsXsec.py +0 -237
- {ras_commander-0.44.0 → ras_commander-0.45.0}/LICENSE +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/README.md +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/pyproject.toml +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/Decorators.py +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/HdfBase.py +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/HdfBndry.py +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/HdfMesh.py +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/HdfPlan.py +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/HdfResultsMesh.py +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/HdfStruc.py +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/HdfUtils.py +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/HdfXsec.py +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/LoggingConfig.py +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/RasCmdr.py +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/RasExamples.py +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/RasGeo.py +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/RasGpt.py +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/RasPlan.py +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/RasPrj.py +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/RasUnsteady.py +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander/RasUtils.py +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander.egg-info/dependency_links.txt +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/ras_commander.egg-info/top_level.txt +0 -0
- {ras_commander-0.44.0 → ras_commander-0.45.0}/setup.cfg +0 -0
@@ -0,0 +1,262 @@
|
|
1
|
+
import h5py
|
2
|
+
import numpy as np
|
3
|
+
import pandas as pd
|
4
|
+
import geopandas as gpd
|
5
|
+
import xarray as xr
|
6
|
+
from pathlib import Path
|
7
|
+
from shapely.geometry import LineString, Point
|
8
|
+
from typing import List, Dict, Any, Optional, Union
|
9
|
+
from .HdfBase import HdfBase
|
10
|
+
from .HdfUtils import HdfUtils
|
11
|
+
from .Decorators import standardize_input, log_call
|
12
|
+
from .LoggingConfig import get_logger
|
13
|
+
|
14
|
+
logger = get_logger(__name__)
|
15
|
+
|
16
|
+
class HdfPipe:
|
17
|
+
"""
|
18
|
+
A class for handling pipe network related data from HEC-RAS HDF files.
|
19
|
+
"""
|
20
|
+
|
21
|
+
@staticmethod
|
22
|
+
@log_call
|
23
|
+
@standardize_input(file_type='plan_hdf')
|
24
|
+
def get_pipe_conduits(hdf_path: Path) -> gpd.GeoDataFrame:
|
25
|
+
"""
|
26
|
+
Extract pipe conduit data from the HDF file.
|
27
|
+
|
28
|
+
Args:
|
29
|
+
hdf_path (Path): Path to the HDF file.
|
30
|
+
|
31
|
+
Returns:
|
32
|
+
gpd.GeoDataFrame: GeoDataFrame containing pipe conduit data.
|
33
|
+
|
34
|
+
Raises:
|
35
|
+
KeyError: If the required datasets are not found in the HDF file.
|
36
|
+
"""
|
37
|
+
try:
|
38
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
39
|
+
# Extract pipe conduit data
|
40
|
+
polyline_info = hdf['/Geometry/Pipe Conduits/Polyline Info'][()]
|
41
|
+
polyline_points = hdf['/Geometry/Pipe Conduits/Polyline Points'][()]
|
42
|
+
terrain_profiles_info = hdf['/Geometry/Pipe Conduits/Terrain Profiles Info'][()]
|
43
|
+
|
44
|
+
# Create geometries
|
45
|
+
geometries = []
|
46
|
+
for start, count, _, _ in polyline_info:
|
47
|
+
points = polyline_points[start:start+count]
|
48
|
+
geometries.append(LineString(points))
|
49
|
+
|
50
|
+
# Create GeoDataFrame
|
51
|
+
gdf = gpd.GeoDataFrame(geometry=geometries)
|
52
|
+
gdf['conduit_id'] = range(len(gdf))
|
53
|
+
gdf['terrain_profile_start'] = terrain_profiles_info[:, 0]
|
54
|
+
gdf['terrain_profile_count'] = terrain_profiles_info[:, 1]
|
55
|
+
|
56
|
+
# Set CRS if available
|
57
|
+
crs = HdfUtils.projection(hdf_path)
|
58
|
+
if crs:
|
59
|
+
gdf.set_crs(crs, inplace=True)
|
60
|
+
|
61
|
+
return gdf
|
62
|
+
|
63
|
+
except KeyError as e:
|
64
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
65
|
+
raise
|
66
|
+
except Exception as e:
|
67
|
+
logger.error(f"Error extracting pipe conduit data: {e}")
|
68
|
+
raise
|
69
|
+
|
70
|
+
@staticmethod
|
71
|
+
@log_call
|
72
|
+
@standardize_input(file_type='plan_hdf')
|
73
|
+
def get_pipe_nodes(hdf_path: Path) -> gpd.GeoDataFrame:
|
74
|
+
"""
|
75
|
+
Extract pipe node data from the HDF file.
|
76
|
+
|
77
|
+
Args:
|
78
|
+
hdf_path (Path): Path to the HDF file.
|
79
|
+
|
80
|
+
Returns:
|
81
|
+
gpd.GeoDataFrame: GeoDataFrame containing pipe node data.
|
82
|
+
|
83
|
+
Raises:
|
84
|
+
KeyError: If the required datasets are not found in the HDF file.
|
85
|
+
"""
|
86
|
+
try:
|
87
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
88
|
+
# Extract pipe node data
|
89
|
+
points = hdf['/Geometry/Pipe Nodes/Points'][()]
|
90
|
+
attributes = hdf['/Geometry/Pipe Nodes/Attributes'][()]
|
91
|
+
|
92
|
+
# Create geometries
|
93
|
+
geometries = [Point(x, y) for x, y in points]
|
94
|
+
|
95
|
+
# Create GeoDataFrame
|
96
|
+
gdf = gpd.GeoDataFrame(geometry=geometries)
|
97
|
+
gdf['node_id'] = range(len(gdf))
|
98
|
+
|
99
|
+
# Add attributes
|
100
|
+
for name in attributes.dtype.names:
|
101
|
+
gdf[name] = attributes[name]
|
102
|
+
|
103
|
+
# Set CRS if available
|
104
|
+
crs = HdfUtils.projection(hdf_path)
|
105
|
+
if crs:
|
106
|
+
gdf.set_crs(crs, inplace=True)
|
107
|
+
|
108
|
+
return gdf
|
109
|
+
|
110
|
+
except KeyError as e:
|
111
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
112
|
+
raise
|
113
|
+
except Exception as e:
|
114
|
+
logger.error(f"Error extracting pipe node data: {e}")
|
115
|
+
raise
|
116
|
+
|
117
|
+
@staticmethod
|
118
|
+
@log_call
|
119
|
+
@standardize_input(file_type='plan_hdf')
|
120
|
+
def get_pipe_network_timeseries(hdf_path: Path, variable: str) -> xr.DataArray:
|
121
|
+
"""
|
122
|
+
Extract timeseries data for a specific variable in the pipe network.
|
123
|
+
|
124
|
+
Args:
|
125
|
+
hdf_path (Path): Path to the HDF file.
|
126
|
+
variable (str): Variable to extract (e.g., "Cell Courant", "Cell Water Surface").
|
127
|
+
|
128
|
+
Returns:
|
129
|
+
xr.DataArray: DataArray containing the timeseries data.
|
130
|
+
|
131
|
+
Raises:
|
132
|
+
KeyError: If the required datasets are not found in the HDF file.
|
133
|
+
ValueError: If an invalid variable is specified.
|
134
|
+
"""
|
135
|
+
valid_variables = [
|
136
|
+
"Cell Courant", "Cell Water Surface", "Face Flow", "Face Velocity",
|
137
|
+
"Face Water Surface", "Pipes/Pipe Flow DS", "Pipes/Pipe Flow US",
|
138
|
+
"Pipes/Vel DS", "Pipes/Vel US", "Nodes/Depth", "Nodes/Drop Inlet Flow",
|
139
|
+
"Nodes/Water Surface"
|
140
|
+
]
|
141
|
+
|
142
|
+
if variable not in valid_variables:
|
143
|
+
raise ValueError(f"Invalid variable. Must be one of: {', '.join(valid_variables)}")
|
144
|
+
|
145
|
+
try:
|
146
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
147
|
+
# Extract timeseries data
|
148
|
+
data_path = f"/Results/Unsteady/Output/Output Blocks/DSS Hydrograph Output/Unsteady Time Series/Pipe Networks/Davis/{variable}"
|
149
|
+
data = hdf[data_path][()]
|
150
|
+
|
151
|
+
# Extract time information
|
152
|
+
time = HdfBase._get_unsteady_datetimes(hdf)
|
153
|
+
|
154
|
+
# Create DataArray
|
155
|
+
da = xr.DataArray(
|
156
|
+
data=data,
|
157
|
+
dims=['time', 'location'],
|
158
|
+
coords={'time': time, 'location': range(data.shape[1])},
|
159
|
+
name=variable
|
160
|
+
)
|
161
|
+
|
162
|
+
# Add attributes
|
163
|
+
da.attrs['units'] = hdf[data_path].attrs.get('Units', b'').decode('utf-8')
|
164
|
+
da.attrs['variable'] = variable
|
165
|
+
|
166
|
+
return da
|
167
|
+
|
168
|
+
except KeyError as e:
|
169
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
170
|
+
raise
|
171
|
+
except Exception as e:
|
172
|
+
logger.error(f"Error extracting pipe network timeseries data: {e}")
|
173
|
+
raise
|
174
|
+
|
175
|
+
@staticmethod
|
176
|
+
@log_call
|
177
|
+
@standardize_input(file_type='plan_hdf')
|
178
|
+
def get_pipe_network_summary(hdf_path: Path) -> pd.DataFrame:
|
179
|
+
"""
|
180
|
+
Extract summary data for pipe networks from the HDF file.
|
181
|
+
|
182
|
+
Args:
|
183
|
+
hdf_path (Path): Path to the HDF file.
|
184
|
+
|
185
|
+
Returns:
|
186
|
+
pd.DataFrame: DataFrame containing pipe network summary data.
|
187
|
+
|
188
|
+
Raises:
|
189
|
+
KeyError: If the required datasets are not found in the HDF file.
|
190
|
+
"""
|
191
|
+
try:
|
192
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
193
|
+
# Extract summary data
|
194
|
+
summary_path = "/Results/Unsteady/Summary/Pipe Network"
|
195
|
+
if summary_path not in hdf:
|
196
|
+
logger.warning("Pipe Network summary data not found in HDF file")
|
197
|
+
return pd.DataFrame()
|
198
|
+
|
199
|
+
summary_data = hdf[summary_path][()]
|
200
|
+
|
201
|
+
# Create DataFrame
|
202
|
+
df = pd.DataFrame(summary_data)
|
203
|
+
|
204
|
+
# Convert column names
|
205
|
+
df.columns = [col.decode('utf-8') for col in df.columns]
|
206
|
+
|
207
|
+
return df
|
208
|
+
|
209
|
+
except KeyError as e:
|
210
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
211
|
+
raise
|
212
|
+
except Exception as e:
|
213
|
+
logger.error(f"Error extracting pipe network summary data: {e}")
|
214
|
+
raise
|
215
|
+
|
216
|
+
@staticmethod
|
217
|
+
@log_call
|
218
|
+
@standardize_input(file_type='plan_hdf')
|
219
|
+
def get_pipe_profile(hdf_path: Path, conduit_id: int) -> pd.DataFrame:
|
220
|
+
"""
|
221
|
+
Extract the profile data for a specific pipe conduit.
|
222
|
+
|
223
|
+
Args:
|
224
|
+
hdf_path (Path): Path to the HDF file.
|
225
|
+
conduit_id (int): ID of the conduit to extract profile for.
|
226
|
+
|
227
|
+
Returns:
|
228
|
+
pd.DataFrame: DataFrame containing the pipe profile data.
|
229
|
+
|
230
|
+
Raises:
|
231
|
+
KeyError: If the required datasets are not found in the HDF file.
|
232
|
+
IndexError: If the specified conduit_id is out of range.
|
233
|
+
"""
|
234
|
+
try:
|
235
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
236
|
+
# Get conduit info
|
237
|
+
terrain_profiles_info = hdf['/Geometry/Pipe Conduits/Terrain Profiles Info'][()]
|
238
|
+
|
239
|
+
if conduit_id >= len(terrain_profiles_info):
|
240
|
+
raise IndexError(f"conduit_id {conduit_id} is out of range")
|
241
|
+
|
242
|
+
start, count = terrain_profiles_info[conduit_id]
|
243
|
+
|
244
|
+
# Extract profile data
|
245
|
+
profile_values = hdf['/Geometry/Pipe Conduits/Terrain Profiles Values'][start:start+count]
|
246
|
+
|
247
|
+
# Create DataFrame
|
248
|
+
df = pd.DataFrame(profile_values, columns=['Station', 'Elevation'])
|
249
|
+
|
250
|
+
return df
|
251
|
+
|
252
|
+
except KeyError as e:
|
253
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
254
|
+
raise
|
255
|
+
except IndexError as e:
|
256
|
+
logger.error(f"Invalid conduit_id: {e}")
|
257
|
+
raise
|
258
|
+
except Exception as e:
|
259
|
+
logger.error(f"Error extracting pipe profile data: {e}")
|
260
|
+
raise
|
261
|
+
|
262
|
+
|
@@ -0,0 +1,255 @@
|
|
1
|
+
import h5py
|
2
|
+
import numpy as np
|
3
|
+
import pandas as pd
|
4
|
+
import geopandas as gpd
|
5
|
+
import xarray as xr
|
6
|
+
from pathlib import Path
|
7
|
+
from shapely.geometry import Point
|
8
|
+
from typing import List, Dict, Any, Optional, Union
|
9
|
+
from .HdfUtils import HdfUtils
|
10
|
+
from .Decorators import standardize_input, log_call
|
11
|
+
from .LoggingConfig import get_logger
|
12
|
+
|
13
|
+
logger = get_logger(__name__)
|
14
|
+
|
15
|
+
class HdfPump:
|
16
|
+
"""
|
17
|
+
A class for handling pump station related data from HEC-RAS HDF files.
|
18
|
+
"""
|
19
|
+
|
20
|
+
@staticmethod
|
21
|
+
@log_call
|
22
|
+
@standardize_input(file_type='plan_hdf')
|
23
|
+
def get_pump_stations(hdf_path: Path) -> gpd.GeoDataFrame:
|
24
|
+
"""
|
25
|
+
Extract pump station data from the HDF file.
|
26
|
+
|
27
|
+
Args:
|
28
|
+
hdf_path (Path): Path to the HDF file.
|
29
|
+
|
30
|
+
Returns:
|
31
|
+
gpd.GeoDataFrame: GeoDataFrame containing pump station data.
|
32
|
+
|
33
|
+
Raises:
|
34
|
+
KeyError: If the required datasets are not found in the HDF file.
|
35
|
+
"""
|
36
|
+
try:
|
37
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
38
|
+
# Extract pump station data
|
39
|
+
attributes = hdf['/Geometry/Pump Stations/Attributes'][()]
|
40
|
+
points = hdf['/Geometry/Pump Stations/Points'][()]
|
41
|
+
|
42
|
+
# Create geometries
|
43
|
+
geometries = [Point(x, y) for x, y in points]
|
44
|
+
|
45
|
+
# Create GeoDataFrame
|
46
|
+
gdf = gpd.GeoDataFrame(geometry=geometries)
|
47
|
+
gdf['station_id'] = range(len(gdf))
|
48
|
+
|
49
|
+
# Add attributes
|
50
|
+
for name in attributes.dtype.names:
|
51
|
+
gdf[name] = attributes[name]
|
52
|
+
|
53
|
+
# Set CRS if available
|
54
|
+
crs = HdfUtils.projection(hdf_path)
|
55
|
+
if crs:
|
56
|
+
gdf.set_crs(crs, inplace=True)
|
57
|
+
|
58
|
+
return gdf
|
59
|
+
|
60
|
+
except KeyError as e:
|
61
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
62
|
+
raise
|
63
|
+
except Exception as e:
|
64
|
+
logger.error(f"Error extracting pump station data: {e}")
|
65
|
+
raise
|
66
|
+
|
67
|
+
@staticmethod
|
68
|
+
@log_call
|
69
|
+
@standardize_input(file_type='plan_hdf')
|
70
|
+
def get_pump_groups(hdf_path: Path) -> pd.DataFrame:
|
71
|
+
"""
|
72
|
+
Extract pump group data from the HDF file.
|
73
|
+
|
74
|
+
Args:
|
75
|
+
hdf_path (Path): Path to the HDF file.
|
76
|
+
|
77
|
+
Returns:
|
78
|
+
pd.DataFrame: DataFrame containing pump group data.
|
79
|
+
|
80
|
+
Raises:
|
81
|
+
KeyError: If the required datasets are not found in the HDF file.
|
82
|
+
"""
|
83
|
+
try:
|
84
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
85
|
+
# Extract pump group data
|
86
|
+
attributes = hdf['/Geometry/Pump Stations/Pump Groups/Attributes'][()]
|
87
|
+
efficiency_curves_info = hdf['/Geometry/Pump Stations/Pump Groups/Efficiency Curves Info'][()]
|
88
|
+
efficiency_curves_values = hdf['/Geometry/Pump Stations/Pump Groups/Efficiency Curves Values'][()]
|
89
|
+
|
90
|
+
# Create DataFrame
|
91
|
+
df = pd.DataFrame(attributes)
|
92
|
+
|
93
|
+
# Add efficiency curve data
|
94
|
+
df['efficiency_curve_start'] = efficiency_curves_info[:, 0]
|
95
|
+
df['efficiency_curve_count'] = efficiency_curves_info[:, 1]
|
96
|
+
|
97
|
+
# Process efficiency curves
|
98
|
+
def get_efficiency_curve(start, count):
|
99
|
+
return efficiency_curves_values[start:start+count].tolist()
|
100
|
+
|
101
|
+
df['efficiency_curve'] = df.apply(lambda row: get_efficiency_curve(row['efficiency_curve_start'], row['efficiency_curve_count']), axis=1)
|
102
|
+
|
103
|
+
return df
|
104
|
+
|
105
|
+
except KeyError as e:
|
106
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
107
|
+
raise
|
108
|
+
except Exception as e:
|
109
|
+
logger.error(f"Error extracting pump group data: {e}")
|
110
|
+
raise
|
111
|
+
|
112
|
+
@staticmethod
|
113
|
+
@log_call
|
114
|
+
@standardize_input(file_type='plan_hdf')
|
115
|
+
def get_pump_station_timeseries(hdf_path: Path, pump_station: str) -> xr.DataArray:
|
116
|
+
"""
|
117
|
+
Extract timeseries data for a specific pump station.
|
118
|
+
|
119
|
+
Args:
|
120
|
+
hdf_path (Path): Path to the HDF file.
|
121
|
+
pump_station (str): Name of the pump station.
|
122
|
+
|
123
|
+
Returns:
|
124
|
+
xr.DataArray: DataArray containing the timeseries data.
|
125
|
+
|
126
|
+
Raises:
|
127
|
+
KeyError: If the required datasets are not found in the HDF file.
|
128
|
+
ValueError: If the specified pump station is not found.
|
129
|
+
"""
|
130
|
+
try:
|
131
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
132
|
+
# Check if the pump station exists
|
133
|
+
pumping_stations_path = "/Results/Unsteady/Output/Output Blocks/DSS Hydrograph Output/Unsteady Time Series/Pumping Stations"
|
134
|
+
if pump_station not in hdf[pumping_stations_path]:
|
135
|
+
raise ValueError(f"Pump station '{pump_station}' not found in HDF file")
|
136
|
+
|
137
|
+
# Extract timeseries data
|
138
|
+
data_path = f"{pumping_stations_path}/{pump_station}/Structure Variables"
|
139
|
+
data = hdf[data_path][()]
|
140
|
+
|
141
|
+
# Extract time information
|
142
|
+
time = HdfBase._get_unsteady_datetimes(hdf)
|
143
|
+
|
144
|
+
# Create DataArray
|
145
|
+
da = xr.DataArray(
|
146
|
+
data=data,
|
147
|
+
dims=['time', 'variable'],
|
148
|
+
coords={'time': time, 'variable': ['Flow', 'Stage HW', 'Stage TW', 'Pump Station', 'Pumps on']},
|
149
|
+
name=pump_station
|
150
|
+
)
|
151
|
+
|
152
|
+
# Add attributes
|
153
|
+
da.attrs['units'] = hdf[data_path].attrs.get('Variable_Unit', b'').decode('utf-8')
|
154
|
+
da.attrs['pump_station'] = pump_station
|
155
|
+
|
156
|
+
return da
|
157
|
+
|
158
|
+
except KeyError as e:
|
159
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
160
|
+
raise
|
161
|
+
except ValueError as e:
|
162
|
+
logger.error(str(e))
|
163
|
+
raise
|
164
|
+
except Exception as e:
|
165
|
+
logger.error(f"Error extracting pump station timeseries data: {e}")
|
166
|
+
raise
|
167
|
+
|
168
|
+
@staticmethod
|
169
|
+
@log_call
|
170
|
+
@standardize_input(file_type='plan_hdf')
|
171
|
+
def get_pump_station_summary(hdf_path: Path) -> pd.DataFrame:
|
172
|
+
"""
|
173
|
+
Extract summary data for pump stations from the HDF file.
|
174
|
+
|
175
|
+
Args:
|
176
|
+
hdf_path (Path): Path to the HDF file.
|
177
|
+
|
178
|
+
Returns:
|
179
|
+
pd.DataFrame: DataFrame containing pump station summary data.
|
180
|
+
|
181
|
+
Raises:
|
182
|
+
KeyError: If the required datasets are not found in the HDF file.
|
183
|
+
"""
|
184
|
+
try:
|
185
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
186
|
+
# Extract summary data
|
187
|
+
summary_path = "/Results/Unsteady/Summary/Pump Station"
|
188
|
+
if summary_path not in hdf:
|
189
|
+
logger.warning("Pump Station summary data not found in HDF file")
|
190
|
+
return pd.DataFrame()
|
191
|
+
|
192
|
+
summary_data = hdf[summary_path][()]
|
193
|
+
|
194
|
+
# Create DataFrame
|
195
|
+
df = pd.DataFrame(summary_data)
|
196
|
+
|
197
|
+
# Convert column names
|
198
|
+
df.columns = [col.decode('utf-8') for col in df.columns]
|
199
|
+
|
200
|
+
return df
|
201
|
+
|
202
|
+
except KeyError as e:
|
203
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
204
|
+
raise
|
205
|
+
except Exception as e:
|
206
|
+
logger.error(f"Error extracting pump station summary data: {e}")
|
207
|
+
raise
|
208
|
+
|
209
|
+
@staticmethod
|
210
|
+
@log_call
|
211
|
+
@standardize_input(file_type='plan_hdf')
|
212
|
+
def get_pump_operation_data(hdf_path: Path, pump_station: str) -> pd.DataFrame:
|
213
|
+
"""
|
214
|
+
Extract pump operation data for a specific pump station.
|
215
|
+
|
216
|
+
Args:
|
217
|
+
hdf_path (Path): Path to the HDF file.
|
218
|
+
pump_station (str): Name of the pump station.
|
219
|
+
|
220
|
+
Returns:
|
221
|
+
pd.DataFrame: DataFrame containing pump operation data.
|
222
|
+
|
223
|
+
Raises:
|
224
|
+
KeyError: If the required datasets are not found in the HDF file.
|
225
|
+
ValueError: If the specified pump station is not found.
|
226
|
+
"""
|
227
|
+
try:
|
228
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
229
|
+
# Check if the pump station exists
|
230
|
+
pump_stations_path = "/Results/Unsteady/Output/Output Blocks/DSS Profile Output/Unsteady Time Series/Pumping Stations"
|
231
|
+
if pump_station not in hdf[pump_stations_path]:
|
232
|
+
raise ValueError(f"Pump station '{pump_station}' not found in HDF file")
|
233
|
+
|
234
|
+
# Extract pump operation data
|
235
|
+
data_path = f"{pump_stations_path}/{pump_station}/Structure Variables"
|
236
|
+
data = hdf[data_path][()]
|
237
|
+
|
238
|
+
# Extract time information
|
239
|
+
time = HdfBase._get_unsteady_datetimes(hdf)
|
240
|
+
|
241
|
+
# Create DataFrame
|
242
|
+
df = pd.DataFrame(data, columns=['Flow', 'Stage HW', 'Stage TW', 'Pump Station', 'Pumps on'])
|
243
|
+
df['Time'] = time
|
244
|
+
|
245
|
+
return df
|
246
|
+
|
247
|
+
except KeyError as e:
|
248
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
249
|
+
raise
|
250
|
+
except ValueError as e:
|
251
|
+
logger.error(str(e))
|
252
|
+
raise
|
253
|
+
except Exception as e:
|
254
|
+
logger.error(f"Error extracting pump operation data: {e}")
|
255
|
+
raise
|
@@ -0,0 +1,443 @@
|
|
1
|
+
"""
|
2
|
+
Class: HdfResultsXsec
|
3
|
+
|
4
|
+
Attribution: A substantial amount of code in this file is sourced or derived
|
5
|
+
from the https://github.com/fema-ffrd/rashdf library,
|
6
|
+
released under MIT license and Copyright (c) 2024 fema-ffrd
|
7
|
+
|
8
|
+
The file has been forked and modified for use in RAS Commander.
|
9
|
+
"""
|
10
|
+
|
11
|
+
from pathlib import Path
|
12
|
+
from typing import Union, Optional, List
|
13
|
+
|
14
|
+
import h5py
|
15
|
+
import numpy as np
|
16
|
+
import pandas as pd
|
17
|
+
import xarray as xr
|
18
|
+
|
19
|
+
from .HdfBase import HdfBase
|
20
|
+
from .HdfUtils import HdfUtils
|
21
|
+
from .Decorators import standardize_input, log_call
|
22
|
+
from .LoggingConfig import get_logger
|
23
|
+
|
24
|
+
logger = get_logger(__name__)
|
25
|
+
|
26
|
+
class HdfResultsXsec:
|
27
|
+
"""
|
28
|
+
A class for handling cross-section results from HEC-RAS HDF files.
|
29
|
+
|
30
|
+
This class provides methods to extract and process steady flow simulation results
|
31
|
+
for cross-sections, including water surface elevations, flow rates, energy grades,
|
32
|
+
and additional parameters such as encroachment stations and velocities.
|
33
|
+
|
34
|
+
The class relies on the HdfBase and HdfUtils classes for core HDF file operations
|
35
|
+
and utility functions.
|
36
|
+
|
37
|
+
Attributes:
|
38
|
+
None
|
39
|
+
|
40
|
+
Methods:
|
41
|
+
steady_profile_xs_output: Extract steady profile cross-section output for a specified variable.
|
42
|
+
cross_sections_wsel: Get water surface elevation data for cross-sections.
|
43
|
+
cross_sections_flow: Get flow data for cross-sections.
|
44
|
+
cross_sections_energy_grade: Get energy grade data for cross-sections.
|
45
|
+
cross_sections_additional_enc_station_left: Get left encroachment station data for cross-sections.
|
46
|
+
cross_sections_additional_enc_station_right: Get right encroachment station data for cross-sections.
|
47
|
+
cross_sections_additional_area_total: Get total ineffective area data for cross-sections.
|
48
|
+
cross_sections_additional_velocity_total: Get total velocity data for cross-sections.
|
49
|
+
"""
|
50
|
+
|
51
|
+
@staticmethod
|
52
|
+
@standardize_input(file_type='plan_hdf')
|
53
|
+
def steady_profile_xs_output(hdf_path: Path, var: str, round_to: int = 2) -> pd.DataFrame:
|
54
|
+
"""
|
55
|
+
Create a DataFrame from steady cross section results based on the specified variable.
|
56
|
+
|
57
|
+
Parameters:
|
58
|
+
----------
|
59
|
+
hdf_path : Path
|
60
|
+
Path to the HEC-RAS plan HDF file.
|
61
|
+
var : str
|
62
|
+
The variable to extract from the steady cross section results.
|
63
|
+
round_to : int, optional
|
64
|
+
Number of decimal places to round the results to (default is 2).
|
65
|
+
|
66
|
+
Returns:
|
67
|
+
-------
|
68
|
+
pd.DataFrame
|
69
|
+
DataFrame containing the steady cross section results for the specified variable.
|
70
|
+
"""
|
71
|
+
XS_STEADY_OUTPUT_ADDITIONAL = [
|
72
|
+
"Additional Encroachment Station Left",
|
73
|
+
"Additional Encroachment Station Right",
|
74
|
+
"Additional Area Ineffective Total",
|
75
|
+
"Additional Velocity Total",
|
76
|
+
]
|
77
|
+
|
78
|
+
try:
|
79
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
80
|
+
# Determine the correct path based on the variable
|
81
|
+
if var in XS_STEADY_OUTPUT_ADDITIONAL:
|
82
|
+
path = f"/Results/Steady/Cross Sections/Additional Output/{var}"
|
83
|
+
else:
|
84
|
+
path = f"/Results/Steady/Cross Sections/{var}"
|
85
|
+
|
86
|
+
# Check if the path exists in the HDF file
|
87
|
+
if path not in hdf_file:
|
88
|
+
return pd.DataFrame()
|
89
|
+
|
90
|
+
# Get the profile names
|
91
|
+
profiles = HdfBase.steady_flow_names(hdf_path)
|
92
|
+
|
93
|
+
# Extract the steady data
|
94
|
+
steady_data = hdf_file[path]
|
95
|
+
|
96
|
+
# Create a DataFrame with profiles as index
|
97
|
+
df = pd.DataFrame(steady_data, index=profiles)
|
98
|
+
|
99
|
+
# Transpose the DataFrame and round values
|
100
|
+
df_t = df.T.copy()
|
101
|
+
for p in profiles:
|
102
|
+
df_t[p] = df_t[p].apply(lambda x: round(x, round_to))
|
103
|
+
|
104
|
+
return df_t
|
105
|
+
except Exception as e:
|
106
|
+
HdfUtils.logger.error(f"Failed to get steady profile cross section output: {str(e)}")
|
107
|
+
return pd.DataFrame()
|
108
|
+
|
109
|
+
@staticmethod
|
110
|
+
@standardize_input(file_type='plan_hdf')
|
111
|
+
def cross_sections_wsel(hdf_path: Path) -> pd.DataFrame:
|
112
|
+
"""
|
113
|
+
Return the water surface elevation information for each 1D Cross Section.
|
114
|
+
|
115
|
+
Parameters:
|
116
|
+
----------
|
117
|
+
hdf_path : Path
|
118
|
+
Path to the HEC-RAS plan HDF file.
|
119
|
+
|
120
|
+
Returns:
|
121
|
+
-------
|
122
|
+
pd.DataFrame
|
123
|
+
A DataFrame containing the water surface elevations for each cross section and event.
|
124
|
+
"""
|
125
|
+
return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Water Surface")
|
126
|
+
|
127
|
+
@staticmethod
|
128
|
+
@standardize_input(file_type='plan_hdf')
|
129
|
+
def cross_sections_flow(hdf_path: Path) -> pd.DataFrame:
|
130
|
+
"""
|
131
|
+
Return the Flow information for each 1D Cross Section.
|
132
|
+
|
133
|
+
Parameters:
|
134
|
+
----------
|
135
|
+
hdf_path : Path
|
136
|
+
Path to the HEC-RAS plan HDF file.
|
137
|
+
|
138
|
+
Returns:
|
139
|
+
-------
|
140
|
+
pd.DataFrame
|
141
|
+
A DataFrame containing the flow for each cross section and event.
|
142
|
+
"""
|
143
|
+
return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Flow")
|
144
|
+
|
145
|
+
@staticmethod
|
146
|
+
@standardize_input(file_type='plan_hdf')
|
147
|
+
def cross_sections_energy_grade(hdf_path: Path) -> pd.DataFrame:
|
148
|
+
"""
|
149
|
+
Return the energy grade information for each 1D Cross Section.
|
150
|
+
|
151
|
+
Parameters:
|
152
|
+
----------
|
153
|
+
hdf_path : Path
|
154
|
+
Path to the HEC-RAS plan HDF file.
|
155
|
+
|
156
|
+
Returns:
|
157
|
+
-------
|
158
|
+
pd.DataFrame
|
159
|
+
A DataFrame containing the energy grade for each cross section and event.
|
160
|
+
"""
|
161
|
+
return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Energy Grade")
|
162
|
+
|
163
|
+
@staticmethod
|
164
|
+
@standardize_input(file_type='plan_hdf')
|
165
|
+
def cross_sections_additional_enc_station_left(hdf_path: Path) -> pd.DataFrame:
|
166
|
+
"""
|
167
|
+
Return the left side encroachment information for a floodway plan hdf.
|
168
|
+
|
169
|
+
Parameters:
|
170
|
+
----------
|
171
|
+
hdf_path : Path
|
172
|
+
Path to the HEC-RAS plan HDF file.
|
173
|
+
|
174
|
+
Returns:
|
175
|
+
-------
|
176
|
+
pd.DataFrame
|
177
|
+
A DataFrame containing the cross sections left side encroachment stations.
|
178
|
+
"""
|
179
|
+
return HdfResultsXsec.steady_profile_xs_output(
|
180
|
+
hdf_path, "Encroachment Station Left"
|
181
|
+
)
|
182
|
+
|
183
|
+
@staticmethod
|
184
|
+
@standardize_input(file_type='plan_hdf')
|
185
|
+
def cross_sections_additional_enc_station_right(hdf_path: Path) -> pd.DataFrame:
|
186
|
+
"""
|
187
|
+
Return the right side encroachment information for a floodway plan hdf.
|
188
|
+
|
189
|
+
Parameters:
|
190
|
+
----------
|
191
|
+
hdf_path : Path
|
192
|
+
Path to the HEC-RAS plan HDF file.
|
193
|
+
|
194
|
+
Returns:
|
195
|
+
-------
|
196
|
+
pd.DataFrame
|
197
|
+
A DataFrame containing the cross sections right side encroachment stations.
|
198
|
+
"""
|
199
|
+
return HdfResultsXsec.steady_profile_xs_output(
|
200
|
+
hdf_path, "Encroachment Station Right"
|
201
|
+
)
|
202
|
+
|
203
|
+
@staticmethod
|
204
|
+
@standardize_input(file_type='plan_hdf')
|
205
|
+
def cross_sections_additional_area_total(hdf_path: Path) -> pd.DataFrame:
|
206
|
+
"""
|
207
|
+
Return the 1D cross section area for each profile.
|
208
|
+
|
209
|
+
Parameters:
|
210
|
+
----------
|
211
|
+
hdf_path : Path
|
212
|
+
Path to the HEC-RAS plan HDF file.
|
213
|
+
|
214
|
+
Returns:
|
215
|
+
-------
|
216
|
+
pd.DataFrame
|
217
|
+
A DataFrame containing the wet area inside the cross sections.
|
218
|
+
"""
|
219
|
+
return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Area Ineffective Total")
|
220
|
+
|
221
|
+
@staticmethod
|
222
|
+
@standardize_input(file_type='plan_hdf')
|
223
|
+
def cross_sections_additional_velocity_total(hdf_path: Path) -> pd.DataFrame:
|
224
|
+
"""
|
225
|
+
Return the 1D cross section velocity for each profile.
|
226
|
+
|
227
|
+
Parameters:
|
228
|
+
----------
|
229
|
+
hdf_path : Path
|
230
|
+
Path to the HEC-RAS plan HDF file.
|
231
|
+
|
232
|
+
Returns:
|
233
|
+
-------
|
234
|
+
pd.DataFrame
|
235
|
+
A DataFrame containing the velocity inside the cross sections.
|
236
|
+
"""
|
237
|
+
return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Velocity Total")
|
238
|
+
|
239
|
+
|
240
|
+
@staticmethod
|
241
|
+
@log_call
|
242
|
+
@standardize_input(file_type='plan_hdf')
|
243
|
+
def get_pipe_network_summary(hdf_path: Path) -> pd.DataFrame:
|
244
|
+
"""
|
245
|
+
Extract summary data for pipe networks from the HDF file.
|
246
|
+
|
247
|
+
Args:
|
248
|
+
hdf_path (Path): Path to the HDF file.
|
249
|
+
|
250
|
+
Returns:
|
251
|
+
pd.DataFrame: DataFrame containing pipe network summary data.
|
252
|
+
|
253
|
+
Raises:
|
254
|
+
KeyError: If the required datasets are not found in the HDF file.
|
255
|
+
"""
|
256
|
+
try:
|
257
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
258
|
+
# Extract summary data
|
259
|
+
summary_path = "/Results/Unsteady/Summary/Pipe Network"
|
260
|
+
if summary_path not in hdf:
|
261
|
+
logger.warning("Pipe Network summary data not found in HDF file")
|
262
|
+
return pd.DataFrame()
|
263
|
+
|
264
|
+
summary_data = hdf[summary_path][()]
|
265
|
+
|
266
|
+
# Create DataFrame
|
267
|
+
df = pd.DataFrame(summary_data)
|
268
|
+
|
269
|
+
# Convert column names
|
270
|
+
df.columns = [col.decode('utf-8') if isinstance(col, bytes) else col for col in df.columns]
|
271
|
+
|
272
|
+
# Convert byte string values to regular strings
|
273
|
+
for col in df.columns:
|
274
|
+
if df[col].dtype == object:
|
275
|
+
df[col] = df[col].apply(lambda x: x.decode('utf-8') if isinstance(x, bytes) else x)
|
276
|
+
|
277
|
+
return df
|
278
|
+
|
279
|
+
except KeyError as e:
|
280
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
281
|
+
raise
|
282
|
+
except Exception as e:
|
283
|
+
logger.error(f"Error extracting pipe network summary data: {e}")
|
284
|
+
raise
|
285
|
+
|
286
|
+
@staticmethod
|
287
|
+
@log_call
|
288
|
+
@standardize_input(file_type='plan_hdf')
|
289
|
+
def get_pump_station_summary(hdf_path: Path) -> pd.DataFrame:
|
290
|
+
"""
|
291
|
+
Extract summary data for pump stations from the HDF file.
|
292
|
+
|
293
|
+
Args:
|
294
|
+
hdf_path (Path): Path to the HDF file.
|
295
|
+
|
296
|
+
Returns:
|
297
|
+
pd.DataFrame: DataFrame containing pump station summary data.
|
298
|
+
|
299
|
+
Raises:
|
300
|
+
KeyError: If the required datasets are not found in the HDF file.
|
301
|
+
"""
|
302
|
+
try:
|
303
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
304
|
+
# Extract summary data
|
305
|
+
summary_path = "/Results/Unsteady/Summary/Pump Station"
|
306
|
+
if summary_path not in hdf:
|
307
|
+
logger.warning("Pump Station summary data not found in HDF file")
|
308
|
+
return pd.DataFrame()
|
309
|
+
|
310
|
+
summary_data = hdf[summary_path][()]
|
311
|
+
|
312
|
+
# Create DataFrame
|
313
|
+
df = pd.DataFrame(summary_data)
|
314
|
+
|
315
|
+
# Convert column names
|
316
|
+
df.columns = [col.decode('utf-8') if isinstance(col, bytes) else col for col in df.columns]
|
317
|
+
|
318
|
+
# Convert byte string values to regular strings
|
319
|
+
for col in df.columns:
|
320
|
+
if df[col].dtype == object:
|
321
|
+
df[col] = df[col].apply(lambda x: x.decode('utf-8') if isinstance(x, bytes) else x)
|
322
|
+
|
323
|
+
return df
|
324
|
+
|
325
|
+
except KeyError as e:
|
326
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
327
|
+
raise
|
328
|
+
except Exception as e:
|
329
|
+
logger.error(f"Error extracting pump station summary data: {e}")
|
330
|
+
raise
|
331
|
+
|
332
|
+
@staticmethod
|
333
|
+
@log_call
|
334
|
+
@standardize_input(file_type='plan_hdf')
|
335
|
+
def get_pipe_network_profile_output(hdf_path: Path) -> pd.DataFrame:
|
336
|
+
"""
|
337
|
+
Extract pipe network profile output data from the HDF file.
|
338
|
+
|
339
|
+
Args:
|
340
|
+
hdf_path (Path): Path to the HDF file.
|
341
|
+
|
342
|
+
Returns:
|
343
|
+
pd.DataFrame: DataFrame containing pipe network profile output data.
|
344
|
+
|
345
|
+
Raises:
|
346
|
+
KeyError: If the required datasets are not found in the HDF file.
|
347
|
+
"""
|
348
|
+
try:
|
349
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
350
|
+
# Extract profile output data
|
351
|
+
profile_path = "/Results/Unsteady/Output/Output Blocks/DSS Profile Output/Unsteady Time Series/Pipe Networks"
|
352
|
+
if profile_path not in hdf:
|
353
|
+
logger.warning("Pipe Network profile output data not found in HDF file")
|
354
|
+
return pd.DataFrame()
|
355
|
+
|
356
|
+
# Initialize an empty list to store data from all pipe networks
|
357
|
+
all_data = []
|
358
|
+
|
359
|
+
# Iterate through all pipe networks
|
360
|
+
for network in hdf[profile_path].keys():
|
361
|
+
network_path = f"{profile_path}/{network}"
|
362
|
+
|
363
|
+
# Extract data for each variable
|
364
|
+
for var in hdf[network_path].keys():
|
365
|
+
data = hdf[f"{network_path}/{var}"][()]
|
366
|
+
|
367
|
+
# Create a DataFrame for this variable
|
368
|
+
df = pd.DataFrame(data)
|
369
|
+
df['Network'] = network
|
370
|
+
df['Variable'] = var
|
371
|
+
|
372
|
+
all_data.append(df)
|
373
|
+
|
374
|
+
# Concatenate all DataFrames
|
375
|
+
result_df = pd.concat(all_data, ignore_index=True)
|
376
|
+
|
377
|
+
# Add time information
|
378
|
+
time = HdfBase._get_unsteady_datetimes(hdf)
|
379
|
+
result_df['Time'] = [time[i] for i in result_df.index]
|
380
|
+
|
381
|
+
return result_df
|
382
|
+
|
383
|
+
except KeyError as e:
|
384
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
385
|
+
raise
|
386
|
+
except Exception as e:
|
387
|
+
logger.error(f"Error extracting pipe network profile output data: {e}")
|
388
|
+
raise
|
389
|
+
|
390
|
+
@staticmethod
|
391
|
+
@log_call
|
392
|
+
@standardize_input(file_type='plan_hdf')
|
393
|
+
def get_pump_station_profile_output(hdf_path: Path) -> pd.DataFrame:
|
394
|
+
"""
|
395
|
+
Extract pump station profile output data from the HDF file.
|
396
|
+
|
397
|
+
Args:
|
398
|
+
hdf_path (Path): Path to the HDF file.
|
399
|
+
|
400
|
+
Returns:
|
401
|
+
pd.DataFrame: DataFrame containing pump station profile output data.
|
402
|
+
|
403
|
+
Raises:
|
404
|
+
KeyError: If the required datasets are not found in the HDF file.
|
405
|
+
"""
|
406
|
+
try:
|
407
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
408
|
+
# Extract profile output data
|
409
|
+
profile_path = "/Results/Unsteady/Output/Output Blocks/DSS Profile Output/Unsteady Time Series/Pumping Stations"
|
410
|
+
if profile_path not in hdf:
|
411
|
+
logger.warning("Pump Station profile output data not found in HDF file")
|
412
|
+
return pd.DataFrame()
|
413
|
+
|
414
|
+
# Initialize an empty list to store data from all pump stations
|
415
|
+
all_data = []
|
416
|
+
|
417
|
+
# Iterate through all pump stations
|
418
|
+
for station in hdf[profile_path].keys():
|
419
|
+
station_path = f"{profile_path}/{station}/Structure Variables"
|
420
|
+
|
421
|
+
data = hdf[station_path][()]
|
422
|
+
|
423
|
+
# Create a DataFrame for this pump station
|
424
|
+
df = pd.DataFrame(data, columns=['Flow', 'Stage HW', 'Stage TW', 'Pump Station', 'Pumps on'])
|
425
|
+
df['Station'] = station
|
426
|
+
|
427
|
+
all_data.append(df)
|
428
|
+
|
429
|
+
# Concatenate all DataFrames
|
430
|
+
result_df = pd.concat(all_data, ignore_index=True)
|
431
|
+
|
432
|
+
# Add time information
|
433
|
+
time = HdfBase._get_unsteady_datetimes(hdf)
|
434
|
+
result_df['Time'] = [time[i] for i in result_df.index]
|
435
|
+
|
436
|
+
return result_df
|
437
|
+
|
438
|
+
except KeyError as e:
|
439
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
440
|
+
raise
|
441
|
+
except Exception as e:
|
442
|
+
logger.error(f"Error extracting pump station profile output data: {e}")
|
443
|
+
raise
|
@@ -32,6 +32,8 @@ from .HdfResultsXsec import HdfResultsXsec
|
|
32
32
|
from .HdfStruc import HdfStruc
|
33
33
|
from .HdfUtils import HdfUtils
|
34
34
|
from .HdfXsec import HdfXsec
|
35
|
+
from .HdfPump import HdfPump
|
36
|
+
from .HdfPipe import HdfPipe
|
35
37
|
|
36
38
|
# Define __all__ to specify what should be imported when using "from ras_commander import *"
|
37
39
|
__all__ = [
|
@@ -45,6 +47,8 @@ __all__ = [
|
|
45
47
|
"HdfStruc",
|
46
48
|
"HdfUtils",
|
47
49
|
"HdfXsec",
|
50
|
+
"HdfPump",
|
51
|
+
"HdfPipe",
|
48
52
|
"standardize_input",
|
49
53
|
"ras",
|
50
54
|
"init_ras_project",
|
@@ -6,7 +6,9 @@ ras_commander/Decorators.py
|
|
6
6
|
ras_commander/HdfBase.py
|
7
7
|
ras_commander/HdfBndry.py
|
8
8
|
ras_commander/HdfMesh.py
|
9
|
+
ras_commander/HdfPipe.py
|
9
10
|
ras_commander/HdfPlan.py
|
11
|
+
ras_commander/HdfPump.py
|
10
12
|
ras_commander/HdfResultsMesh.py
|
11
13
|
ras_commander/HdfResultsPlan.py
|
12
14
|
ras_commander/HdfResultsXsec.py
|
@@ -1,237 +0,0 @@
|
|
1
|
-
"""
|
2
|
-
Class: HdfResultsXsec
|
3
|
-
|
4
|
-
Attribution: A substantial amount of code in this file is sourced or derived
|
5
|
-
from the https://github.com/fema-ffrd/rashdf library,
|
6
|
-
released under MIT license and Copyright (c) 2024 fema-ffrd
|
7
|
-
|
8
|
-
The file has been forked and modified for use in RAS Commander.
|
9
|
-
"""
|
10
|
-
|
11
|
-
import h5py
|
12
|
-
import numpy as np
|
13
|
-
import pandas as pd
|
14
|
-
from pathlib import Path
|
15
|
-
from typing import Union, Optional, List
|
16
|
-
from .HdfBase import HdfBase
|
17
|
-
from .HdfUtils import HdfUtils
|
18
|
-
from .Decorators import standardize_input, log_call
|
19
|
-
from .LoggingConfig import setup_logging, get_logger
|
20
|
-
import xarray as xr
|
21
|
-
|
22
|
-
logger = get_logger(__name__)
|
23
|
-
|
24
|
-
|
25
|
-
class HdfResultsXsec:
|
26
|
-
"""
|
27
|
-
A class for handling cross-section results from HEC-RAS HDF files.
|
28
|
-
|
29
|
-
This class provides methods to extract and process steady flow simulation results
|
30
|
-
for cross-sections, including water surface elevations, flow rates, energy grades,
|
31
|
-
and additional parameters such as encroachment stations and velocities.
|
32
|
-
|
33
|
-
The class relies on the HdfBase and HdfUtils classes for core HDF file operations
|
34
|
-
and utility functions.
|
35
|
-
|
36
|
-
Attributes:
|
37
|
-
None
|
38
|
-
|
39
|
-
Methods:
|
40
|
-
steady_profile_xs_output: Extract steady profile cross-section output for a specified variable.
|
41
|
-
cross_sections_wsel: Get water surface elevation data for cross-sections.
|
42
|
-
cross_sections_flow: Get flow data for cross-sections.
|
43
|
-
cross_sections_energy_grade: Get energy grade data for cross-sections.
|
44
|
-
cross_sections_additional_enc_station_left: Get left encroachment station data for cross-sections.
|
45
|
-
cross_sections_additional_enc_station_right: Get right encroachment station data for cross-sections.
|
46
|
-
cross_sections_additional_area_total: Get total ineffective area data for cross-sections.
|
47
|
-
cross_sections_additional_velocity_total: Get total velocity data for cross-sections.
|
48
|
-
"""
|
49
|
-
|
50
|
-
@staticmethod
|
51
|
-
@standardize_input(file_type='plan_hdf')
|
52
|
-
def steady_profile_xs_output(hdf_path: Path, var: str, round_to: int = 2) -> pd.DataFrame:
|
53
|
-
"""
|
54
|
-
Create a DataFrame from steady cross section results based on the specified variable.
|
55
|
-
|
56
|
-
Parameters:
|
57
|
-
----------
|
58
|
-
hdf_path : Path
|
59
|
-
Path to the HEC-RAS plan HDF file.
|
60
|
-
var : str
|
61
|
-
The variable to extract from the steady cross section results.
|
62
|
-
round_to : int, optional
|
63
|
-
Number of decimal places to round the results to (default is 2).
|
64
|
-
|
65
|
-
Returns:
|
66
|
-
-------
|
67
|
-
pd.DataFrame
|
68
|
-
DataFrame containing the steady cross section results for the specified variable.
|
69
|
-
"""
|
70
|
-
XS_STEADY_OUTPUT_ADDITIONAL = [
|
71
|
-
"Additional Encroachment Station Left",
|
72
|
-
"Additional Encroachment Station Right",
|
73
|
-
"Additional Area Ineffective Total",
|
74
|
-
"Additional Velocity Total",
|
75
|
-
]
|
76
|
-
|
77
|
-
try:
|
78
|
-
with h5py.File(hdf_path, 'r') as hdf_file:
|
79
|
-
# Determine the correct path based on the variable
|
80
|
-
if var in XS_STEADY_OUTPUT_ADDITIONAL:
|
81
|
-
path = f"/Results/Steady/Cross Sections/Additional Output/{var}"
|
82
|
-
else:
|
83
|
-
path = f"/Results/Steady/Cross Sections/{var}"
|
84
|
-
|
85
|
-
# Check if the path exists in the HDF file
|
86
|
-
if path not in hdf_file:
|
87
|
-
return pd.DataFrame()
|
88
|
-
|
89
|
-
# Get the profile names
|
90
|
-
profiles = HdfBase.steady_flow_names(hdf_path)
|
91
|
-
|
92
|
-
# Extract the steady data
|
93
|
-
steady_data = hdf_file[path]
|
94
|
-
|
95
|
-
# Create a DataFrame with profiles as index
|
96
|
-
df = pd.DataFrame(steady_data, index=profiles)
|
97
|
-
|
98
|
-
# Transpose the DataFrame and round values
|
99
|
-
df_t = df.T.copy()
|
100
|
-
for p in profiles:
|
101
|
-
df_t[p] = df_t[p].apply(lambda x: round(x, round_to))
|
102
|
-
|
103
|
-
return df_t
|
104
|
-
except Exception as e:
|
105
|
-
HdfUtils.logger.error(f"Failed to get steady profile cross section output: {str(e)}")
|
106
|
-
return pd.DataFrame()
|
107
|
-
|
108
|
-
@staticmethod
|
109
|
-
@standardize_input(file_type='plan_hdf')
|
110
|
-
def cross_sections_wsel(hdf_path: Path) -> pd.DataFrame:
|
111
|
-
"""
|
112
|
-
Return the water surface elevation information for each 1D Cross Section.
|
113
|
-
|
114
|
-
Parameters:
|
115
|
-
----------
|
116
|
-
hdf_path : Path
|
117
|
-
Path to the HEC-RAS plan HDF file.
|
118
|
-
|
119
|
-
Returns:
|
120
|
-
-------
|
121
|
-
pd.DataFrame
|
122
|
-
A DataFrame containing the water surface elevations for each cross section and event.
|
123
|
-
"""
|
124
|
-
return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Water Surface")
|
125
|
-
|
126
|
-
@staticmethod
|
127
|
-
@standardize_input(file_type='plan_hdf')
|
128
|
-
def cross_sections_flow(hdf_path: Path) -> pd.DataFrame:
|
129
|
-
"""
|
130
|
-
Return the Flow information for each 1D Cross Section.
|
131
|
-
|
132
|
-
Parameters:
|
133
|
-
----------
|
134
|
-
hdf_path : Path
|
135
|
-
Path to the HEC-RAS plan HDF file.
|
136
|
-
|
137
|
-
Returns:
|
138
|
-
-------
|
139
|
-
pd.DataFrame
|
140
|
-
A DataFrame containing the flow for each cross section and event.
|
141
|
-
"""
|
142
|
-
return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Flow")
|
143
|
-
|
144
|
-
@staticmethod
|
145
|
-
@standardize_input(file_type='plan_hdf')
|
146
|
-
def cross_sections_energy_grade(hdf_path: Path) -> pd.DataFrame:
|
147
|
-
"""
|
148
|
-
Return the energy grade information for each 1D Cross Section.
|
149
|
-
|
150
|
-
Parameters:
|
151
|
-
----------
|
152
|
-
hdf_path : Path
|
153
|
-
Path to the HEC-RAS plan HDF file.
|
154
|
-
|
155
|
-
Returns:
|
156
|
-
-------
|
157
|
-
pd.DataFrame
|
158
|
-
A DataFrame containing the energy grade for each cross section and event.
|
159
|
-
"""
|
160
|
-
return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Energy Grade")
|
161
|
-
|
162
|
-
@staticmethod
|
163
|
-
@standardize_input(file_type='plan_hdf')
|
164
|
-
def cross_sections_additional_enc_station_left(hdf_path: Path) -> pd.DataFrame:
|
165
|
-
"""
|
166
|
-
Return the left side encroachment information for a floodway plan hdf.
|
167
|
-
|
168
|
-
Parameters:
|
169
|
-
----------
|
170
|
-
hdf_path : Path
|
171
|
-
Path to the HEC-RAS plan HDF file.
|
172
|
-
|
173
|
-
Returns:
|
174
|
-
-------
|
175
|
-
pd.DataFrame
|
176
|
-
A DataFrame containing the cross sections left side encroachment stations.
|
177
|
-
"""
|
178
|
-
return HdfResultsXsec.steady_profile_xs_output(
|
179
|
-
hdf_path, "Encroachment Station Left"
|
180
|
-
)
|
181
|
-
|
182
|
-
@staticmethod
|
183
|
-
@standardize_input(file_type='plan_hdf')
|
184
|
-
def cross_sections_additional_enc_station_right(hdf_path: Path) -> pd.DataFrame:
|
185
|
-
"""
|
186
|
-
Return the right side encroachment information for a floodway plan hdf.
|
187
|
-
|
188
|
-
Parameters:
|
189
|
-
----------
|
190
|
-
hdf_path : Path
|
191
|
-
Path to the HEC-RAS plan HDF file.
|
192
|
-
|
193
|
-
Returns:
|
194
|
-
-------
|
195
|
-
pd.DataFrame
|
196
|
-
A DataFrame containing the cross sections right side encroachment stations.
|
197
|
-
"""
|
198
|
-
return HdfResultsXsec.steady_profile_xs_output(
|
199
|
-
hdf_path, "Encroachment Station Right"
|
200
|
-
)
|
201
|
-
|
202
|
-
@staticmethod
|
203
|
-
@standardize_input(file_type='plan_hdf')
|
204
|
-
def cross_sections_additional_area_total(hdf_path: Path) -> pd.DataFrame:
|
205
|
-
"""
|
206
|
-
Return the 1D cross section area for each profile.
|
207
|
-
|
208
|
-
Parameters:
|
209
|
-
----------
|
210
|
-
hdf_path : Path
|
211
|
-
Path to the HEC-RAS plan HDF file.
|
212
|
-
|
213
|
-
Returns:
|
214
|
-
-------
|
215
|
-
pd.DataFrame
|
216
|
-
A DataFrame containing the wet area inside the cross sections.
|
217
|
-
"""
|
218
|
-
return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Area Ineffective Total")
|
219
|
-
|
220
|
-
@staticmethod
|
221
|
-
@standardize_input(file_type='plan_hdf')
|
222
|
-
def cross_sections_additional_velocity_total(hdf_path: Path) -> pd.DataFrame:
|
223
|
-
"""
|
224
|
-
Return the 1D cross section velocity for each profile.
|
225
|
-
|
226
|
-
Parameters:
|
227
|
-
----------
|
228
|
-
hdf_path : Path
|
229
|
-
Path to the HEC-RAS plan HDF file.
|
230
|
-
|
231
|
-
Returns:
|
232
|
-
-------
|
233
|
-
pd.DataFrame
|
234
|
-
A DataFrame containing the velocity inside the cross sections.
|
235
|
-
"""
|
236
|
-
return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Velocity Total")
|
237
|
-
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|