ras-commander 0.44.0__py3-none-any.whl → 0.45.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ras_commander/HdfPipe.py +262 -0
- ras_commander/HdfPump.py +255 -0
- ras_commander/HdfResultsPlan.py +3 -0
- ras_commander/HdfResultsXsec.py +211 -5
- ras_commander/__init__.py +4 -0
- {ras_commander-0.44.0.dist-info → ras_commander-0.45.0.dist-info}/METADATA +1 -1
- {ras_commander-0.44.0.dist-info → ras_commander-0.45.0.dist-info}/RECORD +10 -8
- {ras_commander-0.44.0.dist-info → ras_commander-0.45.0.dist-info}/LICENSE +0 -0
- {ras_commander-0.44.0.dist-info → ras_commander-0.45.0.dist-info}/WHEEL +0 -0
- {ras_commander-0.44.0.dist-info → ras_commander-0.45.0.dist-info}/top_level.txt +0 -0
ras_commander/HdfPipe.py
ADDED
@@ -0,0 +1,262 @@
|
|
1
|
+
import h5py
|
2
|
+
import numpy as np
|
3
|
+
import pandas as pd
|
4
|
+
import geopandas as gpd
|
5
|
+
import xarray as xr
|
6
|
+
from pathlib import Path
|
7
|
+
from shapely.geometry import LineString, Point
|
8
|
+
from typing import List, Dict, Any, Optional, Union
|
9
|
+
from .HdfBase import HdfBase
|
10
|
+
from .HdfUtils import HdfUtils
|
11
|
+
from .Decorators import standardize_input, log_call
|
12
|
+
from .LoggingConfig import get_logger
|
13
|
+
|
14
|
+
logger = get_logger(__name__)
|
15
|
+
|
16
|
+
class HdfPipe:
|
17
|
+
"""
|
18
|
+
A class for handling pipe network related data from HEC-RAS HDF files.
|
19
|
+
"""
|
20
|
+
|
21
|
+
@staticmethod
|
22
|
+
@log_call
|
23
|
+
@standardize_input(file_type='plan_hdf')
|
24
|
+
def get_pipe_conduits(hdf_path: Path) -> gpd.GeoDataFrame:
|
25
|
+
"""
|
26
|
+
Extract pipe conduit data from the HDF file.
|
27
|
+
|
28
|
+
Args:
|
29
|
+
hdf_path (Path): Path to the HDF file.
|
30
|
+
|
31
|
+
Returns:
|
32
|
+
gpd.GeoDataFrame: GeoDataFrame containing pipe conduit data.
|
33
|
+
|
34
|
+
Raises:
|
35
|
+
KeyError: If the required datasets are not found in the HDF file.
|
36
|
+
"""
|
37
|
+
try:
|
38
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
39
|
+
# Extract pipe conduit data
|
40
|
+
polyline_info = hdf['/Geometry/Pipe Conduits/Polyline Info'][()]
|
41
|
+
polyline_points = hdf['/Geometry/Pipe Conduits/Polyline Points'][()]
|
42
|
+
terrain_profiles_info = hdf['/Geometry/Pipe Conduits/Terrain Profiles Info'][()]
|
43
|
+
|
44
|
+
# Create geometries
|
45
|
+
geometries = []
|
46
|
+
for start, count, _, _ in polyline_info:
|
47
|
+
points = polyline_points[start:start+count]
|
48
|
+
geometries.append(LineString(points))
|
49
|
+
|
50
|
+
# Create GeoDataFrame
|
51
|
+
gdf = gpd.GeoDataFrame(geometry=geometries)
|
52
|
+
gdf['conduit_id'] = range(len(gdf))
|
53
|
+
gdf['terrain_profile_start'] = terrain_profiles_info[:, 0]
|
54
|
+
gdf['terrain_profile_count'] = terrain_profiles_info[:, 1]
|
55
|
+
|
56
|
+
# Set CRS if available
|
57
|
+
crs = HdfUtils.projection(hdf_path)
|
58
|
+
if crs:
|
59
|
+
gdf.set_crs(crs, inplace=True)
|
60
|
+
|
61
|
+
return gdf
|
62
|
+
|
63
|
+
except KeyError as e:
|
64
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
65
|
+
raise
|
66
|
+
except Exception as e:
|
67
|
+
logger.error(f"Error extracting pipe conduit data: {e}")
|
68
|
+
raise
|
69
|
+
|
70
|
+
@staticmethod
|
71
|
+
@log_call
|
72
|
+
@standardize_input(file_type='plan_hdf')
|
73
|
+
def get_pipe_nodes(hdf_path: Path) -> gpd.GeoDataFrame:
|
74
|
+
"""
|
75
|
+
Extract pipe node data from the HDF file.
|
76
|
+
|
77
|
+
Args:
|
78
|
+
hdf_path (Path): Path to the HDF file.
|
79
|
+
|
80
|
+
Returns:
|
81
|
+
gpd.GeoDataFrame: GeoDataFrame containing pipe node data.
|
82
|
+
|
83
|
+
Raises:
|
84
|
+
KeyError: If the required datasets are not found in the HDF file.
|
85
|
+
"""
|
86
|
+
try:
|
87
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
88
|
+
# Extract pipe node data
|
89
|
+
points = hdf['/Geometry/Pipe Nodes/Points'][()]
|
90
|
+
attributes = hdf['/Geometry/Pipe Nodes/Attributes'][()]
|
91
|
+
|
92
|
+
# Create geometries
|
93
|
+
geometries = [Point(x, y) for x, y in points]
|
94
|
+
|
95
|
+
# Create GeoDataFrame
|
96
|
+
gdf = gpd.GeoDataFrame(geometry=geometries)
|
97
|
+
gdf['node_id'] = range(len(gdf))
|
98
|
+
|
99
|
+
# Add attributes
|
100
|
+
for name in attributes.dtype.names:
|
101
|
+
gdf[name] = attributes[name]
|
102
|
+
|
103
|
+
# Set CRS if available
|
104
|
+
crs = HdfUtils.projection(hdf_path)
|
105
|
+
if crs:
|
106
|
+
gdf.set_crs(crs, inplace=True)
|
107
|
+
|
108
|
+
return gdf
|
109
|
+
|
110
|
+
except KeyError as e:
|
111
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
112
|
+
raise
|
113
|
+
except Exception as e:
|
114
|
+
logger.error(f"Error extracting pipe node data: {e}")
|
115
|
+
raise
|
116
|
+
|
117
|
+
@staticmethod
|
118
|
+
@log_call
|
119
|
+
@standardize_input(file_type='plan_hdf')
|
120
|
+
def get_pipe_network_timeseries(hdf_path: Path, variable: str) -> xr.DataArray:
|
121
|
+
"""
|
122
|
+
Extract timeseries data for a specific variable in the pipe network.
|
123
|
+
|
124
|
+
Args:
|
125
|
+
hdf_path (Path): Path to the HDF file.
|
126
|
+
variable (str): Variable to extract (e.g., "Cell Courant", "Cell Water Surface").
|
127
|
+
|
128
|
+
Returns:
|
129
|
+
xr.DataArray: DataArray containing the timeseries data.
|
130
|
+
|
131
|
+
Raises:
|
132
|
+
KeyError: If the required datasets are not found in the HDF file.
|
133
|
+
ValueError: If an invalid variable is specified.
|
134
|
+
"""
|
135
|
+
valid_variables = [
|
136
|
+
"Cell Courant", "Cell Water Surface", "Face Flow", "Face Velocity",
|
137
|
+
"Face Water Surface", "Pipes/Pipe Flow DS", "Pipes/Pipe Flow US",
|
138
|
+
"Pipes/Vel DS", "Pipes/Vel US", "Nodes/Depth", "Nodes/Drop Inlet Flow",
|
139
|
+
"Nodes/Water Surface"
|
140
|
+
]
|
141
|
+
|
142
|
+
if variable not in valid_variables:
|
143
|
+
raise ValueError(f"Invalid variable. Must be one of: {', '.join(valid_variables)}")
|
144
|
+
|
145
|
+
try:
|
146
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
147
|
+
# Extract timeseries data
|
148
|
+
data_path = f"/Results/Unsteady/Output/Output Blocks/DSS Hydrograph Output/Unsteady Time Series/Pipe Networks/Davis/{variable}"
|
149
|
+
data = hdf[data_path][()]
|
150
|
+
|
151
|
+
# Extract time information
|
152
|
+
time = HdfBase._get_unsteady_datetimes(hdf)
|
153
|
+
|
154
|
+
# Create DataArray
|
155
|
+
da = xr.DataArray(
|
156
|
+
data=data,
|
157
|
+
dims=['time', 'location'],
|
158
|
+
coords={'time': time, 'location': range(data.shape[1])},
|
159
|
+
name=variable
|
160
|
+
)
|
161
|
+
|
162
|
+
# Add attributes
|
163
|
+
da.attrs['units'] = hdf[data_path].attrs.get('Units', b'').decode('utf-8')
|
164
|
+
da.attrs['variable'] = variable
|
165
|
+
|
166
|
+
return da
|
167
|
+
|
168
|
+
except KeyError as e:
|
169
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
170
|
+
raise
|
171
|
+
except Exception as e:
|
172
|
+
logger.error(f"Error extracting pipe network timeseries data: {e}")
|
173
|
+
raise
|
174
|
+
|
175
|
+
@staticmethod
|
176
|
+
@log_call
|
177
|
+
@standardize_input(file_type='plan_hdf')
|
178
|
+
def get_pipe_network_summary(hdf_path: Path) -> pd.DataFrame:
|
179
|
+
"""
|
180
|
+
Extract summary data for pipe networks from the HDF file.
|
181
|
+
|
182
|
+
Args:
|
183
|
+
hdf_path (Path): Path to the HDF file.
|
184
|
+
|
185
|
+
Returns:
|
186
|
+
pd.DataFrame: DataFrame containing pipe network summary data.
|
187
|
+
|
188
|
+
Raises:
|
189
|
+
KeyError: If the required datasets are not found in the HDF file.
|
190
|
+
"""
|
191
|
+
try:
|
192
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
193
|
+
# Extract summary data
|
194
|
+
summary_path = "/Results/Unsteady/Summary/Pipe Network"
|
195
|
+
if summary_path not in hdf:
|
196
|
+
logger.warning("Pipe Network summary data not found in HDF file")
|
197
|
+
return pd.DataFrame()
|
198
|
+
|
199
|
+
summary_data = hdf[summary_path][()]
|
200
|
+
|
201
|
+
# Create DataFrame
|
202
|
+
df = pd.DataFrame(summary_data)
|
203
|
+
|
204
|
+
# Convert column names
|
205
|
+
df.columns = [col.decode('utf-8') for col in df.columns]
|
206
|
+
|
207
|
+
return df
|
208
|
+
|
209
|
+
except KeyError as e:
|
210
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
211
|
+
raise
|
212
|
+
except Exception as e:
|
213
|
+
logger.error(f"Error extracting pipe network summary data: {e}")
|
214
|
+
raise
|
215
|
+
|
216
|
+
@staticmethod
|
217
|
+
@log_call
|
218
|
+
@standardize_input(file_type='plan_hdf')
|
219
|
+
def get_pipe_profile(hdf_path: Path, conduit_id: int) -> pd.DataFrame:
|
220
|
+
"""
|
221
|
+
Extract the profile data for a specific pipe conduit.
|
222
|
+
|
223
|
+
Args:
|
224
|
+
hdf_path (Path): Path to the HDF file.
|
225
|
+
conduit_id (int): ID of the conduit to extract profile for.
|
226
|
+
|
227
|
+
Returns:
|
228
|
+
pd.DataFrame: DataFrame containing the pipe profile data.
|
229
|
+
|
230
|
+
Raises:
|
231
|
+
KeyError: If the required datasets are not found in the HDF file.
|
232
|
+
IndexError: If the specified conduit_id is out of range.
|
233
|
+
"""
|
234
|
+
try:
|
235
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
236
|
+
# Get conduit info
|
237
|
+
terrain_profiles_info = hdf['/Geometry/Pipe Conduits/Terrain Profiles Info'][()]
|
238
|
+
|
239
|
+
if conduit_id >= len(terrain_profiles_info):
|
240
|
+
raise IndexError(f"conduit_id {conduit_id} is out of range")
|
241
|
+
|
242
|
+
start, count = terrain_profiles_info[conduit_id]
|
243
|
+
|
244
|
+
# Extract profile data
|
245
|
+
profile_values = hdf['/Geometry/Pipe Conduits/Terrain Profiles Values'][start:start+count]
|
246
|
+
|
247
|
+
# Create DataFrame
|
248
|
+
df = pd.DataFrame(profile_values, columns=['Station', 'Elevation'])
|
249
|
+
|
250
|
+
return df
|
251
|
+
|
252
|
+
except KeyError as e:
|
253
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
254
|
+
raise
|
255
|
+
except IndexError as e:
|
256
|
+
logger.error(f"Invalid conduit_id: {e}")
|
257
|
+
raise
|
258
|
+
except Exception as e:
|
259
|
+
logger.error(f"Error extracting pipe profile data: {e}")
|
260
|
+
raise
|
261
|
+
|
262
|
+
|
ras_commander/HdfPump.py
ADDED
@@ -0,0 +1,255 @@
|
|
1
|
+
import h5py
|
2
|
+
import numpy as np
|
3
|
+
import pandas as pd
|
4
|
+
import geopandas as gpd
|
5
|
+
import xarray as xr
|
6
|
+
from pathlib import Path
|
7
|
+
from shapely.geometry import Point
|
8
|
+
from typing import List, Dict, Any, Optional, Union
|
9
|
+
from .HdfUtils import HdfUtils
|
10
|
+
from .Decorators import standardize_input, log_call
|
11
|
+
from .LoggingConfig import get_logger
|
12
|
+
|
13
|
+
logger = get_logger(__name__)
|
14
|
+
|
15
|
+
class HdfPump:
|
16
|
+
"""
|
17
|
+
A class for handling pump station related data from HEC-RAS HDF files.
|
18
|
+
"""
|
19
|
+
|
20
|
+
@staticmethod
|
21
|
+
@log_call
|
22
|
+
@standardize_input(file_type='plan_hdf')
|
23
|
+
def get_pump_stations(hdf_path: Path) -> gpd.GeoDataFrame:
|
24
|
+
"""
|
25
|
+
Extract pump station data from the HDF file.
|
26
|
+
|
27
|
+
Args:
|
28
|
+
hdf_path (Path): Path to the HDF file.
|
29
|
+
|
30
|
+
Returns:
|
31
|
+
gpd.GeoDataFrame: GeoDataFrame containing pump station data.
|
32
|
+
|
33
|
+
Raises:
|
34
|
+
KeyError: If the required datasets are not found in the HDF file.
|
35
|
+
"""
|
36
|
+
try:
|
37
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
38
|
+
# Extract pump station data
|
39
|
+
attributes = hdf['/Geometry/Pump Stations/Attributes'][()]
|
40
|
+
points = hdf['/Geometry/Pump Stations/Points'][()]
|
41
|
+
|
42
|
+
# Create geometries
|
43
|
+
geometries = [Point(x, y) for x, y in points]
|
44
|
+
|
45
|
+
# Create GeoDataFrame
|
46
|
+
gdf = gpd.GeoDataFrame(geometry=geometries)
|
47
|
+
gdf['station_id'] = range(len(gdf))
|
48
|
+
|
49
|
+
# Add attributes
|
50
|
+
for name in attributes.dtype.names:
|
51
|
+
gdf[name] = attributes[name]
|
52
|
+
|
53
|
+
# Set CRS if available
|
54
|
+
crs = HdfUtils.projection(hdf_path)
|
55
|
+
if crs:
|
56
|
+
gdf.set_crs(crs, inplace=True)
|
57
|
+
|
58
|
+
return gdf
|
59
|
+
|
60
|
+
except KeyError as e:
|
61
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
62
|
+
raise
|
63
|
+
except Exception as e:
|
64
|
+
logger.error(f"Error extracting pump station data: {e}")
|
65
|
+
raise
|
66
|
+
|
67
|
+
@staticmethod
|
68
|
+
@log_call
|
69
|
+
@standardize_input(file_type='plan_hdf')
|
70
|
+
def get_pump_groups(hdf_path: Path) -> pd.DataFrame:
|
71
|
+
"""
|
72
|
+
Extract pump group data from the HDF file.
|
73
|
+
|
74
|
+
Args:
|
75
|
+
hdf_path (Path): Path to the HDF file.
|
76
|
+
|
77
|
+
Returns:
|
78
|
+
pd.DataFrame: DataFrame containing pump group data.
|
79
|
+
|
80
|
+
Raises:
|
81
|
+
KeyError: If the required datasets are not found in the HDF file.
|
82
|
+
"""
|
83
|
+
try:
|
84
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
85
|
+
# Extract pump group data
|
86
|
+
attributes = hdf['/Geometry/Pump Stations/Pump Groups/Attributes'][()]
|
87
|
+
efficiency_curves_info = hdf['/Geometry/Pump Stations/Pump Groups/Efficiency Curves Info'][()]
|
88
|
+
efficiency_curves_values = hdf['/Geometry/Pump Stations/Pump Groups/Efficiency Curves Values'][()]
|
89
|
+
|
90
|
+
# Create DataFrame
|
91
|
+
df = pd.DataFrame(attributes)
|
92
|
+
|
93
|
+
# Add efficiency curve data
|
94
|
+
df['efficiency_curve_start'] = efficiency_curves_info[:, 0]
|
95
|
+
df['efficiency_curve_count'] = efficiency_curves_info[:, 1]
|
96
|
+
|
97
|
+
# Process efficiency curves
|
98
|
+
def get_efficiency_curve(start, count):
|
99
|
+
return efficiency_curves_values[start:start+count].tolist()
|
100
|
+
|
101
|
+
df['efficiency_curve'] = df.apply(lambda row: get_efficiency_curve(row['efficiency_curve_start'], row['efficiency_curve_count']), axis=1)
|
102
|
+
|
103
|
+
return df
|
104
|
+
|
105
|
+
except KeyError as e:
|
106
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
107
|
+
raise
|
108
|
+
except Exception as e:
|
109
|
+
logger.error(f"Error extracting pump group data: {e}")
|
110
|
+
raise
|
111
|
+
|
112
|
+
@staticmethod
|
113
|
+
@log_call
|
114
|
+
@standardize_input(file_type='plan_hdf')
|
115
|
+
def get_pump_station_timeseries(hdf_path: Path, pump_station: str) -> xr.DataArray:
|
116
|
+
"""
|
117
|
+
Extract timeseries data for a specific pump station.
|
118
|
+
|
119
|
+
Args:
|
120
|
+
hdf_path (Path): Path to the HDF file.
|
121
|
+
pump_station (str): Name of the pump station.
|
122
|
+
|
123
|
+
Returns:
|
124
|
+
xr.DataArray: DataArray containing the timeseries data.
|
125
|
+
|
126
|
+
Raises:
|
127
|
+
KeyError: If the required datasets are not found in the HDF file.
|
128
|
+
ValueError: If the specified pump station is not found.
|
129
|
+
"""
|
130
|
+
try:
|
131
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
132
|
+
# Check if the pump station exists
|
133
|
+
pumping_stations_path = "/Results/Unsteady/Output/Output Blocks/DSS Hydrograph Output/Unsteady Time Series/Pumping Stations"
|
134
|
+
if pump_station not in hdf[pumping_stations_path]:
|
135
|
+
raise ValueError(f"Pump station '{pump_station}' not found in HDF file")
|
136
|
+
|
137
|
+
# Extract timeseries data
|
138
|
+
data_path = f"{pumping_stations_path}/{pump_station}/Structure Variables"
|
139
|
+
data = hdf[data_path][()]
|
140
|
+
|
141
|
+
# Extract time information
|
142
|
+
time = HdfBase._get_unsteady_datetimes(hdf)
|
143
|
+
|
144
|
+
# Create DataArray
|
145
|
+
da = xr.DataArray(
|
146
|
+
data=data,
|
147
|
+
dims=['time', 'variable'],
|
148
|
+
coords={'time': time, 'variable': ['Flow', 'Stage HW', 'Stage TW', 'Pump Station', 'Pumps on']},
|
149
|
+
name=pump_station
|
150
|
+
)
|
151
|
+
|
152
|
+
# Add attributes
|
153
|
+
da.attrs['units'] = hdf[data_path].attrs.get('Variable_Unit', b'').decode('utf-8')
|
154
|
+
da.attrs['pump_station'] = pump_station
|
155
|
+
|
156
|
+
return da
|
157
|
+
|
158
|
+
except KeyError as e:
|
159
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
160
|
+
raise
|
161
|
+
except ValueError as e:
|
162
|
+
logger.error(str(e))
|
163
|
+
raise
|
164
|
+
except Exception as e:
|
165
|
+
logger.error(f"Error extracting pump station timeseries data: {e}")
|
166
|
+
raise
|
167
|
+
|
168
|
+
@staticmethod
|
169
|
+
@log_call
|
170
|
+
@standardize_input(file_type='plan_hdf')
|
171
|
+
def get_pump_station_summary(hdf_path: Path) -> pd.DataFrame:
|
172
|
+
"""
|
173
|
+
Extract summary data for pump stations from the HDF file.
|
174
|
+
|
175
|
+
Args:
|
176
|
+
hdf_path (Path): Path to the HDF file.
|
177
|
+
|
178
|
+
Returns:
|
179
|
+
pd.DataFrame: DataFrame containing pump station summary data.
|
180
|
+
|
181
|
+
Raises:
|
182
|
+
KeyError: If the required datasets are not found in the HDF file.
|
183
|
+
"""
|
184
|
+
try:
|
185
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
186
|
+
# Extract summary data
|
187
|
+
summary_path = "/Results/Unsteady/Summary/Pump Station"
|
188
|
+
if summary_path not in hdf:
|
189
|
+
logger.warning("Pump Station summary data not found in HDF file")
|
190
|
+
return pd.DataFrame()
|
191
|
+
|
192
|
+
summary_data = hdf[summary_path][()]
|
193
|
+
|
194
|
+
# Create DataFrame
|
195
|
+
df = pd.DataFrame(summary_data)
|
196
|
+
|
197
|
+
# Convert column names
|
198
|
+
df.columns = [col.decode('utf-8') for col in df.columns]
|
199
|
+
|
200
|
+
return df
|
201
|
+
|
202
|
+
except KeyError as e:
|
203
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
204
|
+
raise
|
205
|
+
except Exception as e:
|
206
|
+
logger.error(f"Error extracting pump station summary data: {e}")
|
207
|
+
raise
|
208
|
+
|
209
|
+
@staticmethod
|
210
|
+
@log_call
|
211
|
+
@standardize_input(file_type='plan_hdf')
|
212
|
+
def get_pump_operation_data(hdf_path: Path, pump_station: str) -> pd.DataFrame:
|
213
|
+
"""
|
214
|
+
Extract pump operation data for a specific pump station.
|
215
|
+
|
216
|
+
Args:
|
217
|
+
hdf_path (Path): Path to the HDF file.
|
218
|
+
pump_station (str): Name of the pump station.
|
219
|
+
|
220
|
+
Returns:
|
221
|
+
pd.DataFrame: DataFrame containing pump operation data.
|
222
|
+
|
223
|
+
Raises:
|
224
|
+
KeyError: If the required datasets are not found in the HDF file.
|
225
|
+
ValueError: If the specified pump station is not found.
|
226
|
+
"""
|
227
|
+
try:
|
228
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
229
|
+
# Check if the pump station exists
|
230
|
+
pump_stations_path = "/Results/Unsteady/Output/Output Blocks/DSS Profile Output/Unsteady Time Series/Pumping Stations"
|
231
|
+
if pump_station not in hdf[pump_stations_path]:
|
232
|
+
raise ValueError(f"Pump station '{pump_station}' not found in HDF file")
|
233
|
+
|
234
|
+
# Extract pump operation data
|
235
|
+
data_path = f"{pump_stations_path}/{pump_station}/Structure Variables"
|
236
|
+
data = hdf[data_path][()]
|
237
|
+
|
238
|
+
# Extract time information
|
239
|
+
time = HdfBase._get_unsteady_datetimes(hdf)
|
240
|
+
|
241
|
+
# Create DataFrame
|
242
|
+
df = pd.DataFrame(data, columns=['Flow', 'Stage HW', 'Stage TW', 'Pump Station', 'Pumps on'])
|
243
|
+
df['Time'] = time
|
244
|
+
|
245
|
+
return df
|
246
|
+
|
247
|
+
except KeyError as e:
|
248
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
249
|
+
raise
|
250
|
+
except ValueError as e:
|
251
|
+
logger.error(str(e))
|
252
|
+
raise
|
253
|
+
except Exception as e:
|
254
|
+
logger.error(f"Error extracting pump operation data: {e}")
|
255
|
+
raise
|
ras_commander/HdfResultsPlan.py
CHANGED
ras_commander/HdfResultsXsec.py
CHANGED
@@ -8,20 +8,21 @@ released under MIT license and Copyright (c) 2024 fema-ffrd
|
|
8
8
|
The file has been forked and modified for use in RAS Commander.
|
9
9
|
"""
|
10
10
|
|
11
|
+
from pathlib import Path
|
12
|
+
from typing import Union, Optional, List
|
13
|
+
|
11
14
|
import h5py
|
12
15
|
import numpy as np
|
13
16
|
import pandas as pd
|
14
|
-
|
15
|
-
|
17
|
+
import xarray as xr
|
18
|
+
|
16
19
|
from .HdfBase import HdfBase
|
17
20
|
from .HdfUtils import HdfUtils
|
18
21
|
from .Decorators import standardize_input, log_call
|
19
|
-
from .LoggingConfig import
|
20
|
-
import xarray as xr
|
22
|
+
from .LoggingConfig import get_logger
|
21
23
|
|
22
24
|
logger = get_logger(__name__)
|
23
25
|
|
24
|
-
|
25
26
|
class HdfResultsXsec:
|
26
27
|
"""
|
27
28
|
A class for handling cross-section results from HEC-RAS HDF files.
|
@@ -235,3 +236,208 @@ class HdfResultsXsec:
|
|
235
236
|
"""
|
236
237
|
return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Velocity Total")
|
237
238
|
|
239
|
+
|
240
|
+
@staticmethod
|
241
|
+
@log_call
|
242
|
+
@standardize_input(file_type='plan_hdf')
|
243
|
+
def get_pipe_network_summary(hdf_path: Path) -> pd.DataFrame:
|
244
|
+
"""
|
245
|
+
Extract summary data for pipe networks from the HDF file.
|
246
|
+
|
247
|
+
Args:
|
248
|
+
hdf_path (Path): Path to the HDF file.
|
249
|
+
|
250
|
+
Returns:
|
251
|
+
pd.DataFrame: DataFrame containing pipe network summary data.
|
252
|
+
|
253
|
+
Raises:
|
254
|
+
KeyError: If the required datasets are not found in the HDF file.
|
255
|
+
"""
|
256
|
+
try:
|
257
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
258
|
+
# Extract summary data
|
259
|
+
summary_path = "/Results/Unsteady/Summary/Pipe Network"
|
260
|
+
if summary_path not in hdf:
|
261
|
+
logger.warning("Pipe Network summary data not found in HDF file")
|
262
|
+
return pd.DataFrame()
|
263
|
+
|
264
|
+
summary_data = hdf[summary_path][()]
|
265
|
+
|
266
|
+
# Create DataFrame
|
267
|
+
df = pd.DataFrame(summary_data)
|
268
|
+
|
269
|
+
# Convert column names
|
270
|
+
df.columns = [col.decode('utf-8') if isinstance(col, bytes) else col for col in df.columns]
|
271
|
+
|
272
|
+
# Convert byte string values to regular strings
|
273
|
+
for col in df.columns:
|
274
|
+
if df[col].dtype == object:
|
275
|
+
df[col] = df[col].apply(lambda x: x.decode('utf-8') if isinstance(x, bytes) else x)
|
276
|
+
|
277
|
+
return df
|
278
|
+
|
279
|
+
except KeyError as e:
|
280
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
281
|
+
raise
|
282
|
+
except Exception as e:
|
283
|
+
logger.error(f"Error extracting pipe network summary data: {e}")
|
284
|
+
raise
|
285
|
+
|
286
|
+
@staticmethod
|
287
|
+
@log_call
|
288
|
+
@standardize_input(file_type='plan_hdf')
|
289
|
+
def get_pump_station_summary(hdf_path: Path) -> pd.DataFrame:
|
290
|
+
"""
|
291
|
+
Extract summary data for pump stations from the HDF file.
|
292
|
+
|
293
|
+
Args:
|
294
|
+
hdf_path (Path): Path to the HDF file.
|
295
|
+
|
296
|
+
Returns:
|
297
|
+
pd.DataFrame: DataFrame containing pump station summary data.
|
298
|
+
|
299
|
+
Raises:
|
300
|
+
KeyError: If the required datasets are not found in the HDF file.
|
301
|
+
"""
|
302
|
+
try:
|
303
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
304
|
+
# Extract summary data
|
305
|
+
summary_path = "/Results/Unsteady/Summary/Pump Station"
|
306
|
+
if summary_path not in hdf:
|
307
|
+
logger.warning("Pump Station summary data not found in HDF file")
|
308
|
+
return pd.DataFrame()
|
309
|
+
|
310
|
+
summary_data = hdf[summary_path][()]
|
311
|
+
|
312
|
+
# Create DataFrame
|
313
|
+
df = pd.DataFrame(summary_data)
|
314
|
+
|
315
|
+
# Convert column names
|
316
|
+
df.columns = [col.decode('utf-8') if isinstance(col, bytes) else col for col in df.columns]
|
317
|
+
|
318
|
+
# Convert byte string values to regular strings
|
319
|
+
for col in df.columns:
|
320
|
+
if df[col].dtype == object:
|
321
|
+
df[col] = df[col].apply(lambda x: x.decode('utf-8') if isinstance(x, bytes) else x)
|
322
|
+
|
323
|
+
return df
|
324
|
+
|
325
|
+
except KeyError as e:
|
326
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
327
|
+
raise
|
328
|
+
except Exception as e:
|
329
|
+
logger.error(f"Error extracting pump station summary data: {e}")
|
330
|
+
raise
|
331
|
+
|
332
|
+
@staticmethod
|
333
|
+
@log_call
|
334
|
+
@standardize_input(file_type='plan_hdf')
|
335
|
+
def get_pipe_network_profile_output(hdf_path: Path) -> pd.DataFrame:
|
336
|
+
"""
|
337
|
+
Extract pipe network profile output data from the HDF file.
|
338
|
+
|
339
|
+
Args:
|
340
|
+
hdf_path (Path): Path to the HDF file.
|
341
|
+
|
342
|
+
Returns:
|
343
|
+
pd.DataFrame: DataFrame containing pipe network profile output data.
|
344
|
+
|
345
|
+
Raises:
|
346
|
+
KeyError: If the required datasets are not found in the HDF file.
|
347
|
+
"""
|
348
|
+
try:
|
349
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
350
|
+
# Extract profile output data
|
351
|
+
profile_path = "/Results/Unsteady/Output/Output Blocks/DSS Profile Output/Unsteady Time Series/Pipe Networks"
|
352
|
+
if profile_path not in hdf:
|
353
|
+
logger.warning("Pipe Network profile output data not found in HDF file")
|
354
|
+
return pd.DataFrame()
|
355
|
+
|
356
|
+
# Initialize an empty list to store data from all pipe networks
|
357
|
+
all_data = []
|
358
|
+
|
359
|
+
# Iterate through all pipe networks
|
360
|
+
for network in hdf[profile_path].keys():
|
361
|
+
network_path = f"{profile_path}/{network}"
|
362
|
+
|
363
|
+
# Extract data for each variable
|
364
|
+
for var in hdf[network_path].keys():
|
365
|
+
data = hdf[f"{network_path}/{var}"][()]
|
366
|
+
|
367
|
+
# Create a DataFrame for this variable
|
368
|
+
df = pd.DataFrame(data)
|
369
|
+
df['Network'] = network
|
370
|
+
df['Variable'] = var
|
371
|
+
|
372
|
+
all_data.append(df)
|
373
|
+
|
374
|
+
# Concatenate all DataFrames
|
375
|
+
result_df = pd.concat(all_data, ignore_index=True)
|
376
|
+
|
377
|
+
# Add time information
|
378
|
+
time = HdfBase._get_unsteady_datetimes(hdf)
|
379
|
+
result_df['Time'] = [time[i] for i in result_df.index]
|
380
|
+
|
381
|
+
return result_df
|
382
|
+
|
383
|
+
except KeyError as e:
|
384
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
385
|
+
raise
|
386
|
+
except Exception as e:
|
387
|
+
logger.error(f"Error extracting pipe network profile output data: {e}")
|
388
|
+
raise
|
389
|
+
|
390
|
+
@staticmethod
|
391
|
+
@log_call
|
392
|
+
@standardize_input(file_type='plan_hdf')
|
393
|
+
def get_pump_station_profile_output(hdf_path: Path) -> pd.DataFrame:
|
394
|
+
"""
|
395
|
+
Extract pump station profile output data from the HDF file.
|
396
|
+
|
397
|
+
Args:
|
398
|
+
hdf_path (Path): Path to the HDF file.
|
399
|
+
|
400
|
+
Returns:
|
401
|
+
pd.DataFrame: DataFrame containing pump station profile output data.
|
402
|
+
|
403
|
+
Raises:
|
404
|
+
KeyError: If the required datasets are not found in the HDF file.
|
405
|
+
"""
|
406
|
+
try:
|
407
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
408
|
+
# Extract profile output data
|
409
|
+
profile_path = "/Results/Unsteady/Output/Output Blocks/DSS Profile Output/Unsteady Time Series/Pumping Stations"
|
410
|
+
if profile_path not in hdf:
|
411
|
+
logger.warning("Pump Station profile output data not found in HDF file")
|
412
|
+
return pd.DataFrame()
|
413
|
+
|
414
|
+
# Initialize an empty list to store data from all pump stations
|
415
|
+
all_data = []
|
416
|
+
|
417
|
+
# Iterate through all pump stations
|
418
|
+
for station in hdf[profile_path].keys():
|
419
|
+
station_path = f"{profile_path}/{station}/Structure Variables"
|
420
|
+
|
421
|
+
data = hdf[station_path][()]
|
422
|
+
|
423
|
+
# Create a DataFrame for this pump station
|
424
|
+
df = pd.DataFrame(data, columns=['Flow', 'Stage HW', 'Stage TW', 'Pump Station', 'Pumps on'])
|
425
|
+
df['Station'] = station
|
426
|
+
|
427
|
+
all_data.append(df)
|
428
|
+
|
429
|
+
# Concatenate all DataFrames
|
430
|
+
result_df = pd.concat(all_data, ignore_index=True)
|
431
|
+
|
432
|
+
# Add time information
|
433
|
+
time = HdfBase._get_unsteady_datetimes(hdf)
|
434
|
+
result_df['Time'] = [time[i] for i in result_df.index]
|
435
|
+
|
436
|
+
return result_df
|
437
|
+
|
438
|
+
except KeyError as e:
|
439
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
440
|
+
raise
|
441
|
+
except Exception as e:
|
442
|
+
logger.error(f"Error extracting pump station profile output data: {e}")
|
443
|
+
raise
|
ras_commander/__init__.py
CHANGED
@@ -32,6 +32,8 @@ from .HdfResultsXsec import HdfResultsXsec
|
|
32
32
|
from .HdfStruc import HdfStruc
|
33
33
|
from .HdfUtils import HdfUtils
|
34
34
|
from .HdfXsec import HdfXsec
|
35
|
+
from .HdfPump import HdfPump
|
36
|
+
from .HdfPipe import HdfPipe
|
35
37
|
|
36
38
|
# Define __all__ to specify what should be imported when using "from ras_commander import *"
|
37
39
|
__all__ = [
|
@@ -45,6 +47,8 @@ __all__ = [
|
|
45
47
|
"HdfStruc",
|
46
48
|
"HdfUtils",
|
47
49
|
"HdfXsec",
|
50
|
+
"HdfPump",
|
51
|
+
"HdfPipe",
|
48
52
|
"standardize_input",
|
49
53
|
"ras",
|
50
54
|
"init_ras_project",
|
@@ -2,10 +2,12 @@ ras_commander/Decorators.py,sha256=i5AEQbe7JeI8Y3O_dQ5OO4Ab0KO5SiZTiysFBGxqTRU,4
|
|
2
2
|
ras_commander/HdfBase.py,sha256=HV5ccV9QH2lz4ZRYqK2d7_S833cTSUcostzxxSPb4O4,7129
|
3
3
|
ras_commander/HdfBndry.py,sha256=LWaDMHeo0V_VOpx7D9q7W_0WvWsD6NQThiSLYPY8ApE,20761
|
4
4
|
ras_commander/HdfMesh.py,sha256=JKWn6S-FcdFs3fZQMQ8mkIEIFFxUdVjWb5r7jAh3ujw,12668
|
5
|
+
ras_commander/HdfPipe.py,sha256=agGhMNeANi92eS62CEoaPDimS0I0yGoAoMv1c8YAIk4,9432
|
5
6
|
ras_commander/HdfPlan.py,sha256=nt1Q03cPNX_yVX0mOUqeWGPicqtusUnuBSTcBw8n0Vg,6967
|
7
|
+
ras_commander/HdfPump.py,sha256=8mBfFg0epaq74txDyUfBPkWqzvRQCb8OojEgDGCridM,9416
|
6
8
|
ras_commander/HdfResultsMesh.py,sha256=Ke6HSpdKBd-TUa0QIuMaxhyj--lfuQGlMDlDuk2qrd8,28064
|
7
|
-
ras_commander/HdfResultsPlan.py,sha256
|
8
|
-
ras_commander/HdfResultsXsec.py,sha256=
|
9
|
+
ras_commander/HdfResultsPlan.py,sha256=-4tTMLiSkuOs3pKn7sA_1RoGmBxaG8jMUJ9qvVlc2Q0,17299
|
10
|
+
ras_commander/HdfResultsXsec.py,sha256=4BHIX-VTfkh1YwIUJDZpnnz_-wGctOJO2XVCTsiUXDs,16393
|
9
11
|
ras_commander/HdfStruc.py,sha256=NNTSwUQmLMLn8SPyrLIKXaUaqKavTos_TDDB6Mja7-4,5595
|
10
12
|
ras_commander/HdfUtils.py,sha256=dUU9QkNO-FhaCjD_TEOv_FNNrZI3TUOQ6QjSiXrJGSc,18403
|
11
13
|
ras_commander/HdfXsec.py,sha256=ccJ-GAVKwS2Z-k6CCWAYvw974xCBTTF-5Hh1CnikVG4,11562
|
@@ -18,9 +20,9 @@ ras_commander/RasPlan.py,sha256=_0EjxQnamd9FhzzKlYNqJ3chv2bvxfk0DXEwfHntXFo,4028
|
|
18
20
|
ras_commander/RasPrj.py,sha256=ePLcPNcKVMgijvJKkJIWwQslQi2D6zq2UTW-8XCFmto,36159
|
19
21
|
ras_commander/RasUnsteady.py,sha256=oBVC9QFQMrSx8LY2Cb1CjUyCQEUoIEnEuVJsATYiVYs,4649
|
20
22
|
ras_commander/RasUtils.py,sha256=NBMxTHWHoTH2MJzqJ0y1_00fgKSS1GnNuEikwZ3Pqzs,34153
|
21
|
-
ras_commander/__init__.py,sha256=
|
22
|
-
ras_commander-0.
|
23
|
-
ras_commander-0.
|
24
|
-
ras_commander-0.
|
25
|
-
ras_commander-0.
|
26
|
-
ras_commander-0.
|
23
|
+
ras_commander/__init__.py,sha256=8sg6T5I_uAqGe2pInrMnBrhS8NZbfnF7Pp7uwzQOqIo,1722
|
24
|
+
ras_commander-0.45.0.dist-info/LICENSE,sha256=_pbd6qHnlsz1iQ-ozDW_49r86BZT6CRwO2iBtw0iN6M,457
|
25
|
+
ras_commander-0.45.0.dist-info/METADATA,sha256=Y7H1Fq4aG4C2ujRl6VkjxBLOaNZUzX1X7gNd5UJDU7s,15935
|
26
|
+
ras_commander-0.45.0.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
|
27
|
+
ras_commander-0.45.0.dist-info/top_level.txt,sha256=i76S7eKLFC8doKcXDl3aiOr9RwT06G8adI6YuKbQDaA,14
|
28
|
+
ras_commander-0.45.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|