ras-commander 0.43.0__py3-none-any.whl → 0.45.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ras_commander/HdfBase.py +9 -1
- ras_commander/HdfBndry.py +9 -0
- ras_commander/HdfMesh.py +9 -0
- ras_commander/HdfPipe.py +262 -0
- ras_commander/HdfPlan.py +10 -0
- ras_commander/HdfPump.py +255 -0
- ras_commander/HdfResultsMesh.py +178 -164
- ras_commander/HdfResultsPlan.py +15 -6
- ras_commander/HdfResultsXsec.py +221 -5
- ras_commander/HdfStruc.py +9 -0
- ras_commander/HdfUtils.py +25 -19
- ras_commander/HdfXsec.py +10 -0
- ras_commander/__init__.py +4 -0
- {ras_commander-0.43.0.dist-info → ras_commander-0.45.0.dist-info}/METADATA +9 -2
- ras_commander-0.45.0.dist-info/RECORD +28 -0
- ras_commander-0.43.0.dist-info/RECORD +0 -26
- {ras_commander-0.43.0.dist-info → ras_commander-0.45.0.dist-info}/LICENSE +0 -0
- {ras_commander-0.43.0.dist-info → ras_commander-0.45.0.dist-info}/WHEEL +0 -0
- {ras_commander-0.43.0.dist-info → ras_commander-0.45.0.dist-info}/top_level.txt +0 -0
ras_commander/HdfBase.py
CHANGED
@@ -1,3 +1,12 @@
|
|
1
|
+
"""
|
2
|
+
Class: HdfBase
|
3
|
+
|
4
|
+
Attribution: A substantial amount of code in this file is sourced or derived
|
5
|
+
from the https://github.com/fema-ffrd/rashdf library,
|
6
|
+
released under MIT license and Copyright (c) 2024 fema-ffrd
|
7
|
+
|
8
|
+
The file has been forked and modified for use in RAS Commander.
|
9
|
+
"""
|
1
10
|
import re
|
2
11
|
from datetime import datetime, timedelta
|
3
12
|
import h5py
|
@@ -7,7 +16,6 @@ import xarray as xr # Added import for xarray
|
|
7
16
|
from typing import List, Tuple, Union, Optional, Dict
|
8
17
|
from pathlib import Path
|
9
18
|
import logging
|
10
|
-
import dask.array as da
|
11
19
|
|
12
20
|
from .HdfUtils import HdfUtils
|
13
21
|
from .Decorators import standardize_input, log_call
|
ras_commander/HdfBndry.py
CHANGED
@@ -1,3 +1,12 @@
|
|
1
|
+
"""
|
2
|
+
Class: HdfBndry
|
3
|
+
|
4
|
+
Attribution: A substantial amount of code in this file is sourced or derived
|
5
|
+
from the https://github.com/fema-ffrd/rashdf library,
|
6
|
+
released under MIT license and Copyright (c) 2024 fema-ffrd
|
7
|
+
|
8
|
+
The file has been forked and modified for use in RAS Commander.
|
9
|
+
"""
|
1
10
|
from pathlib import Path
|
2
11
|
from typing import Dict, List, Optional, Union, Any
|
3
12
|
import h5py
|
ras_commander/HdfMesh.py
CHANGED
@@ -1,3 +1,12 @@
|
|
1
|
+
"""
|
2
|
+
Class: HdfMesh
|
3
|
+
|
4
|
+
Attribution: A substantial amount of code in this file is sourced or derived
|
5
|
+
from the https://github.com/fema-ffrd/rashdf library,
|
6
|
+
released under MIT license and Copyright (c) 2024 fema-ffrd
|
7
|
+
|
8
|
+
The file has been forked and modified for use in RAS Commander.
|
9
|
+
"""
|
1
10
|
from pathlib import Path
|
2
11
|
import h5py
|
3
12
|
import numpy as np
|
ras_commander/HdfPipe.py
ADDED
@@ -0,0 +1,262 @@
|
|
1
|
+
import h5py
|
2
|
+
import numpy as np
|
3
|
+
import pandas as pd
|
4
|
+
import geopandas as gpd
|
5
|
+
import xarray as xr
|
6
|
+
from pathlib import Path
|
7
|
+
from shapely.geometry import LineString, Point
|
8
|
+
from typing import List, Dict, Any, Optional, Union
|
9
|
+
from .HdfBase import HdfBase
|
10
|
+
from .HdfUtils import HdfUtils
|
11
|
+
from .Decorators import standardize_input, log_call
|
12
|
+
from .LoggingConfig import get_logger
|
13
|
+
|
14
|
+
logger = get_logger(__name__)
|
15
|
+
|
16
|
+
class HdfPipe:
|
17
|
+
"""
|
18
|
+
A class for handling pipe network related data from HEC-RAS HDF files.
|
19
|
+
"""
|
20
|
+
|
21
|
+
@staticmethod
|
22
|
+
@log_call
|
23
|
+
@standardize_input(file_type='plan_hdf')
|
24
|
+
def get_pipe_conduits(hdf_path: Path) -> gpd.GeoDataFrame:
|
25
|
+
"""
|
26
|
+
Extract pipe conduit data from the HDF file.
|
27
|
+
|
28
|
+
Args:
|
29
|
+
hdf_path (Path): Path to the HDF file.
|
30
|
+
|
31
|
+
Returns:
|
32
|
+
gpd.GeoDataFrame: GeoDataFrame containing pipe conduit data.
|
33
|
+
|
34
|
+
Raises:
|
35
|
+
KeyError: If the required datasets are not found in the HDF file.
|
36
|
+
"""
|
37
|
+
try:
|
38
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
39
|
+
# Extract pipe conduit data
|
40
|
+
polyline_info = hdf['/Geometry/Pipe Conduits/Polyline Info'][()]
|
41
|
+
polyline_points = hdf['/Geometry/Pipe Conduits/Polyline Points'][()]
|
42
|
+
terrain_profiles_info = hdf['/Geometry/Pipe Conduits/Terrain Profiles Info'][()]
|
43
|
+
|
44
|
+
# Create geometries
|
45
|
+
geometries = []
|
46
|
+
for start, count, _, _ in polyline_info:
|
47
|
+
points = polyline_points[start:start+count]
|
48
|
+
geometries.append(LineString(points))
|
49
|
+
|
50
|
+
# Create GeoDataFrame
|
51
|
+
gdf = gpd.GeoDataFrame(geometry=geometries)
|
52
|
+
gdf['conduit_id'] = range(len(gdf))
|
53
|
+
gdf['terrain_profile_start'] = terrain_profiles_info[:, 0]
|
54
|
+
gdf['terrain_profile_count'] = terrain_profiles_info[:, 1]
|
55
|
+
|
56
|
+
# Set CRS if available
|
57
|
+
crs = HdfUtils.projection(hdf_path)
|
58
|
+
if crs:
|
59
|
+
gdf.set_crs(crs, inplace=True)
|
60
|
+
|
61
|
+
return gdf
|
62
|
+
|
63
|
+
except KeyError as e:
|
64
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
65
|
+
raise
|
66
|
+
except Exception as e:
|
67
|
+
logger.error(f"Error extracting pipe conduit data: {e}")
|
68
|
+
raise
|
69
|
+
|
70
|
+
@staticmethod
|
71
|
+
@log_call
|
72
|
+
@standardize_input(file_type='plan_hdf')
|
73
|
+
def get_pipe_nodes(hdf_path: Path) -> gpd.GeoDataFrame:
|
74
|
+
"""
|
75
|
+
Extract pipe node data from the HDF file.
|
76
|
+
|
77
|
+
Args:
|
78
|
+
hdf_path (Path): Path to the HDF file.
|
79
|
+
|
80
|
+
Returns:
|
81
|
+
gpd.GeoDataFrame: GeoDataFrame containing pipe node data.
|
82
|
+
|
83
|
+
Raises:
|
84
|
+
KeyError: If the required datasets are not found in the HDF file.
|
85
|
+
"""
|
86
|
+
try:
|
87
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
88
|
+
# Extract pipe node data
|
89
|
+
points = hdf['/Geometry/Pipe Nodes/Points'][()]
|
90
|
+
attributes = hdf['/Geometry/Pipe Nodes/Attributes'][()]
|
91
|
+
|
92
|
+
# Create geometries
|
93
|
+
geometries = [Point(x, y) for x, y in points]
|
94
|
+
|
95
|
+
# Create GeoDataFrame
|
96
|
+
gdf = gpd.GeoDataFrame(geometry=geometries)
|
97
|
+
gdf['node_id'] = range(len(gdf))
|
98
|
+
|
99
|
+
# Add attributes
|
100
|
+
for name in attributes.dtype.names:
|
101
|
+
gdf[name] = attributes[name]
|
102
|
+
|
103
|
+
# Set CRS if available
|
104
|
+
crs = HdfUtils.projection(hdf_path)
|
105
|
+
if crs:
|
106
|
+
gdf.set_crs(crs, inplace=True)
|
107
|
+
|
108
|
+
return gdf
|
109
|
+
|
110
|
+
except KeyError as e:
|
111
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
112
|
+
raise
|
113
|
+
except Exception as e:
|
114
|
+
logger.error(f"Error extracting pipe node data: {e}")
|
115
|
+
raise
|
116
|
+
|
117
|
+
@staticmethod
|
118
|
+
@log_call
|
119
|
+
@standardize_input(file_type='plan_hdf')
|
120
|
+
def get_pipe_network_timeseries(hdf_path: Path, variable: str) -> xr.DataArray:
|
121
|
+
"""
|
122
|
+
Extract timeseries data for a specific variable in the pipe network.
|
123
|
+
|
124
|
+
Args:
|
125
|
+
hdf_path (Path): Path to the HDF file.
|
126
|
+
variable (str): Variable to extract (e.g., "Cell Courant", "Cell Water Surface").
|
127
|
+
|
128
|
+
Returns:
|
129
|
+
xr.DataArray: DataArray containing the timeseries data.
|
130
|
+
|
131
|
+
Raises:
|
132
|
+
KeyError: If the required datasets are not found in the HDF file.
|
133
|
+
ValueError: If an invalid variable is specified.
|
134
|
+
"""
|
135
|
+
valid_variables = [
|
136
|
+
"Cell Courant", "Cell Water Surface", "Face Flow", "Face Velocity",
|
137
|
+
"Face Water Surface", "Pipes/Pipe Flow DS", "Pipes/Pipe Flow US",
|
138
|
+
"Pipes/Vel DS", "Pipes/Vel US", "Nodes/Depth", "Nodes/Drop Inlet Flow",
|
139
|
+
"Nodes/Water Surface"
|
140
|
+
]
|
141
|
+
|
142
|
+
if variable not in valid_variables:
|
143
|
+
raise ValueError(f"Invalid variable. Must be one of: {', '.join(valid_variables)}")
|
144
|
+
|
145
|
+
try:
|
146
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
147
|
+
# Extract timeseries data
|
148
|
+
data_path = f"/Results/Unsteady/Output/Output Blocks/DSS Hydrograph Output/Unsteady Time Series/Pipe Networks/Davis/{variable}"
|
149
|
+
data = hdf[data_path][()]
|
150
|
+
|
151
|
+
# Extract time information
|
152
|
+
time = HdfBase._get_unsteady_datetimes(hdf)
|
153
|
+
|
154
|
+
# Create DataArray
|
155
|
+
da = xr.DataArray(
|
156
|
+
data=data,
|
157
|
+
dims=['time', 'location'],
|
158
|
+
coords={'time': time, 'location': range(data.shape[1])},
|
159
|
+
name=variable
|
160
|
+
)
|
161
|
+
|
162
|
+
# Add attributes
|
163
|
+
da.attrs['units'] = hdf[data_path].attrs.get('Units', b'').decode('utf-8')
|
164
|
+
da.attrs['variable'] = variable
|
165
|
+
|
166
|
+
return da
|
167
|
+
|
168
|
+
except KeyError as e:
|
169
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
170
|
+
raise
|
171
|
+
except Exception as e:
|
172
|
+
logger.error(f"Error extracting pipe network timeseries data: {e}")
|
173
|
+
raise
|
174
|
+
|
175
|
+
@staticmethod
|
176
|
+
@log_call
|
177
|
+
@standardize_input(file_type='plan_hdf')
|
178
|
+
def get_pipe_network_summary(hdf_path: Path) -> pd.DataFrame:
|
179
|
+
"""
|
180
|
+
Extract summary data for pipe networks from the HDF file.
|
181
|
+
|
182
|
+
Args:
|
183
|
+
hdf_path (Path): Path to the HDF file.
|
184
|
+
|
185
|
+
Returns:
|
186
|
+
pd.DataFrame: DataFrame containing pipe network summary data.
|
187
|
+
|
188
|
+
Raises:
|
189
|
+
KeyError: If the required datasets are not found in the HDF file.
|
190
|
+
"""
|
191
|
+
try:
|
192
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
193
|
+
# Extract summary data
|
194
|
+
summary_path = "/Results/Unsteady/Summary/Pipe Network"
|
195
|
+
if summary_path not in hdf:
|
196
|
+
logger.warning("Pipe Network summary data not found in HDF file")
|
197
|
+
return pd.DataFrame()
|
198
|
+
|
199
|
+
summary_data = hdf[summary_path][()]
|
200
|
+
|
201
|
+
# Create DataFrame
|
202
|
+
df = pd.DataFrame(summary_data)
|
203
|
+
|
204
|
+
# Convert column names
|
205
|
+
df.columns = [col.decode('utf-8') for col in df.columns]
|
206
|
+
|
207
|
+
return df
|
208
|
+
|
209
|
+
except KeyError as e:
|
210
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
211
|
+
raise
|
212
|
+
except Exception as e:
|
213
|
+
logger.error(f"Error extracting pipe network summary data: {e}")
|
214
|
+
raise
|
215
|
+
|
216
|
+
@staticmethod
|
217
|
+
@log_call
|
218
|
+
@standardize_input(file_type='plan_hdf')
|
219
|
+
def get_pipe_profile(hdf_path: Path, conduit_id: int) -> pd.DataFrame:
|
220
|
+
"""
|
221
|
+
Extract the profile data for a specific pipe conduit.
|
222
|
+
|
223
|
+
Args:
|
224
|
+
hdf_path (Path): Path to the HDF file.
|
225
|
+
conduit_id (int): ID of the conduit to extract profile for.
|
226
|
+
|
227
|
+
Returns:
|
228
|
+
pd.DataFrame: DataFrame containing the pipe profile data.
|
229
|
+
|
230
|
+
Raises:
|
231
|
+
KeyError: If the required datasets are not found in the HDF file.
|
232
|
+
IndexError: If the specified conduit_id is out of range.
|
233
|
+
"""
|
234
|
+
try:
|
235
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
236
|
+
# Get conduit info
|
237
|
+
terrain_profiles_info = hdf['/Geometry/Pipe Conduits/Terrain Profiles Info'][()]
|
238
|
+
|
239
|
+
if conduit_id >= len(terrain_profiles_info):
|
240
|
+
raise IndexError(f"conduit_id {conduit_id} is out of range")
|
241
|
+
|
242
|
+
start, count = terrain_profiles_info[conduit_id]
|
243
|
+
|
244
|
+
# Extract profile data
|
245
|
+
profile_values = hdf['/Geometry/Pipe Conduits/Terrain Profiles Values'][start:start+count]
|
246
|
+
|
247
|
+
# Create DataFrame
|
248
|
+
df = pd.DataFrame(profile_values, columns=['Station', 'Elevation'])
|
249
|
+
|
250
|
+
return df
|
251
|
+
|
252
|
+
except KeyError as e:
|
253
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
254
|
+
raise
|
255
|
+
except IndexError as e:
|
256
|
+
logger.error(f"Invalid conduit_id: {e}")
|
257
|
+
raise
|
258
|
+
except Exception as e:
|
259
|
+
logger.error(f"Error extracting pipe profile data: {e}")
|
260
|
+
raise
|
261
|
+
|
262
|
+
|
ras_commander/HdfPlan.py
CHANGED
@@ -1,3 +1,13 @@
|
|
1
|
+
"""
|
2
|
+
Class: HdfPlan
|
3
|
+
|
4
|
+
Attribution: A substantial amount of code in this file is sourced or derived
|
5
|
+
from the https://github.com/fema-ffrd/rashdf library,
|
6
|
+
released under MIT license and Copyright (c) 2024 fema-ffrd
|
7
|
+
|
8
|
+
The file has been forked and modified for use in RAS Commander.
|
9
|
+
"""
|
10
|
+
|
1
11
|
import h5py
|
2
12
|
import pandas as pd
|
3
13
|
from datetime import datetime
|
ras_commander/HdfPump.py
ADDED
@@ -0,0 +1,255 @@
|
|
1
|
+
import h5py
|
2
|
+
import numpy as np
|
3
|
+
import pandas as pd
|
4
|
+
import geopandas as gpd
|
5
|
+
import xarray as xr
|
6
|
+
from pathlib import Path
|
7
|
+
from shapely.geometry import Point
|
8
|
+
from typing import List, Dict, Any, Optional, Union
|
9
|
+
from .HdfUtils import HdfUtils
|
10
|
+
from .Decorators import standardize_input, log_call
|
11
|
+
from .LoggingConfig import get_logger
|
12
|
+
|
13
|
+
logger = get_logger(__name__)
|
14
|
+
|
15
|
+
class HdfPump:
|
16
|
+
"""
|
17
|
+
A class for handling pump station related data from HEC-RAS HDF files.
|
18
|
+
"""
|
19
|
+
|
20
|
+
@staticmethod
|
21
|
+
@log_call
|
22
|
+
@standardize_input(file_type='plan_hdf')
|
23
|
+
def get_pump_stations(hdf_path: Path) -> gpd.GeoDataFrame:
|
24
|
+
"""
|
25
|
+
Extract pump station data from the HDF file.
|
26
|
+
|
27
|
+
Args:
|
28
|
+
hdf_path (Path): Path to the HDF file.
|
29
|
+
|
30
|
+
Returns:
|
31
|
+
gpd.GeoDataFrame: GeoDataFrame containing pump station data.
|
32
|
+
|
33
|
+
Raises:
|
34
|
+
KeyError: If the required datasets are not found in the HDF file.
|
35
|
+
"""
|
36
|
+
try:
|
37
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
38
|
+
# Extract pump station data
|
39
|
+
attributes = hdf['/Geometry/Pump Stations/Attributes'][()]
|
40
|
+
points = hdf['/Geometry/Pump Stations/Points'][()]
|
41
|
+
|
42
|
+
# Create geometries
|
43
|
+
geometries = [Point(x, y) for x, y in points]
|
44
|
+
|
45
|
+
# Create GeoDataFrame
|
46
|
+
gdf = gpd.GeoDataFrame(geometry=geometries)
|
47
|
+
gdf['station_id'] = range(len(gdf))
|
48
|
+
|
49
|
+
# Add attributes
|
50
|
+
for name in attributes.dtype.names:
|
51
|
+
gdf[name] = attributes[name]
|
52
|
+
|
53
|
+
# Set CRS if available
|
54
|
+
crs = HdfUtils.projection(hdf_path)
|
55
|
+
if crs:
|
56
|
+
gdf.set_crs(crs, inplace=True)
|
57
|
+
|
58
|
+
return gdf
|
59
|
+
|
60
|
+
except KeyError as e:
|
61
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
62
|
+
raise
|
63
|
+
except Exception as e:
|
64
|
+
logger.error(f"Error extracting pump station data: {e}")
|
65
|
+
raise
|
66
|
+
|
67
|
+
@staticmethod
|
68
|
+
@log_call
|
69
|
+
@standardize_input(file_type='plan_hdf')
|
70
|
+
def get_pump_groups(hdf_path: Path) -> pd.DataFrame:
|
71
|
+
"""
|
72
|
+
Extract pump group data from the HDF file.
|
73
|
+
|
74
|
+
Args:
|
75
|
+
hdf_path (Path): Path to the HDF file.
|
76
|
+
|
77
|
+
Returns:
|
78
|
+
pd.DataFrame: DataFrame containing pump group data.
|
79
|
+
|
80
|
+
Raises:
|
81
|
+
KeyError: If the required datasets are not found in the HDF file.
|
82
|
+
"""
|
83
|
+
try:
|
84
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
85
|
+
# Extract pump group data
|
86
|
+
attributes = hdf['/Geometry/Pump Stations/Pump Groups/Attributes'][()]
|
87
|
+
efficiency_curves_info = hdf['/Geometry/Pump Stations/Pump Groups/Efficiency Curves Info'][()]
|
88
|
+
efficiency_curves_values = hdf['/Geometry/Pump Stations/Pump Groups/Efficiency Curves Values'][()]
|
89
|
+
|
90
|
+
# Create DataFrame
|
91
|
+
df = pd.DataFrame(attributes)
|
92
|
+
|
93
|
+
# Add efficiency curve data
|
94
|
+
df['efficiency_curve_start'] = efficiency_curves_info[:, 0]
|
95
|
+
df['efficiency_curve_count'] = efficiency_curves_info[:, 1]
|
96
|
+
|
97
|
+
# Process efficiency curves
|
98
|
+
def get_efficiency_curve(start, count):
|
99
|
+
return efficiency_curves_values[start:start+count].tolist()
|
100
|
+
|
101
|
+
df['efficiency_curve'] = df.apply(lambda row: get_efficiency_curve(row['efficiency_curve_start'], row['efficiency_curve_count']), axis=1)
|
102
|
+
|
103
|
+
return df
|
104
|
+
|
105
|
+
except KeyError as e:
|
106
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
107
|
+
raise
|
108
|
+
except Exception as e:
|
109
|
+
logger.error(f"Error extracting pump group data: {e}")
|
110
|
+
raise
|
111
|
+
|
112
|
+
@staticmethod
|
113
|
+
@log_call
|
114
|
+
@standardize_input(file_type='plan_hdf')
|
115
|
+
def get_pump_station_timeseries(hdf_path: Path, pump_station: str) -> xr.DataArray:
|
116
|
+
"""
|
117
|
+
Extract timeseries data for a specific pump station.
|
118
|
+
|
119
|
+
Args:
|
120
|
+
hdf_path (Path): Path to the HDF file.
|
121
|
+
pump_station (str): Name of the pump station.
|
122
|
+
|
123
|
+
Returns:
|
124
|
+
xr.DataArray: DataArray containing the timeseries data.
|
125
|
+
|
126
|
+
Raises:
|
127
|
+
KeyError: If the required datasets are not found in the HDF file.
|
128
|
+
ValueError: If the specified pump station is not found.
|
129
|
+
"""
|
130
|
+
try:
|
131
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
132
|
+
# Check if the pump station exists
|
133
|
+
pumping_stations_path = "/Results/Unsteady/Output/Output Blocks/DSS Hydrograph Output/Unsteady Time Series/Pumping Stations"
|
134
|
+
if pump_station not in hdf[pumping_stations_path]:
|
135
|
+
raise ValueError(f"Pump station '{pump_station}' not found in HDF file")
|
136
|
+
|
137
|
+
# Extract timeseries data
|
138
|
+
data_path = f"{pumping_stations_path}/{pump_station}/Structure Variables"
|
139
|
+
data = hdf[data_path][()]
|
140
|
+
|
141
|
+
# Extract time information
|
142
|
+
time = HdfBase._get_unsteady_datetimes(hdf)
|
143
|
+
|
144
|
+
# Create DataArray
|
145
|
+
da = xr.DataArray(
|
146
|
+
data=data,
|
147
|
+
dims=['time', 'variable'],
|
148
|
+
coords={'time': time, 'variable': ['Flow', 'Stage HW', 'Stage TW', 'Pump Station', 'Pumps on']},
|
149
|
+
name=pump_station
|
150
|
+
)
|
151
|
+
|
152
|
+
# Add attributes
|
153
|
+
da.attrs['units'] = hdf[data_path].attrs.get('Variable_Unit', b'').decode('utf-8')
|
154
|
+
da.attrs['pump_station'] = pump_station
|
155
|
+
|
156
|
+
return da
|
157
|
+
|
158
|
+
except KeyError as e:
|
159
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
160
|
+
raise
|
161
|
+
except ValueError as e:
|
162
|
+
logger.error(str(e))
|
163
|
+
raise
|
164
|
+
except Exception as e:
|
165
|
+
logger.error(f"Error extracting pump station timeseries data: {e}")
|
166
|
+
raise
|
167
|
+
|
168
|
+
@staticmethod
|
169
|
+
@log_call
|
170
|
+
@standardize_input(file_type='plan_hdf')
|
171
|
+
def get_pump_station_summary(hdf_path: Path) -> pd.DataFrame:
|
172
|
+
"""
|
173
|
+
Extract summary data for pump stations from the HDF file.
|
174
|
+
|
175
|
+
Args:
|
176
|
+
hdf_path (Path): Path to the HDF file.
|
177
|
+
|
178
|
+
Returns:
|
179
|
+
pd.DataFrame: DataFrame containing pump station summary data.
|
180
|
+
|
181
|
+
Raises:
|
182
|
+
KeyError: If the required datasets are not found in the HDF file.
|
183
|
+
"""
|
184
|
+
try:
|
185
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
186
|
+
# Extract summary data
|
187
|
+
summary_path = "/Results/Unsteady/Summary/Pump Station"
|
188
|
+
if summary_path not in hdf:
|
189
|
+
logger.warning("Pump Station summary data not found in HDF file")
|
190
|
+
return pd.DataFrame()
|
191
|
+
|
192
|
+
summary_data = hdf[summary_path][()]
|
193
|
+
|
194
|
+
# Create DataFrame
|
195
|
+
df = pd.DataFrame(summary_data)
|
196
|
+
|
197
|
+
# Convert column names
|
198
|
+
df.columns = [col.decode('utf-8') for col in df.columns]
|
199
|
+
|
200
|
+
return df
|
201
|
+
|
202
|
+
except KeyError as e:
|
203
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
204
|
+
raise
|
205
|
+
except Exception as e:
|
206
|
+
logger.error(f"Error extracting pump station summary data: {e}")
|
207
|
+
raise
|
208
|
+
|
209
|
+
@staticmethod
|
210
|
+
@log_call
|
211
|
+
@standardize_input(file_type='plan_hdf')
|
212
|
+
def get_pump_operation_data(hdf_path: Path, pump_station: str) -> pd.DataFrame:
|
213
|
+
"""
|
214
|
+
Extract pump operation data for a specific pump station.
|
215
|
+
|
216
|
+
Args:
|
217
|
+
hdf_path (Path): Path to the HDF file.
|
218
|
+
pump_station (str): Name of the pump station.
|
219
|
+
|
220
|
+
Returns:
|
221
|
+
pd.DataFrame: DataFrame containing pump operation data.
|
222
|
+
|
223
|
+
Raises:
|
224
|
+
KeyError: If the required datasets are not found in the HDF file.
|
225
|
+
ValueError: If the specified pump station is not found.
|
226
|
+
"""
|
227
|
+
try:
|
228
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
229
|
+
# Check if the pump station exists
|
230
|
+
pump_stations_path = "/Results/Unsteady/Output/Output Blocks/DSS Profile Output/Unsteady Time Series/Pumping Stations"
|
231
|
+
if pump_station not in hdf[pump_stations_path]:
|
232
|
+
raise ValueError(f"Pump station '{pump_station}' not found in HDF file")
|
233
|
+
|
234
|
+
# Extract pump operation data
|
235
|
+
data_path = f"{pump_stations_path}/{pump_station}/Structure Variables"
|
236
|
+
data = hdf[data_path][()]
|
237
|
+
|
238
|
+
# Extract time information
|
239
|
+
time = HdfBase._get_unsteady_datetimes(hdf)
|
240
|
+
|
241
|
+
# Create DataFrame
|
242
|
+
df = pd.DataFrame(data, columns=['Flow', 'Stage HW', 'Stage TW', 'Pump Station', 'Pumps on'])
|
243
|
+
df['Time'] = time
|
244
|
+
|
245
|
+
return df
|
246
|
+
|
247
|
+
except KeyError as e:
|
248
|
+
logger.error(f"Required dataset not found in HDF file: {e}")
|
249
|
+
raise
|
250
|
+
except ValueError as e:
|
251
|
+
logger.error(str(e))
|
252
|
+
raise
|
253
|
+
except Exception as e:
|
254
|
+
logger.error(f"Error extracting pump operation data: {e}")
|
255
|
+
raise
|