ras-commander 0.48.0__py3-none-any.whl → 0.50.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ras_commander/HdfPlan.py CHANGED
@@ -6,6 +6,43 @@ from the https://github.com/fema-ffrd/rashdf library,
6
6
  released under MIT license and Copyright (c) 2024 fema-ffrd
7
7
 
8
8
  The file has been forked and modified for use in RAS Commander.
9
+
10
+ -----
11
+
12
+ All of the methods in this class are static and are designed to be used without instantiation.
13
+
14
+ List of Functions in HdfPlan:
15
+ - get_simulation_start_time()
16
+ - get_simulation_end_time()
17
+ - get_unsteady_datetimes()
18
+ - get_plan_info_attrs()
19
+ - get_plan_parameters()
20
+ - get_meteorology_precip_attrs()
21
+ - get_geom_attrs()
22
+
23
+
24
+ REVISIONS NEEDED:
25
+
26
+ Use get_ prefix for functions that return data.
27
+ Since we are extracting plan data, we should use get_plan_...
28
+ BUT, we will never set results data, so we should use results_
29
+
30
+ We need to shorten names where possible.
31
+
32
+ List of Revised Functions in HdfPlan:
33
+ - get_plan_start_time()
34
+ - get_plan_end_time()
35
+ - get_plan_timestamps_list()
36
+ - get_plan_information()
37
+ - get_plan_parameters()
38
+ - get_plan_met_precip()
39
+ - get_geometry_information()
40
+
41
+
42
+
43
+
44
+
45
+
9
46
  """
10
47
 
11
48
  import h5py
@@ -24,64 +61,53 @@ logger = get_logger(__name__)
24
61
 
25
62
  class HdfPlan:
26
63
  """
27
- A class for handling operations on HEC-RAS plan HDF files.
28
-
29
- This class provides methods for extracting and analyzing data from HEC-RAS plan HDF files,
30
- including simulation times, plan information, and geometry attributes.
64
+ A class for handling HEC-RAS plan HDF files.
31
65
 
32
- Methods in this class use the @standardize_input decorator to handle different input types
33
- (e.g., plan number, file path) and the @log_call decorator for logging method calls.
66
+ Provides static methods for extracting data from HEC-RAS plan HDF files including
67
+ simulation times, plan information, and geometry attributes. All methods use
68
+ @standardize_input for handling different input types and @log_call for logging.
34
69
 
35
- Attributes:
36
- None
37
-
38
- Methods:
39
- get_simulation_start_time: Get the simulation start time.
40
- get_simulation_end_time: Get the simulation end time.
41
- get_unsteady_datetimes: Get a list of unsteady datetimes.
42
- get_plan_info_attrs: Get plan information attributes.
43
- get_plan_param_attrs: Get plan parameter attributes.
44
- get_meteorology_precip_attrs: Get precipitation attributes.
45
- get_geom_attrs: Get geometry attributes.
70
+ Note: This code is partially derived from the rashdf library (https://github.com/fema-ffrd/rashdf)
71
+ under MIT license.
46
72
  """
47
73
 
48
74
  @staticmethod
49
75
  @log_call
50
76
  @standardize_input(file_type='plan_hdf')
51
- def get_simulation_start_time(hdf_path: Path) -> datetime:
77
+ def get_plan_start_time(hdf_path: Path) -> datetime:
52
78
  """
53
- Get the simulation start time from the plan file.
79
+ Get the plan start time from the plan file.
54
80
 
55
81
  Args:
56
82
  hdf_path (Path): Path to the HEC-RAS plan HDF file.
57
83
 
58
84
  Returns:
59
- datetime: The simulation start time.
85
+ datetime: The plan start time in UTC format.
60
86
 
61
87
  Raises:
62
- ValueError: If there's an error reading the simulation start time.
88
+ ValueError: If there's an error reading the plan start time.
63
89
  """
64
90
  try:
65
91
  with h5py.File(hdf_path, 'r') as hdf_file:
66
- return HdfBase._get_simulation_start_time(hdf_file)
92
+ return HdfBase.get_simulation_start_time(hdf_file)
67
93
  except Exception as e:
68
- raise ValueError(f"Failed to get simulation start time: {str(e)}")
94
+ raise ValueError(f"Failed to get plan start time: {str(e)}")
69
95
 
70
96
  @staticmethod
71
97
  @log_call
72
98
  @standardize_input(file_type='plan_hdf')
73
- def get_simulation_end_time(hdf_path: Path) -> datetime:
99
+ def get_plan_end_time(hdf_path: Path) -> datetime:
74
100
  """
75
- Get the simulation end time from the plan file.
101
+ Get the plan end time from the plan file.
76
102
 
77
103
  Args:
78
104
  hdf_path (Path): Path to the HEC-RAS plan HDF file.
79
105
 
80
106
  Returns:
81
- datetime: The simulation end time.
107
+ datetime: The plan end time.
82
108
 
83
109
  Raises:
84
- ValueError: If there's an error reading the simulation end time.
110
+ ValueError: If there's an error reading the plan end time.
85
111
  """
86
112
  try:
87
113
  with h5py.File(hdf_path, 'r') as hdf_file:
@@ -89,57 +115,70 @@ class HdfPlan:
89
115
  if plan_info is None:
90
116
  raise ValueError("Plan Information not found in HDF file")
91
117
  time_str = plan_info.attrs.get('Simulation End Time')
92
- return datetime.strptime(time_str.decode('utf-8'), "%d%b%Y %H:%M:%S")
118
+ return HdfUtils.parse_ras_datetime(time_str.decode('utf-8'))
93
119
  except Exception as e:
94
- raise ValueError(f"Failed to get simulation end time: {str(e)}")
120
+ raise ValueError(f"Failed to get plan end time: {str(e)}")
95
121
 
96
122
  @staticmethod
97
123
  @log_call
98
124
  @standardize_input(file_type='plan_hdf')
99
- def get_unsteady_datetimes(hdf_path: Path) -> List[datetime]:
125
+ def get_plan_timestamps_list(hdf_path: Path) -> List[datetime]:
100
126
  """
101
- Get the list of unsteady datetimes from the HDF file.
127
+ Get the list of output timestamps from the plan simulation.
102
128
 
103
129
  Args:
104
130
  hdf_path (Path): Path to the HEC-RAS plan HDF file.
105
131
 
106
132
  Returns:
107
- List[datetime]: A list of datetime objects representing the unsteady timestamps.
133
+ List[datetime]: Chronological list of simulation output timestamps in UTC.
108
134
 
109
135
  Raises:
110
- ValueError: If there's an error retrieving the unsteady datetimes.
136
+ ValueError: If there's an error retrieving the plan timestamps.
111
137
  """
112
138
  try:
113
139
  with h5py.File(hdf_path, 'r') as hdf_file:
114
- return HdfBase._get_unsteady_datetimes(hdf_file)
140
+ return HdfBase.get_unsteady_timestamps(hdf_file)
115
141
  except Exception as e:
116
- raise ValueError(f"Failed to get unsteady datetimes: {str(e)}")
142
+ raise ValueError(f"Failed to get plan timestamps: {str(e)}")
117
143
 
118
144
  @staticmethod
119
145
  @log_call
120
146
  @standardize_input(file_type='plan_hdf')
121
- def get_plan_info_attrs(hdf_path: Path) -> Dict:
147
+ def get_plan_information(hdf_path: Path) -> Dict:
122
148
  """
123
- Get plan information attributes from a HEC-RAS HDF plan file.
149
+ Get plan information from a HEC-RAS HDF plan file.
124
150
 
125
151
  Args:
126
152
  hdf_path (Path): Path to the HEC-RAS plan HDF file.
127
153
 
128
154
  Returns:
129
- Dict: A dictionary containing the plan information attributes.
155
+ Dict: Plan information including simulation times, flow regime,
156
+ computation settings, etc.
130
157
 
131
158
  Raises:
132
- ValueError: If there's an error retrieving the plan information attributes.
159
+ ValueError: If there's an error retrieving the plan information.
133
160
  """
134
161
  try:
135
- return HdfUtils.get_attrs(hdf_path, "Plan Data/Plan Information")
162
+ with h5py.File(hdf_path, 'r') as hdf_file:
163
+ plan_info_path = "Plan Data/Plan Information"
164
+ if plan_info_path not in hdf_file:
165
+ raise ValueError(f"Plan Information not found in {hdf_path}")
166
+
167
+ attrs = {}
168
+ for key in hdf_file[plan_info_path].attrs.keys():
169
+ value = hdf_file[plan_info_path].attrs[key]
170
+ if isinstance(value, bytes):
171
+ value = HdfUtils.convert_ras_string(value)
172
+ attrs[key] = value
173
+
174
+ return attrs
136
175
  except Exception as e:
137
176
  raise ValueError(f"Failed to get plan information attributes: {str(e)}")
138
177
 
139
178
  @staticmethod
140
179
  @log_call
141
180
  @standardize_input(file_type='plan_hdf')
142
- def get_plan_param_attrs(hdf_path: Path) -> Dict:
181
+ def get_plan_parameters(hdf_path: Path) -> Dict:
143
182
  """
144
183
  Get plan parameter attributes from a HEC-RAS HDF plan file.
145
184
 
@@ -153,14 +192,26 @@ class HdfPlan:
153
192
  ValueError: If there's an error retrieving the plan parameter attributes.
154
193
  """
155
194
  try:
156
- return HdfUtils.get_attrs(hdf_path, "Plan Data/Plan Parameters")
195
+ with h5py.File(hdf_path, 'r') as hdf_file:
196
+ plan_params_path = "Plan Data/Plan Parameters"
197
+ if plan_params_path not in hdf_file:
198
+ raise ValueError(f"Plan Parameters not found in {hdf_path}")
199
+
200
+ attrs = {}
201
+ for key in hdf_file[plan_params_path].attrs.keys():
202
+ value = hdf_file[plan_params_path].attrs[key]
203
+ if isinstance(value, bytes):
204
+ value = HdfUtils.convert_ras_string(value)
205
+ attrs[key] = value
206
+
207
+ return attrs
157
208
  except Exception as e:
158
209
  raise ValueError(f"Failed to get plan parameter attributes: {str(e)}")
159
210
 
160
211
  @staticmethod
161
212
  @log_call
162
213
  @standardize_input(file_type='plan_hdf')
163
- def get_meteorology_precip_attrs(hdf_path: Path) -> Dict:
214
+ def get_plan_met_precip(hdf_path: Path) -> Dict:
164
215
  """
165
216
  Get precipitation attributes from a HEC-RAS HDF plan file.
166
217
 
@@ -168,38 +219,73 @@ class HdfPlan:
168
219
  hdf_path (Path): Path to the HEC-RAS plan HDF file.
169
220
 
170
221
  Returns:
171
- Dict: A dictionary containing the precipitation attributes.
172
-
173
- Raises:
174
- ValueError: If there's an error retrieving the precipitation attributes.
222
+ Dict: Precipitation attributes including method, time series data,
223
+ and spatial distribution if available. Returns empty dict if
224
+ no precipitation data exists.
175
225
  """
176
226
  try:
177
- return HdfUtils.get_attrs(hdf_path, "Event Conditions/Meteorology/Precipitation")
227
+ with h5py.File(hdf_path, 'r') as hdf_file:
228
+ precip_path = "Event Conditions/Meteorology/Precipitation"
229
+ if precip_path not in hdf_file:
230
+ logger.error(f"Precipitation data not found in {hdf_path}")
231
+ return {}
232
+
233
+ attrs = {}
234
+ for key in hdf_file[precip_path].attrs.keys():
235
+ value = hdf_file[precip_path].attrs[key]
236
+ if isinstance(value, bytes):
237
+ value = HdfUtils.convert_ras_string(value)
238
+ attrs[key] = value
239
+
240
+ return attrs
178
241
  except Exception as e:
179
- raise ValueError(f"Failed to get precipitation attributes: {str(e)}")
180
-
242
+ logger.error(f"Failed to get precipitation attributes: {str(e)}")
243
+ return {}
244
+
181
245
  @staticmethod
182
246
  @log_call
183
247
  @standardize_input(file_type='plan_hdf')
184
- def get_geom_attrs(hdf_path: Path) -> Dict:
248
+ def get_geometry_information(hdf_path: Path) -> pd.DataFrame:
185
249
  """
186
- Get geometry attributes from a HEC-RAS HDF plan file.
250
+ Get root level geometry attributes from the HDF plan file.
187
251
 
188
252
  Args:
189
253
  hdf_path (Path): Path to the HEC-RAS plan HDF file.
190
254
 
191
255
  Returns:
192
- Dict: A dictionary containing the geometry attributes.
256
+ pd.DataFrame: DataFrame with geometry attributes including Creation Date/Time,
257
+ Version, Units, and Projection information.
193
258
 
194
259
  Raises:
195
- ValueError: If there's an error retrieving the geometry attributes.
260
+ ValueError: If Geometry group is missing or there's an error reading attributes.
196
261
  """
262
+ print(f"Getting geometry attributes from {hdf_path}")
197
263
  try:
198
- return HdfUtils.get_attrs(hdf_path, "Geometry")
264
+ with h5py.File(hdf_path, 'r') as hdf_file:
265
+ geom_attrs_path = "Geometry"
266
+ print(f"Checking for Geometry group in {hdf_path}")
267
+ if geom_attrs_path not in hdf_file:
268
+ raise ValueError(f"Geometry group not found in {hdf_path}")
269
+
270
+ attrs = {}
271
+ geom_group = hdf_file[geom_attrs_path]
272
+ print("Getting root level geometry attributes")
273
+ # Get root level geometry attributes only
274
+ for key, value in geom_group.attrs.items():
275
+ if isinstance(value, bytes):
276
+ try:
277
+ value = HdfUtils.convert_ras_string(value)
278
+ except UnicodeDecodeError:
279
+ logger.warning(f"Failed to decode byte string for root attribute {key}")
280
+ continue
281
+ attrs[key] = value
282
+
283
+ print("Successfully extracted root level geometry attributes")
284
+ return pd.DataFrame.from_dict(attrs, orient='index', columns=['Value'])
285
+
286
+ except (OSError, RuntimeError) as e:
287
+ raise ValueError(f"Failed to read HDF file {hdf_path}: {str(e)}")
199
288
  except Exception as e:
200
289
  raise ValueError(f"Failed to get geometry attributes: {str(e)}")
201
290
 
202
291
 
203
-
204
-
205
-
@@ -0,0 +1,104 @@
1
+ """
2
+ Class: HdfPlot
3
+
4
+ A collection of static methods for plotting general HDF data from HEC-RAS models.
5
+ """
6
+
7
+ import matplotlib.pyplot as plt
8
+ import pandas as pd
9
+ import geopandas as gpd
10
+ from typing import Optional, Union, Tuple
11
+ from .Decorators import log_call, standardize_input
12
+ from .HdfUtils import HdfUtils
13
+
14
+ class HdfPlot:
15
+ """
16
+ A class containing static methods for plotting general HDF data from HEC-RAS models.
17
+
18
+ This class provides plotting functionality for HDF data, focusing on
19
+ geometric elements like cell polygons and time series data.
20
+ """
21
+
22
+ @staticmethod
23
+ @log_call
24
+ def plot_mesh_cells(
25
+ cell_polygons_df: pd.DataFrame, ## THIS IS A GEODATAFRAME - NEED TO EDIT BOTH ARGUMENT AND USAGE
26
+ projection: str,
27
+ title: str = '2D Flow Area Mesh Cells',
28
+ figsize: Tuple[int, int] = (12, 8)
29
+ ) -> Optional[gpd.GeoDataFrame]:
30
+ """
31
+ Plots the mesh cells from the provided DataFrame and returns the GeoDataFrame.
32
+
33
+ Args:
34
+ cell_polygons_df (pd.DataFrame): DataFrame containing cell polygons.
35
+ projection (str): The coordinate reference system to assign to the GeoDataFrame.
36
+ title (str, optional): Plot title. Defaults to '2D Flow Area Mesh Cells'.
37
+ figsize (Tuple[int, int], optional): Figure size. Defaults to (12, 8).
38
+
39
+ Returns:
40
+ Optional[gpd.GeoDataFrame]: GeoDataFrame containing the mesh cells, or None if no cells found.
41
+ """
42
+ if cell_polygons_df.empty:
43
+ print("No Cell Polygons found.")
44
+ return None
45
+
46
+ # Convert any datetime columns to strings using HdfUtils
47
+ cell_polygons_df = HdfUtils.convert_df_datetimes_to_str(cell_polygons_df)
48
+
49
+ cell_polygons_gdf = gpd.GeoDataFrame(cell_polygons_df, crs=projection)
50
+
51
+ print("Cell Polygons CRS:", cell_polygons_gdf.crs)
52
+ display(cell_polygons_gdf.head())
53
+
54
+ fig, ax = plt.subplots(figsize=figsize)
55
+ cell_polygons_gdf.plot(ax=ax, edgecolor='blue', facecolor='none')
56
+ ax.set_xlabel('X Coordinate')
57
+ ax.set_ylabel('Y Coordinate')
58
+ ax.set_title(title)
59
+ ax.grid(True)
60
+ plt.tight_layout()
61
+ plt.show()
62
+
63
+ return cell_polygons_gdf
64
+
65
+ @staticmethod
66
+ @log_call
67
+ def plot_time_series(
68
+ df: pd.DataFrame,
69
+ x_col: str,
70
+ y_col: str,
71
+ title: str = None,
72
+ figsize: Tuple[int, int] = (12, 6)
73
+ ) -> None:
74
+ """
75
+ Plots time series data from HDF results.
76
+
77
+ Args:
78
+ df (pd.DataFrame): DataFrame containing the time series data
79
+ x_col (str): Name of the column containing x-axis data (usually time)
80
+ y_col (str): Name of the column containing y-axis data
81
+ title (str, optional): Plot title. Defaults to None.
82
+ figsize (Tuple[int, int], optional): Figure size. Defaults to (12, 6).
83
+ """
84
+ # Convert any datetime columns to strings
85
+ df = HdfUtils.convert_df_datetimes_to_str(df)
86
+
87
+ fig, ax = plt.subplots(figsize=figsize)
88
+ df.plot(x=x_col, y=y_col, ax=ax)
89
+
90
+ if title:
91
+ ax.set_title(title)
92
+ ax.grid(True)
93
+ plt.tight_layout()
94
+ plt.show()
95
+
96
+
97
+
98
+
99
+
100
+
101
+
102
+
103
+
104
+
ras_commander/HdfPump.py CHANGED
@@ -1,3 +1,19 @@
1
+ """
2
+ Class: HdfPump
3
+
4
+ All of the methods in this class are static and are designed to be used without instantiation.
5
+
6
+ List of Functions in HdfPump:
7
+ - get_pump_stations()
8
+ - get_pump_groups()
9
+ - get_pump_station_timeseries()
10
+ - get_pump_station_summary()
11
+ - get_pump_operation_timeseries()
12
+
13
+
14
+ """
15
+
16
+
1
17
  import h5py
2
18
  import numpy as np
3
19
  import pandas as pd
@@ -16,6 +32,14 @@ logger = get_logger(__name__)
16
32
  class HdfPump:
17
33
  """
18
34
  A class for handling pump station related data from HEC-RAS HDF files.
35
+
36
+ This class provides static methods to extract and process pump station data, including:
37
+ - Pump station locations and attributes
38
+ - Pump group configurations and efficiency curves
39
+ - Time series results for pump operations
40
+ - Summary statistics for pump stations
41
+
42
+ All methods are static and designed to work with HEC-RAS HDF files containing pump data.
19
43
  """
20
44
 
21
45
  @staticmethod
@@ -26,13 +50,17 @@ class HdfPump:
26
50
  Extract pump station data from the HDF file.
27
51
 
28
52
  Args:
29
- hdf_path (Path): Path to the HDF file.
53
+ hdf_path (Path): Path to the HEC-RAS HDF file.
30
54
 
31
55
  Returns:
32
- gpd.GeoDataFrame: GeoDataFrame containing pump station data.
56
+ gpd.GeoDataFrame: GeoDataFrame containing pump station data with columns:
57
+ - geometry: Point geometry of pump station location
58
+ - station_id: Unique identifier for each pump station
59
+ - Additional attributes from the HDF file
33
60
 
34
61
  Raises:
35
- KeyError: If the required datasets are not found in the HDF file.
62
+ KeyError: If pump station datasets are not found in the HDF file.
63
+ Exception: If there are errors processing the pump station data.
36
64
  """
37
65
  try:
38
66
  with h5py.File(hdf_path, 'r') as hdf:
@@ -57,7 +85,7 @@ class HdfPump:
57
85
  gdf[col] = attr_df[col]
58
86
 
59
87
  # Set CRS if available
60
- crs = HdfUtils.projection(hdf_path)
88
+ crs = HdfBase.get_projection(hdf_path)
61
89
  if crs:
62
90
  gdf.set_crs(crs, inplace=True)
63
91
 
@@ -78,13 +106,18 @@ class HdfPump:
78
106
  Extract pump group data from the HDF file.
79
107
 
80
108
  Args:
81
- hdf_path (Path): Path to the HDF file.
109
+ hdf_path (Path): Path to the HEC-RAS HDF file.
82
110
 
83
111
  Returns:
84
- pd.DataFrame: DataFrame containing pump group data.
112
+ pd.DataFrame: DataFrame containing pump group data with columns:
113
+ - efficiency_curve_start: Starting index of efficiency curve data
114
+ - efficiency_curve_count: Number of points in efficiency curve
115
+ - efficiency_curve: List of efficiency curve values
116
+ - Additional attributes from the HDF file
85
117
 
86
118
  Raises:
87
- KeyError: If the required datasets are not found in the HDF file.
119
+ KeyError: If pump group datasets are not found in the HDF file.
120
+ Exception: If there are errors processing the pump group data.
88
121
  """
89
122
  try:
90
123
  with h5py.File(hdf_path, 'r') as hdf:
@@ -123,18 +156,23 @@ class HdfPump:
123
156
  @standardize_input(file_type='plan_hdf')
124
157
  def get_pump_station_timeseries(hdf_path: Path, pump_station: str) -> xr.DataArray:
125
158
  """
126
- Extract timeseries data for a specific pump station.
159
+ Extract timeseries results data for a specific pump station.
127
160
 
128
161
  Args:
129
- hdf_path (Path): Path to the HDF file.
130
- pump_station (str): Name of the pump station.
162
+ hdf_path (Path): Path to the HEC-RAS HDF file.
163
+ pump_station (str): Name or identifier of the pump station.
131
164
 
132
165
  Returns:
133
- xr.DataArray: DataArray containing the timeseries data.
166
+ xr.DataArray: DataArray containing the timeseries data with dimensions:
167
+ - time: Timestamps of simulation
168
+ - variable: Variables including ['Flow', 'Stage HW', 'Stage TW',
169
+ 'Pump Station', 'Pumps on']
170
+ Attributes include units and pump station name.
134
171
 
135
172
  Raises:
136
- KeyError: If the required datasets are not found in the HDF file.
137
- ValueError: If the specified pump station is not found.
173
+ KeyError: If required datasets are not found in the HDF file.
174
+ ValueError: If the specified pump station name is not found.
175
+ Exception: If there are errors processing the timeseries data.
138
176
  """
139
177
  try:
140
178
  with h5py.File(hdf_path, 'r') as hdf:
@@ -147,8 +185,8 @@ class HdfPump:
147
185
  data_path = f"{pumping_stations_path}/{pump_station}/Structure Variables"
148
186
  data = hdf[data_path][()]
149
187
 
150
- # Extract time information
151
- time = HdfBase._get_unsteady_datetimes(hdf)
188
+ # Extract time information - Updated to use new method name
189
+ time = HdfBase.get_unsteady_timestamps(hdf)
152
190
 
153
191
  # Create DataArray
154
192
  da = xr.DataArray(
@@ -180,16 +218,19 @@ class HdfPump:
180
218
  @standardize_input(file_type='plan_hdf')
181
219
  def get_pump_station_summary(hdf_path: Path) -> pd.DataFrame:
182
220
  """
183
- Extract summary data for pump stations from the HDF file.
221
+ Extract summary statistics and performance data for all pump stations.
184
222
 
185
223
  Args:
186
- hdf_path (Path): Path to the HDF file.
224
+ hdf_path (Path): Path to the HEC-RAS HDF file.
187
225
 
188
226
  Returns:
189
- pd.DataFrame: DataFrame containing pump station summary data.
227
+ pd.DataFrame: DataFrame containing pump station summary data including
228
+ operational statistics and performance metrics. Returns empty DataFrame
229
+ if no summary data is found.
190
230
 
191
231
  Raises:
192
- KeyError: If the required datasets are not found in the HDF file.
232
+ KeyError: If the summary dataset is not found in the HDF file.
233
+ Exception: If there are errors processing the summary data.
193
234
  """
194
235
  try:
195
236
  with h5py.File(hdf_path, 'r') as hdf:
@@ -219,20 +260,27 @@ class HdfPump:
219
260
  @staticmethod
220
261
  @log_call
221
262
  @standardize_input(file_type='plan_hdf')
222
- def get_pump_operation_data(hdf_path: Path, pump_station: str) -> pd.DataFrame:
263
+ def get_pump_operation_timeseries(hdf_path: Path, pump_station: str) -> pd.DataFrame:
223
264
  """
224
- Extract pump operation data for a specific pump station.
265
+ Extract detailed pump operation results data for a specific pump station.
225
266
 
226
267
  Args:
227
- hdf_path (Path): Path to the HDF file.
228
- pump_station (str): Name of the pump station.
268
+ hdf_path (Path): Path to the HEC-RAS HDF file.
269
+ pump_station (str): Name or identifier of the pump station.
229
270
 
230
271
  Returns:
231
- pd.DataFrame: DataFrame containing pump operation data.
272
+ pd.DataFrame: DataFrame containing pump operation data with columns:
273
+ - Time: Simulation timestamps
274
+ - Flow: Pump flow rate
275
+ - Stage HW: Headwater stage
276
+ - Stage TW: Tailwater stage
277
+ - Pump Station: Station identifier
278
+ - Pumps on: Number of active pumps
232
279
 
233
280
  Raises:
234
- KeyError: If the required datasets are not found in the HDF file.
235
- ValueError: If the specified pump station is not found.
281
+ KeyError: If required datasets are not found in the HDF file.
282
+ ValueError: If the specified pump station name is not found.
283
+ Exception: If there are errors processing the operation data.
236
284
  """
237
285
  try:
238
286
  with h5py.File(hdf_path, 'r') as hdf:
@@ -245,8 +293,8 @@ class HdfPump:
245
293
  data_path = f"{pump_stations_path}/{pump_station}/Structure Variables"
246
294
  data = hdf[data_path][()]
247
295
 
248
- # Extract time information
249
- time = HdfBase._get_unsteady_datetimes(hdf)
296
+ # Extract time information - Updated to use new method name
297
+ time = HdfBase.get_unsteady_timestamps(hdf)
250
298
 
251
299
  # Create DataFrame and decode byte strings
252
300
  df = pd.DataFrame(data, columns=['Flow', 'Stage HW', 'Stage TW', 'Pump Station', 'Pumps on'])