ras-commander 0.42.0__py3-none-any.whl → 0.43.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,392 @@
1
+ from typing import Dict, List, Union, Optional
2
+ from pathlib import Path
3
+ import h5py
4
+ import pandas as pd
5
+ import xarray as xr
6
+ from .Decorators import standardize_input, log_call
7
+ from .HdfBase import HdfBase
8
+ from .HdfResultsXsec import HdfResultsXsec
9
+ from .LoggingConfig import get_logger
10
+ import dask.array as da
11
+ from datetime import datetime
12
+ import numpy as np
13
+
14
+ logger = get_logger(__name__)
15
+
16
+
17
+ class HdfResultsPlan:
18
+ """
19
+ A class for handling HEC-RAS plan HDF file results related to unsteady flow and reference line/point outputs.
20
+
21
+ This class provides methods for extracting and analyzing data from HEC-RAS plan HDF files,
22
+ focusing on unsteady flow results, volume accounting, and reference line/point time series outputs.
23
+
24
+ Methods in this class use the @standardize_input decorator to handle different input types
25
+ (e.g., plan number, file path) and the @log_call decorator for logging method calls.
26
+
27
+ Attributes:
28
+ None
29
+
30
+ Note:
31
+ This class is designed to work with HEC-RAS plan HDF files and requires the HdfBase class
32
+ for some of its operations.
33
+ """
34
+
35
+ @staticmethod
36
+ @log_call
37
+ @standardize_input(file_type='plan_hdf')
38
+ def get_results_unsteady_attrs(hdf_path: Path) -> Dict:
39
+ """
40
+ Get unsteady attributes from a HEC-RAS HDF plan file.
41
+
42
+ Args:
43
+ hdf_path (Path): Path to the HEC-RAS plan HDF file.
44
+
45
+ Returns:
46
+ Dict: A dictionary containing the unsteady attributes.
47
+
48
+ Raises:
49
+ FileNotFoundError: If the specified HDF file is not found.
50
+ KeyError: If the "Results/Unsteady" group is not found in the HDF file.
51
+ """
52
+ try:
53
+ with h5py.File(hdf_path, 'r') as hdf_file:
54
+ if "Results/Unsteady" not in hdf_file:
55
+ raise KeyError("Results/Unsteady group not found in the HDF file.")
56
+ return dict(hdf_file["Results/Unsteady"].attrs)
57
+ except FileNotFoundError:
58
+ raise FileNotFoundError(f"HDF file not found: {hdf_path}")
59
+ except Exception as e:
60
+ raise RuntimeError(f"Error reading unsteady attributes: {str(e)}")
61
+
62
+ @staticmethod
63
+ @log_call
64
+ @standardize_input(file_type='plan_hdf')
65
+ def get_results_unsteady_summary_attrs(hdf_path: Path) -> Dict:
66
+ """
67
+ Get results unsteady summary attributes from a HEC-RAS HDF plan file.
68
+
69
+ Args:
70
+ hdf_path (Path): Path to the HEC-RAS plan HDF file.
71
+
72
+ Returns:
73
+ Dict: A dictionary containing the results unsteady summary attributes.
74
+
75
+ Raises:
76
+ FileNotFoundError: If the specified HDF file is not found.
77
+ KeyError: If the "Results/Unsteady/Summary" group is not found in the HDF file.
78
+ """
79
+ try:
80
+ with h5py.File(hdf_path, 'r') as hdf_file:
81
+ if "Results/Unsteady/Summary" not in hdf_file:
82
+ raise KeyError("Results/Unsteady/Summary group not found in the HDF file.")
83
+ return dict(hdf_file["Results/Unsteady/Summary"].attrs)
84
+ except FileNotFoundError:
85
+ raise FileNotFoundError(f"HDF file not found: {hdf_path}")
86
+ except Exception as e:
87
+ raise RuntimeError(f"Error reading unsteady summary attributes: {str(e)}")
88
+
89
+ @staticmethod
90
+ @log_call
91
+ @standardize_input(file_type='plan_hdf')
92
+ def get_results_volume_accounting_attrs(hdf_path: Path) -> Dict:
93
+ """
94
+ Get volume accounting attributes from a HEC-RAS HDF plan file.
95
+
96
+ Args:
97
+ hdf_path (Path): Path to the HEC-RAS plan HDF file.
98
+
99
+ Returns:
100
+ Dict: A dictionary containing the volume accounting attributes.
101
+
102
+ Raises:
103
+ FileNotFoundError: If the specified HDF file is not found.
104
+ KeyError: If the "Results/Unsteady/Summary/Volume Accounting" group is not found in the HDF file.
105
+ """
106
+ try:
107
+ with h5py.File(hdf_path, 'r') as hdf_file:
108
+ if "Results/Unsteady/Summary/Volume Accounting" not in hdf_file:
109
+ raise KeyError("Results/Unsteady/Summary/Volume Accounting group not found in the HDF file.")
110
+ return dict(hdf_file["Results/Unsteady/Summary/Volume Accounting"].attrs)
111
+ except FileNotFoundError:
112
+ raise FileNotFoundError(f"HDF file not found: {hdf_path}")
113
+ except Exception as e:
114
+ raise RuntimeError(f"Error reading volume accounting attributes: {str(e)}")
115
+
116
+ @staticmethod
117
+ @standardize_input(file_type='plan_hdf')
118
+ def get_runtime_data(hdf_path: Path) -> Optional[pd.DataFrame]:
119
+ """
120
+ Extract runtime and compute time data from a single HDF file.
121
+
122
+ Args:
123
+ hdf_path (Path): The full path to the HDF file.
124
+
125
+ Returns:
126
+ Optional[pd.DataFrame]: DataFrame containing runtime and compute time data, or None if data extraction fails.
127
+ """
128
+ if hdf_path is None:
129
+ logger.error(f"Could not find HDF file for input")
130
+ return None
131
+
132
+ with h5py.File(hdf_path, 'r') as hdf_file:
133
+ logger.info(f"Extracting Plan Information from: {Path(hdf_file.filename).name}")
134
+ plan_info = hdf_file.get('/Plan Data/Plan Information')
135
+ if plan_info is None:
136
+ logger.warning("Group '/Plan Data/Plan Information' not found.")
137
+ return None
138
+
139
+ plan_name = plan_info.attrs.get('Plan Name', 'Unknown')
140
+ plan_name = plan_name.decode('utf-8') if isinstance(plan_name, bytes) else plan_name
141
+ logger.info(f"Plan Name: {plan_name}")
142
+
143
+ start_time_str = plan_info.attrs.get('Simulation Start Time', 'Unknown')
144
+ end_time_str = plan_info.attrs.get('Simulation End Time', 'Unknown')
145
+ start_time_str = start_time_str.decode('utf-8') if isinstance(start_time_str, bytes) else start_time_str
146
+ end_time_str = end_time_str.decode('utf-8') if isinstance(end_time_str, bytes) else end_time_str
147
+
148
+ start_time = datetime.strptime(start_time_str, "%d%b%Y %H:%M:%S")
149
+ end_time = datetime.strptime(end_time_str, "%d%b%Y %H:%M:%S")
150
+ simulation_duration = end_time - start_time
151
+ simulation_hours = simulation_duration.total_seconds() / 3600
152
+
153
+ logger.info(f"Simulation Start Time: {start_time_str}")
154
+ logger.info(f"Simulation End Time: {end_time_str}")
155
+ logger.info(f"Simulation Duration (hours): {simulation_hours}")
156
+
157
+ compute_processes = hdf_file.get('/Results/Summary/Compute Processes')
158
+ if compute_processes is None:
159
+ logger.warning("Dataset '/Results/Summary/Compute Processes' not found.")
160
+ return None
161
+
162
+ process_names = [name.decode('utf-8') for name in compute_processes['Process'][:]]
163
+ filenames = [filename.decode('utf-8') for filename in compute_processes['Filename'][:]]
164
+ completion_times = compute_processes['Compute Time (ms)'][:]
165
+
166
+ compute_processes_df = pd.DataFrame({
167
+ 'Process': process_names,
168
+ 'Filename': filenames,
169
+ 'Compute Time (ms)': completion_times,
170
+ 'Compute Time (s)': completion_times / 1000,
171
+ 'Compute Time (hours)': completion_times / (1000 * 3600)
172
+ })
173
+
174
+ logger.debug("Compute processes DataFrame:")
175
+ logger.debug(compute_processes_df)
176
+
177
+ compute_processes_summary = {
178
+ 'Plan Name': [plan_name],
179
+ 'File Name': [Path(hdf_file.filename).name],
180
+ 'Simulation Start Time': [start_time_str],
181
+ 'Simulation End Time': [end_time_str],
182
+ 'Simulation Duration (s)': [simulation_duration.total_seconds()],
183
+ 'Simulation Time (hr)': [simulation_hours],
184
+ 'Completing Geometry (hr)': [compute_processes_df[compute_processes_df['Process'] == 'Completing Geometry']['Compute Time (hours)'].values[0] if 'Completing Geometry' in compute_processes_df['Process'].values else 'N/A'],
185
+ 'Preprocessing Geometry (hr)': [compute_processes_df[compute_processes_df['Process'] == 'Preprocessing Geometry']['Compute Time (hours)'].values[0] if 'Preprocessing Geometry' in compute_processes_df['Process'].values else 'N/A'],
186
+ 'Completing Event Conditions (hr)': [compute_processes_df[compute_processes_df['Process'] == 'Completing Event Conditions']['Compute Time (hours)'].values[0] if 'Completing Event Conditions' in compute_processes_df['Process'].values else 'N/A'],
187
+ 'Unsteady Flow Computations (hr)': [compute_processes_df[compute_processes_df['Process'] == 'Unsteady Flow Computations']['Compute Time (hours)'].values[0] if 'Unsteady Flow Computations' in compute_processes_df['Process'].values else 'N/A'],
188
+ 'Complete Process (hr)': [compute_processes_df['Compute Time (hours)'].sum()]
189
+ }
190
+
191
+ compute_processes_summary['Unsteady Flow Speed (hr/hr)'] = [simulation_hours / compute_processes_summary['Unsteady Flow Computations (hr)'][0] if compute_processes_summary['Unsteady Flow Computations (hr)'][0] != 'N/A' else 'N/A']
192
+ compute_processes_summary['Complete Process Speed (hr/hr)'] = [simulation_hours / compute_processes_summary['Complete Process (hr)'][0] if compute_processes_summary['Complete Process (hr)'][0] != 'N/A' else 'N/A']
193
+
194
+ compute_summary_df = pd.DataFrame(compute_processes_summary)
195
+ logger.debug("Compute summary DataFrame:")
196
+ logger.debug(compute_summary_df)
197
+
198
+ return compute_summary_df
199
+
200
+
201
+
202
+ @staticmethod
203
+ @log_call
204
+ @standardize_input(file_type='plan_hdf')
205
+ def reference_timeseries_output(hdf_path: Path, reftype: str = "lines") -> xr.Dataset:
206
+ """
207
+ Get timeseries output for reference lines or points.
208
+
209
+ Args:
210
+ hdf_path (Path): Path to the HDF file.
211
+ reftype (str): Type of reference, either "lines" or "points" (default "lines").
212
+
213
+ Returns:
214
+ xr.Dataset: Dataset containing the timeseries output for reference lines or points.
215
+
216
+ Raises:
217
+ FileNotFoundError: If the specified HDF file is not found.
218
+ ValueError: If an invalid reftype is provided.
219
+ """
220
+ try:
221
+ with h5py.File(hdf_path, 'r') as hdf_file:
222
+ return HdfResultsPlan._reference_timeseries_output(hdf_file, reftype)
223
+ except FileNotFoundError:
224
+ raise FileNotFoundError(f"HDF file not found: {hdf_path}")
225
+ except ValueError as ve:
226
+ raise ValueError(f"Invalid reftype: {str(ve)}")
227
+ except Exception as e:
228
+ raise RuntimeError(f"Error getting reference timeseries output: {str(e)}")
229
+
230
+
231
+ @staticmethod
232
+ def _reference_timeseries_output(hdf_file: h5py.File, reftype: str = "lines") -> xr.Dataset:
233
+ """
234
+ Private method to return timeseries output data for reference lines or points from a HEC-RAS HDF plan file.
235
+
236
+ Parameters
237
+ ----------
238
+ hdf_file : h5py.File
239
+ Open HDF file object.
240
+ reftype : str, optional
241
+ The type of reference data to retrieve. Must be either "lines" or "points".
242
+ (default: "lines")
243
+
244
+ Returns
245
+ -------
246
+ xr.Dataset
247
+ An xarray Dataset with reference line or point timeseries data.
248
+ Returns an empty Dataset if the reference output data is not found.
249
+
250
+ Raises
251
+ ------
252
+ ValueError
253
+ If reftype is not "lines" or "points".
254
+ """
255
+ if reftype == "lines":
256
+ output_path = "Results/Unsteady/Output/Output Blocks/Base Output/Unsteady Time Series/Reference Lines"
257
+ abbrev = "refln"
258
+ elif reftype == "points":
259
+ output_path = "Results/Unsteady/Output/Output Blocks/Base Output/Unsteady Time Series/Reference Points"
260
+ abbrev = "refpt"
261
+ else:
262
+ raise ValueError('reftype must be either "lines" or "points".')
263
+
264
+ try:
265
+ reference_group = hdf_file[output_path]
266
+ except KeyError:
267
+ logger.error(f"Could not find HDF group at path '{output_path}'. "
268
+ f"The Plan HDF file may not contain reference {reftype[:-1]} output data.")
269
+ return xr.Dataset()
270
+
271
+ reference_names = reference_group["Name"][:]
272
+ names = []
273
+ mesh_areas = []
274
+ for s in reference_names:
275
+ name, mesh_area = s.decode("utf-8").split("|")
276
+ names.append(name)
277
+ mesh_areas.append(mesh_area)
278
+
279
+ times = HdfBase._get_unsteady_datetimes(hdf_file)
280
+
281
+ das = {}
282
+ for var in ["Flow", "Velocity", "Water Surface"]:
283
+ group = reference_group.get(var)
284
+ if group is None:
285
+ continue
286
+ try:
287
+ values = da.from_array(group, chunks=group.chunks)
288
+ except ImportError:
289
+ values = group[:]
290
+ units = group.attrs["Units"].decode("utf-8")
291
+ da = xr.DataArray(
292
+ values,
293
+ name=var,
294
+ dims=["time", f"{abbrev}_id"],
295
+ coords={
296
+ "time": times,
297
+ f"{abbrev}_id": range(values.shape[1]),
298
+ f"{abbrev}_name": (f"{abbrev}_id", names),
299
+ "mesh_name": (f"{abbrev}_id", mesh_areas),
300
+ },
301
+ attrs={"units": units, "hdf_path": f"{output_path}/{var}"},
302
+ )
303
+ das[var] = da
304
+ return xr.Dataset(das)
305
+
306
+
307
+
308
+
309
+ @staticmethod
310
+ @log_call
311
+ @standardize_input(file_type='plan_hdf')
312
+ def reference_lines_timeseries_output(hdf_path: Path) -> xr.Dataset:
313
+ """
314
+ Get timeseries output for reference lines.
315
+
316
+ Args:
317
+ hdf_path (Path): Path to the HDF file.
318
+
319
+ Returns:
320
+ xr.Dataset: Dataset containing the timeseries output for reference lines.
321
+
322
+ Raises:
323
+ FileNotFoundError: If the specified HDF file is not found.
324
+ """
325
+ return HdfResultsPlan.reference_timeseries_output(hdf_path, reftype="lines")
326
+
327
+ @staticmethod
328
+ @log_call
329
+ @standardize_input(file_type='plan_hdf')
330
+ def reference_points_timeseries_output(hdf_path: Path) -> xr.Dataset:
331
+ """
332
+ Get timeseries output for reference points.
333
+
334
+ Args:
335
+ hdf_path (Path): Path to the HDF file.
336
+
337
+ Returns:
338
+ xr.Dataset: Dataset containing the timeseries output for reference points.
339
+
340
+ Raises:
341
+ FileNotFoundError: If the specified HDF file is not found.
342
+ """
343
+ return HdfResultsPlan.reference_timeseries_output(hdf_path, reftype="points")
344
+
345
+ @staticmethod
346
+ @log_call
347
+ @standardize_input(file_type='plan_hdf')
348
+ def reference_summary_output(hdf_path: Path, reftype: str = "lines") -> pd.DataFrame:
349
+ """
350
+ Get summary output for reference lines or points.
351
+
352
+ Args:
353
+ hdf_path (Path): Path to the HDF file.
354
+ reftype (str): Type of reference, either "lines" or "points" (default "lines").
355
+
356
+ Returns:
357
+ pd.DataFrame: DataFrame containing the summary output for reference lines or points.
358
+
359
+ Raises:
360
+ ValueError: If an invalid reftype is provided.
361
+ """
362
+ if not hdf_path.exists():
363
+ logger.error(f"HDF file not found: {hdf_path}")
364
+ return pd.DataFrame() # Return an empty DataFrame if the path doesn't exist
365
+
366
+ try:
367
+ # Get the timeseries output
368
+ ds = HdfResultsPlan.reference_timeseries_output(hdf_path, reftype)
369
+
370
+ if 'station' not in ds.dims:
371
+ logger.error("No 'station' dimension found in the dataset.")
372
+ return pd.DataFrame() # Return an empty DataFrame if 'station' dimension is missing
373
+
374
+ # Calculate summary statistics
375
+ summary = ds.groupby('station').agg({
376
+ 'WSE': ['min', 'max', 'mean'],
377
+ 'Q': ['min', 'max', 'mean']
378
+ })
379
+
380
+ # Flatten column names
381
+ summary.columns = ['_'.join(col).strip() for col in summary.columns.values]
382
+
383
+ # Reset index to make 'station' a column
384
+ summary = summary.reset_index()
385
+
386
+ return summary
387
+ except ValueError as ve:
388
+ logger.error(f"Invalid reftype: {str(ve)}")
389
+ return pd.DataFrame() # Return an empty DataFrame on ValueError
390
+ except Exception as e:
391
+ logger.error(f"Error in reference_summary_output: {str(e)}")
392
+ return pd.DataFrame() # Return an empty DataFrame on general error
@@ -0,0 +1,227 @@
1
+ import h5py
2
+ import numpy as np
3
+ import pandas as pd
4
+ from pathlib import Path
5
+ from typing import Union, Optional, List
6
+ from .HdfBase import HdfBase
7
+ from .HdfUtils import HdfUtils
8
+ from .Decorators import standardize_input, log_call
9
+ from .LoggingConfig import setup_logging, get_logger
10
+ import xarray as xr
11
+
12
+ logger = get_logger(__name__)
13
+
14
+
15
+ class HdfResultsXsec:
16
+ """
17
+ A class for handling cross-section results from HEC-RAS HDF files.
18
+
19
+ This class provides methods to extract and process steady flow simulation results
20
+ for cross-sections, including water surface elevations, flow rates, energy grades,
21
+ and additional parameters such as encroachment stations and velocities.
22
+
23
+ The class relies on the HdfBase and HdfUtils classes for core HDF file operations
24
+ and utility functions.
25
+
26
+ Attributes:
27
+ None
28
+
29
+ Methods:
30
+ steady_profile_xs_output: Extract steady profile cross-section output for a specified variable.
31
+ cross_sections_wsel: Get water surface elevation data for cross-sections.
32
+ cross_sections_flow: Get flow data for cross-sections.
33
+ cross_sections_energy_grade: Get energy grade data for cross-sections.
34
+ cross_sections_additional_enc_station_left: Get left encroachment station data for cross-sections.
35
+ cross_sections_additional_enc_station_right: Get right encroachment station data for cross-sections.
36
+ cross_sections_additional_area_total: Get total ineffective area data for cross-sections.
37
+ cross_sections_additional_velocity_total: Get total velocity data for cross-sections.
38
+ """
39
+
40
+ @staticmethod
41
+ @standardize_input(file_type='plan_hdf')
42
+ def steady_profile_xs_output(hdf_path: Path, var: str, round_to: int = 2) -> pd.DataFrame:
43
+ """
44
+ Create a DataFrame from steady cross section results based on the specified variable.
45
+
46
+ Parameters:
47
+ ----------
48
+ hdf_path : Path
49
+ Path to the HEC-RAS plan HDF file.
50
+ var : str
51
+ The variable to extract from the steady cross section results.
52
+ round_to : int, optional
53
+ Number of decimal places to round the results to (default is 2).
54
+
55
+ Returns:
56
+ -------
57
+ pd.DataFrame
58
+ DataFrame containing the steady cross section results for the specified variable.
59
+ """
60
+ XS_STEADY_OUTPUT_ADDITIONAL = [
61
+ "Additional Encroachment Station Left",
62
+ "Additional Encroachment Station Right",
63
+ "Additional Area Ineffective Total",
64
+ "Additional Velocity Total",
65
+ ]
66
+
67
+ try:
68
+ with h5py.File(hdf_path, 'r') as hdf_file:
69
+ # Determine the correct path based on the variable
70
+ if var in XS_STEADY_OUTPUT_ADDITIONAL:
71
+ path = f"/Results/Steady/Cross Sections/Additional Output/{var}"
72
+ else:
73
+ path = f"/Results/Steady/Cross Sections/{var}"
74
+
75
+ # Check if the path exists in the HDF file
76
+ if path not in hdf_file:
77
+ return pd.DataFrame()
78
+
79
+ # Get the profile names
80
+ profiles = HdfBase.steady_flow_names(hdf_path)
81
+
82
+ # Extract the steady data
83
+ steady_data = hdf_file[path]
84
+
85
+ # Create a DataFrame with profiles as index
86
+ df = pd.DataFrame(steady_data, index=profiles)
87
+
88
+ # Transpose the DataFrame and round values
89
+ df_t = df.T.copy()
90
+ for p in profiles:
91
+ df_t[p] = df_t[p].apply(lambda x: round(x, round_to))
92
+
93
+ return df_t
94
+ except Exception as e:
95
+ HdfUtils.logger.error(f"Failed to get steady profile cross section output: {str(e)}")
96
+ return pd.DataFrame()
97
+
98
+ @staticmethod
99
+ @standardize_input(file_type='plan_hdf')
100
+ def cross_sections_wsel(hdf_path: Path) -> pd.DataFrame:
101
+ """
102
+ Return the water surface elevation information for each 1D Cross Section.
103
+
104
+ Parameters:
105
+ ----------
106
+ hdf_path : Path
107
+ Path to the HEC-RAS plan HDF file.
108
+
109
+ Returns:
110
+ -------
111
+ pd.DataFrame
112
+ A DataFrame containing the water surface elevations for each cross section and event.
113
+ """
114
+ return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Water Surface")
115
+
116
+ @staticmethod
117
+ @standardize_input(file_type='plan_hdf')
118
+ def cross_sections_flow(hdf_path: Path) -> pd.DataFrame:
119
+ """
120
+ Return the Flow information for each 1D Cross Section.
121
+
122
+ Parameters:
123
+ ----------
124
+ hdf_path : Path
125
+ Path to the HEC-RAS plan HDF file.
126
+
127
+ Returns:
128
+ -------
129
+ pd.DataFrame
130
+ A DataFrame containing the flow for each cross section and event.
131
+ """
132
+ return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Flow")
133
+
134
+ @staticmethod
135
+ @standardize_input(file_type='plan_hdf')
136
+ def cross_sections_energy_grade(hdf_path: Path) -> pd.DataFrame:
137
+ """
138
+ Return the energy grade information for each 1D Cross Section.
139
+
140
+ Parameters:
141
+ ----------
142
+ hdf_path : Path
143
+ Path to the HEC-RAS plan HDF file.
144
+
145
+ Returns:
146
+ -------
147
+ pd.DataFrame
148
+ A DataFrame containing the energy grade for each cross section and event.
149
+ """
150
+ return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Energy Grade")
151
+
152
+ @staticmethod
153
+ @standardize_input(file_type='plan_hdf')
154
+ def cross_sections_additional_enc_station_left(hdf_path: Path) -> pd.DataFrame:
155
+ """
156
+ Return the left side encroachment information for a floodway plan hdf.
157
+
158
+ Parameters:
159
+ ----------
160
+ hdf_path : Path
161
+ Path to the HEC-RAS plan HDF file.
162
+
163
+ Returns:
164
+ -------
165
+ pd.DataFrame
166
+ A DataFrame containing the cross sections left side encroachment stations.
167
+ """
168
+ return HdfResultsXsec.steady_profile_xs_output(
169
+ hdf_path, "Encroachment Station Left"
170
+ )
171
+
172
+ @staticmethod
173
+ @standardize_input(file_type='plan_hdf')
174
+ def cross_sections_additional_enc_station_right(hdf_path: Path) -> pd.DataFrame:
175
+ """
176
+ Return the right side encroachment information for a floodway plan hdf.
177
+
178
+ Parameters:
179
+ ----------
180
+ hdf_path : Path
181
+ Path to the HEC-RAS plan HDF file.
182
+
183
+ Returns:
184
+ -------
185
+ pd.DataFrame
186
+ A DataFrame containing the cross sections right side encroachment stations.
187
+ """
188
+ return HdfResultsXsec.steady_profile_xs_output(
189
+ hdf_path, "Encroachment Station Right"
190
+ )
191
+
192
+ @staticmethod
193
+ @standardize_input(file_type='plan_hdf')
194
+ def cross_sections_additional_area_total(hdf_path: Path) -> pd.DataFrame:
195
+ """
196
+ Return the 1D cross section area for each profile.
197
+
198
+ Parameters:
199
+ ----------
200
+ hdf_path : Path
201
+ Path to the HEC-RAS plan HDF file.
202
+
203
+ Returns:
204
+ -------
205
+ pd.DataFrame
206
+ A DataFrame containing the wet area inside the cross sections.
207
+ """
208
+ return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Area Ineffective Total")
209
+
210
+ @staticmethod
211
+ @standardize_input(file_type='plan_hdf')
212
+ def cross_sections_additional_velocity_total(hdf_path: Path) -> pd.DataFrame:
213
+ """
214
+ Return the 1D cross section velocity for each profile.
215
+
216
+ Parameters:
217
+ ----------
218
+ hdf_path : Path
219
+ Path to the HEC-RAS plan HDF file.
220
+
221
+ Returns:
222
+ -------
223
+ pd.DataFrame
224
+ A DataFrame containing the velocity inside the cross sections.
225
+ """
226
+ return HdfResultsXsec.steady_profile_xs_output(hdf_path, "Velocity Total")
227
+