ras-commander 0.61.0__py3-none-any.whl → 0.65.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,365 +1,381 @@
1
- """
2
- HdfResultsPlan: A module for extracting and analyzing HEC-RAS plan HDF file results.
3
-
4
- Attribution:
5
- Substantial code sourced/derived from https://github.com/fema-ffrd/rashdf
6
- Copyright (c) 2024 fema-ffrd, MIT license
7
-
8
- Description:
9
- Provides static methods for extracting unsteady flow results, volume accounting,
10
- and reference data from HEC-RAS plan HDF files.
11
-
12
- Available Functions:
13
- - get_unsteady_info: Extract unsteady attributes
14
- - get_unsteady_summary: Extract unsteady summary data
15
- - get_volume_accounting: Extract volume accounting data
16
- - get_runtime_data: Extract runtime and compute time data
17
-
18
- Note:
19
- All methods are static and designed to be used without class instantiation.
20
- """
21
-
22
- from typing import Dict, List, Union, Optional
23
- from pathlib import Path
24
- import h5py
25
- import pandas as pd
26
- import xarray as xr
27
- from .Decorators import standardize_input, log_call
28
- from .HdfUtils import HdfUtils
29
- from .HdfResultsXsec import HdfResultsXsec
30
- from .LoggingConfig import get_logger
31
- import numpy as np
32
- from datetime import datetime
33
- from .RasPrj import ras
34
-
35
- logger = get_logger(__name__)
36
-
37
-
38
- class HdfResultsPlan:
39
- """
40
- Handles extraction of results data from HEC-RAS plan HDF files.
41
-
42
- This class provides static methods for accessing and analyzing:
43
- - Unsteady flow results
44
- - Volume accounting data
45
- - Runtime statistics
46
- - Reference line/point time series outputs
47
-
48
- All methods use:
49
- - @standardize_input decorator for consistent file path handling
50
- - @log_call decorator for operation logging
51
- - HdfUtils class for common HDF operations
52
-
53
- Note:
54
- No instantiation required - all methods are static.
55
- """
56
-
57
- @staticmethod
58
- @log_call
59
- @standardize_input(file_type='plan_hdf')
60
- def get_unsteady_info(hdf_path: Path) -> pd.DataFrame:
61
- """
62
- Get unsteady attributes from a HEC-RAS HDF plan file.
63
-
64
- Args:
65
- hdf_path (Path): Path to the HEC-RAS plan HDF file.
66
- ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
67
-
68
- Returns:
69
- pd.DataFrame: A DataFrame containing the unsteady attributes.
70
-
71
- Raises:
72
- FileNotFoundError: If the specified HDF file is not found.
73
- KeyError: If the "Results/Unsteady" group is not found in the HDF file.
74
- """
75
- try:
76
- with h5py.File(hdf_path, 'r') as hdf_file:
77
- if "Results/Unsteady" not in hdf_file:
78
- raise KeyError("Results/Unsteady group not found in the HDF file.")
79
-
80
- # Create dictionary from attributes
81
- attrs_dict = dict(hdf_file["Results/Unsteady"].attrs)
82
-
83
- # Create DataFrame with a single row index
84
- return pd.DataFrame(attrs_dict, index=[0])
85
-
86
- except FileNotFoundError:
87
- raise FileNotFoundError(f"HDF file not found: {hdf_path}")
88
- except Exception as e:
89
- raise RuntimeError(f"Error reading unsteady attributes: {str(e)}")
90
-
91
- @staticmethod
92
- @log_call
93
- @standardize_input(file_type='plan_hdf')
94
- def get_unsteady_summary(hdf_path: Path) -> pd.DataFrame:
95
- """
96
- Get results unsteady summary attributes from a HEC-RAS HDF plan file.
97
-
98
- Args:
99
- hdf_path (Path): Path to the HEC-RAS plan HDF file.
100
- ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
101
-
102
- Returns:
103
- pd.DataFrame: A DataFrame containing the results unsteady summary attributes.
104
-
105
- Raises:
106
- FileNotFoundError: If the specified HDF file is not found.
107
- KeyError: If the "Results/Unsteady/Summary" group is not found in the HDF file.
108
- """
109
- try:
110
- with h5py.File(hdf_path, 'r') as hdf_file:
111
- if "Results/Unsteady/Summary" not in hdf_file:
112
- raise KeyError("Results/Unsteady/Summary group not found in the HDF file.")
113
-
114
- # Create dictionary from attributes
115
- attrs_dict = dict(hdf_file["Results/Unsteady/Summary"].attrs)
116
-
117
- # Create DataFrame with a single row index
118
- return pd.DataFrame(attrs_dict, index=[0])
119
-
120
- except FileNotFoundError:
121
- raise FileNotFoundError(f"HDF file not found: {hdf_path}")
122
- except Exception as e:
123
- raise RuntimeError(f"Error reading unsteady summary attributes: {str(e)}")
124
-
125
- @staticmethod
126
- @log_call
127
- @standardize_input(file_type='plan_hdf')
128
- def get_volume_accounting(hdf_path: Path) -> Optional[pd.DataFrame]:
129
- """
130
- Get volume accounting attributes from a HEC-RAS HDF plan file.
131
-
132
- Args:
133
- hdf_path (Path): Path to the HEC-RAS plan HDF file.
134
- ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
135
-
136
- Returns:
137
- Optional[pd.DataFrame]: DataFrame containing the volume accounting attributes,
138
- or None if the group is not found.
139
-
140
- Raises:
141
- FileNotFoundError: If the specified HDF file is not found.
142
- """
143
- try:
144
- with h5py.File(hdf_path, 'r') as hdf_file:
145
- if "Results/Unsteady/Summary/Volume Accounting" not in hdf_file:
146
- return None
147
-
148
- # Get attributes and convert to DataFrame
149
- attrs_dict = dict(hdf_file["Results/Unsteady/Summary/Volume Accounting"].attrs)
150
- return pd.DataFrame(attrs_dict, index=[0])
151
-
152
- except FileNotFoundError:
153
- raise FileNotFoundError(f"HDF file not found: {hdf_path}")
154
- except Exception as e:
155
- raise RuntimeError(f"Error reading volume accounting attributes: {str(e)}")
156
-
157
- @staticmethod
158
- @standardize_input(file_type='plan_hdf')
159
- def get_runtime_data(hdf_path: Path) -> Optional[pd.DataFrame]:
160
- """
161
- Extract detailed runtime and computational performance metrics from HDF file.
162
-
163
- Args:
164
- hdf_path (Path): Path to HEC-RAS plan HDF file
165
- ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
166
-
167
- Returns:
168
- Optional[pd.DataFrame]: DataFrame containing runtime statistics or None if data cannot be extracted
169
-
170
- Notes:
171
- - Times are reported in multiple units (ms, s, hours)
172
- - Compute speeds are calculated as simulation-time/compute-time ratios
173
- - Process times include: geometry, preprocessing, event conditions,
174
- and unsteady flow computations
175
- """
176
- try:
177
- if hdf_path is None:
178
- logger.error(f"Could not find HDF file for input")
179
- return None
180
-
181
- with h5py.File(hdf_path, 'r') as hdf_file:
182
- logger.info(f"Extracting Plan Information from: {Path(hdf_file.filename).name}")
183
- plan_info = hdf_file.get('/Plan Data/Plan Information')
184
- if plan_info is None:
185
- logger.warning("Group '/Plan Data/Plan Information' not found.")
186
- return None
187
-
188
- # Extract plan information
189
- plan_name = HdfUtils.convert_ras_string(plan_info.attrs.get('Plan Name', 'Unknown'))
190
- start_time_str = HdfUtils.convert_ras_string(plan_info.attrs.get('Simulation Start Time', 'Unknown'))
191
- end_time_str = HdfUtils.convert_ras_string(plan_info.attrs.get('Simulation End Time', 'Unknown'))
192
-
193
- try:
194
- # Check if times are already datetime objects
195
- if isinstance(start_time_str, datetime):
196
- start_time = start_time_str
197
- else:
198
- start_time = datetime.strptime(start_time_str, "%d%b%Y %H:%M:%S")
199
-
200
- if isinstance(end_time_str, datetime):
201
- end_time = end_time_str
202
- else:
203
- end_time = datetime.strptime(end_time_str, "%d%b%Y %H:%M:%S")
204
-
205
- simulation_duration = end_time - start_time
206
- simulation_hours = simulation_duration.total_seconds() / 3600
207
- except ValueError as e:
208
- logger.error(f"Error parsing simulation times: {e}")
209
- return None
210
-
211
- logger.info(f"Plan Name: {plan_name}")
212
- logger.info(f"Simulation Duration (hours): {simulation_hours}")
213
-
214
- # Extract compute processes data
215
- compute_processes = hdf_file.get('/Results/Summary/Compute Processes')
216
- if compute_processes is None:
217
- logger.warning("Dataset '/Results/Summary/Compute Processes' not found.")
218
- return None
219
-
220
- # Process compute times
221
- process_names = [HdfUtils.convert_ras_string(name) for name in compute_processes['Process'][:]]
222
- filenames = [HdfUtils.convert_ras_string(filename) for filename in compute_processes['Filename'][:]]
223
- completion_times = compute_processes['Compute Time (ms)'][:]
224
-
225
- compute_processes_df = pd.DataFrame({
226
- 'Process': process_names,
227
- 'Filename': filenames,
228
- 'Compute Time (ms)': completion_times,
229
- 'Compute Time (s)': completion_times / 1000,
230
- 'Compute Time (hours)': completion_times / (1000 * 3600)
231
- })
232
-
233
- # Create summary DataFrame
234
- compute_processes_summary = {
235
- 'Plan Name': [plan_name],
236
- 'File Name': [Path(hdf_file.filename).name],
237
- 'Simulation Start Time': [start_time_str],
238
- 'Simulation End Time': [end_time_str],
239
- 'Simulation Duration (s)': [simulation_duration.total_seconds()],
240
- 'Simulation Time (hr)': [simulation_hours]
241
- }
242
-
243
- # Add process-specific times
244
- process_types = {
245
- 'Completing Geometry': 'Completing Geometry (hr)',
246
- 'Preprocessing Geometry': 'Preprocessing Geometry (hr)',
247
- 'Completing Event Conditions': 'Completing Event Conditions (hr)',
248
- 'Unsteady Flow Computations': 'Unsteady Flow Computations (hr)'
249
- }
250
-
251
- for process, column in process_types.items():
252
- time_value = compute_processes_df[
253
- compute_processes_df['Process'] == process
254
- ]['Compute Time (hours)'].values[0] if process in process_names else 'N/A'
255
- compute_processes_summary[column] = [time_value]
256
-
257
- # Add total process time
258
- total_time = compute_processes_df['Compute Time (hours)'].sum()
259
- compute_processes_summary['Complete Process (hr)'] = [total_time]
260
-
261
- # Calculate speeds
262
- if compute_processes_summary['Unsteady Flow Computations (hr)'][0] != 'N/A':
263
- compute_processes_summary['Unsteady Flow Speed (hr/hr)'] = [
264
- simulation_hours / compute_processes_summary['Unsteady Flow Computations (hr)'][0]
265
- ]
266
- else:
267
- compute_processes_summary['Unsteady Flow Speed (hr/hr)'] = ['N/A']
268
-
269
- compute_processes_summary['Complete Process Speed (hr/hr)'] = [
270
- simulation_hours / total_time
271
- ]
272
-
273
- return pd.DataFrame(compute_processes_summary)
274
-
275
- except Exception as e:
276
- logger.error(f"Error in get_runtime_data: {str(e)}")
277
- return None
278
-
279
- @staticmethod
280
- @log_call
281
- @standardize_input(file_type='plan_hdf')
282
- def get_reference_timeseries(hdf_path: Path, reftype: str) -> pd.DataFrame:
283
- """
284
- Get reference line or point timeseries output from HDF file.
285
-
286
- Args:
287
- hdf_path (Path): Path to HEC-RAS plan HDF file
288
- reftype (str): Type of reference data ('lines' or 'points')
289
- ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
290
-
291
- Returns:
292
- pd.DataFrame: DataFrame containing reference timeseries data
293
- """
294
- try:
295
- with h5py.File(hdf_path, 'r') as hdf_file:
296
- base_path = "Results/Unsteady/Output/Output Blocks/Base Output/Unsteady Time Series"
297
- ref_path = f"{base_path}/Reference {reftype.capitalize()}"
298
-
299
- if ref_path not in hdf_file:
300
- logger.warning(f"Reference {reftype} data not found in HDF file")
301
- return pd.DataFrame()
302
-
303
- ref_group = hdf_file[ref_path]
304
- time_data = hdf_file[f"{base_path}/Time"][:]
305
-
306
- dfs = []
307
- for ref_name in ref_group.keys():
308
- ref_data = ref_group[ref_name][:]
309
- df = pd.DataFrame(ref_data, columns=[ref_name])
310
- df['Time'] = time_data
311
- dfs.append(df)
312
-
313
- if not dfs:
314
- return pd.DataFrame()
315
-
316
- return pd.concat(dfs, axis=1)
317
-
318
- except Exception as e:
319
- logger.error(f"Error reading reference {reftype} timeseries: {str(e)}")
320
- return pd.DataFrame()
321
-
322
- @staticmethod
323
- @log_call
324
- @standardize_input(file_type='plan_hdf')
325
- def get_reference_summary(hdf_path: Path, reftype: str) -> pd.DataFrame:
326
- """
327
- Get reference line or point summary output from HDF file.
328
-
329
- Args:
330
- hdf_path (Path): Path to HEC-RAS plan HDF file
331
- reftype (str): Type of reference data ('lines' or 'points')
332
- ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
333
-
334
- Returns:
335
- pd.DataFrame: DataFrame containing reference summary data
336
- """
337
- try:
338
- with h5py.File(hdf_path, 'r') as hdf_file:
339
- base_path = "Results/Unsteady/Output/Output Blocks/Base Output/Summary Output"
340
- ref_path = f"{base_path}/Reference {reftype.capitalize()}"
341
-
342
- if ref_path not in hdf_file:
343
- logger.warning(f"Reference {reftype} summary data not found in HDF file")
344
- return pd.DataFrame()
345
-
346
- ref_group = hdf_file[ref_path]
347
- dfs = []
348
-
349
- for ref_name in ref_group.keys():
350
- ref_data = ref_group[ref_name][:]
351
- if ref_data.ndim == 2:
352
- df = pd.DataFrame(ref_data.T, columns=['Value', 'Time'])
353
- else:
354
- df = pd.DataFrame({'Value': ref_data})
355
- df['Reference'] = ref_name
356
- dfs.append(df)
357
-
358
- if not dfs:
359
- return pd.DataFrame()
360
-
361
- return pd.concat(dfs, ignore_index=True)
362
-
363
- except Exception as e:
364
- logger.error(f"Error reading reference {reftype} summary: {str(e)}")
1
+ """
2
+ HdfResultsPlan: A module for extracting and analyzing HEC-RAS plan HDF file results.
3
+
4
+ Attribution:
5
+ Substantial code sourced/derived from https://github.com/fema-ffrd/rashdf
6
+ Copyright (c) 2024 fema-ffrd, MIT license
7
+
8
+ Description:
9
+ Provides static methods for extracting unsteady flow results, volume accounting,
10
+ and reference data from HEC-RAS plan HDF files.
11
+
12
+ Available Functions:
13
+ - get_unsteady_info: Extract unsteady attributes
14
+ - get_unsteady_summary: Extract unsteady summary data
15
+ - get_volume_accounting: Extract volume accounting data
16
+ - get_runtime_data: Extract runtime and compute time data
17
+
18
+ Note:
19
+ All methods are static and designed to be used without class instantiation.
20
+ """
21
+
22
+ from typing import Dict, List, Union, Optional
23
+ from pathlib import Path
24
+ import h5py
25
+ import pandas as pd
26
+ import xarray as xr
27
+ from .Decorators import standardize_input, log_call
28
+ from .HdfUtils import HdfUtils
29
+ from .HdfResultsXsec import HdfResultsXsec
30
+ from .LoggingConfig import get_logger
31
+ import numpy as np
32
+ from datetime import datetime
33
+ from .RasPrj import ras
34
+
35
+ logger = get_logger(__name__)
36
+
37
+
38
+ class HdfResultsPlan:
39
+ """
40
+ Handles extraction of results data from HEC-RAS plan HDF files.
41
+
42
+ This class provides static methods for accessing and analyzing:
43
+ - Unsteady flow results
44
+ - Volume accounting data
45
+ - Runtime statistics
46
+ - Reference line/point time series outputs
47
+
48
+ All methods use:
49
+ - @standardize_input decorator for consistent file path handling
50
+ - @log_call decorator for operation logging
51
+ - HdfUtils class for common HDF operations
52
+
53
+ Note:
54
+ No instantiation required - all methods are static.
55
+ """
56
+
57
+ @staticmethod
58
+ @log_call
59
+ @standardize_input(file_type='plan_hdf')
60
+ def get_unsteady_info(hdf_path: Path) -> pd.DataFrame:
61
+ """
62
+ Get unsteady attributes from a HEC-RAS HDF plan file.
63
+
64
+ Args:
65
+ hdf_path (Path): Path to the HEC-RAS plan HDF file.
66
+ ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
67
+
68
+ Returns:
69
+ pd.DataFrame: A DataFrame containing the decoded unsteady attributes.
70
+
71
+ Raises:
72
+ FileNotFoundError: If the specified HDF file is not found.
73
+ KeyError: If the "Results/Unsteady" group is not found in the HDF file.
74
+ """
75
+ try:
76
+ with h5py.File(hdf_path, 'r') as hdf_file:
77
+ if "Results/Unsteady" not in hdf_file:
78
+ raise KeyError("Results/Unsteady group not found in the HDF file.")
79
+
80
+ # Create dictionary from attributes and decode byte strings
81
+ attrs_dict = {}
82
+ for key, value in dict(hdf_file["Results/Unsteady"].attrs).items():
83
+ if isinstance(value, bytes):
84
+ attrs_dict[key] = value.decode('utf-8')
85
+ else:
86
+ attrs_dict[key] = value
87
+
88
+ # Create DataFrame with a single row index
89
+ return pd.DataFrame(attrs_dict, index=[0])
90
+
91
+ except FileNotFoundError:
92
+ raise FileNotFoundError(f"HDF file not found: {hdf_path}")
93
+ except Exception as e:
94
+ raise RuntimeError(f"Error reading unsteady attributes: {str(e)}")
95
+
96
+ @staticmethod
97
+ @log_call
98
+ @standardize_input(file_type='plan_hdf')
99
+ def get_unsteady_summary(hdf_path: Path) -> pd.DataFrame:
100
+ """
101
+ Get results unsteady summary attributes from a HEC-RAS HDF plan file.
102
+
103
+ Args:
104
+ hdf_path (Path): Path to the HEC-RAS plan HDF file.
105
+ ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
106
+
107
+ Returns:
108
+ pd.DataFrame: A DataFrame containing the decoded results unsteady summary attributes.
109
+
110
+ Raises:
111
+ FileNotFoundError: If the specified HDF file is not found.
112
+ KeyError: If the "Results/Unsteady/Summary" group is not found in the HDF file.
113
+ """
114
+ try:
115
+ with h5py.File(hdf_path, 'r') as hdf_file:
116
+ if "Results/Unsteady/Summary" not in hdf_file:
117
+ raise KeyError("Results/Unsteady/Summary group not found in the HDF file.")
118
+
119
+ # Create dictionary from attributes and decode byte strings
120
+ attrs_dict = {}
121
+ for key, value in dict(hdf_file["Results/Unsteady/Summary"].attrs).items():
122
+ if isinstance(value, bytes):
123
+ attrs_dict[key] = value.decode('utf-8')
124
+ else:
125
+ attrs_dict[key] = value
126
+
127
+ # Create DataFrame with a single row index
128
+ return pd.DataFrame(attrs_dict, index=[0])
129
+
130
+ except FileNotFoundError:
131
+ raise FileNotFoundError(f"HDF file not found: {hdf_path}")
132
+ except Exception as e:
133
+ raise RuntimeError(f"Error reading unsteady summary attributes: {str(e)}")
134
+
135
+ @staticmethod
136
+ @log_call
137
+ @standardize_input(file_type='plan_hdf')
138
+ def get_volume_accounting(hdf_path: Path) -> Optional[pd.DataFrame]:
139
+ """
140
+ Get volume accounting attributes from a HEC-RAS HDF plan file.
141
+
142
+ Args:
143
+ hdf_path (Path): Path to the HEC-RAS plan HDF file.
144
+ ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
145
+
146
+ Returns:
147
+ Optional[pd.DataFrame]: DataFrame containing the decoded volume accounting attributes,
148
+ or None if the group is not found.
149
+
150
+ Raises:
151
+ FileNotFoundError: If the specified HDF file is not found.
152
+ """
153
+ try:
154
+ with h5py.File(hdf_path, 'r') as hdf_file:
155
+ if "Results/Unsteady/Summary/Volume Accounting" not in hdf_file:
156
+ return None
157
+
158
+ # Get attributes and decode byte strings
159
+ attrs_dict = {}
160
+ for key, value in dict(hdf_file["Results/Unsteady/Summary/Volume Accounting"].attrs).items():
161
+ if isinstance(value, bytes):
162
+ attrs_dict[key] = value.decode('utf-8')
163
+ else:
164
+ attrs_dict[key] = value
165
+
166
+ return pd.DataFrame(attrs_dict, index=[0])
167
+
168
+ except FileNotFoundError:
169
+ raise FileNotFoundError(f"HDF file not found: {hdf_path}")
170
+ except Exception as e:
171
+ raise RuntimeError(f"Error reading volume accounting attributes: {str(e)}")
172
+
173
+ @staticmethod
174
+ @standardize_input(file_type='plan_hdf')
175
+ def get_runtime_data(hdf_path: Path) -> Optional[pd.DataFrame]:
176
+ """
177
+ Extract detailed runtime and computational performance metrics from HDF file.
178
+
179
+ Args:
180
+ hdf_path (Path): Path to HEC-RAS plan HDF file
181
+ ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
182
+
183
+ Returns:
184
+ Optional[pd.DataFrame]: DataFrame containing runtime statistics or None if data cannot be extracted
185
+
186
+ Notes:
187
+ - Times are reported in multiple units (ms, s, hours)
188
+ - Compute speeds are calculated as simulation-time/compute-time ratios
189
+ - Process times include: geometry, preprocessing, event conditions,
190
+ and unsteady flow computations
191
+ """
192
+ try:
193
+ if hdf_path is None:
194
+ logger.error(f"Could not find HDF file for input")
195
+ return None
196
+
197
+ with h5py.File(hdf_path, 'r') as hdf_file:
198
+ logger.info(f"Extracting Plan Information from: {Path(hdf_file.filename).name}")
199
+ plan_info = hdf_file.get('/Plan Data/Plan Information')
200
+ if plan_info is None:
201
+ logger.warning("Group '/Plan Data/Plan Information' not found.")
202
+ return None
203
+
204
+ # Extract plan information
205
+ plan_name = HdfUtils.convert_ras_string(plan_info.attrs.get('Plan Name', 'Unknown'))
206
+ start_time_str = HdfUtils.convert_ras_string(plan_info.attrs.get('Simulation Start Time', 'Unknown'))
207
+ end_time_str = HdfUtils.convert_ras_string(plan_info.attrs.get('Simulation End Time', 'Unknown'))
208
+
209
+ try:
210
+ # Check if times are already datetime objects
211
+ if isinstance(start_time_str, datetime):
212
+ start_time = start_time_str
213
+ else:
214
+ start_time = datetime.strptime(start_time_str, "%d%b%Y %H:%M:%S")
215
+
216
+ if isinstance(end_time_str, datetime):
217
+ end_time = end_time_str
218
+ else:
219
+ end_time = datetime.strptime(end_time_str, "%d%b%Y %H:%M:%S")
220
+
221
+ simulation_duration = end_time - start_time
222
+ simulation_hours = simulation_duration.total_seconds() / 3600
223
+ except ValueError as e:
224
+ logger.error(f"Error parsing simulation times: {e}")
225
+ return None
226
+
227
+ logger.info(f"Plan Name: {plan_name}")
228
+ logger.info(f"Simulation Duration (hours): {simulation_hours}")
229
+
230
+ # Extract compute processes data
231
+ compute_processes = hdf_file.get('/Results/Summary/Compute Processes')
232
+ if compute_processes is None:
233
+ logger.warning("Dataset '/Results/Summary/Compute Processes' not found.")
234
+ return None
235
+
236
+ # Process compute times
237
+ process_names = [HdfUtils.convert_ras_string(name) for name in compute_processes['Process'][:]]
238
+ filenames = [HdfUtils.convert_ras_string(filename) for filename in compute_processes['Filename'][:]]
239
+ completion_times = compute_processes['Compute Time (ms)'][:]
240
+
241
+ compute_processes_df = pd.DataFrame({
242
+ 'Process': process_names,
243
+ 'Filename': filenames,
244
+ 'Compute Time (ms)': completion_times,
245
+ 'Compute Time (s)': completion_times / 1000,
246
+ 'Compute Time (hours)': completion_times / (1000 * 3600)
247
+ })
248
+
249
+ # Create summary DataFrame
250
+ compute_processes_summary = {
251
+ 'Plan Name': [plan_name],
252
+ 'File Name': [Path(hdf_file.filename).name],
253
+ 'Simulation Start Time': [start_time_str],
254
+ 'Simulation End Time': [end_time_str],
255
+ 'Simulation Duration (s)': [simulation_duration.total_seconds()],
256
+ 'Simulation Time (hr)': [simulation_hours]
257
+ }
258
+
259
+ # Add process-specific times
260
+ process_types = {
261
+ 'Completing Geometry': 'Completing Geometry (hr)',
262
+ 'Preprocessing Geometry': 'Preprocessing Geometry (hr)',
263
+ 'Completing Event Conditions': 'Completing Event Conditions (hr)',
264
+ 'Unsteady Flow Computations': 'Unsteady Flow Computations (hr)'
265
+ }
266
+
267
+ for process, column in process_types.items():
268
+ time_value = compute_processes_df[
269
+ compute_processes_df['Process'] == process
270
+ ]['Compute Time (hours)'].values[0] if process in process_names else 'N/A'
271
+ compute_processes_summary[column] = [time_value]
272
+
273
+ # Add total process time
274
+ total_time = compute_processes_df['Compute Time (hours)'].sum()
275
+ compute_processes_summary['Complete Process (hr)'] = [total_time]
276
+
277
+ # Calculate speeds
278
+ if compute_processes_summary['Unsteady Flow Computations (hr)'][0] != 'N/A':
279
+ compute_processes_summary['Unsteady Flow Speed (hr/hr)'] = [
280
+ simulation_hours / compute_processes_summary['Unsteady Flow Computations (hr)'][0]
281
+ ]
282
+ else:
283
+ compute_processes_summary['Unsteady Flow Speed (hr/hr)'] = ['N/A']
284
+
285
+ compute_processes_summary['Complete Process Speed (hr/hr)'] = [
286
+ simulation_hours / total_time
287
+ ]
288
+
289
+ return pd.DataFrame(compute_processes_summary)
290
+
291
+ except Exception as e:
292
+ logger.error(f"Error in get_runtime_data: {str(e)}")
293
+ return None
294
+
295
+ @staticmethod
296
+ @log_call
297
+ @standardize_input(file_type='plan_hdf')
298
+ def get_reference_timeseries(hdf_path: Path, reftype: str) -> pd.DataFrame:
299
+ """
300
+ Get reference line or point timeseries output from HDF file.
301
+
302
+ Args:
303
+ hdf_path (Path): Path to HEC-RAS plan HDF file
304
+ reftype (str): Type of reference data ('lines' or 'points')
305
+ ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
306
+
307
+ Returns:
308
+ pd.DataFrame: DataFrame containing reference timeseries data
309
+ """
310
+ try:
311
+ with h5py.File(hdf_path, 'r') as hdf_file:
312
+ base_path = "Results/Unsteady/Output/Output Blocks/Base Output/Unsteady Time Series"
313
+ ref_path = f"{base_path}/Reference {reftype.capitalize()}"
314
+
315
+ if ref_path not in hdf_file:
316
+ logger.warning(f"Reference {reftype} data not found in HDF file")
317
+ return pd.DataFrame()
318
+
319
+ ref_group = hdf_file[ref_path]
320
+ time_data = hdf_file[f"{base_path}/Time"][:]
321
+
322
+ dfs = []
323
+ for ref_name in ref_group.keys():
324
+ ref_data = ref_group[ref_name][:]
325
+ df = pd.DataFrame(ref_data, columns=[ref_name])
326
+ df['Time'] = time_data
327
+ dfs.append(df)
328
+
329
+ if not dfs:
330
+ return pd.DataFrame()
331
+
332
+ return pd.concat(dfs, axis=1)
333
+
334
+ except Exception as e:
335
+ logger.error(f"Error reading reference {reftype} timeseries: {str(e)}")
336
+ return pd.DataFrame()
337
+
338
+ @staticmethod
339
+ @log_call
340
+ @standardize_input(file_type='plan_hdf')
341
+ def get_reference_summary(hdf_path: Path, reftype: str) -> pd.DataFrame:
342
+ """
343
+ Get reference line or point summary output from HDF file.
344
+
345
+ Args:
346
+ hdf_path (Path): Path to HEC-RAS plan HDF file
347
+ reftype (str): Type of reference data ('lines' or 'points')
348
+ ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
349
+
350
+ Returns:
351
+ pd.DataFrame: DataFrame containing reference summary data
352
+ """
353
+ try:
354
+ with h5py.File(hdf_path, 'r') as hdf_file:
355
+ base_path = "Results/Unsteady/Output/Output Blocks/Base Output/Summary Output"
356
+ ref_path = f"{base_path}/Reference {reftype.capitalize()}"
357
+
358
+ if ref_path not in hdf_file:
359
+ logger.warning(f"Reference {reftype} summary data not found in HDF file")
360
+ return pd.DataFrame()
361
+
362
+ ref_group = hdf_file[ref_path]
363
+ dfs = []
364
+
365
+ for ref_name in ref_group.keys():
366
+ ref_data = ref_group[ref_name][:]
367
+ if ref_data.ndim == 2:
368
+ df = pd.DataFrame(ref_data.T, columns=['Value', 'Time'])
369
+ else:
370
+ df = pd.DataFrame({'Value': ref_data})
371
+ df['Reference'] = ref_name
372
+ dfs.append(df)
373
+
374
+ if not dfs:
375
+ return pd.DataFrame()
376
+
377
+ return pd.concat(dfs, ignore_index=True)
378
+
379
+ except Exception as e:
380
+ logger.error(f"Error reading reference {reftype} summary: {str(e)}")
365
381
  return pd.DataFrame()