ras-commander 0.42.0__py3-none-any.whl → 0.44.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ras_commander/Decorators.py +111 -0
- ras_commander/HdfBase.py +197 -0
- ras_commander/HdfBndry.py +505 -0
- ras_commander/HdfMesh.py +308 -0
- ras_commander/HdfPlan.py +200 -0
- ras_commander/HdfResultsMesh.py +662 -0
- ras_commander/HdfResultsPlan.py +398 -0
- ras_commander/HdfResultsXsec.py +237 -0
- ras_commander/HdfStruc.py +147 -0
- ras_commander/HdfUtils.py +467 -0
- ras_commander/HdfXsec.py +282 -0
- ras_commander/RasCmdr.py +2 -1
- ras_commander/RasExamples.py +49 -116
- ras_commander/RasGeo.py +2 -2
- ras_commander/RasGpt.py +6 -129
- ras_commander/RasPlan.py +2 -2
- ras_commander/RasPrj.py +55 -9
- ras_commander/RasUnsteady.py +2 -1
- ras_commander/RasUtils.py +198 -73
- ras_commander/__init__.py +31 -9
- {ras_commander-0.42.0.dist-info → ras_commander-0.44.0.dist-info}/METADATA +9 -2
- ras_commander-0.44.0.dist-info/RECORD +26 -0
- ras_commander/RasHdf.py +0 -1619
- ras_commander-0.42.0.dist-info/RECORD +0 -16
- /ras_commander/{logging_config.py → LoggingConfig.py} +0 -0
- {ras_commander-0.42.0.dist-info → ras_commander-0.44.0.dist-info}/LICENSE +0 -0
- {ras_commander-0.42.0.dist-info → ras_commander-0.44.0.dist-info}/WHEEL +0 -0
- {ras_commander-0.42.0.dist-info → ras_commander-0.44.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,147 @@
|
|
1
|
+
"""
|
2
|
+
Class: HdfStruc
|
3
|
+
|
4
|
+
Attribution: A substantial amount of code in this file is sourced or derived
|
5
|
+
from the https://github.com/fema-ffrd/rashdf library,
|
6
|
+
released under MIT license and Copyright (c) 2024 fema-ffrd
|
7
|
+
|
8
|
+
The file has been forked and modified for use in RAS Commander.
|
9
|
+
"""
|
10
|
+
from typing import Dict, Any, List, Union
|
11
|
+
from pathlib import Path
|
12
|
+
import h5py
|
13
|
+
import numpy as np
|
14
|
+
import pandas as pd
|
15
|
+
from geopandas import GeoDataFrame
|
16
|
+
from shapely.geometry import LineString, MultiLineString, Polygon, MultiPolygon, Point, GeometryCollection
|
17
|
+
from .HdfUtils import HdfUtils
|
18
|
+
from .HdfXsec import HdfXsec
|
19
|
+
from .HdfBase import HdfBase
|
20
|
+
from .Decorators import standardize_input, log_call
|
21
|
+
from .LoggingConfig import setup_logging, get_logger
|
22
|
+
|
23
|
+
logger = get_logger(__name__)
|
24
|
+
|
25
|
+
class HdfStruc:
|
26
|
+
"""
|
27
|
+
HEC-RAS HDF Structures class for handling operations related to structures in HDF files.
|
28
|
+
|
29
|
+
This class provides methods for extracting and analyzing data about structures
|
30
|
+
from HEC-RAS HDF files. It includes functionality to retrieve structure geometries
|
31
|
+
and attributes.
|
32
|
+
|
33
|
+
Methods in this class use the @standardize_input decorator to handle different
|
34
|
+
input types (file path, etc.) and the @log_call decorator for logging method calls.
|
35
|
+
|
36
|
+
Attributes:
|
37
|
+
GEOM_STRUCTURES_PATH (str): Constant for the HDF path to structures data.
|
38
|
+
|
39
|
+
Note: This class contains static methods and does not require instantiation.
|
40
|
+
"""
|
41
|
+
|
42
|
+
GEOM_STRUCTURES_PATH = "Geometry/Structures"
|
43
|
+
|
44
|
+
@staticmethod
|
45
|
+
@log_call
|
46
|
+
@standardize_input(file_type='geom_hdf')
|
47
|
+
def structures(hdf_path: Path, datetime_to_str: bool = False) -> GeoDataFrame:
|
48
|
+
"""
|
49
|
+
Return the model structures.
|
50
|
+
|
51
|
+
This method extracts structure data from the HDF file, including geometry
|
52
|
+
and attributes, and returns it as a GeoDataFrame.
|
53
|
+
|
54
|
+
Parameters
|
55
|
+
----------
|
56
|
+
hdf_path : Path
|
57
|
+
Path to the HEC-RAS geometry HDF file.
|
58
|
+
datetime_to_str : bool, optional
|
59
|
+
If True, convert datetime objects to strings. Default is False.
|
60
|
+
|
61
|
+
Returns
|
62
|
+
-------
|
63
|
+
GeoDataFrame
|
64
|
+
A GeoDataFrame containing the structures, with columns for attributes
|
65
|
+
and geometry.
|
66
|
+
|
67
|
+
Raises
|
68
|
+
------
|
69
|
+
Exception
|
70
|
+
If there's an error reading the structures data from the HDF file.
|
71
|
+
"""
|
72
|
+
try:
|
73
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
74
|
+
# Check if the structures path exists in the HDF file
|
75
|
+
if HdfStruc.GEOM_STRUCTURES_PATH not in hdf_file:
|
76
|
+
logger.info(f"No structures found in the geometry file: {hdf_path}")
|
77
|
+
return GeoDataFrame()
|
78
|
+
|
79
|
+
struct_data = hdf_file[HdfStruc.GEOM_STRUCTURES_PATH]
|
80
|
+
v_conv_val = np.vectorize(HdfUtils._convert_ras_hdf_value)
|
81
|
+
sd_attrs = struct_data["Attributes"][()]
|
82
|
+
|
83
|
+
# Create a dictionary to store structure data
|
84
|
+
struct_dict = {"struct_id": range(sd_attrs.shape[0])}
|
85
|
+
struct_dict.update(
|
86
|
+
{name: v_conv_val(sd_attrs[name]) for name in sd_attrs.dtype.names}
|
87
|
+
)
|
88
|
+
|
89
|
+
# Get structure geometries
|
90
|
+
geoms = HdfXsec._get_polylines(
|
91
|
+
hdf_path,
|
92
|
+
HdfStruc.GEOM_STRUCTURES_PATH,
|
93
|
+
info_name="Centerline Info",
|
94
|
+
parts_name="Centerline Parts",
|
95
|
+
points_name="Centerline Points"
|
96
|
+
)
|
97
|
+
|
98
|
+
# Create GeoDataFrame
|
99
|
+
struct_gdf = GeoDataFrame(
|
100
|
+
struct_dict,
|
101
|
+
geometry=geoms,
|
102
|
+
crs=HdfUtils.projection(hdf_path),
|
103
|
+
)
|
104
|
+
|
105
|
+
# Convert datetime to string if requested
|
106
|
+
if datetime_to_str:
|
107
|
+
struct_gdf["Last Edited"] = struct_gdf["Last Edited"].apply(
|
108
|
+
lambda x: pd.Timestamp.isoformat(x) if pd.notnull(x) else None
|
109
|
+
)
|
110
|
+
|
111
|
+
return struct_gdf
|
112
|
+
except Exception as e:
|
113
|
+
logger.error(f"Error reading structures: {str(e)}")
|
114
|
+
raise
|
115
|
+
|
116
|
+
@staticmethod
|
117
|
+
@log_call
|
118
|
+
@standardize_input(file_type='geom_hdf')
|
119
|
+
def get_geom_structures_attrs(hdf_path: Path) -> Dict[str, Any]:
|
120
|
+
"""
|
121
|
+
Return geometry structures attributes from a HEC-RAS HDF file.
|
122
|
+
|
123
|
+
This method extracts attributes related to geometry structures from the HDF file.
|
124
|
+
|
125
|
+
Parameters
|
126
|
+
----------
|
127
|
+
hdf_path : Path
|
128
|
+
Path to the HEC-RAS geometry HDF file.
|
129
|
+
|
130
|
+
Returns
|
131
|
+
-------
|
132
|
+
Dict[str, Any]
|
133
|
+
A dictionary containing the geometry structures attributes.
|
134
|
+
|
135
|
+
Notes
|
136
|
+
-----
|
137
|
+
If no structures are found in the geometry file, an empty dictionary is returned.
|
138
|
+
"""
|
139
|
+
try:
|
140
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
141
|
+
if HdfStruc.GEOM_STRUCTURES_PATH not in hdf_file:
|
142
|
+
logger.info(f"No structures found in the geometry file: {hdf_path}")
|
143
|
+
return {}
|
144
|
+
return HdfUtils.get_attrs(hdf_file, HdfStruc.GEOM_STRUCTURES_PATH)
|
145
|
+
except Exception as e:
|
146
|
+
logger.error(f"Error reading geometry structures attributes: {str(e)}")
|
147
|
+
return {}
|
@@ -0,0 +1,467 @@
|
|
1
|
+
"""
|
2
|
+
Class: HdfUtils
|
3
|
+
|
4
|
+
Attribution: A substantial amount of code in this file is sourced or derived
|
5
|
+
from the https://github.com/fema-ffrd/rashdf library,
|
6
|
+
released under MIT license and Copyright (c) 2024 fema-ffrd
|
7
|
+
|
8
|
+
The file has been forked and modified for use in RAS Commander.
|
9
|
+
"""
|
10
|
+
import logging
|
11
|
+
from pathlib import Path
|
12
|
+
import h5py
|
13
|
+
import numpy as np
|
14
|
+
import pandas as pd
|
15
|
+
from datetime import datetime, timedelta
|
16
|
+
from typing import Union, Optional, Dict, List, Tuple, Any
|
17
|
+
from scipy.spatial import KDTree
|
18
|
+
import re
|
19
|
+
|
20
|
+
from .Decorators import standardize_input, log_call
|
21
|
+
from .LoggingConfig import setup_logging, get_logger
|
22
|
+
|
23
|
+
logger = get_logger(__name__)
|
24
|
+
|
25
|
+
class HdfUtils:
|
26
|
+
"""
|
27
|
+
Utility class for working with HEC-RAS HDF files.
|
28
|
+
|
29
|
+
This class provides general utility functions for HDF file operations,
|
30
|
+
including attribute extraction, data conversion, and common HDF queries.
|
31
|
+
It also includes spatial operations and helper methods for working with
|
32
|
+
HEC-RAS specific data structures.
|
33
|
+
|
34
|
+
Note:
|
35
|
+
- Use this class for general HDF utility functions that are not specific to plan or geometry files.
|
36
|
+
- All methods in this class are static and can be called without instantiating the class.
|
37
|
+
"""
|
38
|
+
|
39
|
+
@staticmethod
|
40
|
+
@standardize_input(file_type='plan_hdf')
|
41
|
+
def get_hdf_filename(hdf_input: Union[str, Path, h5py.File], ras_object=None) -> Optional[Path]:
|
42
|
+
"""
|
43
|
+
Get the HDF filename from various input types.
|
44
|
+
|
45
|
+
Args:
|
46
|
+
hdf_input (Union[str, Path, h5py.File]): The plan number, full path to the HDF file, or an open HDF file object.
|
47
|
+
ras_object (RasPrj, optional): The RAS project object. If None, uses the global ras instance.
|
48
|
+
|
49
|
+
Returns:
|
50
|
+
Optional[Path]: Path to the HDF file, or None if not found.
|
51
|
+
"""
|
52
|
+
if isinstance(hdf_input, h5py.File):
|
53
|
+
return Path(hdf_input.filename)
|
54
|
+
|
55
|
+
if isinstance(hdf_input, str):
|
56
|
+
hdf_input = Path(hdf_input)
|
57
|
+
|
58
|
+
if isinstance(hdf_input, Path) and hdf_input.is_file():
|
59
|
+
return hdf_input
|
60
|
+
|
61
|
+
if ras_object is None:
|
62
|
+
logger.critical("RAS object is not provided. It is required when hdf_input is not a direct file path.")
|
63
|
+
return None
|
64
|
+
|
65
|
+
plan_info = ras_object.plan_df[ras_object.plan_df['plan_number'] == str(hdf_input)]
|
66
|
+
if plan_info.empty:
|
67
|
+
logger.critical(f"No HDF file found for plan number {hdf_input}")
|
68
|
+
return None
|
69
|
+
|
70
|
+
hdf_filename = plan_info.iloc[0]['HDF_Results_Path']
|
71
|
+
if hdf_filename is None:
|
72
|
+
logger.critical(f"HDF_Results_Path is None for plan number {hdf_input}")
|
73
|
+
return None
|
74
|
+
|
75
|
+
hdf_path = Path(hdf_filename)
|
76
|
+
if not hdf_path.is_file():
|
77
|
+
logger.critical(f"HDF file not found: {hdf_path}")
|
78
|
+
return None
|
79
|
+
|
80
|
+
return hdf_path
|
81
|
+
|
82
|
+
@staticmethod
|
83
|
+
@standardize_input(file_type='plan_hdf')
|
84
|
+
def get_root_attrs(hdf_path: Path) -> dict:
|
85
|
+
"""
|
86
|
+
Return attributes at root level of HEC-RAS HDF file.
|
87
|
+
|
88
|
+
Args:
|
89
|
+
hdf_path (Path): Path to the HDF file.
|
90
|
+
|
91
|
+
Returns:
|
92
|
+
dict: Dictionary filled with HEC-RAS HDF root attributes.
|
93
|
+
"""
|
94
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
95
|
+
return HdfUtils.get_attrs(hdf_file, "/")
|
96
|
+
|
97
|
+
@staticmethod
|
98
|
+
@standardize_input(file_type='plan_hdf')
|
99
|
+
def get_attrs(hdf_path: Path, attr_path: str) -> dict:
|
100
|
+
"""
|
101
|
+
Get attributes from a HEC-RAS HDF file for a given attribute path.
|
102
|
+
|
103
|
+
Args:
|
104
|
+
hdf_path (Path): The path to the HDF file.
|
105
|
+
attr_path (str): The path to the attributes within the HDF file.
|
106
|
+
|
107
|
+
Returns:
|
108
|
+
dict: A dictionary of attributes.
|
109
|
+
"""
|
110
|
+
try:
|
111
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
112
|
+
attr_object = hdf_file.get(attr_path)
|
113
|
+
if attr_object is None:
|
114
|
+
logger.warning(f"Attribute path '{attr_path}' not found in HDF file.")
|
115
|
+
return {}
|
116
|
+
return HdfUtils._hdf5_attrs_to_dict(attr_object.attrs)
|
117
|
+
except Exception as e:
|
118
|
+
logger.error(f"Error getting attributes from '{attr_path}': {str(e)}")
|
119
|
+
return {}
|
120
|
+
|
121
|
+
@staticmethod
|
122
|
+
@standardize_input(file_type='plan_hdf')
|
123
|
+
def get_hdf_paths_with_properties(hdf_path: Path) -> pd.DataFrame:
|
124
|
+
"""
|
125
|
+
Get all paths in the HDF file with their properties.
|
126
|
+
|
127
|
+
Args:
|
128
|
+
hdf_path (Path): Path to the HDF file.
|
129
|
+
|
130
|
+
Returns:
|
131
|
+
pd.DataFrame: DataFrame containing paths and their properties.
|
132
|
+
"""
|
133
|
+
def get_item_properties(item):
|
134
|
+
return {
|
135
|
+
'name': item.name,
|
136
|
+
'type': type(item).__name__,
|
137
|
+
'shape': item.shape if hasattr(item, 'shape') else None,
|
138
|
+
'dtype': item.dtype if hasattr(item, 'dtype') else None
|
139
|
+
}
|
140
|
+
|
141
|
+
try:
|
142
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
143
|
+
items = []
|
144
|
+
hdf_file.visititems(lambda name, item: items.append(get_item_properties(item)))
|
145
|
+
|
146
|
+
return pd.DataFrame(items)
|
147
|
+
except Exception as e:
|
148
|
+
logger.error(f"Error reading HDF file: {e}")
|
149
|
+
return pd.DataFrame()
|
150
|
+
|
151
|
+
@staticmethod
|
152
|
+
@standardize_input(file_type='plan_hdf')
|
153
|
+
def get_group_attributes_as_df(hdf_path: Path, group_path: str) -> pd.DataFrame:
|
154
|
+
"""
|
155
|
+
Get attributes of a group in the HDF file as a DataFrame.
|
156
|
+
|
157
|
+
Args:
|
158
|
+
hdf_path (Path): Path to the HDF file.
|
159
|
+
group_path (str): Path to the group within the HDF file.
|
160
|
+
|
161
|
+
Returns:
|
162
|
+
pd.DataFrame: DataFrame containing the group's attributes.
|
163
|
+
"""
|
164
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
165
|
+
group = hdf_file[group_path]
|
166
|
+
attributes = {key: group.attrs[key] for key in group.attrs.keys()}
|
167
|
+
return pd.DataFrame([attributes])
|
168
|
+
|
169
|
+
|
170
|
+
@staticmethod
|
171
|
+
def convert_ras_hdf_string(value: Union[str, bytes]) -> Union[bool, datetime, List[datetime], timedelta, str]:
|
172
|
+
"""
|
173
|
+
Convert a string value from an HEC-RAS HDF file into a Python object.
|
174
|
+
|
175
|
+
Args:
|
176
|
+
value (Union[str, bytes]): The value to convert.
|
177
|
+
|
178
|
+
Returns:
|
179
|
+
Union[bool, datetime, List[datetime], timedelta, str]: The converted value.
|
180
|
+
"""
|
181
|
+
return HdfUtils._convert_ras_hdf_string(value)
|
182
|
+
|
183
|
+
@staticmethod
|
184
|
+
def df_datetimes_to_str(df: pd.DataFrame) -> pd.DataFrame:
|
185
|
+
"""
|
186
|
+
Convert any datetime64 columns in a DataFrame to strings.
|
187
|
+
|
188
|
+
Args:
|
189
|
+
df (pd.DataFrame): The DataFrame to convert.
|
190
|
+
|
191
|
+
Returns:
|
192
|
+
pd.DataFrame: The DataFrame with datetime columns converted to strings.
|
193
|
+
"""
|
194
|
+
for col in df.select_dtypes(include=['datetime64']).columns:
|
195
|
+
df[col] = df[col].dt.strftime('%Y-%m-%d %H:%M:%S')
|
196
|
+
return df
|
197
|
+
|
198
|
+
@staticmethod
|
199
|
+
def perform_kdtree_query(
|
200
|
+
reference_points: np.ndarray,
|
201
|
+
query_points: np.ndarray,
|
202
|
+
max_distance: float = 2.0
|
203
|
+
) -> np.ndarray:
|
204
|
+
"""
|
205
|
+
Performs a KDTree query between two datasets and returns indices with distances exceeding max_distance set to -1.
|
206
|
+
|
207
|
+
Args:
|
208
|
+
reference_points (np.ndarray): The reference dataset for KDTree.
|
209
|
+
query_points (np.ndarray): The query dataset to search against KDTree of reference_points.
|
210
|
+
max_distance (float, optional): The maximum distance threshold. Indices with distances greater than this are set to -1. Defaults to 2.0.
|
211
|
+
|
212
|
+
Returns:
|
213
|
+
np.ndarray: Array of indices from reference_points that are nearest to each point in query_points.
|
214
|
+
Indices with distances > max_distance are set to -1.
|
215
|
+
|
216
|
+
Example:
|
217
|
+
>>> ref_points = np.array([[0, 0], [1, 1], [2, 2]])
|
218
|
+
>>> query_points = np.array([[0.5, 0.5], [3, 3]])
|
219
|
+
>>> result = HdfUtils.perform_kdtree_query(ref_points, query_points)
|
220
|
+
>>> print(result)
|
221
|
+
array([ 0, -1])
|
222
|
+
"""
|
223
|
+
dist, snap = KDTree(reference_points).query(query_points, distance_upper_bound=max_distance)
|
224
|
+
snap[dist > max_distance] = -1
|
225
|
+
return snap
|
226
|
+
|
227
|
+
@staticmethod
|
228
|
+
def find_nearest_neighbors(points: np.ndarray, max_distance: float = 2.0) -> np.ndarray:
|
229
|
+
"""
|
230
|
+
Creates a self KDTree for dataset points and finds nearest neighbors excluding self,
|
231
|
+
with distances above max_distance set to -1.
|
232
|
+
|
233
|
+
Args:
|
234
|
+
points (np.ndarray): The dataset to build the KDTree from and query against itself.
|
235
|
+
max_distance (float, optional): The maximum distance threshold. Indices with distances
|
236
|
+
greater than max_distance are set to -1. Defaults to 2.0.
|
237
|
+
|
238
|
+
Returns:
|
239
|
+
np.ndarray: Array of indices representing the nearest neighbor in points for each point in points.
|
240
|
+
Indices with distances > max_distance or self-matches are set to -1.
|
241
|
+
|
242
|
+
Example:
|
243
|
+
>>> points = np.array([[0, 0], [1, 1], [2, 2], [10, 10]])
|
244
|
+
>>> result = HdfUtils.find_nearest_neighbors(points)
|
245
|
+
>>> print(result)
|
246
|
+
array([1, 0, 1, -1])
|
247
|
+
"""
|
248
|
+
dist, snap = KDTree(points).query(points, k=2, distance_upper_bound=max_distance)
|
249
|
+
snap[dist > max_distance] = -1
|
250
|
+
|
251
|
+
snp = pd.DataFrame(snap, index=np.arange(len(snap)))
|
252
|
+
snp = snp.replace(-1, np.nan)
|
253
|
+
snp.loc[snp[0] == snp.index, 0] = np.nan
|
254
|
+
snp.loc[snp[1] == snp.index, 1] = np.nan
|
255
|
+
filled = snp[0].fillna(snp[1])
|
256
|
+
snapped = filled.fillna(-1).astype(np.int64).to_numpy()
|
257
|
+
return snapped
|
258
|
+
|
259
|
+
@staticmethod
|
260
|
+
def _convert_ras_hdf_string(value: Union[str, bytes]) -> Union[bool, datetime, List[datetime], timedelta, str]:
|
261
|
+
"""
|
262
|
+
Private method to convert a string value from an HEC-RAS HDF file into a Python object.
|
263
|
+
|
264
|
+
Args:
|
265
|
+
value (Union[str, bytes]): The value to convert.
|
266
|
+
|
267
|
+
Returns:
|
268
|
+
Union[bool, datetime, List[datetime], timedelta, str]: The converted value.
|
269
|
+
"""
|
270
|
+
if isinstance(value, bytes):
|
271
|
+
s = value.decode("utf-8")
|
272
|
+
else:
|
273
|
+
s = value
|
274
|
+
|
275
|
+
if s == "True":
|
276
|
+
return True
|
277
|
+
elif s == "False":
|
278
|
+
return False
|
279
|
+
|
280
|
+
ras_datetime_format1_re = r"\d{2}\w{3}\d{4} \d{2}:\d{2}:\d{2}"
|
281
|
+
ras_datetime_format2_re = r"\d{2}\w{3}\d{4} \d{2}\d{2}"
|
282
|
+
ras_duration_format_re = r"\d{2}:\d{2}:\d{2}"
|
283
|
+
|
284
|
+
if re.match(rf"^{ras_datetime_format1_re}", s):
|
285
|
+
if re.match(rf"^{ras_datetime_format1_re} to {ras_datetime_format1_re}$", s):
|
286
|
+
split = s.split(" to ")
|
287
|
+
return [
|
288
|
+
HdfBase._parse_ras_datetime(split[0]),
|
289
|
+
HdfBase._parse_ras_datetime(split[1]),
|
290
|
+
]
|
291
|
+
return HdfBase._parse_ras_datetime(s)
|
292
|
+
elif re.match(rf"^{ras_datetime_format2_re}", s):
|
293
|
+
if re.match(rf"^{ras_datetime_format2_re} to {ras_datetime_format2_re}$", s):
|
294
|
+
split = s.split(" to ")
|
295
|
+
return [
|
296
|
+
HdfBase._parse_ras_simulation_window_datetime(split[0]),
|
297
|
+
HdfBase._parse_ras_simulation_window_datetime(split[1]),
|
298
|
+
]
|
299
|
+
return HdfBase._parse_ras_simulation_window_datetime(s)
|
300
|
+
elif re.match(rf"^{ras_duration_format_re}$", s):
|
301
|
+
return HdfBase._parse_duration(s)
|
302
|
+
return s
|
303
|
+
|
304
|
+
@staticmethod
|
305
|
+
def _convert_ras_hdf_value(value: Any) -> Union[None, bool, str, List[str], int, float, List[int], List[float]]:
|
306
|
+
"""
|
307
|
+
Convert a value from a HEC-RAS HDF file into a Python object.
|
308
|
+
|
309
|
+
Args:
|
310
|
+
value (Any): The value to convert.
|
311
|
+
|
312
|
+
Returns:
|
313
|
+
Union[None, bool, str, List[str], int, float, List[int], List[float]]: The converted value.
|
314
|
+
"""
|
315
|
+
if isinstance(value, np.floating) and np.isnan(value):
|
316
|
+
return None
|
317
|
+
elif isinstance(value, (bytes, np.bytes_)):
|
318
|
+
return value.decode('utf-8')
|
319
|
+
elif isinstance(value, np.integer):
|
320
|
+
return int(value)
|
321
|
+
elif isinstance(value, np.floating):
|
322
|
+
return float(value)
|
323
|
+
elif isinstance(value, (int, float)):
|
324
|
+
return value
|
325
|
+
elif isinstance(value, (list, tuple, np.ndarray)):
|
326
|
+
if len(value) > 1:
|
327
|
+
return [HdfUtils._convert_ras_hdf_value(v) for v in value]
|
328
|
+
else:
|
329
|
+
return HdfUtils._convert_ras_hdf_value(value[0])
|
330
|
+
else:
|
331
|
+
return str(value)
|
332
|
+
|
333
|
+
@staticmethod
|
334
|
+
def _parse_ras_datetime_ms(datetime_str: str) -> datetime:
|
335
|
+
"""
|
336
|
+
Private method to parse a datetime string with milliseconds from a RAS file.
|
337
|
+
|
338
|
+
Args:
|
339
|
+
datetime_str (str): The datetime string to parse.
|
340
|
+
|
341
|
+
Returns:
|
342
|
+
datetime: The parsed datetime object.
|
343
|
+
"""
|
344
|
+
milliseconds = int(datetime_str[-3:])
|
345
|
+
microseconds = milliseconds * 1000
|
346
|
+
parsed_dt = HdfBase._parse_ras_datetime(datetime_str[:-4]).replace(microsecond=microseconds)
|
347
|
+
return parsed_dt
|
348
|
+
|
349
|
+
@staticmethod
|
350
|
+
def _ras_timesteps_to_datetimes(timesteps: np.ndarray, start_time: datetime, time_unit: str = "days", round_to: str = "100ms") -> pd.DatetimeIndex:
|
351
|
+
"""
|
352
|
+
Convert RAS timesteps to datetime objects.
|
353
|
+
|
354
|
+
Args:
|
355
|
+
timesteps (np.ndarray): Array of timesteps.
|
356
|
+
start_time (datetime): Start time of the simulation.
|
357
|
+
time_unit (str): Unit of the timesteps. Default is "days".
|
358
|
+
round_to (str): Frequency string to round the times to. Default is "100ms" (100 milliseconds).
|
359
|
+
|
360
|
+
Returns:
|
361
|
+
pd.DatetimeIndex: DatetimeIndex of converted and rounded datetimes.
|
362
|
+
"""
|
363
|
+
if time_unit == "days":
|
364
|
+
datetimes = start_time + pd.to_timedelta(timesteps, unit='D')
|
365
|
+
elif time_unit == "hours":
|
366
|
+
datetimes = start_time + pd.to_timedelta(timesteps, unit='H')
|
367
|
+
else:
|
368
|
+
raise ValueError(f"Unsupported time unit: {time_unit}")
|
369
|
+
|
370
|
+
return pd.DatetimeIndex(datetimes).round(round_to)
|
371
|
+
|
372
|
+
@staticmethod
|
373
|
+
def _hdf5_attrs_to_dict(attrs: Union[h5py.AttributeManager, Dict], prefix: Optional[str] = None) -> Dict:
|
374
|
+
"""
|
375
|
+
Private method to convert HDF5 attributes to a Python dictionary.
|
376
|
+
|
377
|
+
Args:
|
378
|
+
attrs (Union[h5py.AttributeManager, Dict]): The attributes to convert.
|
379
|
+
prefix (Optional[str]): A prefix to add to the attribute keys.
|
380
|
+
|
381
|
+
Returns:
|
382
|
+
Dict: A dictionary of converted attributes.
|
383
|
+
"""
|
384
|
+
result = {}
|
385
|
+
for key, value in attrs.items():
|
386
|
+
if prefix:
|
387
|
+
key = f"{prefix}/{key}"
|
388
|
+
if isinstance(value, (np.ndarray, list)):
|
389
|
+
result[key] = [HdfUtils._convert_ras_hdf_value(v) for v in value]
|
390
|
+
else:
|
391
|
+
result[key] = HdfUtils._convert_ras_hdf_value(value)
|
392
|
+
return result
|
393
|
+
|
394
|
+
@staticmethod
|
395
|
+
def parse_run_time_window(window: str) -> Tuple[datetime, datetime]:
|
396
|
+
"""
|
397
|
+
Parse a run time window string into a tuple of datetime objects.
|
398
|
+
|
399
|
+
Args:
|
400
|
+
window (str): The run time window string to be parsed.
|
401
|
+
|
402
|
+
Returns:
|
403
|
+
Tuple[datetime, datetime]: A tuple containing two datetime objects representing the start and end of the run
|
404
|
+
time window.
|
405
|
+
"""
|
406
|
+
split = window.split(" to ")
|
407
|
+
begin = HdfBase._parse_ras_datetime(split[0])
|
408
|
+
end = HdfBase._parse_ras_datetime(split[1])
|
409
|
+
return begin, end
|
410
|
+
|
411
|
+
|
412
|
+
|
413
|
+
@staticmethod
|
414
|
+
@standardize_input(file_type='plan_hdf')
|
415
|
+
def get_2d_flow_area_names_and_counts(hdf_path: Path) -> List[Tuple[str, int]]:
|
416
|
+
"""
|
417
|
+
Get the names and cell counts of 2D flow areas from the HDF file.
|
418
|
+
|
419
|
+
Args:
|
420
|
+
hdf_path (Path): Path to the HDF file.
|
421
|
+
|
422
|
+
Returns:
|
423
|
+
List[Tuple[str, int]]: A list of tuples containing the name and cell count of each 2D flow area.
|
424
|
+
|
425
|
+
Raises:
|
426
|
+
ValueError: If there's an error reading the HDF file or accessing the required data.
|
427
|
+
"""
|
428
|
+
try:
|
429
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
430
|
+
flow_area_2d_path = "Geometry/2D Flow Areas"
|
431
|
+
if flow_area_2d_path not in hdf_file:
|
432
|
+
return []
|
433
|
+
|
434
|
+
attributes = hdf_file[f"{flow_area_2d_path}/Attributes"][()]
|
435
|
+
names = [HdfUtils._convert_ras_hdf_string(name) for name in attributes["Name"]]
|
436
|
+
|
437
|
+
cell_info = hdf_file[f"{flow_area_2d_path}/Cell Info"][()]
|
438
|
+
cell_counts = [info[1] for info in cell_info]
|
439
|
+
|
440
|
+
return list(zip(names, cell_counts))
|
441
|
+
except Exception as e:
|
442
|
+
logger.error(f"Error reading 2D flow area names and counts from {hdf_path}: {str(e)}")
|
443
|
+
raise ValueError(f"Failed to get 2D flow area names and counts: {str(e)}")
|
444
|
+
|
445
|
+
@staticmethod
|
446
|
+
@standardize_input(file_type='plan_hdf')
|
447
|
+
def projection(hdf_path: Path) -> Optional[str]:
|
448
|
+
"""
|
449
|
+
Get the projection information from the HDF file.
|
450
|
+
|
451
|
+
Args:
|
452
|
+
hdf_path (Path): Path to the HDF file.
|
453
|
+
|
454
|
+
Returns:
|
455
|
+
Optional[str]: The projection information as a string, or None if not found.
|
456
|
+
"""
|
457
|
+
try:
|
458
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
459
|
+
proj_wkt = hdf_file.attrs.get("Projection")
|
460
|
+
if proj_wkt is None:
|
461
|
+
return None
|
462
|
+
if isinstance(proj_wkt, bytes) or isinstance(proj_wkt, np.bytes_):
|
463
|
+
proj_wkt = proj_wkt.decode("utf-8")
|
464
|
+
return proj_wkt
|
465
|
+
except Exception as e:
|
466
|
+
logger.error(f"Error reading projection from {hdf_path}: {str(e)}")
|
467
|
+
return None
|