ras-commander 0.48.0__py3-none-any.whl → 0.50.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ras_commander/Decorators.py +18 -1
- ras_commander/HdfBase.py +307 -197
- ras_commander/HdfBndry.py +94 -287
- ras_commander/HdfFluvialPluvial.py +256 -273
- ras_commander/HdfInfiltration.py +410 -0
- ras_commander/HdfMesh.py +222 -114
- ras_commander/HdfPipe.py +127 -175
- ras_commander/HdfPlan.py +144 -58
- ras_commander/HdfPlot.py +104 -0
- ras_commander/HdfPump.py +76 -28
- ras_commander/HdfResultsMesh.py +190 -183
- ras_commander/HdfResultsPlan.py +76 -220
- ras_commander/HdfResultsPlot.py +182 -0
- ras_commander/HdfResultsXsec.py +185 -145
- ras_commander/HdfStruc.py +65 -35
- ras_commander/HdfUtils.py +435 -518
- ras_commander/HdfXsec.py +137 -127
- ras_commander/LoggingConfig.py +13 -3
- ras_commander/RasCmdr.py +13 -0
- ras_commander/RasExamples.py +14 -0
- ras_commander/RasGeo.py +11 -0
- ras_commander/RasGpt.py +8 -0
- ras_commander/RasMapper.py +105 -0
- ras_commander/RasPlan.py +30 -0
- ras_commander/RasPrj.py +34 -0
- ras_commander/RasToGo.py +16 -0
- ras_commander/RasUnsteady.py +15 -0
- ras_commander/RasUtils.py +31 -0
- ras_commander/__init__.py +10 -0
- {ras_commander-0.48.0.dist-info → ras_commander-0.50.0.dist-info}/METADATA +77 -9
- ras_commander-0.50.0.dist-info/RECORD +34 -0
- ras_commander-0.48.0.dist-info/RECORD +0 -30
- {ras_commander-0.48.0.dist-info → ras_commander-0.50.0.dist-info}/LICENSE +0 -0
- {ras_commander-0.48.0.dist-info → ras_commander-0.50.0.dist-info}/WHEEL +0 -0
- {ras_commander-0.48.0.dist-info → ras_commander-0.50.0.dist-info}/top_level.txt +0 -0
ras_commander/HdfResultsXsec.py
CHANGED
@@ -1,11 +1,30 @@
|
|
1
1
|
"""
|
2
2
|
Class: HdfResultsXsec
|
3
3
|
|
4
|
-
|
5
|
-
|
6
|
-
released under MIT license and Copyright (c) 2024 fema-ffrd
|
4
|
+
Contains methods for extracting 1D results data from HDF files.
|
5
|
+
This includes cross section timeseries, structures and reference line/point timeseries as these are all 1D elements.
|
7
6
|
|
8
|
-
|
7
|
+
-----
|
8
|
+
|
9
|
+
All of the methods in this class are static and are designed to be used without instantiation.
|
10
|
+
|
11
|
+
List of Functions in HdfResultsXsec:
|
12
|
+
- get_xsec_timeseries(): Extract cross-section timeseries data including water surface, velocity, and flow
|
13
|
+
- get_ref_lines_timeseries(): Get timeseries output for reference lines
|
14
|
+
- get_ref_points_timeseries(): Get timeseries output for reference points
|
15
|
+
|
16
|
+
TO BE IMPLEMENTED:
|
17
|
+
DSS Hydrograph Extraction for 1D and 2D Structures.
|
18
|
+
|
19
|
+
Planned functions:
|
20
|
+
- get_bridge_timeseries(): Extract timeseries data for bridge structures
|
21
|
+
- get_inline_structures_timeseries(): Extract timeseries data for inline structures
|
22
|
+
|
23
|
+
Notes:
|
24
|
+
- All functions use the get_ prefix to indicate they return data
|
25
|
+
- Results data functions use results_ prefix to indicate they handle results data
|
26
|
+
- All functions include proper error handling and logging
|
27
|
+
- Functions return xarray Datasets for efficient handling of multi-dimensional data
|
9
28
|
"""
|
10
29
|
|
11
30
|
from pathlib import Path
|
@@ -25,152 +44,23 @@ logger = get_logger(__name__)
|
|
25
44
|
|
26
45
|
class HdfResultsXsec:
|
27
46
|
"""
|
28
|
-
A class for
|
29
|
-
|
30
|
-
This class provides methods to extract and process steady flow simulation results
|
31
|
-
for cross-sections, including water surface elevations, flow rates, energy grades,
|
32
|
-
and additional parameters such as encroachment stations and velocities.
|
47
|
+
A static class for extracting and processing 1D results data from HEC-RAS HDF files.
|
33
48
|
|
34
|
-
|
35
|
-
and
|
49
|
+
This class provides methods to extract and process unsteady flow simulation results
|
50
|
+
for cross-sections, reference lines, and reference points. All methods are static
|
51
|
+
and designed to be used without class instantiation.
|
36
52
|
|
37
|
-
|
38
|
-
|
53
|
+
The class handles:
|
54
|
+
- Cross-section timeseries (water surface, velocity, flow)
|
55
|
+
- Reference line timeseries
|
56
|
+
- Reference point timeseries
|
39
57
|
|
40
|
-
|
58
|
+
Dependencies:
|
59
|
+
- HdfBase: Core HDF file operations
|
60
|
+
- HdfUtils: Utility functions for HDF processing
|
41
61
|
"""
|
42
62
|
|
43
63
|
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
@staticmethod
|
55
|
-
@log_call
|
56
|
-
@standardize_input(file_type='plan_hdf')
|
57
|
-
def get_pump_station_profile_output(hdf_path: Path) -> pd.DataFrame:
|
58
|
-
"""
|
59
|
-
Extract pump station profile output data from the HDF file.
|
60
|
-
|
61
|
-
Args:
|
62
|
-
hdf_path (Path): Path to the HDF file.
|
63
|
-
|
64
|
-
Returns:
|
65
|
-
pd.DataFrame: DataFrame containing pump station profile output data.
|
66
|
-
|
67
|
-
Raises:
|
68
|
-
KeyError: If the required datasets are not found in the HDF file.
|
69
|
-
"""
|
70
|
-
try:
|
71
|
-
with h5py.File(hdf_path, 'r') as hdf:
|
72
|
-
# Extract profile output data
|
73
|
-
profile_path = "/Results/Unsteady/Output/Output Blocks/DSS Profile Output/Unsteady Time Series/Pumping Stations"
|
74
|
-
if profile_path not in hdf:
|
75
|
-
logger.warning("Pump Station profile output data not found in HDF file")
|
76
|
-
return pd.DataFrame()
|
77
|
-
|
78
|
-
# Initialize an empty list to store data from all pump stations
|
79
|
-
all_data = []
|
80
|
-
|
81
|
-
# Iterate through all pump stations
|
82
|
-
for station in hdf[profile_path].keys():
|
83
|
-
station_path = f"{profile_path}/{station}/Structure Variables"
|
84
|
-
|
85
|
-
data = hdf[station_path][()]
|
86
|
-
|
87
|
-
# Create a DataFrame for this pump station
|
88
|
-
df = pd.DataFrame(data, columns=['Flow', 'Stage HW', 'Stage TW', 'Pump Station', 'Pumps on'])
|
89
|
-
df['Station'] = station
|
90
|
-
|
91
|
-
all_data.append(df)
|
92
|
-
|
93
|
-
# Concatenate all DataFrames
|
94
|
-
result_df = pd.concat(all_data, ignore_index=True)
|
95
|
-
|
96
|
-
# Add time information
|
97
|
-
time = HdfBase._get_unsteady_datetimes(hdf)
|
98
|
-
result_df['Time'] = [time[i] for i in result_df.index]
|
99
|
-
|
100
|
-
return result_df
|
101
|
-
|
102
|
-
except KeyError as e:
|
103
|
-
logger.error(f"Required dataset not found in HDF file: {e}")
|
104
|
-
raise
|
105
|
-
except Exception as e:
|
106
|
-
logger.error(f"Error extracting pump station profile output data: {e}")
|
107
|
-
raise
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
@staticmethod
|
126
|
-
@log_call
|
127
|
-
@standardize_input(file_type='plan_hdf')
|
128
|
-
def get_pump_station_summary(hdf_path: Path) -> pd.DataFrame:
|
129
|
-
"""
|
130
|
-
Extract summary data for pump stations from the HDF file.
|
131
|
-
|
132
|
-
Args:
|
133
|
-
hdf_path (Path): Path to the HDF file.
|
134
|
-
|
135
|
-
Returns:
|
136
|
-
pd.DataFrame: DataFrame containing pump station summary data.
|
137
|
-
|
138
|
-
Raises:
|
139
|
-
KeyError: If the required datasets are not found in the HDF file.
|
140
|
-
"""
|
141
|
-
try:
|
142
|
-
with h5py.File(hdf_path, 'r') as hdf:
|
143
|
-
# Extract summary data
|
144
|
-
summary_path = "/Results/Unsteady/Summary/Pump Station"
|
145
|
-
if summary_path not in hdf:
|
146
|
-
logger.warning("Pump Station summary data not found in HDF file")
|
147
|
-
return pd.DataFrame()
|
148
|
-
|
149
|
-
summary_data = hdf[summary_path][()]
|
150
|
-
|
151
|
-
# Create DataFrame
|
152
|
-
df = pd.DataFrame(summary_data)
|
153
|
-
|
154
|
-
# Convert column names
|
155
|
-
df.columns = [col.decode('utf-8') if isinstance(col, bytes) else col for col in df.columns]
|
156
|
-
|
157
|
-
# Convert byte string values to regular strings
|
158
|
-
for col in df.columns:
|
159
|
-
if df[col].dtype == object:
|
160
|
-
df[col] = df[col].apply(lambda x: x.decode('utf-8') if isinstance(x, bytes) else x)
|
161
|
-
|
162
|
-
return df
|
163
|
-
|
164
|
-
except KeyError as e:
|
165
|
-
logger.error(f"Required dataset not found in HDF file: {e}")
|
166
|
-
raise
|
167
|
-
except Exception as e:
|
168
|
-
logger.error(f"Error extracting pump station summary data: {e}")
|
169
|
-
raise
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
64
|
# Tested functions from AWS webinar where the code was developed
|
175
65
|
# Need to add examples
|
176
66
|
|
@@ -178,7 +68,7 @@ class HdfResultsXsec:
|
|
178
68
|
@staticmethod
|
179
69
|
@log_call
|
180
70
|
@standardize_input(file_type='plan_hdf')
|
181
|
-
def
|
71
|
+
def get_xsec_timeseries(hdf_path: Path) -> xr.Dataset:
|
182
72
|
"""
|
183
73
|
Extract Water Surface, Velocity Total, Velocity Channel, Flow Lateral, and Flow data from HEC-RAS HDF file.
|
184
74
|
Includes Cross Section Only and Cross Section Attributes as coordinates in the xarray.Dataset.
|
@@ -270,3 +160,153 @@ class HdfResultsXsec:
|
|
270
160
|
logger.error(f"Error extracting cross section results: {e}")
|
271
161
|
raise
|
272
162
|
|
163
|
+
|
164
|
+
|
165
|
+
@staticmethod
|
166
|
+
@log_call
|
167
|
+
@standardize_input(file_type='plan_hdf')
|
168
|
+
def get_ref_lines_timeseries(hdf_path: Path) -> xr.Dataset:
|
169
|
+
"""
|
170
|
+
Extract timeseries output data for reference lines from HEC-RAS HDF file.
|
171
|
+
|
172
|
+
Parameters:
|
173
|
+
-----------
|
174
|
+
hdf_path : Path
|
175
|
+
Path to the HEC-RAS results HDF file
|
176
|
+
|
177
|
+
Returns:
|
178
|
+
--------
|
179
|
+
xr.Dataset
|
180
|
+
Dataset containing flow, velocity, and water surface data for reference lines.
|
181
|
+
Returns empty dataset if reference line data not found.
|
182
|
+
|
183
|
+
Raises:
|
184
|
+
-------
|
185
|
+
FileNotFoundError
|
186
|
+
If the specified HDF file is not found
|
187
|
+
KeyError
|
188
|
+
If required datasets are missing from the HDF file
|
189
|
+
"""
|
190
|
+
return HdfResultsXsec._reference_timeseries_output(hdf_path, reftype="lines")
|
191
|
+
|
192
|
+
@staticmethod
|
193
|
+
@log_call
|
194
|
+
@standardize_input(file_type='plan_hdf')
|
195
|
+
def get_ref_points_timeseries(hdf_path: Path) -> xr.Dataset:
|
196
|
+
"""
|
197
|
+
Extract timeseries output data for reference points from HEC-RAS HDF file.
|
198
|
+
|
199
|
+
This method extracts flow, velocity, and water surface elevation data for all
|
200
|
+
reference points defined in the model. Reference points are user-defined locations
|
201
|
+
where detailed output is desired.
|
202
|
+
|
203
|
+
Parameters:
|
204
|
+
-----------
|
205
|
+
hdf_path : Path
|
206
|
+
Path to the HEC-RAS results HDF file
|
207
|
+
|
208
|
+
Returns:
|
209
|
+
--------
|
210
|
+
xr.Dataset
|
211
|
+
Dataset containing the following variables for each reference point:
|
212
|
+
- Flow [cfs or m³/s]
|
213
|
+
- Velocity [ft/s or m/s]
|
214
|
+
- Water Surface [ft or m]
|
215
|
+
|
216
|
+
The dataset includes coordinates:
|
217
|
+
- time: Simulation timesteps
|
218
|
+
- refpt_id: Unique identifier for each reference point
|
219
|
+
- refpt_name: Name of each reference point
|
220
|
+
- mesh_name: Associated 2D mesh area name
|
221
|
+
|
222
|
+
Returns empty dataset if reference point data not found.
|
223
|
+
|
224
|
+
Raises:
|
225
|
+
-------
|
226
|
+
FileNotFoundError
|
227
|
+
If the specified HDF file is not found
|
228
|
+
KeyError
|
229
|
+
If required datasets are missing from the HDF file
|
230
|
+
|
231
|
+
Examples:
|
232
|
+
--------
|
233
|
+
>>> ds = HdfResultsXsec.get_ref_points_timeseries("path/to/plan.hdf")
|
234
|
+
>>> # Get water surface timeseries for first reference point
|
235
|
+
>>> ws = ds['Water Surface'].isel(refpt_id=0)
|
236
|
+
>>> # Get all data for a specific reference point by name
|
237
|
+
>>> point_data = ds.sel(refpt_name='Point1')
|
238
|
+
"""
|
239
|
+
return HdfResultsXsec._reference_timeseries_output(hdf_path, reftype="points")
|
240
|
+
|
241
|
+
|
242
|
+
@staticmethod
|
243
|
+
def _reference_timeseries_output(hdf_file: h5py.File, reftype: str = "lines") -> xr.Dataset:
|
244
|
+
"""
|
245
|
+
Internal method to return timeseries output data for reference lines or points from a HEC-RAS HDF plan file.
|
246
|
+
|
247
|
+
Parameters
|
248
|
+
----------
|
249
|
+
hdf_file : h5py.File
|
250
|
+
Open HDF file object.
|
251
|
+
reftype : str, optional
|
252
|
+
The type of reference data to retrieve. Must be either "lines" or "points".
|
253
|
+
(default: "lines")
|
254
|
+
|
255
|
+
Returns
|
256
|
+
-------
|
257
|
+
xr.Dataset
|
258
|
+
An xarray Dataset with reference line or point timeseries data.
|
259
|
+
Returns an empty Dataset if the reference output data is not found.
|
260
|
+
|
261
|
+
Raises
|
262
|
+
------
|
263
|
+
ValueError
|
264
|
+
If reftype is not "lines" or "points".
|
265
|
+
"""
|
266
|
+
if reftype == "lines":
|
267
|
+
output_path = "Results/Unsteady/Output/Output Blocks/Base Output/Unsteady Time Series/Reference Lines"
|
268
|
+
abbrev = "refln"
|
269
|
+
elif reftype == "points":
|
270
|
+
output_path = "Results/Unsteady/Output/Output Blocks/Base Output/Unsteady Time Series/Reference Points"
|
271
|
+
abbrev = "refpt"
|
272
|
+
else:
|
273
|
+
raise ValueError('reftype must be either "lines" or "points".')
|
274
|
+
|
275
|
+
try:
|
276
|
+
reference_group = hdf_file[output_path]
|
277
|
+
except KeyError:
|
278
|
+
logger.error(f"Could not find HDF group at path '{output_path}'. "
|
279
|
+
f"The Plan HDF file may not contain reference {reftype[:-1]} output data.")
|
280
|
+
return xr.Dataset()
|
281
|
+
|
282
|
+
reference_names = reference_group["Name"][:]
|
283
|
+
names = []
|
284
|
+
mesh_areas = []
|
285
|
+
for s in reference_names:
|
286
|
+
name, mesh_area = s.decode("utf-8").split("|")
|
287
|
+
names.append(name)
|
288
|
+
mesh_areas.append(mesh_area)
|
289
|
+
|
290
|
+
times = HdfBase.get_unsteady_timestamps(hdf_file)
|
291
|
+
|
292
|
+
das = {}
|
293
|
+
for var in ["Flow", "Velocity", "Water Surface"]:
|
294
|
+
group = reference_group.get(var)
|
295
|
+
if group is None:
|
296
|
+
continue
|
297
|
+
values = group[:]
|
298
|
+
units = group.attrs["Units"].decode("utf-8")
|
299
|
+
da = xr.DataArray(
|
300
|
+
values,
|
301
|
+
name=var,
|
302
|
+
dims=["time", f"{abbrev}_id"],
|
303
|
+
coords={
|
304
|
+
"time": times,
|
305
|
+
f"{abbrev}_id": range(values.shape[1]),
|
306
|
+
f"{abbrev}_name": (f"{abbrev}_id", names),
|
307
|
+
"mesh_name": (f"{abbrev}_id", mesh_areas),
|
308
|
+
},
|
309
|
+
attrs={"units": units, "hdf_path": f"{output_path}/{var}"},
|
310
|
+
)
|
311
|
+
das[var] = da
|
312
|
+
return xr.Dataset(das)
|
ras_commander/HdfStruc.py
CHANGED
@@ -6,6 +6,14 @@ from the https://github.com/fema-ffrd/rashdf library,
|
|
6
6
|
released under MIT license and Copyright (c) 2024 fema-ffrd
|
7
7
|
|
8
8
|
The file has been forked and modified for use in RAS Commander.
|
9
|
+
|
10
|
+
-----
|
11
|
+
|
12
|
+
All of the methods in this class are static and are designed to be used without instantiation.
|
13
|
+
|
14
|
+
List of Functions in HdfStruc:
|
15
|
+
- get_structures()
|
16
|
+
- get_geom_structures_attrs()
|
9
17
|
"""
|
10
18
|
from typing import Dict, Any, List, Union
|
11
19
|
from pathlib import Path
|
@@ -24,68 +32,83 @@ logger = get_logger(__name__)
|
|
24
32
|
|
25
33
|
class HdfStruc:
|
26
34
|
"""
|
27
|
-
|
28
|
-
|
29
|
-
This class provides methods for extracting and analyzing
|
30
|
-
from HEC-RAS HDF files.
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
Note: This class contains static methods and does not require instantiation.
|
35
|
+
Handles 2D structure geometry data extraction from HEC-RAS HDF files.
|
36
|
+
|
37
|
+
This class provides static methods for extracting and analyzing structure geometries
|
38
|
+
and their attributes from HEC-RAS geometry HDF files. All methods are designed to work
|
39
|
+
without class instantiation.
|
40
|
+
|
41
|
+
Notes
|
42
|
+
-----
|
43
|
+
- 1D Structure data should be accessed via the HdfResultsXsec class
|
44
|
+
- All methods use @standardize_input for consistent file handling
|
45
|
+
- All methods use @log_call for operation logging
|
46
|
+
- Returns GeoDataFrames with both geometric and attribute data
|
40
47
|
"""
|
41
48
|
|
42
49
|
@staticmethod
|
43
50
|
@log_call
|
44
51
|
@standardize_input(file_type='geom_hdf')
|
45
|
-
def
|
52
|
+
def get_structures(hdf_path: Path, datetime_to_str: bool = False) -> GeoDataFrame:
|
46
53
|
"""
|
47
|
-
Extracts structure data from a HEC-RAS geometry HDF5 file
|
48
|
-
|
49
|
-
This function excludes Property Tables, Pier and Abutment Data/Attributes, and Gate Groups.
|
50
|
-
It includes Table Info, Centerlines as LineStrings, Structures Attributes, Bridge Coefficient Attributes,
|
51
|
-
and Profile Data (as a list of station and elevation values for each structure).
|
54
|
+
Extracts structure data from a HEC-RAS geometry HDF5 file.
|
52
55
|
|
53
56
|
Parameters
|
54
57
|
----------
|
55
58
|
hdf_path : Path
|
56
|
-
Path to the HEC-RAS geometry HDF5 file
|
59
|
+
Path to the HEC-RAS geometry HDF5 file
|
57
60
|
datetime_to_str : bool, optional
|
58
|
-
|
61
|
+
If True, converts datetime objects to ISO format strings, by default False
|
59
62
|
|
60
63
|
Returns
|
61
64
|
-------
|
62
65
|
GeoDataFrame
|
63
|
-
|
66
|
+
Structure data with columns:
|
67
|
+
- Structure ID: unique identifier
|
68
|
+
- Geometry: LineString of structure centerline
|
69
|
+
- Various attribute columns from the HDF file
|
70
|
+
- Profile_Data: list of station/elevation dictionaries
|
71
|
+
- Bridge coefficient attributes (if present)
|
72
|
+
- Table info attributes (if present)
|
73
|
+
|
74
|
+
Notes
|
75
|
+
-----
|
76
|
+
- Group-level attributes are stored in GeoDataFrame.attrs['group_attributes']
|
77
|
+
- Invalid geometries are dropped with warning
|
78
|
+
- All byte strings are decoded to UTF-8
|
79
|
+
- CRS is preserved from the source file
|
64
80
|
"""
|
65
81
|
try:
|
66
82
|
with h5py.File(hdf_path, 'r') as hdf:
|
67
83
|
if "Geometry/Structures" not in hdf:
|
68
84
|
logger.info(f"No structures found in: {hdf_path}")
|
69
85
|
return GeoDataFrame()
|
70
|
-
|
86
|
+
|
71
87
|
def get_dataset_df(path: str) -> pd.DataFrame:
|
72
88
|
"""
|
73
|
-
|
89
|
+
Converts an HDF5 dataset to a pandas DataFrame.
|
74
90
|
|
75
91
|
Parameters
|
76
92
|
----------
|
77
93
|
path : str
|
78
|
-
|
94
|
+
Dataset path within the HDF5 file
|
79
95
|
|
80
96
|
Returns
|
81
97
|
-------
|
82
98
|
pd.DataFrame
|
83
|
-
DataFrame
|
99
|
+
DataFrame containing the dataset values.
|
100
|
+
- For compound datasets, column names match field names
|
101
|
+
- For simple datasets, generic column names (Value_0, Value_1, etc.)
|
102
|
+
- Empty DataFrame if dataset not found
|
103
|
+
|
104
|
+
Notes
|
105
|
+
-----
|
106
|
+
Automatically decodes byte strings to UTF-8 with error handling.
|
84
107
|
"""
|
85
108
|
if path not in hdf:
|
86
109
|
logger.warning(f"Dataset not found: {path}")
|
87
110
|
return pd.DataFrame()
|
88
|
-
|
111
|
+
|
89
112
|
data = hdf[path][()]
|
90
113
|
|
91
114
|
if data.dtype.names:
|
@@ -100,6 +123,7 @@ class HdfStruc:
|
|
100
123
|
return pd.DataFrame(data, columns=[f'Value_{i}' for i in range(data.shape[1])])
|
101
124
|
|
102
125
|
# Extract relevant datasets
|
126
|
+
group_attrs = HdfBase.get_attrs(hdf, "Geometry/Structures")
|
103
127
|
struct_attrs = get_dataset_df("Geometry/Structures/Attributes")
|
104
128
|
bridge_coef = get_dataset_df("Geometry/Structures/Bridge Coefficient Attributes")
|
105
129
|
table_info = get_dataset_df("Geometry/Structures/Table Info")
|
@@ -135,7 +159,7 @@ class HdfStruc:
|
|
135
159
|
struct_gdf = GeoDataFrame(
|
136
160
|
struct_attrs,
|
137
161
|
geometry=geoms,
|
138
|
-
crs=
|
162
|
+
crs=HdfBase.get_projection(hdf_path)
|
139
163
|
)
|
140
164
|
|
141
165
|
# Drop entries with invalid geometries
|
@@ -205,6 +229,12 @@ class HdfStruc:
|
|
205
229
|
|
206
230
|
# Final GeoDataFrame
|
207
231
|
logger.info("Successfully extracted structures GeoDataFrame.")
|
232
|
+
|
233
|
+
# Add group attributes to the GeoDataFrame's attrs['group_attributes']
|
234
|
+
struct_gdf.attrs['group_attributes'] = group_attrs
|
235
|
+
|
236
|
+
logger.info("Successfully extracted structures GeoDataFrame with attributes.")
|
237
|
+
|
208
238
|
return struct_gdf
|
209
239
|
|
210
240
|
except Exception as e:
|
@@ -216,30 +246,30 @@ class HdfStruc:
|
|
216
246
|
@standardize_input(file_type='geom_hdf')
|
217
247
|
def get_geom_structures_attrs(hdf_path: Path) -> Dict[str, Any]:
|
218
248
|
"""
|
219
|
-
|
220
|
-
|
221
|
-
This method extracts attributes related to geometry structures from the HDF file.
|
249
|
+
Extracts structure attributes from a HEC-RAS geometry HDF file.
|
222
250
|
|
223
251
|
Parameters
|
224
252
|
----------
|
225
253
|
hdf_path : Path
|
226
|
-
Path to the HEC-RAS geometry HDF file
|
254
|
+
Path to the HEC-RAS geometry HDF file
|
227
255
|
|
228
256
|
Returns
|
229
257
|
-------
|
230
258
|
Dict[str, Any]
|
231
|
-
|
259
|
+
Dictionary of structure attributes from the Geometry/Structures group.
|
260
|
+
Returns empty dict if no structures are found.
|
232
261
|
|
233
262
|
Notes
|
234
263
|
-----
|
235
|
-
|
264
|
+
Attributes are extracted from the HDF5 group 'Geometry/Structures'.
|
265
|
+
All byte strings in attributes are automatically decoded to UTF-8.
|
236
266
|
"""
|
237
267
|
try:
|
238
268
|
with h5py.File(hdf_path, 'r') as hdf_file:
|
239
269
|
if "Geometry/Structures" not in hdf_file:
|
240
270
|
logger.info(f"No structures found in the geometry file: {hdf_path}")
|
241
271
|
return {}
|
242
|
-
return HdfUtils.
|
272
|
+
return HdfUtils.hdf5_attrs_to_dict(hdf_file["Geometry/Structures"].attrs)
|
243
273
|
except Exception as e:
|
244
274
|
logger.error(f"Error reading geometry structures attributes: {str(e)}")
|
245
275
|
return {}
|