ras-commander 0.45.0__py3-none-any.whl → 0.46.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ras_commander/HdfFluvialPluvial.py +317 -0
- ras_commander/HdfMesh.py +62 -15
- ras_commander/HdfPipe.py +587 -78
- ras_commander/HdfPlan.py +5 -0
- ras_commander/HdfPump.py +25 -11
- ras_commander/HdfResultsMesh.py +135 -62
- ras_commander/HdfResultsXsec.py +126 -297
- ras_commander/HdfStruc.py +148 -50
- ras_commander/HdfUtils.py +51 -0
- ras_commander/HdfXsec.py +467 -136
- ras_commander/RasPlan.py +298 -45
- ras_commander/RasToGo.py +21 -0
- ras_commander/RasUnsteady.py +615 -14
- ras_commander/__init__.py +3 -1
- {ras_commander-0.45.0.dist-info → ras_commander-0.46.0.dist-info}/METADATA +1 -1
- ras_commander-0.46.0.dist-info/RECORD +30 -0
- {ras_commander-0.45.0.dist-info → ras_commander-0.46.0.dist-info}/WHEEL +1 -1
- ras_commander-0.45.0.dist-info/RECORD +0 -28
- {ras_commander-0.45.0.dist-info → ras_commander-0.46.0.dist-info}/LICENSE +0 -0
- {ras_commander-0.45.0.dist-info → ras_commander-0.46.0.dist-info}/top_level.txt +0 -0
ras_commander/HdfStruc.py
CHANGED
@@ -38,79 +38,177 @@ class HdfStruc:
|
|
38
38
|
|
39
39
|
Note: This class contains static methods and does not require instantiation.
|
40
40
|
"""
|
41
|
-
|
42
|
-
GEOM_STRUCTURES_PATH = "Geometry/Structures"
|
43
|
-
|
41
|
+
|
44
42
|
@staticmethod
|
45
43
|
@log_call
|
46
44
|
@standardize_input(file_type='geom_hdf')
|
47
45
|
def structures(hdf_path: Path, datetime_to_str: bool = False) -> GeoDataFrame:
|
48
46
|
"""
|
49
|
-
|
47
|
+
Extracts structure data from a HEC-RAS geometry HDF5 file and returns it as a GeoDataFrame.
|
50
48
|
|
51
|
-
This
|
52
|
-
|
49
|
+
This function excludes Property Tables, Pier and Abutment Data/Attributes, and Gate Groups.
|
50
|
+
It includes Table Info, Centerlines as LineStrings, Structures Attributes, Bridge Coefficient Attributes,
|
51
|
+
and Profile Data (as a list of station and elevation values for each structure).
|
53
52
|
|
54
53
|
Parameters
|
55
54
|
----------
|
56
55
|
hdf_path : Path
|
57
|
-
Path to the HEC-RAS geometry
|
56
|
+
Path to the HEC-RAS geometry HDF5 file.
|
58
57
|
datetime_to_str : bool, optional
|
59
|
-
|
58
|
+
Convert datetime objects to strings, by default False.
|
60
59
|
|
61
60
|
Returns
|
62
61
|
-------
|
63
62
|
GeoDataFrame
|
64
|
-
A GeoDataFrame containing
|
65
|
-
and geometry.
|
66
|
-
|
67
|
-
Raises
|
68
|
-
------
|
69
|
-
Exception
|
70
|
-
If there's an error reading the structures data from the HDF file.
|
63
|
+
A GeoDataFrame containing all relevant structure data with geometries and attributes.
|
71
64
|
"""
|
72
65
|
try:
|
73
|
-
with h5py.File(hdf_path, 'r') as
|
74
|
-
|
75
|
-
|
76
|
-
logger.info(f"No structures found in the geometry file: {hdf_path}")
|
66
|
+
with h5py.File(hdf_path, 'r') as hdf:
|
67
|
+
if "Geometry/Structures" not in hdf:
|
68
|
+
logger.info(f"No structures found in: {hdf_path}")
|
77
69
|
return GeoDataFrame()
|
70
|
+
|
71
|
+
def get_dataset_df(path: str) -> pd.DataFrame:
|
72
|
+
"""
|
73
|
+
Helper function to convert an HDF5 dataset to a pandas DataFrame.
|
74
|
+
|
75
|
+
Parameters
|
76
|
+
----------
|
77
|
+
path : str
|
78
|
+
The path to the dataset within the HDF5 file.
|
79
|
+
|
80
|
+
Returns
|
81
|
+
-------
|
82
|
+
pd.DataFrame
|
83
|
+
DataFrame representation of the dataset.
|
84
|
+
"""
|
85
|
+
if path not in hdf:
|
86
|
+
logger.warning(f"Dataset not found: {path}")
|
87
|
+
return pd.DataFrame()
|
88
|
+
|
89
|
+
data = hdf[path][()]
|
90
|
+
|
91
|
+
if data.dtype.names:
|
92
|
+
df = pd.DataFrame(data)
|
93
|
+
# Decode byte strings to UTF-8
|
94
|
+
for col in df.columns:
|
95
|
+
if df[col].dtype.kind in {'S', 'a'}: # Byte strings
|
96
|
+
df[col] = df[col].str.decode('utf-8', errors='ignore')
|
97
|
+
return df
|
98
|
+
else:
|
99
|
+
# If no named fields, assign generic column names
|
100
|
+
return pd.DataFrame(data, columns=[f'Value_{i}' for i in range(data.shape[1])])
|
101
|
+
|
102
|
+
# Extract relevant datasets
|
103
|
+
struct_attrs = get_dataset_df("Geometry/Structures/Attributes")
|
104
|
+
bridge_coef = get_dataset_df("Geometry/Structures/Bridge Coefficient Attributes")
|
105
|
+
table_info = get_dataset_df("Geometry/Structures/Table Info")
|
106
|
+
profile_data = get_dataset_df("Geometry/Structures/Profile Data")
|
107
|
+
|
108
|
+
# Assign 'Structure ID' based on index (starting from 1)
|
109
|
+
struct_attrs.reset_index(drop=True, inplace=True)
|
110
|
+
struct_attrs['Structure ID'] = range(1, len(struct_attrs) + 1)
|
111
|
+
logger.debug(f"Assigned Structure IDs: {struct_attrs['Structure ID'].tolist()}")
|
112
|
+
|
113
|
+
# Check if 'Structure ID' was successfully assigned
|
114
|
+
if 'Structure ID' not in struct_attrs.columns:
|
115
|
+
logger.error("'Structure ID' column could not be assigned to Structures/Attributes.")
|
116
|
+
return GeoDataFrame()
|
117
|
+
|
118
|
+
# Get centerline geometry
|
119
|
+
centerline_info = hdf["Geometry/Structures/Centerline Info"][()]
|
120
|
+
centerline_points = hdf["Geometry/Structures/Centerline Points"][()]
|
78
121
|
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
HdfStruc.GEOM_STRUCTURES_PATH,
|
93
|
-
info_name="Centerline Info",
|
94
|
-
parts_name="Centerline Parts",
|
95
|
-
points_name="Centerline Points"
|
96
|
-
)
|
97
|
-
|
98
|
-
# Create GeoDataFrame
|
122
|
+
# Create LineString geometries for each structure
|
123
|
+
geoms = []
|
124
|
+
for i in range(len(centerline_info)):
|
125
|
+
start_idx = centerline_info[i][0] # Point Starting Index
|
126
|
+
point_count = centerline_info[i][1] # Point Count
|
127
|
+
points = centerline_points[start_idx:start_idx + point_count]
|
128
|
+
if len(points) >= 2:
|
129
|
+
geoms.append(LineString(points))
|
130
|
+
else:
|
131
|
+
logger.warning(f"Insufficient points for LineString in structure index {i}.")
|
132
|
+
geoms.append(None)
|
133
|
+
|
134
|
+
# Create base GeoDataFrame with Structures Attributes and geometries
|
99
135
|
struct_gdf = GeoDataFrame(
|
100
|
-
|
136
|
+
struct_attrs,
|
101
137
|
geometry=geoms,
|
102
|
-
crs=HdfUtils.projection(hdf_path)
|
138
|
+
crs=HdfUtils.projection(hdf_path)
|
103
139
|
)
|
104
|
-
|
105
|
-
#
|
106
|
-
|
107
|
-
|
108
|
-
|
140
|
+
|
141
|
+
# Drop entries with invalid geometries
|
142
|
+
initial_count = len(struct_gdf)
|
143
|
+
struct_gdf = struct_gdf.dropna(subset=['geometry']).reset_index(drop=True)
|
144
|
+
final_count = len(struct_gdf)
|
145
|
+
if final_count < initial_count:
|
146
|
+
logger.warning(f"Dropped {initial_count - final_count} structures due to invalid geometries.")
|
147
|
+
|
148
|
+
# Merge Bridge Coefficient Attributes on 'Structure ID'
|
149
|
+
if not bridge_coef.empty and 'Structure ID' in bridge_coef.columns:
|
150
|
+
struct_gdf = struct_gdf.merge(
|
151
|
+
bridge_coef,
|
152
|
+
on='Structure ID',
|
153
|
+
how='left',
|
154
|
+
suffixes=('', '_bridge_coef')
|
109
155
|
)
|
110
|
-
|
156
|
+
logger.debug("Merged Bridge Coefficient Attributes successfully.")
|
157
|
+
else:
|
158
|
+
logger.warning("Bridge Coefficient Attributes missing or 'Structure ID' not present.")
|
159
|
+
|
160
|
+
# Merge Table Info based on the DataFrame index (one-to-one correspondence)
|
161
|
+
if not table_info.empty:
|
162
|
+
if len(table_info) != len(struct_gdf):
|
163
|
+
logger.warning("Table Info count does not match Structures count. Skipping merge.")
|
164
|
+
else:
|
165
|
+
struct_gdf = pd.concat([struct_gdf, table_info.reset_index(drop=True)], axis=1)
|
166
|
+
logger.debug("Merged Table Info successfully.")
|
167
|
+
else:
|
168
|
+
logger.warning("Table Info dataset is empty or missing.")
|
169
|
+
|
170
|
+
# Process Profile Data based on Table Info
|
171
|
+
if not profile_data.empty and not table_info.empty:
|
172
|
+
# Assuming 'Centerline Profile (Index)' and 'Centerline Profile (Count)' are in 'Table Info'
|
173
|
+
if ('Centerline Profile (Index)' in table_info.columns and
|
174
|
+
'Centerline Profile (Count)' in table_info.columns):
|
175
|
+
struct_gdf['Profile_Data'] = struct_gdf.apply(
|
176
|
+
lambda row: [
|
177
|
+
{'Station': float(profile_data.iloc[i, 0]),
|
178
|
+
'Elevation': float(profile_data.iloc[i, 1])}
|
179
|
+
for i in range(
|
180
|
+
int(row['Centerline Profile (Index)']),
|
181
|
+
int(row['Centerline Profile (Index)']) + int(row['Centerline Profile (Count)'])
|
182
|
+
)
|
183
|
+
],
|
184
|
+
axis=1
|
185
|
+
)
|
186
|
+
logger.debug("Processed Profile Data successfully.")
|
187
|
+
else:
|
188
|
+
logger.warning("Required columns for Profile Data not found in Table Info.")
|
189
|
+
else:
|
190
|
+
logger.warning("Profile Data dataset is empty or Table Info is missing.")
|
191
|
+
|
192
|
+
# Convert datetime columns to string if requested
|
193
|
+
if datetime_to_str:
|
194
|
+
datetime_cols = struct_gdf.select_dtypes(include=['datetime64']).columns
|
195
|
+
for col in datetime_cols:
|
196
|
+
struct_gdf[col] = struct_gdf[col].dt.isoformat()
|
197
|
+
logger.debug(f"Converted datetime column '{col}' to string.")
|
198
|
+
|
199
|
+
# Ensure all byte strings are decoded (if any remain)
|
200
|
+
for col in struct_gdf.columns:
|
201
|
+
if struct_gdf[col].dtype == object:
|
202
|
+
struct_gdf[col] = struct_gdf[col].apply(
|
203
|
+
lambda x: x.decode('utf-8', errors='ignore') if isinstance(x, bytes) else x
|
204
|
+
)
|
205
|
+
|
206
|
+
# Final GeoDataFrame
|
207
|
+
logger.info("Successfully extracted structures GeoDataFrame.")
|
111
208
|
return struct_gdf
|
209
|
+
|
112
210
|
except Exception as e:
|
113
|
-
logger.error(f"Error reading structures: {str(e)}")
|
211
|
+
logger.error(f"Error reading structures from {hdf_path}: {str(e)}")
|
114
212
|
raise
|
115
213
|
|
116
214
|
@staticmethod
|
@@ -138,10 +236,10 @@ class HdfStruc:
|
|
138
236
|
"""
|
139
237
|
try:
|
140
238
|
with h5py.File(hdf_path, 'r') as hdf_file:
|
141
|
-
if
|
239
|
+
if "Geometry/Structures" not in hdf_file:
|
142
240
|
logger.info(f"No structures found in the geometry file: {hdf_path}")
|
143
241
|
return {}
|
144
|
-
return HdfUtils.get_attrs(hdf_file,
|
242
|
+
return HdfUtils.get_attrs(hdf_file, "Geometry/Structures")
|
145
243
|
except Exception as e:
|
146
244
|
logger.error(f"Error reading geometry structures attributes: {str(e)}")
|
147
245
|
return {}
|
ras_commander/HdfUtils.py
CHANGED
@@ -17,6 +17,7 @@ from typing import Union, Optional, Dict, List, Tuple, Any
|
|
17
17
|
from scipy.spatial import KDTree
|
18
18
|
import re
|
19
19
|
|
20
|
+
|
20
21
|
from .Decorators import standardize_input, log_call
|
21
22
|
from .LoggingConfig import setup_logging, get_logger
|
22
23
|
|
@@ -465,3 +466,53 @@ class HdfUtils:
|
|
465
466
|
except Exception as e:
|
466
467
|
logger.error(f"Error reading projection from {hdf_path}: {str(e)}")
|
467
468
|
return None
|
469
|
+
|
470
|
+
def print_attrs(name, obj):
|
471
|
+
"""
|
472
|
+
Print attributes of an HDF5 object.
|
473
|
+
"""
|
474
|
+
if obj.attrs:
|
475
|
+
print("")
|
476
|
+
print(f" Attributes for {name}:")
|
477
|
+
for key, val in obj.attrs.items():
|
478
|
+
print(f" {key}: {val}")
|
479
|
+
else:
|
480
|
+
print(f" No attributes for {name}.")
|
481
|
+
|
482
|
+
@staticmethod
|
483
|
+
@standardize_input(file_type='plan_hdf')
|
484
|
+
def explore_hdf5(file_path: Path, group_path: str = '/') -> None:
|
485
|
+
"""
|
486
|
+
Recursively explore and print the structure of an HDF5 file.
|
487
|
+
|
488
|
+
:param file_path: Path to the HDF5 file
|
489
|
+
:param group_path: Current group path to explore
|
490
|
+
"""
|
491
|
+
def recurse(name, obj, indent=0):
|
492
|
+
spacer = " " * indent
|
493
|
+
if isinstance(obj, h5py.Group):
|
494
|
+
print(f"{spacer}Group: {name}")
|
495
|
+
HdfUtils.print_attrs(name, obj)
|
496
|
+
for key in obj:
|
497
|
+
recurse(f"{name}/{key}", obj[key], indent+1)
|
498
|
+
elif isinstance(obj, h5py.Dataset):
|
499
|
+
print(f"{spacer}Dataset: {name}")
|
500
|
+
print(f"{spacer} Shape: {obj.shape}")
|
501
|
+
print(f"{spacer} Dtype: {obj.dtype}")
|
502
|
+
HdfUtils.print_attrs(name, obj)
|
503
|
+
else:
|
504
|
+
print(f"{spacer}Unknown object: {name}")
|
505
|
+
|
506
|
+
try:
|
507
|
+
with h5py.File(file_path, 'r') as hdf_file:
|
508
|
+
if group_path in hdf_file:
|
509
|
+
print("")
|
510
|
+
print(f"Exploring group: {group_path}\n")
|
511
|
+
group = hdf_file[group_path]
|
512
|
+
for key in group:
|
513
|
+
print("")
|
514
|
+
recurse(f"{group_path}/{key}", group[key], indent=1)
|
515
|
+
else:
|
516
|
+
print(f"Group path '{group_path}' not found in the HDF5 file.")
|
517
|
+
except Exception as e:
|
518
|
+
print(f"Error exploring HDF5 file: {e}")
|