rashdf 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rashdf/__init__.py +5 -0
- rashdf/base.py +78 -0
- rashdf/geom.py +416 -0
- rashdf/plan.py +78 -0
- rashdf/utils.py +215 -0
- rashdf-0.1.0.dist-info/LICENSE +21 -0
- rashdf-0.1.0.dist-info/METADATA +102 -0
- rashdf-0.1.0.dist-info/RECORD +10 -0
- rashdf-0.1.0.dist-info/WHEEL +5 -0
- rashdf-0.1.0.dist-info/top_level.txt +1 -0
rashdf/__init__.py
ADDED
rashdf/base.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import h5py
|
|
2
|
+
from .utils import hdf5_attrs_to_dict
|
|
3
|
+
from typing import Dict
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class RasHdf(h5py.File):
|
|
7
|
+
"""Base class for reading RAS HDF files."""
|
|
8
|
+
|
|
9
|
+
def __init__(self, name: str, **kwargs):
|
|
10
|
+
"""Open a HEC-RAS HDF file.
|
|
11
|
+
|
|
12
|
+
Parameters
|
|
13
|
+
----------
|
|
14
|
+
name : str
|
|
15
|
+
The path to the RAS HDF file.
|
|
16
|
+
kwargs : dict
|
|
17
|
+
Additional keyword arguments to pass to h5py.File
|
|
18
|
+
"""
|
|
19
|
+
super().__init__(name, mode="r", **kwargs)
|
|
20
|
+
|
|
21
|
+
@classmethod
|
|
22
|
+
def open_uri(
|
|
23
|
+
cls, uri: str, fsspec_kwargs: dict = {}, h5py_kwargs: dict = {}
|
|
24
|
+
) -> "RasHdf":
|
|
25
|
+
"""Open a HEC-RAS HDF file from a URI.
|
|
26
|
+
|
|
27
|
+
Parameters
|
|
28
|
+
----------
|
|
29
|
+
uri : str
|
|
30
|
+
The URI of the RAS HDF file. Note this should be a path
|
|
31
|
+
recognized by fsspec, such as an S3 path or a Google Cloud
|
|
32
|
+
Storage path. See https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.open
|
|
33
|
+
fsspec_kwargs : dict
|
|
34
|
+
Additional keyword arguments to pass to fsspec.open
|
|
35
|
+
h5py_kwargs : dict
|
|
36
|
+
Additional keyword arguments to pass to h5py.File
|
|
37
|
+
|
|
38
|
+
Returns
|
|
39
|
+
-------
|
|
40
|
+
RasHdf
|
|
41
|
+
The RAS HDF file opened from the URI.
|
|
42
|
+
|
|
43
|
+
Examples
|
|
44
|
+
--------
|
|
45
|
+
>>> results_hdf = RasHdf.open_uri("s3://my-bucket/results.hdf")
|
|
46
|
+
"""
|
|
47
|
+
import fsspec
|
|
48
|
+
|
|
49
|
+
remote_file = fsspec.open(uri, mode="rb", **fsspec_kwargs)
|
|
50
|
+
return cls(remote_file.open(), **h5py_kwargs)
|
|
51
|
+
|
|
52
|
+
def get_attrs(self, attr_path: str) -> Dict:
|
|
53
|
+
"""Convert attributes from a HEC-RAS HDF file into a Python dictionary for a given attribute path.
|
|
54
|
+
|
|
55
|
+
Parameters
|
|
56
|
+
----------
|
|
57
|
+
attr_path (str): The path within the HEC-RAS HDF file where the desired attributes are located (Ex. "Plan Data/Plan Parameters").
|
|
58
|
+
|
|
59
|
+
Returns
|
|
60
|
+
-------
|
|
61
|
+
plan_attrs (dict): Dictionary filled with attributes at given path, if attributes exist at that path.
|
|
62
|
+
"""
|
|
63
|
+
attr_object = self.get(attr_path)
|
|
64
|
+
|
|
65
|
+
if attr_object:
|
|
66
|
+
return hdf5_attrs_to_dict(attr_object.attrs)
|
|
67
|
+
|
|
68
|
+
return {}
|
|
69
|
+
|
|
70
|
+
def get_root_attrs(self):
|
|
71
|
+
"""Returns attributes at root level of HEC-RAS HDF file.
|
|
72
|
+
|
|
73
|
+
Returns
|
|
74
|
+
-------
|
|
75
|
+
dict
|
|
76
|
+
Dictionary filled with HEC-RAS HDF root attributes.
|
|
77
|
+
"""
|
|
78
|
+
return self.get_attrs("/")
|
rashdf/geom.py
ADDED
|
@@ -0,0 +1,416 @@
|
|
|
1
|
+
from .base import RasHdf
|
|
2
|
+
from .utils import convert_ras_hdf_string, get_first_hdf_group, hdf5_attrs_to_dict
|
|
3
|
+
|
|
4
|
+
import numpy as np
|
|
5
|
+
from geopandas import GeoDataFrame
|
|
6
|
+
from pyproj import CRS
|
|
7
|
+
from shapely import (
|
|
8
|
+
Polygon,
|
|
9
|
+
Point,
|
|
10
|
+
LineString,
|
|
11
|
+
MultiLineString,
|
|
12
|
+
MultiPolygon,
|
|
13
|
+
polygonize,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
from typing import List, Optional
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class RasGeomHdf(RasHdf):
|
|
20
|
+
GEOM_PATH = "Geometry"
|
|
21
|
+
GEOM_STRUCTURES_PATH = f"{GEOM_PATH}/Structures"
|
|
22
|
+
FLOW_AREA_2D_PATH = f"{GEOM_PATH}/2D Flow Areas"
|
|
23
|
+
|
|
24
|
+
def __init__(self, name: str, **kwargs):
|
|
25
|
+
super().__init__(name, **kwargs)
|
|
26
|
+
|
|
27
|
+
def projection(self) -> Optional[CRS]:
|
|
28
|
+
"""Return the projection of the RAS geometry as a
|
|
29
|
+
pyproj.CRS object.
|
|
30
|
+
|
|
31
|
+
Returns
|
|
32
|
+
-------
|
|
33
|
+
CRS
|
|
34
|
+
The projection of the RAS geometry.
|
|
35
|
+
"""
|
|
36
|
+
proj_wkt = self.attrs.get("Projection")
|
|
37
|
+
if proj_wkt is None:
|
|
38
|
+
return None
|
|
39
|
+
if isinstance(proj_wkt, bytes) or isinstance(proj_wkt, np.bytes_):
|
|
40
|
+
proj_wkt = proj_wkt.decode("utf-8")
|
|
41
|
+
return CRS.from_wkt(proj_wkt)
|
|
42
|
+
|
|
43
|
+
def mesh_area_names(self) -> List[str]:
|
|
44
|
+
"""Return a list of the 2D mesh area names of
|
|
45
|
+
the RAS geometry.
|
|
46
|
+
|
|
47
|
+
Returns
|
|
48
|
+
-------
|
|
49
|
+
list
|
|
50
|
+
A list of the 2D mesh area names (str) within the RAS geometry if 2D areas exist.
|
|
51
|
+
"""
|
|
52
|
+
if "/Geometry/2D Flow Areas" not in self:
|
|
53
|
+
return list()
|
|
54
|
+
return list(
|
|
55
|
+
[
|
|
56
|
+
convert_ras_hdf_string(n)
|
|
57
|
+
for n in self["/Geometry/2D Flow Areas/Attributes"][()]["Name"]
|
|
58
|
+
]
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
def mesh_areas(self) -> GeoDataFrame:
|
|
62
|
+
"""Return 2D flow area perimeter polygons.
|
|
63
|
+
|
|
64
|
+
Returns
|
|
65
|
+
-------
|
|
66
|
+
GeoDataFrame
|
|
67
|
+
A GeoDataFrame containing the 2D flow area perimeter polygons if 2D areas exist.
|
|
68
|
+
"""
|
|
69
|
+
mesh_area_names = self.mesh_area_names()
|
|
70
|
+
if not mesh_area_names:
|
|
71
|
+
return GeoDataFrame()
|
|
72
|
+
mesh_area_polygons = [
|
|
73
|
+
Polygon(self[f"/Geometry/2D Flow Areas/{n}/Perimeter"][()])
|
|
74
|
+
for n in mesh_area_names
|
|
75
|
+
]
|
|
76
|
+
return GeoDataFrame(
|
|
77
|
+
{"mesh_name": mesh_area_names, "geometry": mesh_area_polygons},
|
|
78
|
+
geometry="geometry",
|
|
79
|
+
crs=self.projection(),
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
def mesh_cell_polygons(self) -> GeoDataFrame:
|
|
83
|
+
"""Return the 2D flow mesh cell polygons.
|
|
84
|
+
|
|
85
|
+
Returns
|
|
86
|
+
-------
|
|
87
|
+
GeoDataFrame
|
|
88
|
+
A GeoDataFrame containing the 2D flow mesh cell polygons if 2D areas exist.
|
|
89
|
+
"""
|
|
90
|
+
mesh_area_names = self.mesh_area_names()
|
|
91
|
+
if not mesh_area_names:
|
|
92
|
+
return GeoDataFrame()
|
|
93
|
+
|
|
94
|
+
face_gdf = self.mesh_cell_faces()
|
|
95
|
+
|
|
96
|
+
cell_dict = {"mesh_name": [], "cell_id": [], "geometry": []}
|
|
97
|
+
for i, mesh_name in enumerate(mesh_area_names):
|
|
98
|
+
cell_cnt = self["/Geometry/2D Flow Areas/Cell Info"][()][i][1]
|
|
99
|
+
cell_ids = list(range(cell_cnt))
|
|
100
|
+
cell_face_info = self[
|
|
101
|
+
f"/Geometry/2D Flow Areas/{mesh_name}/Cells Face and Orientation Info"
|
|
102
|
+
][()]
|
|
103
|
+
cell_face_values = self[
|
|
104
|
+
f"/Geometry/2D Flow Areas/{mesh_name}/Cells Face and Orientation Values"
|
|
105
|
+
][()][:, 0]
|
|
106
|
+
face_id_lists = list(
|
|
107
|
+
np.vectorize(
|
|
108
|
+
lambda cell_id: str(
|
|
109
|
+
cell_face_values[
|
|
110
|
+
cell_face_info[cell_id][0] : cell_face_info[cell_id][0]
|
|
111
|
+
+ cell_face_info[cell_id][1]
|
|
112
|
+
]
|
|
113
|
+
)
|
|
114
|
+
)(cell_ids)
|
|
115
|
+
)
|
|
116
|
+
mesh_faces = (
|
|
117
|
+
face_gdf[face_gdf.mesh_name == mesh_name][["face_id", "geometry"]]
|
|
118
|
+
.set_index("face_id")
|
|
119
|
+
.to_numpy()
|
|
120
|
+
)
|
|
121
|
+
cell_dict["mesh_name"] += [mesh_name] * cell_cnt
|
|
122
|
+
cell_dict["cell_id"] += cell_ids
|
|
123
|
+
cell_dict["geometry"] += list(
|
|
124
|
+
np.vectorize(
|
|
125
|
+
lambda face_id_list: polygonize(
|
|
126
|
+
np.ravel(
|
|
127
|
+
mesh_faces[
|
|
128
|
+
np.array(face_id_list.strip("[]").split()).astype(int)
|
|
129
|
+
]
|
|
130
|
+
)
|
|
131
|
+
).geoms[0]
|
|
132
|
+
)(face_id_lists)
|
|
133
|
+
)
|
|
134
|
+
return GeoDataFrame(cell_dict, geometry="geometry", crs=self.projection())
|
|
135
|
+
|
|
136
|
+
def mesh_cell_points(self) -> GeoDataFrame:
|
|
137
|
+
"""Return the 2D flow mesh cell points.
|
|
138
|
+
|
|
139
|
+
Returns
|
|
140
|
+
-------
|
|
141
|
+
GeoDataFrame
|
|
142
|
+
A GeoDataFrame containing the 2D flow mesh cell points if 2D areas exist.
|
|
143
|
+
"""
|
|
144
|
+
mesh_area_names = self.mesh_area_names()
|
|
145
|
+
if not mesh_area_names:
|
|
146
|
+
return GeoDataFrame()
|
|
147
|
+
pnt_dict = {"mesh_name": [], "cell_id": [], "geometry": []}
|
|
148
|
+
for i, mesh_name in enumerate(mesh_area_names):
|
|
149
|
+
starting_row, count = self["/Geometry/2D Flow Areas/Cell Info"][()][i]
|
|
150
|
+
cell_pnt_coords = self["/Geometry/2D Flow Areas/Cell Points"][()][
|
|
151
|
+
starting_row : starting_row + count
|
|
152
|
+
]
|
|
153
|
+
pnt_dict["mesh_name"] += [mesh_name] * cell_pnt_coords.shape[0]
|
|
154
|
+
pnt_dict["cell_id"] += range(count)
|
|
155
|
+
pnt_dict["geometry"] += list(
|
|
156
|
+
np.vectorize(lambda coords: Point(coords), signature="(n)->()")(
|
|
157
|
+
cell_pnt_coords
|
|
158
|
+
)
|
|
159
|
+
)
|
|
160
|
+
return GeoDataFrame(pnt_dict, geometry="geometry", crs=self.projection())
|
|
161
|
+
|
|
162
|
+
def mesh_cell_faces(self) -> GeoDataFrame:
|
|
163
|
+
"""Return the 2D flow mesh cell faces.
|
|
164
|
+
|
|
165
|
+
Returns
|
|
166
|
+
-------
|
|
167
|
+
GeoDataFrame
|
|
168
|
+
A GeoDataFrame containing the 2D flow mesh cell faces if 2D areas exist.
|
|
169
|
+
"""
|
|
170
|
+
mesh_area_names = self.mesh_area_names()
|
|
171
|
+
if not mesh_area_names:
|
|
172
|
+
return GeoDataFrame()
|
|
173
|
+
face_dict = {"mesh_name": [], "face_id": [], "geometry": []}
|
|
174
|
+
for mesh_name in mesh_area_names:
|
|
175
|
+
facepoints_index = self[
|
|
176
|
+
f"/Geometry/2D Flow Areas/{mesh_name}/Faces FacePoint Indexes"
|
|
177
|
+
][()]
|
|
178
|
+
facepoints_coordinates = self[
|
|
179
|
+
f"/Geometry/2D Flow Areas/{mesh_name}/FacePoints Coordinate"
|
|
180
|
+
][()]
|
|
181
|
+
faces_perimeter_info = self[
|
|
182
|
+
f"/Geometry/2D Flow Areas/{mesh_name}/Faces Perimeter Info"
|
|
183
|
+
][()]
|
|
184
|
+
faces_perimeter_values = self[
|
|
185
|
+
f"/Geometry/2D Flow Areas/{mesh_name}/Faces Perimeter Values"
|
|
186
|
+
][()]
|
|
187
|
+
face_id = -1
|
|
188
|
+
for pnt_a_index, pnt_b_index in facepoints_index:
|
|
189
|
+
face_id += 1
|
|
190
|
+
face_dict["mesh_name"].append(mesh_name)
|
|
191
|
+
face_dict["face_id"].append(face_id)
|
|
192
|
+
coordinates = list()
|
|
193
|
+
coordinates.append(facepoints_coordinates[pnt_a_index])
|
|
194
|
+
starting_row, count = faces_perimeter_info[face_id]
|
|
195
|
+
if count > 0:
|
|
196
|
+
coordinates += list(
|
|
197
|
+
faces_perimeter_values[starting_row : starting_row + count]
|
|
198
|
+
)
|
|
199
|
+
coordinates.append(facepoints_coordinates[pnt_b_index])
|
|
200
|
+
face_dict["geometry"].append(LineString(coordinates))
|
|
201
|
+
return GeoDataFrame(face_dict, geometry="geometry", crs=self.projection())
|
|
202
|
+
|
|
203
|
+
def get_geom_attrs(self):
|
|
204
|
+
"""Returns base geometry attributes from a HEC-RAS HDF file.
|
|
205
|
+
|
|
206
|
+
Returns
|
|
207
|
+
-------
|
|
208
|
+
dict
|
|
209
|
+
Dictionary filled with base geometry attributes.
|
|
210
|
+
"""
|
|
211
|
+
return self.get_attrs(self.GEOM_PATH)
|
|
212
|
+
|
|
213
|
+
def get_geom_structures_attrs(self):
|
|
214
|
+
"""Returns geometry structures attributes from a HEC-RAS HDF file.
|
|
215
|
+
|
|
216
|
+
Returns
|
|
217
|
+
-------
|
|
218
|
+
dict
|
|
219
|
+
Dictionary filled with geometry structures attributes.
|
|
220
|
+
"""
|
|
221
|
+
return self.get_attrs(self.GEOM_STRUCTURES_PATH)
|
|
222
|
+
|
|
223
|
+
def get_geom_2d_flow_area_attrs(self):
|
|
224
|
+
"""Returns geometry 2d flow area attributes from a HEC-RAS HDF file.
|
|
225
|
+
|
|
226
|
+
Returns
|
|
227
|
+
-------
|
|
228
|
+
dict
|
|
229
|
+
Dictionary filled with geometry 2d flow area attributes.
|
|
230
|
+
"""
|
|
231
|
+
try:
|
|
232
|
+
d2_flow_area = get_first_hdf_group(self.get(self.FLOW_AREA_2D_PATH))
|
|
233
|
+
except AttributeError:
|
|
234
|
+
raise AttributeError(
|
|
235
|
+
f"Unable to get 2D Flow Area; {self.FLOW_AREA_2D_PATH} group not found in HDF5 file."
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
d2_flow_area_attrs = hdf5_attrs_to_dict(d2_flow_area.attrs)
|
|
239
|
+
|
|
240
|
+
return d2_flow_area_attrs
|
|
241
|
+
|
|
242
|
+
def bc_lines(self) -> GeoDataFrame:
|
|
243
|
+
"""Return the 2D mesh area boundary condition lines.
|
|
244
|
+
|
|
245
|
+
Returns
|
|
246
|
+
-------
|
|
247
|
+
GeoDataFrame
|
|
248
|
+
A GeoDataFrame containing the 2D mesh area boundary condition lines if they exist.
|
|
249
|
+
"""
|
|
250
|
+
if "/Geometry/Boundary Condition Lines" not in self:
|
|
251
|
+
return GeoDataFrame()
|
|
252
|
+
bc_line_data = self["/Geometry/Boundary Condition Lines"]
|
|
253
|
+
bc_line_ids = range(bc_line_data["Attributes"][()].shape[0])
|
|
254
|
+
v_conv_str = np.vectorize(convert_ras_hdf_string)
|
|
255
|
+
names = v_conv_str(bc_line_data["Attributes"][()]["Name"])
|
|
256
|
+
mesh_names = v_conv_str(bc_line_data["Attributes"][()]["SA-2D"])
|
|
257
|
+
types = v_conv_str(bc_line_data["Attributes"][()]["Type"])
|
|
258
|
+
geoms = list()
|
|
259
|
+
for pnt_start, pnt_cnt, part_start, part_cnt in bc_line_data["Polyline Info"][
|
|
260
|
+
()
|
|
261
|
+
]:
|
|
262
|
+
points = bc_line_data["Polyline Points"][()][
|
|
263
|
+
pnt_start : pnt_start + pnt_cnt
|
|
264
|
+
]
|
|
265
|
+
if part_cnt == 1:
|
|
266
|
+
geoms.append(LineString(points))
|
|
267
|
+
else:
|
|
268
|
+
parts = bc_line_data["Polyline Parts"][()][
|
|
269
|
+
part_start : part_start + part_cnt
|
|
270
|
+
]
|
|
271
|
+
geoms.append(
|
|
272
|
+
MultiLineString(
|
|
273
|
+
list(
|
|
274
|
+
points[part_pnt_start : part_pnt_start + part_pnt_cnt]
|
|
275
|
+
for part_pnt_start, part_pnt_cnt in parts
|
|
276
|
+
)
|
|
277
|
+
)
|
|
278
|
+
)
|
|
279
|
+
return GeoDataFrame(
|
|
280
|
+
{
|
|
281
|
+
"bc_line_id": bc_line_ids,
|
|
282
|
+
"name": names,
|
|
283
|
+
"mesh_name": mesh_names,
|
|
284
|
+
"type": types,
|
|
285
|
+
"geometry": geoms,
|
|
286
|
+
},
|
|
287
|
+
geometry="geometry",
|
|
288
|
+
crs=self.projection(),
|
|
289
|
+
)
|
|
290
|
+
|
|
291
|
+
def breaklines(self) -> GeoDataFrame:
|
|
292
|
+
"""Return the 2D mesh area breaklines.
|
|
293
|
+
|
|
294
|
+
Returns
|
|
295
|
+
-------
|
|
296
|
+
GeoDataFrame
|
|
297
|
+
A GeoDataFrame containing the 2D mesh area breaklines if they exist.
|
|
298
|
+
"""
|
|
299
|
+
if "/Geometry/2D Flow Area Break Lines" not in self:
|
|
300
|
+
return GeoDataFrame()
|
|
301
|
+
bl_line_data = self["/Geometry/2D Flow Area Break Lines"]
|
|
302
|
+
bl_line_ids = range(bl_line_data["Attributes"][()].shape[0])
|
|
303
|
+
names = np.vectorize(convert_ras_hdf_string)(
|
|
304
|
+
bl_line_data["Attributes"][()]["Name"]
|
|
305
|
+
)
|
|
306
|
+
geoms = list()
|
|
307
|
+
for pnt_start, pnt_cnt, part_start, part_cnt in bl_line_data["Polyline Info"][
|
|
308
|
+
()
|
|
309
|
+
]:
|
|
310
|
+
points = bl_line_data["Polyline Points"][()][
|
|
311
|
+
pnt_start : pnt_start + pnt_cnt
|
|
312
|
+
]
|
|
313
|
+
if part_cnt == 1:
|
|
314
|
+
geoms.append(LineString(points))
|
|
315
|
+
else:
|
|
316
|
+
parts = bl_line_data["Polyline Parts"][()][
|
|
317
|
+
part_start : part_start + part_cnt
|
|
318
|
+
]
|
|
319
|
+
geoms.append(
|
|
320
|
+
MultiLineString(
|
|
321
|
+
list(
|
|
322
|
+
points[part_pnt_start : part_pnt_start + part_pnt_cnt]
|
|
323
|
+
for part_pnt_start, part_pnt_cnt in parts
|
|
324
|
+
)
|
|
325
|
+
)
|
|
326
|
+
)
|
|
327
|
+
return GeoDataFrame(
|
|
328
|
+
{"bl_id": bl_line_ids, "name": names, "geometry": geoms},
|
|
329
|
+
geometry="geometry",
|
|
330
|
+
crs=self.projection(),
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
def refinement_regions(self) -> GeoDataFrame:
|
|
334
|
+
"""Return the 2D mesh area refinement regions.
|
|
335
|
+
|
|
336
|
+
Returns
|
|
337
|
+
-------
|
|
338
|
+
GeoDataFrame
|
|
339
|
+
A GeoDataFrame containing the 2D mesh area refinement regions if they exist.
|
|
340
|
+
"""
|
|
341
|
+
if "/Geometry/2D Flow Area Refinement Regions" not in self:
|
|
342
|
+
return GeoDataFrame()
|
|
343
|
+
rr_data = self["/Geometry/2D Flow Area Refinement Regions"]
|
|
344
|
+
rr_ids = range(rr_data["Attributes"][()].shape[0])
|
|
345
|
+
names = np.vectorize(convert_ras_hdf_string)(rr_data["Attributes"][()]["Name"])
|
|
346
|
+
geoms = list()
|
|
347
|
+
for pnt_start, pnt_cnt, part_start, part_cnt in rr_data["Polygon Info"][()]:
|
|
348
|
+
points = rr_data["Polygon Points"][()][pnt_start : pnt_start + pnt_cnt]
|
|
349
|
+
if part_cnt == 1:
|
|
350
|
+
geoms.append(Polygon(points))
|
|
351
|
+
else:
|
|
352
|
+
parts = rr_data["Polygon Parts"][()][part_start : part_start + part_cnt]
|
|
353
|
+
geoms.append(
|
|
354
|
+
MultiPolygon(
|
|
355
|
+
list(
|
|
356
|
+
points[part_pnt_start : part_pnt_start + part_pnt_cnt]
|
|
357
|
+
for part_pnt_start, part_pnt_cnt in parts
|
|
358
|
+
)
|
|
359
|
+
)
|
|
360
|
+
)
|
|
361
|
+
return GeoDataFrame(
|
|
362
|
+
{"rr_id": rr_ids, "name": names, "geometry": geoms},
|
|
363
|
+
geometry="geometry",
|
|
364
|
+
crs=self.projection(),
|
|
365
|
+
)
|
|
366
|
+
|
|
367
|
+
def connections(self) -> GeoDataFrame:
|
|
368
|
+
raise NotImplementedError
|
|
369
|
+
|
|
370
|
+
def ic_points(self) -> GeoDataFrame:
|
|
371
|
+
raise NotImplementedError
|
|
372
|
+
|
|
373
|
+
def reference_lines(self) -> GeoDataFrame:
|
|
374
|
+
raise NotImplementedError
|
|
375
|
+
|
|
376
|
+
def reference_points(self) -> GeoDataFrame:
|
|
377
|
+
raise NotImplementedError
|
|
378
|
+
|
|
379
|
+
def structures(self) -> GeoDataFrame:
|
|
380
|
+
raise NotImplementedError
|
|
381
|
+
|
|
382
|
+
def pump_stations(self) -> GeoDataFrame:
|
|
383
|
+
raise NotImplementedError
|
|
384
|
+
|
|
385
|
+
def mannings_calibration_regions(self) -> GeoDataFrame:
|
|
386
|
+
raise NotImplementedError
|
|
387
|
+
|
|
388
|
+
def classification_polygons(self) -> GeoDataFrame:
|
|
389
|
+
raise NotImplementedError
|
|
390
|
+
|
|
391
|
+
def terrain_modifications(self) -> GeoDataFrame:
|
|
392
|
+
raise NotImplementedError
|
|
393
|
+
|
|
394
|
+
def cross_sections(self) -> GeoDataFrame:
|
|
395
|
+
raise NotImplementedError
|
|
396
|
+
|
|
397
|
+
def river_reaches(self) -> GeoDataFrame:
|
|
398
|
+
raise NotImplementedError
|
|
399
|
+
|
|
400
|
+
def flowpaths(self) -> GeoDataFrame:
|
|
401
|
+
raise NotImplementedError
|
|
402
|
+
|
|
403
|
+
def bank_points(self) -> GeoDataFrame:
|
|
404
|
+
raise NotImplementedError
|
|
405
|
+
|
|
406
|
+
def bank_lines(self) -> GeoDataFrame:
|
|
407
|
+
raise NotImplementedError
|
|
408
|
+
|
|
409
|
+
def ineffective_areas(self) -> GeoDataFrame:
|
|
410
|
+
raise NotImplementedError
|
|
411
|
+
|
|
412
|
+
def ineffective_points(self) -> GeoDataFrame:
|
|
413
|
+
raise NotImplementedError
|
|
414
|
+
|
|
415
|
+
def blocked_obstructions(self) -> GeoDataFrame:
|
|
416
|
+
raise NotImplementedError
|
rashdf/plan.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
from .geom import RasGeomHdf
|
|
2
|
+
from typing import Dict
|
|
3
|
+
from geopandas import GeoDataFrame
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class RasPlanHdf(RasGeomHdf):
|
|
7
|
+
PLAN_INFO_PATH = "Plan Data/Plan Information"
|
|
8
|
+
PLAN_PARAMS_PATH = "Plan Data/Plan Parameters"
|
|
9
|
+
PRECIP_PATH = "Event Conditions/Meteorology/Precipitation"
|
|
10
|
+
RESULTS_UNSTEADY_PATH = "Results/Unsteady"
|
|
11
|
+
RESULTS_UNSTEADY_SUMMARY_PATH = f"{RESULTS_UNSTEADY_PATH}/Summary"
|
|
12
|
+
VOLUME_ACCOUNTING_PATH = f"{RESULTS_UNSTEADY_PATH}/Volume Accounting"
|
|
13
|
+
|
|
14
|
+
def __init__(self, name: str, **kwargs):
|
|
15
|
+
super().__init__(name, **kwargs)
|
|
16
|
+
|
|
17
|
+
def get_plan_info_attrs(self) -> Dict:
|
|
18
|
+
"""Returns plan information attributes from a HEC-RAS HDF plan file.
|
|
19
|
+
|
|
20
|
+
Returns
|
|
21
|
+
-------
|
|
22
|
+
dict
|
|
23
|
+
Dictionary filled with plan information attributes.
|
|
24
|
+
"""
|
|
25
|
+
return self.get_attrs(self.PLAN_INFO_PATH)
|
|
26
|
+
|
|
27
|
+
def get_plan_param_attrs(self) -> Dict:
|
|
28
|
+
"""Returns plan parameter attributes from a HEC-RAS HDF plan file.
|
|
29
|
+
|
|
30
|
+
Returns
|
|
31
|
+
-------
|
|
32
|
+
dict
|
|
33
|
+
Dictionary filled with plan parameter attributes.
|
|
34
|
+
"""
|
|
35
|
+
return self.get_attrs(self.PLAN_PARAMS_PATH)
|
|
36
|
+
|
|
37
|
+
def get_meteorology_precip_attrs(self) -> Dict:
|
|
38
|
+
"""Returns precipitation attributes from a HEC-RAS HDF plan file.
|
|
39
|
+
|
|
40
|
+
Returns
|
|
41
|
+
-------
|
|
42
|
+
dict
|
|
43
|
+
Dictionary filled with precipitation attributes.
|
|
44
|
+
"""
|
|
45
|
+
return self.get_attrs(self.PRECIP_PATH)
|
|
46
|
+
|
|
47
|
+
def get_results_unsteady_attrs(self) -> Dict:
|
|
48
|
+
"""Returns unsteady attributes from a HEC-RAS HDF plan file.
|
|
49
|
+
|
|
50
|
+
Returns
|
|
51
|
+
-------
|
|
52
|
+
dict
|
|
53
|
+
Dictionary filled with unsteady attributes.
|
|
54
|
+
"""
|
|
55
|
+
return self.get_attrs(self.RESULTS_UNSTEADY_PATH)
|
|
56
|
+
|
|
57
|
+
def get_results_unsteady_summary_attrs(self) -> Dict:
|
|
58
|
+
"""Returns results unsteady summary attributes from a HEC-RAS HDF plan file.
|
|
59
|
+
|
|
60
|
+
Returns
|
|
61
|
+
-------
|
|
62
|
+
dict
|
|
63
|
+
Dictionary filled with results summary attributes.
|
|
64
|
+
"""
|
|
65
|
+
return self.get_attrs(self.RESULTS_UNSTEADY_SUMMARY_PATH)
|
|
66
|
+
|
|
67
|
+
def get_results_volume_accounting_attrs(self) -> Dict:
|
|
68
|
+
"""Returns volume accounting attributes from a HEC-RAS HDF plan file.
|
|
69
|
+
|
|
70
|
+
Returns
|
|
71
|
+
-------
|
|
72
|
+
dict
|
|
73
|
+
Dictionary filled with volume accounting attributes.
|
|
74
|
+
"""
|
|
75
|
+
return self.get_attrs(self.VOLUME_ACCOUNTING_PATH)
|
|
76
|
+
|
|
77
|
+
def enroachment_points(self) -> GeoDataFrame:
|
|
78
|
+
raise NotImplementedError
|
rashdf/utils.py
ADDED
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
import h5py
|
|
3
|
+
from typing import Any, List, Tuple, Union, Optional
|
|
4
|
+
|
|
5
|
+
from datetime import datetime, timedelta
|
|
6
|
+
import re
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def parse_ras_datetime(datetime_str: str) -> datetime:
|
|
10
|
+
"""Parse a datetime string from a RAS file into a datetime object.
|
|
11
|
+
|
|
12
|
+
Parameters
|
|
13
|
+
----------
|
|
14
|
+
datetime_str (str): The datetime string to be parsed. The string should be in the format "ddMMMyyyy HHmm".
|
|
15
|
+
|
|
16
|
+
Returns
|
|
17
|
+
-------
|
|
18
|
+
datetime: A datetime object representing the parsed datetime.
|
|
19
|
+
"""
|
|
20
|
+
format = "%d%b%Y %H:%M:%S"
|
|
21
|
+
return datetime.strptime(datetime_str, format)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def parse_ras_simulation_window_datetime(datetime_str) -> datetime:
|
|
25
|
+
"""
|
|
26
|
+
Parse a datetime string from a RAS simulation window into a datetime object.
|
|
27
|
+
|
|
28
|
+
Parameters
|
|
29
|
+
----------
|
|
30
|
+
datetime_str: The datetime string to be parsed.
|
|
31
|
+
|
|
32
|
+
Returns
|
|
33
|
+
-------
|
|
34
|
+
datetime: A datetime object representing the parsed datetime.
|
|
35
|
+
"""
|
|
36
|
+
format = "%d%b%Y %H%M"
|
|
37
|
+
return datetime.strptime(datetime_str, format)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def parse_run_time_window(window: str) -> Tuple[datetime, datetime]:
|
|
41
|
+
"""
|
|
42
|
+
Parse a run time window string into a tuple of datetime objects.
|
|
43
|
+
|
|
44
|
+
Parameters
|
|
45
|
+
----------
|
|
46
|
+
window (str): The run time window string to be parsed.
|
|
47
|
+
|
|
48
|
+
Returns
|
|
49
|
+
-------
|
|
50
|
+
Tuple[datetime, datetime]: A tuple containing two datetime objects representing the start and end of the run
|
|
51
|
+
time window.
|
|
52
|
+
"""
|
|
53
|
+
split = window.split(" to ")
|
|
54
|
+
begin = parse_ras_datetime(split[0])
|
|
55
|
+
end = parse_ras_datetime(split[1])
|
|
56
|
+
return begin, end
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def parse_duration(duration_str: str) -> timedelta:
|
|
60
|
+
"""
|
|
61
|
+
Parse a duration string into a timedelta object.
|
|
62
|
+
|
|
63
|
+
Parameters
|
|
64
|
+
----------
|
|
65
|
+
duration_str (str): The duration string to be parsed. The string should be in the format "HH:MM:SS".
|
|
66
|
+
|
|
67
|
+
Returns
|
|
68
|
+
-------
|
|
69
|
+
timedelta: A timedelta object representing the parsed duration.
|
|
70
|
+
"""
|
|
71
|
+
hours, minutes, seconds = map(int, duration_str.split(":"))
|
|
72
|
+
duration = timedelta(hours=hours, minutes=minutes, seconds=seconds)
|
|
73
|
+
return duration
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def convert_ras_hdf_string(value: str) -> Union[bool, str, List[str]]:
|
|
77
|
+
"""Convert a string value from an HEC-RAS HDF file into a Python object.
|
|
78
|
+
|
|
79
|
+
This function handles several specific string formats:
|
|
80
|
+
- "True" and "False" are converted to boolean values.
|
|
81
|
+
- Strings matching the format "ddMMMyyyy HH:mm:ss" or "ddMMMyyyy HHmm" are parsed into datetime objects.
|
|
82
|
+
- Strings matching the format "ddMMMyyyy HH:mm:ss to ddMMMyyyy HH:mm:ss" or "ddMMMyyyy HHmm to ddMMMyyyy HHmm"
|
|
83
|
+
are parsed into a list of two datetime objects.
|
|
84
|
+
|
|
85
|
+
Parameters
|
|
86
|
+
----------
|
|
87
|
+
value (str): The string value to be converted.
|
|
88
|
+
|
|
89
|
+
Returns
|
|
90
|
+
-------
|
|
91
|
+
The converted value, which could be a boolean, a datetime string, a list of datetime strings, or the original
|
|
92
|
+
string if no other conditions are met.
|
|
93
|
+
"""
|
|
94
|
+
ras_datetime_format1_re = r"\d{2}\w{3}\d{4} \d{2}:\d{2}:\d{2}"
|
|
95
|
+
ras_datetime_format2_re = r"\d{2}\w{3}\d{4} \d{2}\d{2}"
|
|
96
|
+
s = value.decode("utf-8")
|
|
97
|
+
if s == "True":
|
|
98
|
+
return True
|
|
99
|
+
elif s == "False":
|
|
100
|
+
return False
|
|
101
|
+
elif re.match(rf"^{ras_datetime_format1_re}", s):
|
|
102
|
+
if re.match(rf"^{ras_datetime_format1_re} to {ras_datetime_format1_re}$", s):
|
|
103
|
+
split = s.split(" to ")
|
|
104
|
+
return [
|
|
105
|
+
parse_ras_datetime(split[0]),
|
|
106
|
+
parse_ras_datetime(split[1]),
|
|
107
|
+
]
|
|
108
|
+
return parse_ras_datetime(s)
|
|
109
|
+
elif re.match(rf"^{ras_datetime_format2_re}", s):
|
|
110
|
+
if re.match(rf"^{ras_datetime_format2_re} to {ras_datetime_format2_re}$", s):
|
|
111
|
+
split = s.split(" to ")
|
|
112
|
+
return [
|
|
113
|
+
parse_ras_simulation_window_datetime(split[0]),
|
|
114
|
+
parse_ras_simulation_window_datetime(split[1]),
|
|
115
|
+
]
|
|
116
|
+
return parse_ras_simulation_window_datetime(s)
|
|
117
|
+
return s
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def convert_ras_hdf_value(
|
|
121
|
+
value: Any,
|
|
122
|
+
) -> Union[None, bool, str, List[str], int, float, List[int], List[float]]:
|
|
123
|
+
"""Convert a value from a HEC-RAS HDF file into a Python object.
|
|
124
|
+
|
|
125
|
+
This function handles several specific types:
|
|
126
|
+
- NaN values are converted to None.
|
|
127
|
+
- Byte strings are converted using the `convert_ras_hdf_string` function.
|
|
128
|
+
- NumPy integer or float types are converted to Python int or float.
|
|
129
|
+
- Regular ints and floats are left as they are.
|
|
130
|
+
- Lists, tuples, and NumPy arrays are recursively processed.
|
|
131
|
+
- All other types are converted to string.
|
|
132
|
+
|
|
133
|
+
Parameters
|
|
134
|
+
----------
|
|
135
|
+
value (Any): The value to be converted.
|
|
136
|
+
|
|
137
|
+
Returns
|
|
138
|
+
-------
|
|
139
|
+
The converted value, which could be None, a boolean, a string, a list of strings, an integer, a float, a list
|
|
140
|
+
of integers, a list of floats, or the original value as a string if no other conditions are met.
|
|
141
|
+
"""
|
|
142
|
+
# TODO (?): handle "8-bit bitfield" values in 2D Flow Area groups
|
|
143
|
+
|
|
144
|
+
# Check for NaN (np.nan)
|
|
145
|
+
if isinstance(value, np.floating) and np.isnan(value):
|
|
146
|
+
return None
|
|
147
|
+
|
|
148
|
+
# Check for byte strings
|
|
149
|
+
elif isinstance(value, bytes) or isinstance(value, np.bytes_):
|
|
150
|
+
return convert_ras_hdf_string(value)
|
|
151
|
+
|
|
152
|
+
# Check for NumPy integer or float types
|
|
153
|
+
elif isinstance(value, np.integer):
|
|
154
|
+
return int(value)
|
|
155
|
+
elif isinstance(value, np.floating):
|
|
156
|
+
return float(value)
|
|
157
|
+
|
|
158
|
+
# Leave regular ints and floats as they are
|
|
159
|
+
elif isinstance(value, (int, float)):
|
|
160
|
+
return value
|
|
161
|
+
|
|
162
|
+
elif isinstance(value, (list, tuple, np.ndarray)):
|
|
163
|
+
if len(value) > 1:
|
|
164
|
+
return [convert_ras_hdf_value(v) for v in value]
|
|
165
|
+
else:
|
|
166
|
+
return convert_ras_hdf_value(value[0])
|
|
167
|
+
|
|
168
|
+
# Convert all other types to string
|
|
169
|
+
else:
|
|
170
|
+
return str(value)
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def hdf5_attrs_to_dict(attrs: dict, prefix: str = None) -> dict:
|
|
174
|
+
"""
|
|
175
|
+
Convert a dictionary of attributes from an HDF5 file into a Python dictionary.
|
|
176
|
+
|
|
177
|
+
Parameters:
|
|
178
|
+
----------
|
|
179
|
+
attrs (dict): The attributes to be converted.
|
|
180
|
+
prefix (str, optional): An optional prefix to prepend to the keys.
|
|
181
|
+
|
|
182
|
+
Returns:
|
|
183
|
+
----------
|
|
184
|
+
dict: A dictionary with the converted attributes.
|
|
185
|
+
"""
|
|
186
|
+
results = {}
|
|
187
|
+
for k, v in attrs.items():
|
|
188
|
+
value = convert_ras_hdf_value(v)
|
|
189
|
+
if prefix:
|
|
190
|
+
key = f"{prefix}:{k}"
|
|
191
|
+
else:
|
|
192
|
+
key = k
|
|
193
|
+
results[key] = value
|
|
194
|
+
return results
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def get_first_hdf_group(parent_group: h5py.Group) -> Optional[h5py.Group]:
|
|
198
|
+
"""
|
|
199
|
+
Get the first HDF5 group from a parent group.
|
|
200
|
+
|
|
201
|
+
This function iterates over the items in the parent group and returns the first item that is an instance of
|
|
202
|
+
h5py.Group. If no such item is found, it returns None.
|
|
203
|
+
|
|
204
|
+
Parameters:
|
|
205
|
+
----------
|
|
206
|
+
parent_group (h5py.Group): The parent group to search in.
|
|
207
|
+
|
|
208
|
+
Returns:
|
|
209
|
+
----------
|
|
210
|
+
Optional[h5py.Group]: The first HDF5 group in the parent group, or None if no group is found.
|
|
211
|
+
"""
|
|
212
|
+
for _, item in parent_group.items():
|
|
213
|
+
if isinstance(item, h5py.Group):
|
|
214
|
+
return item
|
|
215
|
+
return None
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 fema-ffrd
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: rashdf
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Read data from HEC-RAS HDF files.
|
|
5
|
+
Project-URL: repository, https://github.com/fema-ffrd/rashdf
|
|
6
|
+
Classifier: Development Status :: 4 - Beta
|
|
7
|
+
Classifier: Intended Audience :: Developers
|
|
8
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
14
|
+
Description-Content-Type: text/markdown
|
|
15
|
+
License-File: LICENSE
|
|
16
|
+
Requires-Dist: h5py
|
|
17
|
+
Requires-Dist: geopandas
|
|
18
|
+
Provides-Extra: dev
|
|
19
|
+
Requires-Dist: pre-commit ; extra == 'dev'
|
|
20
|
+
Requires-Dist: ruff ; extra == 'dev'
|
|
21
|
+
Requires-Dist: pytest ; extra == 'dev'
|
|
22
|
+
|
|
23
|
+
# rashdf
|
|
24
|
+
[](https://github.com/fema-ffrd/rashdf/actions/workflows/continuous-integration.yml)
|
|
25
|
+
[](https://github.com/fema-ffrd/rashdf/actions/workflows/release.yml)
|
|
26
|
+
[](https://badge.fury.io/py/rashdf)
|
|
27
|
+
|
|
28
|
+
Read data from [HEC-RAS](https://www.hec.usace.army.mil/software/hec-ras/) [HDF](https://github.com/HDFGroup/hdf5) files.
|
|
29
|
+
|
|
30
|
+
*Pronunciation: `raz·aitch·dee·eff`*
|
|
31
|
+
|
|
32
|
+
## Install
|
|
33
|
+
```bash
|
|
34
|
+
$ pip install rashdf
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
## Usage
|
|
38
|
+
`RasGeomHdf` and `RasPlanHdf` are extensions of
|
|
39
|
+
[h5py.File](https://docs.h5py.org/en/stable/high/file.html#h5py.File). They contain
|
|
40
|
+
methods to export HEC-RAS model geometry as
|
|
41
|
+
[GeoDataFrame](https://geopandas.org/en/stable/docs/reference/geodataframe.html)
|
|
42
|
+
objects.
|
|
43
|
+
```python
|
|
44
|
+
>>> from rashdf import RasGeomHdf
|
|
45
|
+
>>> geom_hdf = RasGeomHdf("path/to/rasmodel/Muncie.g04.hdf")
|
|
46
|
+
>>> mesh_cells = geom_hdf.mesh_cell_polygons() # export a GeoDataFrame
|
|
47
|
+
>>> mesh_cells
|
|
48
|
+
mesh_name cell_id geometry
|
|
49
|
+
0 2D Interior Area 0 POLYGON ((406025.000 1805015.237, 406025.000 1...
|
|
50
|
+
1 2D Interior Area 1 POLYGON ((406075.000 1805018.545, 406075.000 1...
|
|
51
|
+
2 2D Interior Area 2 POLYGON ((406075.000 1804975.000, 406075.000 1...
|
|
52
|
+
3 2D Interior Area 3 POLYGON ((406125.000 1804975.000, 406125.000 1...
|
|
53
|
+
4 2D Interior Area 4 POLYGON ((406175.000 1804975.000, 406175.000 1...
|
|
54
|
+
... ... ... ...
|
|
55
|
+
5386 2D Interior Area 5386 POLYGON ((409163.402 1802463.621, 409175.000 1...
|
|
56
|
+
5387 2D Interior Area 5387 POLYGON ((409160.953 1802374.120, 409125.000 1...
|
|
57
|
+
5388 2D Interior Area 5388 POLYGON ((409163.402 1802463.621, 409161.906 1...
|
|
58
|
+
5389 2D Interior Area 5389 POLYGON ((409112.480 1802410.114, 409112.046 1...
|
|
59
|
+
5390 2D Interior Area 5390 POLYGON ((409112.480 1802410.114, 409063.039 1...
|
|
60
|
+
>>> mesh_cells.to_file("mucie-mesh-cell-polygons.shp")
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
Also, methods to extract certain HDF group attributes as dictionaries:
|
|
64
|
+
```python
|
|
65
|
+
>>> from rashdf import RasPlanHdf
|
|
66
|
+
>>> with RasPlanHdf("path/to/rasmodel/Muncie.p04.hdf") as plan_hdf:
|
|
67
|
+
>>> results_unsteady_summary = plan_hdf.get_results_unsteady_summary()
|
|
68
|
+
>>> results_unsteady_summary
|
|
69
|
+
{'Computation Time DSS': '00:00:00', 'Computation Time Total': '00:00:23', 'Maximum WSEL Error': 0.0099277812987566, 'Maximum number of cores': 6, 'Run Time Window': [datetime.datetime(2024, 3, 27, 9, 31, 52), datetime.datetime(2024, 3, 27, 9, 32, 15)], 'Solution': 'Unsteady Finished Successfully', 'Time Solution Went Unstable': None, 'Time Stamp Solution Went Unstable': 'Not Applicable'}
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
## Documentation
|
|
73
|
+
Coming soon.
|
|
74
|
+
|
|
75
|
+
## Developer Setup
|
|
76
|
+
Create a virtual environment in the project directory:
|
|
77
|
+
```
|
|
78
|
+
$ python -m venv venv-rashdf
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
Activate the virtual environment:
|
|
82
|
+
```
|
|
83
|
+
$ source ./venv/bin/activate
|
|
84
|
+
(venv-rashdf) $
|
|
85
|
+
```
|
|
86
|
+
|
|
87
|
+
Install dev dependencies:
|
|
88
|
+
```
|
|
89
|
+
(venv-rashdf) $ pip install ".[dev]"
|
|
90
|
+
```
|
|
91
|
+
|
|
92
|
+
Install git hook scripts (used for automatic liniting/formatting)
|
|
93
|
+
```
|
|
94
|
+
(venv-rashdf) $ pre-commit install
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
With the virtual environment activated, run the tests:
|
|
98
|
+
```
|
|
99
|
+
(venv-rashdf) $ pytest
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
rashdf/__init__.py,sha256=r_UaNbJve1yWg-2r7qFiZVMdYiq4m5LNoRLJKL4HYYk,133
|
|
2
|
+
rashdf/base.py,sha256=yTihpXoSm-S6kb1BGdru4UPIR3_-mDFrcEaQfXd8ujY,2384
|
|
3
|
+
rashdf/geom.py,sha256=74VKwlgElFc167hTk2BbyFmKbbzi3o9SyW9tkH23XDM,14846
|
|
4
|
+
rashdf/plan.py,sha256=GU8BlRBN27R-letDhF_fOK1xAaJsL4CSYEyBCiDBaPs,2472
|
|
5
|
+
rashdf/utils.py,sha256=5jWSjCkPdnwGXwN7ELv9rpiUWtn3cprlZkfxmJYAWf8,7038
|
|
6
|
+
rashdf-0.1.0.dist-info/LICENSE,sha256=L_0QaLpQVHPcglVjiaJPnOocwzP8uXevDRjUPr9DL1Y,1065
|
|
7
|
+
rashdf-0.1.0.dist-info/METADATA,sha256=dFe6XNXj3PaygLZoSZ8DVM-M10vsrfP2drZZ0PaFBXw,4156
|
|
8
|
+
rashdf-0.1.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
|
9
|
+
rashdf-0.1.0.dist-info/top_level.txt,sha256=1iE403K85UTilumVhhghg2JdH3ABHE-2DmvFpKvi9fA,7
|
|
10
|
+
rashdf-0.1.0.dist-info/RECORD,,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
rashdf
|