rashdf 0.4.0__tar.gz → 0.6.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {rashdf-0.4.0 → rashdf-0.6.0}/PKG-INFO +12 -6
- {rashdf-0.4.0 → rashdf-0.6.0}/README.md +4 -4
- {rashdf-0.4.0 → rashdf-0.6.0}/pyproject.toml +3 -3
- {rashdf-0.4.0 → rashdf-0.6.0}/src/cli.py +49 -6
- {rashdf-0.4.0 → rashdf-0.6.0}/src/rashdf/base.py +4 -1
- {rashdf-0.4.0 → rashdf-0.6.0}/src/rashdf/geom.py +216 -112
- {rashdf-0.4.0 → rashdf-0.6.0}/src/rashdf/plan.py +445 -7
- {rashdf-0.4.0 → rashdf-0.6.0}/src/rashdf/utils.py +32 -2
- {rashdf-0.4.0 → rashdf-0.6.0}/src/rashdf.egg-info/PKG-INFO +12 -6
- {rashdf-0.4.0 → rashdf-0.6.0}/src/rashdf.egg-info/SOURCES.txt +1 -0
- {rashdf-0.4.0 → rashdf-0.6.0}/src/rashdf.egg-info/requires.txt +7 -1
- rashdf-0.6.0/tests/test_base.py +20 -0
- {rashdf-0.4.0 → rashdf-0.6.0}/tests/test_cli.py +55 -1
- {rashdf-0.4.0 → rashdf-0.6.0}/tests/test_geom.py +30 -0
- {rashdf-0.4.0 → rashdf-0.6.0}/tests/test_plan.py +274 -9
- {rashdf-0.4.0 → rashdf-0.6.0}/LICENSE +0 -0
- {rashdf-0.4.0 → rashdf-0.6.0}/setup.cfg +0 -0
- {rashdf-0.4.0 → rashdf-0.6.0}/src/rashdf/__init__.py +0 -0
- {rashdf-0.4.0 → rashdf-0.6.0}/src/rashdf.egg-info/dependency_links.txt +0 -0
- {rashdf-0.4.0 → rashdf-0.6.0}/src/rashdf.egg-info/entry_points.txt +0 -0
- {rashdf-0.4.0 → rashdf-0.6.0}/src/rashdf.egg-info/top_level.txt +0 -0
- {rashdf-0.4.0 → rashdf-0.6.0}/tests/test_utils.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: rashdf
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.6.0
|
|
4
4
|
Summary: Read data from HEC-RAS HDF files.
|
|
5
5
|
Project-URL: repository, https://github.com/fema-ffrd/rashdf
|
|
6
6
|
Classifier: Development Status :: 4 - Beta
|
|
@@ -14,7 +14,7 @@ Classifier: Programming Language :: Python :: 3.12
|
|
|
14
14
|
Description-Content-Type: text/markdown
|
|
15
15
|
License-File: LICENSE
|
|
16
16
|
Requires-Dist: h5py
|
|
17
|
-
Requires-Dist: geopandas<0.
|
|
17
|
+
Requires-Dist: geopandas<2.0,>=1.0
|
|
18
18
|
Requires-Dist: pyarrow
|
|
19
19
|
Requires-Dist: xarray
|
|
20
20
|
Provides-Extra: dev
|
|
@@ -22,6 +22,12 @@ Requires-Dist: pre-commit; extra == "dev"
|
|
|
22
22
|
Requires-Dist: ruff; extra == "dev"
|
|
23
23
|
Requires-Dist: pytest; extra == "dev"
|
|
24
24
|
Requires-Dist: pytest-cov; extra == "dev"
|
|
25
|
+
Requires-Dist: fiona; extra == "dev"
|
|
26
|
+
Requires-Dist: kerchunk; extra == "dev"
|
|
27
|
+
Requires-Dist: zarr; extra == "dev"
|
|
28
|
+
Requires-Dist: dask; extra == "dev"
|
|
29
|
+
Requires-Dist: fsspec; extra == "dev"
|
|
30
|
+
Requires-Dist: s3fs; extra == "dev"
|
|
25
31
|
Provides-Extra: docs
|
|
26
32
|
Requires-Dist: sphinx; extra == "docs"
|
|
27
33
|
Requires-Dist: numpydoc; extra == "docs"
|
|
@@ -76,8 +82,8 @@ Also, methods to extract certain HDF group attributes as dictionaries:
|
|
|
76
82
|
```python
|
|
77
83
|
>>> from rashdf import RasPlanHdf
|
|
78
84
|
>>> with RasPlanHdf("path/to/rasmodel/Muncie.p04.hdf") as plan_hdf:
|
|
79
|
-
>>>
|
|
80
|
-
>>>
|
|
85
|
+
>>> results_unsteady_summary_attrs = plan_hdf.get_results_unsteady_summary_attrs()
|
|
86
|
+
>>> results_unsteady_summary_attrs
|
|
81
87
|
{'Computation Time DSS': datetime.timedelta(0),
|
|
82
88
|
'Computation Time Total': datetime.timedelta(seconds=23),
|
|
83
89
|
'Maximum WSEL Error': 0.0099277812987566,
|
|
@@ -101,9 +107,9 @@ CLI help:
|
|
|
101
107
|
$ rashdf --help
|
|
102
108
|
```
|
|
103
109
|
|
|
104
|
-
Print the output formats supported by
|
|
110
|
+
Print the output formats supported by pyorgio:
|
|
105
111
|
```
|
|
106
|
-
$ rashdf --
|
|
112
|
+
$ rashdf --pyogrio-drivers
|
|
107
113
|
```
|
|
108
114
|
|
|
109
115
|
Help for a specific subcommand:
|
|
@@ -47,8 +47,8 @@ Also, methods to extract certain HDF group attributes as dictionaries:
|
|
|
47
47
|
```python
|
|
48
48
|
>>> from rashdf import RasPlanHdf
|
|
49
49
|
>>> with RasPlanHdf("path/to/rasmodel/Muncie.p04.hdf") as plan_hdf:
|
|
50
|
-
>>>
|
|
51
|
-
>>>
|
|
50
|
+
>>> results_unsteady_summary_attrs = plan_hdf.get_results_unsteady_summary_attrs()
|
|
51
|
+
>>> results_unsteady_summary_attrs
|
|
52
52
|
{'Computation Time DSS': datetime.timedelta(0),
|
|
53
53
|
'Computation Time Total': datetime.timedelta(seconds=23),
|
|
54
54
|
'Maximum WSEL Error': 0.0099277812987566,
|
|
@@ -72,9 +72,9 @@ CLI help:
|
|
|
72
72
|
$ rashdf --help
|
|
73
73
|
```
|
|
74
74
|
|
|
75
|
-
Print the output formats supported by
|
|
75
|
+
Print the output formats supported by pyorgio:
|
|
76
76
|
```
|
|
77
|
-
$ rashdf --
|
|
77
|
+
$ rashdf --pyogrio-drivers
|
|
78
78
|
```
|
|
79
79
|
|
|
80
80
|
Help for a specific subcommand:
|
|
@@ -12,11 +12,11 @@ classifiers = [
|
|
|
12
12
|
"Programming Language :: Python :: 3.11",
|
|
13
13
|
"Programming Language :: Python :: 3.12",
|
|
14
14
|
]
|
|
15
|
-
version = "0.
|
|
16
|
-
dependencies = ["h5py", "geopandas>=0.
|
|
15
|
+
version = "0.6.0"
|
|
16
|
+
dependencies = ["h5py", "geopandas>=1.0,<2.0", "pyarrow", "xarray"]
|
|
17
17
|
|
|
18
18
|
[project.optional-dependencies]
|
|
19
|
-
dev = ["pre-commit", "ruff", "pytest", "pytest-cov"]
|
|
19
|
+
dev = ["pre-commit", "ruff", "pytest", "pytest-cov", "fiona", "kerchunk", "zarr", "dask", "fsspec", "s3fs"]
|
|
20
20
|
docs = ["sphinx", "numpydoc", "sphinx_rtd_theme"]
|
|
21
21
|
|
|
22
22
|
[project.urls]
|
|
@@ -3,7 +3,6 @@
|
|
|
3
3
|
from rashdf import RasGeomHdf, RasPlanHdf
|
|
4
4
|
from rashdf.utils import df_datetimes_to_str
|
|
5
5
|
|
|
6
|
-
import fiona
|
|
7
6
|
from geopandas import GeoDataFrame
|
|
8
7
|
|
|
9
8
|
import argparse
|
|
@@ -23,6 +22,8 @@ COMMANDS = [
|
|
|
23
22
|
"refinement_regions",
|
|
24
23
|
"bc_lines",
|
|
25
24
|
"breaklines",
|
|
25
|
+
"reference_lines",
|
|
26
|
+
"reference_points",
|
|
26
27
|
"structures",
|
|
27
28
|
]
|
|
28
29
|
|
|
@@ -50,6 +51,20 @@ def docstring_to_help(docstring: Optional[str]) -> str:
|
|
|
50
51
|
return help_text
|
|
51
52
|
|
|
52
53
|
|
|
54
|
+
def pyogrio_supported_drivers() -> List[str]:
|
|
55
|
+
"""Return a list of drivers supported by pyogrio for writing output files.
|
|
56
|
+
|
|
57
|
+
Returns
|
|
58
|
+
-------
|
|
59
|
+
list
|
|
60
|
+
A list of drivers supported by pyogrio for writing output files.
|
|
61
|
+
"""
|
|
62
|
+
import pyogrio
|
|
63
|
+
|
|
64
|
+
drivers = pyogrio.list_drivers(write=True)
|
|
65
|
+
return sorted(drivers)
|
|
66
|
+
|
|
67
|
+
|
|
53
68
|
def fiona_supported_drivers() -> List[str]:
|
|
54
69
|
"""Return a list of drivers supported by Fiona for writing output files.
|
|
55
70
|
|
|
@@ -58,18 +73,34 @@ def fiona_supported_drivers() -> List[str]:
|
|
|
58
73
|
list
|
|
59
74
|
A list of drivers supported by Fiona for writing output files.
|
|
60
75
|
"""
|
|
76
|
+
import fiona
|
|
77
|
+
|
|
61
78
|
drivers = [d for d, s in fiona.supported_drivers.items() if "w" in s]
|
|
62
|
-
return drivers
|
|
79
|
+
return sorted(drivers)
|
|
63
80
|
|
|
64
81
|
|
|
65
82
|
def parse_args(args: str) -> argparse.Namespace:
|
|
66
83
|
"""Parse command-line arguments."""
|
|
67
84
|
parser = argparse.ArgumentParser(description="Extract data from HEC-RAS HDF files.")
|
|
68
85
|
parser.add_argument(
|
|
69
|
-
"--
|
|
86
|
+
"--pyogrio-drivers",
|
|
70
87
|
action="store_true",
|
|
71
|
-
help="List the drivers supported by
|
|
88
|
+
help="List the drivers supported by pyogrio for writing output files.",
|
|
72
89
|
)
|
|
90
|
+
fiona_installed = False
|
|
91
|
+
engines = ["pyogrio"]
|
|
92
|
+
try:
|
|
93
|
+
import fiona
|
|
94
|
+
|
|
95
|
+
fiona_installed = True
|
|
96
|
+
engines.append("fiona")
|
|
97
|
+
parser.add_argument(
|
|
98
|
+
"--fiona-drivers",
|
|
99
|
+
action="store_true",
|
|
100
|
+
help="List the drivers supported by Fiona for writing output files.",
|
|
101
|
+
)
|
|
102
|
+
except ImportError:
|
|
103
|
+
pass
|
|
73
104
|
subparsers = parser.add_subparsers(help="Sub-command help")
|
|
74
105
|
for command in COMMANDS:
|
|
75
106
|
f = getattr(RasGeomHdf, command)
|
|
@@ -91,6 +122,13 @@ def parse_args(args: str) -> argparse.Namespace:
|
|
|
91
122
|
output_group.add_argument(
|
|
92
123
|
"--feather", action="store_true", help="Output as Feather."
|
|
93
124
|
)
|
|
125
|
+
output_group.add_argument(
|
|
126
|
+
"--engine",
|
|
127
|
+
type=str,
|
|
128
|
+
choices=engines,
|
|
129
|
+
default="pyogrio",
|
|
130
|
+
help="Engine for writing output data.",
|
|
131
|
+
)
|
|
94
132
|
subparser.add_argument(
|
|
95
133
|
"--kwargs",
|
|
96
134
|
type=str,
|
|
@@ -105,7 +143,11 @@ def parse_args(args: str) -> argparse.Namespace:
|
|
|
105
143
|
|
|
106
144
|
def export(args: argparse.Namespace) -> Optional[str]:
|
|
107
145
|
"""Act on parsed arguments to extract data from HEC-RAS HDF files."""
|
|
108
|
-
if args.
|
|
146
|
+
if args.pyogrio_drivers:
|
|
147
|
+
for driver in pyogrio_supported_drivers():
|
|
148
|
+
print(driver)
|
|
149
|
+
return
|
|
150
|
+
if hasattr(args, "fiona_drivers") and args.fiona_drivers:
|
|
109
151
|
for driver in fiona_supported_drivers():
|
|
110
152
|
print(driver)
|
|
111
153
|
return
|
|
@@ -138,6 +180,7 @@ def export(args: argparse.Namespace) -> Optional[str]:
|
|
|
138
180
|
),
|
|
139
181
|
)
|
|
140
182
|
result = gdf.to_json(**kwargs)
|
|
183
|
+
print("No output file!")
|
|
141
184
|
print(result)
|
|
142
185
|
return result
|
|
143
186
|
elif args.parquet:
|
|
@@ -153,7 +196,7 @@ def export(args: argparse.Namespace) -> Optional[str]:
|
|
|
153
196
|
# convert any datetime columns to string.
|
|
154
197
|
# TODO: besides Geopackage, which of the standard Fiona drivers allow datetime?
|
|
155
198
|
gdf = df_datetimes_to_str(gdf)
|
|
156
|
-
gdf.to_file(args.output_file, **kwargs)
|
|
199
|
+
gdf.to_file(args.output_file, engine=args.engine, **kwargs)
|
|
157
200
|
|
|
158
201
|
|
|
159
202
|
def main():
|
|
@@ -19,6 +19,7 @@ class RasHdf(h5py.File):
|
|
|
19
19
|
Additional keyword arguments to pass to h5py.File
|
|
20
20
|
"""
|
|
21
21
|
super().__init__(name, mode="r", **kwargs)
|
|
22
|
+
self._loc = name
|
|
22
23
|
|
|
23
24
|
@classmethod
|
|
24
25
|
def open_uri(
|
|
@@ -49,7 +50,9 @@ class RasHdf(h5py.File):
|
|
|
49
50
|
import fsspec
|
|
50
51
|
|
|
51
52
|
remote_file = fsspec.open(uri, mode="rb", **fsspec_kwargs)
|
|
52
|
-
|
|
53
|
+
result = cls(remote_file.open(), **h5py_kwargs)
|
|
54
|
+
result._loc = uri
|
|
55
|
+
return result
|
|
53
56
|
|
|
54
57
|
def get_attrs(self, attr_path: str) -> Dict:
|
|
55
58
|
"""Convert attributes from a HEC-RAS HDF file into a Python dictionary for a given attribute path.
|
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
"""HEC-RAS Geometry HDF class."""
|
|
2
2
|
|
|
3
|
-
from typing import Dict, List, Optional
|
|
4
|
-
|
|
5
3
|
import numpy as np
|
|
6
4
|
import pandas as pd
|
|
7
5
|
from geopandas import GeoDataFrame
|
|
8
6
|
from pyproj import CRS
|
|
9
7
|
from shapely import (
|
|
8
|
+
Geometry,
|
|
9
|
+
Polygon,
|
|
10
|
+
Point,
|
|
10
11
|
LineString,
|
|
11
12
|
MultiLineString,
|
|
12
13
|
MultiPolygon,
|
|
@@ -15,6 +16,9 @@ from shapely import (
|
|
|
15
16
|
polygonize_full,
|
|
16
17
|
)
|
|
17
18
|
|
|
19
|
+
from typing import Dict, List, Optional, Union
|
|
20
|
+
|
|
21
|
+
|
|
18
22
|
from .base import RasHdf
|
|
19
23
|
from .utils import (
|
|
20
24
|
convert_ras_hdf_string,
|
|
@@ -24,12 +28,24 @@ from .utils import (
|
|
|
24
28
|
)
|
|
25
29
|
|
|
26
30
|
|
|
31
|
+
class RasGeomHdfError(Exception):
|
|
32
|
+
"""HEC-RAS Plan HDF error class."""
|
|
33
|
+
|
|
34
|
+
pass
|
|
35
|
+
|
|
36
|
+
|
|
27
37
|
class RasGeomHdf(RasHdf):
|
|
28
38
|
"""HEC-RAS Geometry HDF class."""
|
|
29
39
|
|
|
30
40
|
GEOM_PATH = "Geometry"
|
|
31
41
|
GEOM_STRUCTURES_PATH = f"{GEOM_PATH}/Structures"
|
|
32
42
|
FLOW_AREA_2D_PATH = f"{GEOM_PATH}/2D Flow Areas"
|
|
43
|
+
BC_LINES_PATH = f"{GEOM_PATH}/Boundary Condition Lines"
|
|
44
|
+
BREAKLINES_PATH = f"{GEOM_PATH}/2D Flow Area Break Lines"
|
|
45
|
+
REFERENCE_LINES_PATH = f"{GEOM_PATH}/Reference Lines"
|
|
46
|
+
REFERENCE_POINTS_PATH = f"{GEOM_PATH}/Reference Points"
|
|
47
|
+
CROSS_SECTIONS = f"{GEOM_PATH}/Cross Sections"
|
|
48
|
+
RIVER_CENTERLINES = f"{GEOM_PATH}/River Centerlines"
|
|
33
49
|
|
|
34
50
|
def __init__(self, name: str, **kwargs):
|
|
35
51
|
"""Open a HEC-RAS Geometry HDF file.
|
|
@@ -262,6 +278,38 @@ class RasGeomHdf(RasHdf):
|
|
|
262
278
|
|
|
263
279
|
return d2_flow_area_attrs
|
|
264
280
|
|
|
281
|
+
def _get_polylines(
|
|
282
|
+
self,
|
|
283
|
+
path: str,
|
|
284
|
+
info_name: str = "Polyline Info",
|
|
285
|
+
parts_name: str = "Polyline Parts",
|
|
286
|
+
points_name: str = "Polyline Points",
|
|
287
|
+
) -> List[Geometry]:
|
|
288
|
+
polyline_info_path = f"{path}/{info_name}"
|
|
289
|
+
polyline_parts_path = f"{path}/{parts_name}"
|
|
290
|
+
polyline_points_path = f"{path}/{points_name}"
|
|
291
|
+
|
|
292
|
+
polyline_info = self[polyline_info_path][()]
|
|
293
|
+
polyline_parts = self[polyline_parts_path][()]
|
|
294
|
+
polyline_points = self[polyline_points_path][()]
|
|
295
|
+
|
|
296
|
+
geoms = []
|
|
297
|
+
for pnt_start, pnt_cnt, part_start, part_cnt in polyline_info:
|
|
298
|
+
points = polyline_points[pnt_start : pnt_start + pnt_cnt]
|
|
299
|
+
if part_cnt == 1:
|
|
300
|
+
geoms.append(LineString(points))
|
|
301
|
+
else:
|
|
302
|
+
parts = polyline_parts[part_start : part_start + part_cnt]
|
|
303
|
+
geoms.append(
|
|
304
|
+
MultiLineString(
|
|
305
|
+
list(
|
|
306
|
+
points[part_pnt_start : part_pnt_start + part_pnt_cnt]
|
|
307
|
+
for part_pnt_start, part_pnt_cnt in parts
|
|
308
|
+
)
|
|
309
|
+
)
|
|
310
|
+
)
|
|
311
|
+
return geoms
|
|
312
|
+
|
|
265
313
|
def bc_lines(self) -> GeoDataFrame:
|
|
266
314
|
"""Return 2D mesh area boundary condition lines.
|
|
267
315
|
|
|
@@ -270,35 +318,15 @@ class RasGeomHdf(RasHdf):
|
|
|
270
318
|
GeoDataFrame
|
|
271
319
|
A GeoDataFrame containing the 2D mesh area boundary condition lines if they exist.
|
|
272
320
|
"""
|
|
273
|
-
if
|
|
321
|
+
if self.BC_LINES_PATH not in self:
|
|
274
322
|
return GeoDataFrame()
|
|
275
|
-
bc_line_data = self[
|
|
323
|
+
bc_line_data = self[self.BC_LINES_PATH]
|
|
276
324
|
bc_line_ids = range(bc_line_data["Attributes"][()].shape[0])
|
|
277
325
|
v_conv_str = np.vectorize(convert_ras_hdf_string)
|
|
278
326
|
names = v_conv_str(bc_line_data["Attributes"][()]["Name"])
|
|
279
327
|
mesh_names = v_conv_str(bc_line_data["Attributes"][()]["SA-2D"])
|
|
280
328
|
types = v_conv_str(bc_line_data["Attributes"][()]["Type"])
|
|
281
|
-
geoms =
|
|
282
|
-
for pnt_start, pnt_cnt, part_start, part_cnt in bc_line_data["Polyline Info"][
|
|
283
|
-
()
|
|
284
|
-
]:
|
|
285
|
-
points = bc_line_data["Polyline Points"][()][
|
|
286
|
-
pnt_start : pnt_start + pnt_cnt
|
|
287
|
-
]
|
|
288
|
-
if part_cnt == 1:
|
|
289
|
-
geoms.append(LineString(points))
|
|
290
|
-
else:
|
|
291
|
-
parts = bc_line_data["Polyline Parts"][()][
|
|
292
|
-
part_start : part_start + part_cnt
|
|
293
|
-
]
|
|
294
|
-
geoms.append(
|
|
295
|
-
MultiLineString(
|
|
296
|
-
list(
|
|
297
|
-
points[part_pnt_start : part_pnt_start + part_pnt_cnt]
|
|
298
|
-
for part_pnt_start, part_pnt_cnt in parts
|
|
299
|
-
)
|
|
300
|
-
)
|
|
301
|
-
)
|
|
329
|
+
geoms = self._get_polylines(self.BC_LINES_PATH)
|
|
302
330
|
return GeoDataFrame(
|
|
303
331
|
{
|
|
304
332
|
"bc_line_id": bc_line_ids,
|
|
@@ -319,34 +347,14 @@ class RasGeomHdf(RasHdf):
|
|
|
319
347
|
GeoDataFrame
|
|
320
348
|
A GeoDataFrame containing the 2D mesh area breaklines if they exist.
|
|
321
349
|
"""
|
|
322
|
-
if
|
|
350
|
+
if self.BREAKLINES_PATH not in self:
|
|
323
351
|
return GeoDataFrame()
|
|
324
|
-
bl_line_data = self[
|
|
352
|
+
bl_line_data = self[self.BREAKLINES_PATH]
|
|
325
353
|
bl_line_ids = range(bl_line_data["Attributes"][()].shape[0])
|
|
326
354
|
names = np.vectorize(convert_ras_hdf_string)(
|
|
327
355
|
bl_line_data["Attributes"][()]["Name"]
|
|
328
356
|
)
|
|
329
|
-
geoms =
|
|
330
|
-
for pnt_start, pnt_cnt, part_start, part_cnt in bl_line_data["Polyline Info"][
|
|
331
|
-
()
|
|
332
|
-
]:
|
|
333
|
-
points = bl_line_data["Polyline Points"][()][
|
|
334
|
-
pnt_start : pnt_start + pnt_cnt
|
|
335
|
-
]
|
|
336
|
-
if part_cnt == 1:
|
|
337
|
-
geoms.append(LineString(points))
|
|
338
|
-
else:
|
|
339
|
-
parts = bl_line_data["Polyline Parts"][()][
|
|
340
|
-
part_start : part_start + part_cnt
|
|
341
|
-
]
|
|
342
|
-
geoms.append(
|
|
343
|
-
MultiLineString(
|
|
344
|
-
list(
|
|
345
|
-
points[part_pnt_start : part_pnt_start + part_pnt_cnt]
|
|
346
|
-
for part_pnt_start, part_pnt_cnt in parts
|
|
347
|
-
)
|
|
348
|
-
)
|
|
349
|
-
)
|
|
357
|
+
geoms = self._get_polylines(self.BREAKLINES_PATH)
|
|
350
358
|
return GeoDataFrame(
|
|
351
359
|
{"bl_id": bl_line_ids, "name": names, "geometry": geoms},
|
|
352
360
|
geometry="geometry",
|
|
@@ -400,36 +408,21 @@ class RasGeomHdf(RasHdf):
|
|
|
400
408
|
GeoDataFrame
|
|
401
409
|
A GeoDataFrame containing the model structures if they exist.
|
|
402
410
|
"""
|
|
403
|
-
if
|
|
411
|
+
if self.GEOM_STRUCTURES_PATH not in self:
|
|
404
412
|
return GeoDataFrame()
|
|
405
|
-
struct_data = self[
|
|
413
|
+
struct_data = self[self.GEOM_STRUCTURES_PATH]
|
|
406
414
|
v_conv_val = np.vectorize(convert_ras_hdf_value)
|
|
407
415
|
sd_attrs = struct_data["Attributes"][()]
|
|
408
416
|
struct_dict = {"struct_id": range(sd_attrs.shape[0])}
|
|
409
417
|
struct_dict.update(
|
|
410
418
|
{name: v_conv_val(sd_attrs[name]) for name in sd_attrs.dtype.names}
|
|
411
419
|
)
|
|
412
|
-
geoms =
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
]
|
|
419
|
-
if part_cnt == 1:
|
|
420
|
-
geoms.append(LineString(points))
|
|
421
|
-
else:
|
|
422
|
-
parts = struct_data["Centerline Parts"][()][
|
|
423
|
-
part_start : part_start + part_cnt
|
|
424
|
-
]
|
|
425
|
-
geoms.append(
|
|
426
|
-
MultiLineString(
|
|
427
|
-
list(
|
|
428
|
-
points[part_pnt_start : part_pnt_start + part_pnt_cnt]
|
|
429
|
-
for part_pnt_start, part_pnt_cnt in parts
|
|
430
|
-
)
|
|
431
|
-
)
|
|
432
|
-
)
|
|
420
|
+
geoms = self._get_polylines(
|
|
421
|
+
self.GEOM_STRUCTURES_PATH,
|
|
422
|
+
info_name="Centerline Info",
|
|
423
|
+
parts_name="Centerline Parts",
|
|
424
|
+
points_name="Centerline Points",
|
|
425
|
+
)
|
|
433
426
|
struct_gdf = GeoDataFrame(
|
|
434
427
|
struct_dict,
|
|
435
428
|
geometry=geoms,
|
|
@@ -447,11 +440,153 @@ class RasGeomHdf(RasHdf):
|
|
|
447
440
|
def ic_points(self) -> GeoDataFrame: # noqa D102
|
|
448
441
|
raise NotImplementedError
|
|
449
442
|
|
|
450
|
-
def
|
|
451
|
-
|
|
443
|
+
def _reference_lines_points_names(
|
|
444
|
+
self, reftype: str = "lines", mesh_name: Optional[str] = None
|
|
445
|
+
) -> Union[Dict[str, List[str]], List[str]]:
|
|
446
|
+
"""Return reference line names.
|
|
452
447
|
|
|
453
|
-
|
|
454
|
-
|
|
448
|
+
If a mesh name is provided, return a list of the reference line names for that mesh area.
|
|
449
|
+
If no mesh name is provided, return a dictionary of mesh names and their reference line names.
|
|
450
|
+
|
|
451
|
+
Parameters
|
|
452
|
+
----------
|
|
453
|
+
mesh_name : str, optional
|
|
454
|
+
The name of the mesh area for which to return reference line names.
|
|
455
|
+
|
|
456
|
+
Returns
|
|
457
|
+
-------
|
|
458
|
+
Union[Dict[str, List[str]], List[str]]
|
|
459
|
+
A dictionary of mesh names and their reference line names if mesh_name is None.
|
|
460
|
+
A list of reference line names for the specified mesh area if mesh_name is not None.
|
|
461
|
+
"""
|
|
462
|
+
if reftype == "lines":
|
|
463
|
+
path = self.REFERENCE_LINES_PATH
|
|
464
|
+
sa_2d_field = "SA-2D"
|
|
465
|
+
elif reftype == "points":
|
|
466
|
+
path = self.REFERENCE_POINTS_PATH
|
|
467
|
+
sa_2d_field = "SA/2D"
|
|
468
|
+
else:
|
|
469
|
+
raise RasGeomHdfError(
|
|
470
|
+
f"Invalid reference type: {reftype} -- must be 'lines' or 'points'."
|
|
471
|
+
)
|
|
472
|
+
attributes_path = f"{path}/Attributes"
|
|
473
|
+
if mesh_name is None and attributes_path not in self:
|
|
474
|
+
return {m: [] for m in self.mesh_area_names()}
|
|
475
|
+
if mesh_name is not None and attributes_path not in self:
|
|
476
|
+
return []
|
|
477
|
+
attributes = self[attributes_path][()]
|
|
478
|
+
v_conv_str = np.vectorize(convert_ras_hdf_string)
|
|
479
|
+
names = np.vectorize(convert_ras_hdf_string)(attributes["Name"])
|
|
480
|
+
if mesh_name is not None:
|
|
481
|
+
return names[v_conv_str(attributes[sa_2d_field]) == mesh_name].tolist()
|
|
482
|
+
mesh_names = np.vectorize(convert_ras_hdf_string)(attributes[sa_2d_field])
|
|
483
|
+
return {m: names[mesh_names == m].tolist() for m in np.unique(mesh_names)}
|
|
484
|
+
|
|
485
|
+
def reference_lines_names(
|
|
486
|
+
self, mesh_name: Optional[str] = None
|
|
487
|
+
) -> Union[Dict[str, List[str]], List[str]]:
|
|
488
|
+
"""Return reference line names.
|
|
489
|
+
|
|
490
|
+
If a mesh name is provided, return a list of the reference line names for that mesh area.
|
|
491
|
+
If no mesh name is provided, return a dictionary of mesh names and their reference line names.
|
|
492
|
+
|
|
493
|
+
Parameters
|
|
494
|
+
----------
|
|
495
|
+
mesh_name : str, optional
|
|
496
|
+
The name of the mesh area for which to return reference line names.
|
|
497
|
+
|
|
498
|
+
Returns
|
|
499
|
+
-------
|
|
500
|
+
Union[Dict[str, List[str]], List[str]]
|
|
501
|
+
A dictionary of mesh names and their reference line names if mesh_name is None.
|
|
502
|
+
A list of reference line names for the specified mesh area if mesh_name is not None.
|
|
503
|
+
"""
|
|
504
|
+
return self._reference_lines_points_names("lines", mesh_name)
|
|
505
|
+
|
|
506
|
+
def reference_points_names(
|
|
507
|
+
self, mesh_name: Optional[str] = None
|
|
508
|
+
) -> Union[Dict[str, List[str]], List[str]]:
|
|
509
|
+
"""Return reference point names.
|
|
510
|
+
|
|
511
|
+
If a mesh name is provided, return a list of the reference point names for that mesh area.
|
|
512
|
+
If no mesh name is provided, return a dictionary of mesh names and their reference point names.
|
|
513
|
+
|
|
514
|
+
Parameters
|
|
515
|
+
----------
|
|
516
|
+
mesh_name : str, optional
|
|
517
|
+
The name of the mesh area for which to return reference point names.
|
|
518
|
+
|
|
519
|
+
Returns
|
|
520
|
+
-------
|
|
521
|
+
Union[Dict[str, List[str]], List[str]]
|
|
522
|
+
A dictionary of mesh names and their reference point names if mesh_name is None.
|
|
523
|
+
A list of reference point names for the specified mesh area if mesh_name is not None.
|
|
524
|
+
"""
|
|
525
|
+
return self._reference_lines_points_names("points", mesh_name)
|
|
526
|
+
|
|
527
|
+
def reference_lines(self) -> GeoDataFrame:
|
|
528
|
+
"""Return the reference lines geometry and attributes.
|
|
529
|
+
|
|
530
|
+
Returns
|
|
531
|
+
-------
|
|
532
|
+
GeoDataFrame
|
|
533
|
+
A GeoDataFrame containing the reference lines if they exist.
|
|
534
|
+
"""
|
|
535
|
+
attributes_path = f"{self.REFERENCE_LINES_PATH}/Attributes"
|
|
536
|
+
if attributes_path not in self:
|
|
537
|
+
return GeoDataFrame()
|
|
538
|
+
attributes = self[attributes_path][()]
|
|
539
|
+
refline_ids = range(attributes.shape[0])
|
|
540
|
+
v_conv_str = np.vectorize(convert_ras_hdf_string)
|
|
541
|
+
names = v_conv_str(attributes["Name"])
|
|
542
|
+
mesh_names = v_conv_str(attributes["SA-2D"])
|
|
543
|
+
try:
|
|
544
|
+
types = v_conv_str(attributes["Type"])
|
|
545
|
+
except ValueError:
|
|
546
|
+
# "Type" field doesn't exist -- observed in some RAS HDF files
|
|
547
|
+
types = np.array([""] * attributes.shape[0])
|
|
548
|
+
geoms = self._get_polylines(self.REFERENCE_LINES_PATH)
|
|
549
|
+
return GeoDataFrame(
|
|
550
|
+
{
|
|
551
|
+
"refln_id": refline_ids,
|
|
552
|
+
"refln_name": names,
|
|
553
|
+
"mesh_name": mesh_names,
|
|
554
|
+
"type": types,
|
|
555
|
+
"geometry": geoms,
|
|
556
|
+
},
|
|
557
|
+
geometry="geometry",
|
|
558
|
+
crs=self.projection(),
|
|
559
|
+
)
|
|
560
|
+
|
|
561
|
+
def reference_points(self) -> GeoDataFrame:
|
|
562
|
+
"""Return the reference points geometry and attributes.
|
|
563
|
+
|
|
564
|
+
Returns
|
|
565
|
+
-------
|
|
566
|
+
GeoDataFrame
|
|
567
|
+
A GeoDataFrame containing the reference points if they exist.
|
|
568
|
+
"""
|
|
569
|
+
attributes_path = f"{self.REFERENCE_POINTS_PATH}/Attributes"
|
|
570
|
+
if attributes_path not in self:
|
|
571
|
+
return GeoDataFrame()
|
|
572
|
+
ref_points_group = self[self.REFERENCE_POINTS_PATH]
|
|
573
|
+
attributes = ref_points_group["Attributes"][:]
|
|
574
|
+
v_conv_str = np.vectorize(convert_ras_hdf_string)
|
|
575
|
+
names = v_conv_str(attributes["Name"])
|
|
576
|
+
mesh_names = v_conv_str(attributes["SA/2D"])
|
|
577
|
+
cell_id = attributes["Cell Index"]
|
|
578
|
+
points = ref_points_group["Points"][()]
|
|
579
|
+
return GeoDataFrame(
|
|
580
|
+
{
|
|
581
|
+
"refpt_id": range(attributes.shape[0]),
|
|
582
|
+
"refpt_name": names,
|
|
583
|
+
"mesh_name": mesh_names,
|
|
584
|
+
"cell_id": cell_id,
|
|
585
|
+
"geometry": list(map(Point, points)),
|
|
586
|
+
},
|
|
587
|
+
geometry="geometry",
|
|
588
|
+
crs=self.projection(),
|
|
589
|
+
)
|
|
455
590
|
|
|
456
591
|
def pump_stations(self) -> GeoDataFrame: # noqa D102
|
|
457
592
|
raise NotImplementedError
|
|
@@ -473,33 +608,17 @@ class RasGeomHdf(RasHdf):
|
|
|
473
608
|
GeoDataFrame
|
|
474
609
|
A GeoDataFrame containing the model 1D cross sections if they exist.
|
|
475
610
|
"""
|
|
476
|
-
if
|
|
611
|
+
if self.CROSS_SECTIONS not in self:
|
|
477
612
|
return GeoDataFrame()
|
|
478
613
|
|
|
479
|
-
xs_data = self[
|
|
614
|
+
xs_data = self[self.CROSS_SECTIONS]
|
|
480
615
|
v_conv_val = np.vectorize(convert_ras_hdf_value)
|
|
481
616
|
xs_attrs = xs_data["Attributes"][()]
|
|
482
617
|
xs_dict = {"xs_id": range(xs_attrs.shape[0])}
|
|
483
618
|
xs_dict.update(
|
|
484
619
|
{name: v_conv_val(xs_attrs[name]) for name in xs_attrs.dtype.names}
|
|
485
620
|
)
|
|
486
|
-
geoms =
|
|
487
|
-
for pnt_start, pnt_cnt, part_start, part_cnt in xs_data["Polyline Info"][()]:
|
|
488
|
-
points = xs_data["Polyline Points"][()][pnt_start : pnt_start + pnt_cnt]
|
|
489
|
-
if part_cnt == 1:
|
|
490
|
-
geoms.append(LineString(points))
|
|
491
|
-
else:
|
|
492
|
-
parts = xs_data["Polyline Parts"][()][
|
|
493
|
-
part_start : part_start + part_cnt
|
|
494
|
-
]
|
|
495
|
-
geoms.append(
|
|
496
|
-
MultiLineString(
|
|
497
|
-
list(
|
|
498
|
-
points[part_pnt_start : part_pnt_start + part_pnt_cnt]
|
|
499
|
-
for part_pnt_start, part_pnt_cnt in parts
|
|
500
|
-
)
|
|
501
|
-
)
|
|
502
|
-
)
|
|
621
|
+
geoms = self._get_polylines(self.CROSS_SECTIONS)
|
|
503
622
|
xs_gdf = GeoDataFrame(
|
|
504
623
|
xs_dict,
|
|
505
624
|
geometry=geoms,
|
|
@@ -519,10 +638,10 @@ class RasGeomHdf(RasHdf):
|
|
|
519
638
|
GeoDataFrame
|
|
520
639
|
A GeoDataFrame containing the model 1D river reach lines if they exist.
|
|
521
640
|
"""
|
|
522
|
-
if
|
|
641
|
+
if self.RIVER_CENTERLINES not in self:
|
|
523
642
|
return GeoDataFrame()
|
|
524
643
|
|
|
525
|
-
river_data = self[
|
|
644
|
+
river_data = self[self.RIVER_CENTERLINES]
|
|
526
645
|
v_conv_val = np.vectorize(convert_ras_hdf_value)
|
|
527
646
|
river_attrs = river_data["Attributes"][()]
|
|
528
647
|
river_dict = {"river_id": range(river_attrs.shape[0])}
|
|
@@ -530,22 +649,7 @@ class RasGeomHdf(RasHdf):
|
|
|
530
649
|
{name: v_conv_val(river_attrs[name]) for name in river_attrs.dtype.names}
|
|
531
650
|
)
|
|
532
651
|
geoms = list()
|
|
533
|
-
|
|
534
|
-
points = river_data["Polyline Points"][()][pnt_start : pnt_start + pnt_cnt]
|
|
535
|
-
if part_cnt == 1:
|
|
536
|
-
geoms.append(LineString(points))
|
|
537
|
-
else:
|
|
538
|
-
parts = river_data["Polyline Parts"][()][
|
|
539
|
-
part_start : part_start + part_cnt
|
|
540
|
-
]
|
|
541
|
-
geoms.append(
|
|
542
|
-
MultiLineString(
|
|
543
|
-
list(
|
|
544
|
-
points[part_pnt_start : part_pnt_start + part_pnt_cnt]
|
|
545
|
-
for part_pnt_start, part_pnt_cnt in parts
|
|
546
|
-
)
|
|
547
|
-
)
|
|
548
|
-
)
|
|
652
|
+
geoms = self._get_polylines(self.RIVER_CENTERLINES)
|
|
549
653
|
river_gdf = GeoDataFrame(
|
|
550
654
|
river_dict,
|
|
551
655
|
geometry=geoms,
|