rashdf 0.1.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rashdf/__init__.py +3 -0
- rashdf/base.py +45 -0
- rashdf/geom.py +202 -0
- rashdf/plan.py +9 -0
- rashdf/utils.py +170 -0
- rashdf-0.1.0b1.dist-info/LICENSE +21 -0
- rashdf-0.1.0b1.dist-info/METADATA +41 -0
- rashdf-0.1.0b1.dist-info/RECORD +10 -0
- rashdf-0.1.0b1.dist-info/WHEEL +5 -0
- rashdf-0.1.0b1.dist-info/top_level.txt +1 -0
rashdf/__init__.py
ADDED
rashdf/base.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import h5py
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class RasHdf(h5py.File):
|
|
5
|
+
"""Base class for reading RAS HDF files."""
|
|
6
|
+
|
|
7
|
+
def __init__(self, name: str, **kwargs):
|
|
8
|
+
"""Open a HEC-RAS HDF file.
|
|
9
|
+
|
|
10
|
+
Parameters
|
|
11
|
+
----------
|
|
12
|
+
name : str
|
|
13
|
+
The path to the RAS HDF file.
|
|
14
|
+
kwargs : dict
|
|
15
|
+
Additional keyword arguments to pass to h5py.File
|
|
16
|
+
"""
|
|
17
|
+
super().__init__(name, mode="r", **kwargs)
|
|
18
|
+
|
|
19
|
+
@classmethod
|
|
20
|
+
def open_uri(cls, uri: str, fsspec_kwargs: dict = {}, h5py_kwargs: dict = {}) -> 'RasHdf':
|
|
21
|
+
"""Open a HEC-RAS HDF file from a URI.
|
|
22
|
+
|
|
23
|
+
Parameters
|
|
24
|
+
----------
|
|
25
|
+
uri : str
|
|
26
|
+
The URI of the RAS HDF file. Note this should be a path
|
|
27
|
+
recognized by fsspec, such as an S3 path or a Google Cloud
|
|
28
|
+
Storage path. See https://filesystem-spec.readthedocs.io/en/latest/api.html#fsspec.open
|
|
29
|
+
fsspec_kwargs : dict
|
|
30
|
+
Additional keyword arguments to pass to fsspec.open
|
|
31
|
+
h5py_kwargs : dict
|
|
32
|
+
Additional keyword arguments to pass to h5py.File
|
|
33
|
+
|
|
34
|
+
Returns
|
|
35
|
+
-------
|
|
36
|
+
RasHdf
|
|
37
|
+
The RAS HDF file opened from the URI.
|
|
38
|
+
|
|
39
|
+
Examples
|
|
40
|
+
--------
|
|
41
|
+
>>> results_hdf = RasHdf.open_uri("s3://my-bucket/results.hdf")
|
|
42
|
+
"""
|
|
43
|
+
import fsspec
|
|
44
|
+
remote_file = fsspec.open(uri, mode="rb", **fsspec_kwargs)
|
|
45
|
+
return cls(remote_file.open(), **h5py_kwargs)
|
rashdf/geom.py
ADDED
|
@@ -0,0 +1,202 @@
|
|
|
1
|
+
from .base import RasHdf
|
|
2
|
+
from .utils import convert_ras_hdf_string
|
|
3
|
+
|
|
4
|
+
import numpy as np
|
|
5
|
+
import pandas as pd
|
|
6
|
+
from geopandas import GeoDataFrame
|
|
7
|
+
from pyproj import CRS
|
|
8
|
+
from shapely import Polygon, Point, LineString, polygonize
|
|
9
|
+
|
|
10
|
+
from typing import Optional
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class RasGeomHdf(RasHdf):
|
|
14
|
+
|
|
15
|
+
def projection(self) -> Optional[CRS]:
|
|
16
|
+
"""Return the projection of the RAS geometry as a
|
|
17
|
+
pyproj.CRS object.
|
|
18
|
+
|
|
19
|
+
Returns
|
|
20
|
+
-------
|
|
21
|
+
CRS
|
|
22
|
+
The projection of the RAS geometry.
|
|
23
|
+
"""
|
|
24
|
+
proj_wkt = self.attrs.get("Projection")
|
|
25
|
+
if proj_wkt is None:
|
|
26
|
+
return None
|
|
27
|
+
if type(proj_wkt) == bytes or type(proj_wkt) == np.bytes_:
|
|
28
|
+
proj_wkt = proj_wkt.decode("utf-8")
|
|
29
|
+
return CRS.from_wkt(proj_wkt)
|
|
30
|
+
|
|
31
|
+
def mesh_area_names(self) -> list:
|
|
32
|
+
"""Return a list of the 2D mesh area names of
|
|
33
|
+
the RAS geometry.
|
|
34
|
+
|
|
35
|
+
Returns
|
|
36
|
+
-------
|
|
37
|
+
list
|
|
38
|
+
A list of the 2D mesh area names (str) within the RAS geometry if 2D areas exist.
|
|
39
|
+
"""
|
|
40
|
+
if "/Geometry/2D Flow Areas" not in self:
|
|
41
|
+
return list()
|
|
42
|
+
return list([convert_ras_hdf_string(n) for n in self["/Geometry/2D Flow Areas/Attributes"][()]["Name"]])
|
|
43
|
+
|
|
44
|
+
def mesh_areas(self) -> GeoDataFrame:
|
|
45
|
+
"""Return 2D flow area perimeter polygons.
|
|
46
|
+
|
|
47
|
+
Returns
|
|
48
|
+
-------
|
|
49
|
+
GeoDataFrame
|
|
50
|
+
A GeoDataFrame containing the 2D flow area perimeter polygons if 2D areas exist.
|
|
51
|
+
"""
|
|
52
|
+
mesh_area_names = self.mesh_area_names()
|
|
53
|
+
if not mesh_area_names:
|
|
54
|
+
return GeoDataFrame()
|
|
55
|
+
mesh_area_polygons = [Polygon(self[f"/Geometry/2D Flow Areas/{n}/Perimeter"][()]) for n in mesh_area_names]
|
|
56
|
+
return GeoDataFrame({"mesh_name" : mesh_area_names, "geometry" : mesh_area_polygons}, geometry="geometry", crs=self.projection())
|
|
57
|
+
|
|
58
|
+
def mesh_cell_polygons(self) -> GeoDataFrame:
|
|
59
|
+
"""Return the 2D flow mesh cell polygons.
|
|
60
|
+
|
|
61
|
+
Returns
|
|
62
|
+
-------
|
|
63
|
+
GeoDataFrame
|
|
64
|
+
A GeoDataFrame containing the 2D flow mesh cell polygons if 2D areas exist.
|
|
65
|
+
"""
|
|
66
|
+
mesh_area_names = self.mesh_area_names()
|
|
67
|
+
if not mesh_area_names:
|
|
68
|
+
return GeoDataFrame()
|
|
69
|
+
|
|
70
|
+
face_gdf = self.mesh_cell_faces()
|
|
71
|
+
|
|
72
|
+
cell_dict = {"mesh_name":[], "cell_id":[], "geometry":[]}
|
|
73
|
+
for i, mesh_name in enumerate(mesh_area_names):
|
|
74
|
+
cell_cnt = self["/Geometry/2D Flow Areas/Cell Info"][()][i][1]
|
|
75
|
+
cell_ids = list(range(cell_cnt))
|
|
76
|
+
cell_face_info = self[f"/Geometry/2D Flow Areas/{mesh_name}/Cells Face and Orientation Info"][()]
|
|
77
|
+
cell_face_values = self[f"/Geometry/2D Flow Areas/{mesh_name}/Cells Face and Orientation Values"][()][:,0]
|
|
78
|
+
face_id_lists = list(
|
|
79
|
+
np.vectorize(
|
|
80
|
+
lambda cell_id: str(cell_face_values[cell_face_info[cell_id][0]:cell_face_info[cell_id][0]+cell_face_info[cell_id][1]])
|
|
81
|
+
)(cell_ids)
|
|
82
|
+
)
|
|
83
|
+
mesh_faces = face_gdf[face_gdf.mesh_name == mesh_name][["face_id", "geometry"]].set_index("face_id").to_numpy()
|
|
84
|
+
cell_dict["mesh_name"] += [mesh_name]*cell_cnt
|
|
85
|
+
cell_dict["cell_id"] += cell_ids
|
|
86
|
+
cell_dict["geometry"] += list(
|
|
87
|
+
np.vectorize(
|
|
88
|
+
lambda face_id_list: polygonize(np.ravel(mesh_faces[np.array(face_id_list.strip("[]").split()).astype(int)])).geoms[0]
|
|
89
|
+
)(face_id_lists)
|
|
90
|
+
)
|
|
91
|
+
return GeoDataFrame(cell_dict, geometry="geometry", crs=self.projection())
|
|
92
|
+
|
|
93
|
+
def mesh_cell_points(self) -> GeoDataFrame:
|
|
94
|
+
"""Return the 2D flow mesh cell points.
|
|
95
|
+
|
|
96
|
+
Returns
|
|
97
|
+
-------
|
|
98
|
+
GeoDataFrame
|
|
99
|
+
A GeoDataFrame containing the 2D flow mesh cell points if 2D areas exist.
|
|
100
|
+
"""
|
|
101
|
+
mesh_area_names = self.mesh_area_names()
|
|
102
|
+
if not mesh_area_names:
|
|
103
|
+
return GeoDataFrame()
|
|
104
|
+
pnt_dict = {"mesh_name":[], "cell_id":[], "geometry":[]}
|
|
105
|
+
for i, mesh_name in enumerate(mesh_area_names):
|
|
106
|
+
starting_row, count = self["/Geometry/2D Flow Areas/Cell Info"][()][i]
|
|
107
|
+
cell_pnt_coords = self["/Geometry/2D Flow Areas/Cell Points"][()][starting_row:starting_row+count]
|
|
108
|
+
pnt_dict["mesh_name"] += [mesh_name]*cell_pnt_coords.shape[0]
|
|
109
|
+
pnt_dict["cell_id"] += range(count)
|
|
110
|
+
pnt_dict["geometry"] += list(np.vectorize(lambda coords: Point(coords), signature="(n)->()")(cell_pnt_coords))
|
|
111
|
+
return GeoDataFrame(pnt_dict, geometry="geometry", crs=self.projection())
|
|
112
|
+
|
|
113
|
+
def mesh_cell_faces(self) -> GeoDataFrame:
|
|
114
|
+
"""Return the 2D flow mesh cell faces.
|
|
115
|
+
|
|
116
|
+
Returns
|
|
117
|
+
-------
|
|
118
|
+
GeoDataFrame
|
|
119
|
+
A GeoDataFrame containing the 2D flow mesh cell faces if 2D areas exist.
|
|
120
|
+
"""
|
|
121
|
+
mesh_area_names = self.mesh_area_names()
|
|
122
|
+
if not mesh_area_names:
|
|
123
|
+
return GeoDataFrame()
|
|
124
|
+
face_dict = {"mesh_name":[], "face_id":[], "geometry":[]}
|
|
125
|
+
for mesh_name in mesh_area_names:
|
|
126
|
+
facepoints_index = self[f"/Geometry/2D Flow Areas/{mesh_name}/Faces FacePoint Indexes"][()]
|
|
127
|
+
facepoints_coordinates = self[f"/Geometry/2D Flow Areas/{mesh_name}/FacePoints Coordinate"][()]
|
|
128
|
+
faces_perimeter_info = self[f"/Geometry/2D Flow Areas/{mesh_name}/Faces Perimeter Info"][()]
|
|
129
|
+
faces_perimeter_values = self[f"/Geometry/2D Flow Areas/{mesh_name}/Faces Perimeter Values"][()]
|
|
130
|
+
face_id = -1
|
|
131
|
+
for pnt_a_index, pnt_b_index in facepoints_index:
|
|
132
|
+
face_id+=1
|
|
133
|
+
face_dict["mesh_name"].append(mesh_name)
|
|
134
|
+
face_dict["face_id"].append(face_id)
|
|
135
|
+
coordinates = list()
|
|
136
|
+
coordinates.append(facepoints_coordinates[pnt_a_index])
|
|
137
|
+
starting_row, count = faces_perimeter_info[face_id]
|
|
138
|
+
if count > 0:
|
|
139
|
+
coordinates += list(faces_perimeter_values[starting_row:starting_row+count])
|
|
140
|
+
coordinates.append(facepoints_coordinates[pnt_b_index])
|
|
141
|
+
face_dict["geometry"].append(LineString(coordinates))
|
|
142
|
+
return GeoDataFrame(face_dict, geometry="geometry", crs=self.projection())
|
|
143
|
+
|
|
144
|
+
def bc_lines(self) -> GeoDataFrame:
|
|
145
|
+
raise NotImplementedError
|
|
146
|
+
|
|
147
|
+
def breaklines(self) -> GeoDataFrame:
|
|
148
|
+
raise NotImplementedError
|
|
149
|
+
|
|
150
|
+
def refinement_regions(self) -> GeoDataFrame:
|
|
151
|
+
raise NotImplementedError
|
|
152
|
+
|
|
153
|
+
def connections(self) -> GeoDataFrame:
|
|
154
|
+
raise NotImplementedError
|
|
155
|
+
|
|
156
|
+
def ic_points(self) -> GeoDataFrame:
|
|
157
|
+
raise NotImplementedError
|
|
158
|
+
|
|
159
|
+
def reference_lines(self) -> GeoDataFrame:
|
|
160
|
+
raise NotImplementedError
|
|
161
|
+
|
|
162
|
+
def reference_points(self) -> GeoDataFrame:
|
|
163
|
+
raise NotImplementedError
|
|
164
|
+
|
|
165
|
+
def structures(self) -> GeoDataFrame:
|
|
166
|
+
raise NotImplementedError
|
|
167
|
+
|
|
168
|
+
def pump_stations(self) -> GeoDataFrame:
|
|
169
|
+
raise NotImplementedError
|
|
170
|
+
|
|
171
|
+
def mannings_calibration_regions(self) -> GeoDataFrame:
|
|
172
|
+
raise NotImplementedError
|
|
173
|
+
|
|
174
|
+
def classification_polygons(self) -> GeoDataFrame:
|
|
175
|
+
raise NotImplementedError
|
|
176
|
+
|
|
177
|
+
def terrain_modifications(self) -> GeoDataFrame:
|
|
178
|
+
raise NotImplementedError
|
|
179
|
+
|
|
180
|
+
def cross_sections(self) -> GeoDataFrame:
|
|
181
|
+
raise NotImplementedError
|
|
182
|
+
|
|
183
|
+
def river_reaches(self) -> GeoDataFrame:
|
|
184
|
+
raise NotImplementedError
|
|
185
|
+
|
|
186
|
+
def flowpaths(self) -> GeoDataFrame:
|
|
187
|
+
raise NotImplementedError
|
|
188
|
+
|
|
189
|
+
def bank_points(self) -> GeoDataFrame:
|
|
190
|
+
raise NotImplementedError
|
|
191
|
+
|
|
192
|
+
def bank_lines(self) -> GeoDataFrame:
|
|
193
|
+
raise NotImplementedError
|
|
194
|
+
|
|
195
|
+
def ineffective_areas(self) -> GeoDataFrame:
|
|
196
|
+
raise NotImplementedError
|
|
197
|
+
|
|
198
|
+
def ineffective_points(self) -> GeoDataFrame:
|
|
199
|
+
raise NotImplementedError
|
|
200
|
+
|
|
201
|
+
def blocked_obstructions(self) -> GeoDataFrame:
|
|
202
|
+
raise NotImplementedError
|
rashdf/plan.py
ADDED
rashdf/utils.py
ADDED
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
|
|
3
|
+
from typing import Any, List, Tuple, Union
|
|
4
|
+
|
|
5
|
+
from datetime import datetime, timedelta
|
|
6
|
+
import re
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def parse_ras_datetime(datetime_str: str) -> datetime:
|
|
10
|
+
"""Parse a datetime string from a RAS file into a datetime object.
|
|
11
|
+
|
|
12
|
+
Parameters
|
|
13
|
+
----------
|
|
14
|
+
datetime_str (str): The datetime string to be parsed. The string should be in the format "ddMMMyyyy HHmm".
|
|
15
|
+
|
|
16
|
+
Returns
|
|
17
|
+
-------
|
|
18
|
+
datetime: A datetime object representing the parsed datetime.
|
|
19
|
+
"""
|
|
20
|
+
format = "%d%b%Y %H:%M:%S"
|
|
21
|
+
return datetime.strptime(datetime_str, format)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def parse_ras_simulation_window_datetime(datetime_str) -> datetime:
|
|
25
|
+
"""
|
|
26
|
+
Parse a datetime string from a RAS simulation window into a datetime object.
|
|
27
|
+
|
|
28
|
+
Parameters
|
|
29
|
+
----------
|
|
30
|
+
datetime_str: The datetime string to be parsed.
|
|
31
|
+
|
|
32
|
+
Returns
|
|
33
|
+
-------
|
|
34
|
+
datetime: A datetime object representing the parsed datetime.
|
|
35
|
+
"""
|
|
36
|
+
format = "%d%b%Y %H%M"
|
|
37
|
+
return datetime.strptime(datetime_str, format)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def parse_run_time_window(window: str) -> Tuple[datetime, datetime]:
|
|
41
|
+
"""
|
|
42
|
+
Parse a run time window string into a tuple of datetime objects.
|
|
43
|
+
|
|
44
|
+
Parameters
|
|
45
|
+
----------
|
|
46
|
+
window (str): The run time window string to be parsed.
|
|
47
|
+
|
|
48
|
+
Returns
|
|
49
|
+
-------
|
|
50
|
+
Tuple[datetime, datetime]: A tuple containing two datetime objects representing the start and end of the run
|
|
51
|
+
time window.
|
|
52
|
+
"""
|
|
53
|
+
split = window.split(" to ")
|
|
54
|
+
begin = parse_ras_datetime(split[0])
|
|
55
|
+
end = parse_ras_datetime(split[1])
|
|
56
|
+
return begin, end
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def parse_duration(duration_str: str) -> timedelta:
|
|
60
|
+
"""
|
|
61
|
+
Parse a duration string into a timedelta object.
|
|
62
|
+
|
|
63
|
+
Parameters
|
|
64
|
+
----------
|
|
65
|
+
duration_str (str): The duration string to be parsed. The string should be in the format "HH:MM:SS".
|
|
66
|
+
|
|
67
|
+
Returns
|
|
68
|
+
-------
|
|
69
|
+
timedelta: A timedelta object representing the parsed duration.
|
|
70
|
+
"""
|
|
71
|
+
hours, minutes, seconds = map(int, duration_str.split(":"))
|
|
72
|
+
duration = timedelta(hours=hours, minutes=minutes, seconds=seconds)
|
|
73
|
+
return duration
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def convert_ras_hdf_string(value: str) -> Union[bool, str, List[str]]:
|
|
77
|
+
"""Convert a string value from an HEC-RAS HDF file into a Python object.
|
|
78
|
+
|
|
79
|
+
This function handles several specific string formats:
|
|
80
|
+
- "True" and "False" are converted to boolean values.
|
|
81
|
+
- Strings matching the format "ddMMMyyyy HH:mm:ss" or "ddMMMyyyy HHmm" are parsed into datetime objects.
|
|
82
|
+
- Strings matching the format "ddMMMyyyy HH:mm:ss to ddMMMyyyy HH:mm:ss" or "ddMMMyyyy HHmm to ddMMMyyyy HHmm"
|
|
83
|
+
are parsed into a list of two datetime objects.
|
|
84
|
+
|
|
85
|
+
Parameters
|
|
86
|
+
----------
|
|
87
|
+
value (str): The string value to be converted.
|
|
88
|
+
|
|
89
|
+
Returns
|
|
90
|
+
-------
|
|
91
|
+
The converted value, which could be a boolean, a datetime string, a list of datetime strings, or the original
|
|
92
|
+
string if no other conditions are met.
|
|
93
|
+
"""
|
|
94
|
+
ras_datetime_format1_re = r"\d{2}\w{3}\d{4} \d{2}:\d{2}:\d{2}"
|
|
95
|
+
ras_datetime_format2_re = r"\d{2}\w{3}\d{4} \d{2}\d{2}"
|
|
96
|
+
s = value.decode("utf-8")
|
|
97
|
+
if s == "True":
|
|
98
|
+
return True
|
|
99
|
+
elif s == "False":
|
|
100
|
+
return False
|
|
101
|
+
elif re.match(rf"^{ras_datetime_format1_re}", s):
|
|
102
|
+
if re.match(rf"^{ras_datetime_format1_re} to {ras_datetime_format1_re}$", s):
|
|
103
|
+
split = s.split(" to ")
|
|
104
|
+
return [
|
|
105
|
+
parse_ras_datetime(split[0]),
|
|
106
|
+
parse_ras_datetime(split[1]),
|
|
107
|
+
]
|
|
108
|
+
return parse_ras_datetime(s)
|
|
109
|
+
elif re.match(rf"^{ras_datetime_format2_re}", s):
|
|
110
|
+
if re.match(rf"^{ras_datetime_format2_re} to {ras_datetime_format2_re}$", s):
|
|
111
|
+
split = s.split(" to ")
|
|
112
|
+
return [
|
|
113
|
+
parse_ras_simulation_window_datetime(split[0]),
|
|
114
|
+
parse_ras_simulation_window_datetime(split[1]),
|
|
115
|
+
]
|
|
116
|
+
return parse_ras_simulation_window_datetime(s)
|
|
117
|
+
return s
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def convert_ras_hdf_value(
|
|
121
|
+
value: Any,
|
|
122
|
+
) -> Union[None, bool, str, List[str], int, float, List[int], List[float]]:
|
|
123
|
+
"""Convert a value from a HEC-RAS HDF file into a Python object.
|
|
124
|
+
|
|
125
|
+
This function handles several specific types:
|
|
126
|
+
- NaN values are converted to None.
|
|
127
|
+
- Byte strings are converted using the `convert_ras_hdf_string` function.
|
|
128
|
+
- NumPy integer or float types are converted to Python int or float.
|
|
129
|
+
- Regular ints and floats are left as they are.
|
|
130
|
+
- Lists, tuples, and NumPy arrays are recursively processed.
|
|
131
|
+
- All other types are converted to string.
|
|
132
|
+
|
|
133
|
+
Parameters
|
|
134
|
+
----------
|
|
135
|
+
value (Any): The value to be converted.
|
|
136
|
+
|
|
137
|
+
Returns
|
|
138
|
+
-------
|
|
139
|
+
The converted value, which could be None, a boolean, a string, a list of strings, an integer, a float, a list
|
|
140
|
+
of integers, a list of floats, or the original value as a string if no other conditions are met.
|
|
141
|
+
"""
|
|
142
|
+
# TODO (?): handle "8-bit bitfield" values in 2D Flow Area groups
|
|
143
|
+
|
|
144
|
+
# Check for NaN (np.nan)
|
|
145
|
+
if isinstance(value, np.floating) and np.isnan(value):
|
|
146
|
+
return None
|
|
147
|
+
|
|
148
|
+
# Check for byte strings
|
|
149
|
+
elif isinstance(value, bytes) or isinstance(value, np.bytes_):
|
|
150
|
+
return convert_ras_hdf_string(value)
|
|
151
|
+
|
|
152
|
+
# Check for NumPy integer or float types
|
|
153
|
+
elif isinstance(value, np.integer):
|
|
154
|
+
return int(value)
|
|
155
|
+
elif isinstance(value, np.floating):
|
|
156
|
+
return float(value)
|
|
157
|
+
|
|
158
|
+
# Leave regular ints and floats as they are
|
|
159
|
+
elif isinstance(value, (int, float)):
|
|
160
|
+
return value
|
|
161
|
+
|
|
162
|
+
elif isinstance(value, (list, tuple, np.ndarray)):
|
|
163
|
+
if len(value) > 1:
|
|
164
|
+
return [convert_ras_hdf_value(v) for v in value]
|
|
165
|
+
else:
|
|
166
|
+
return convert_ras_hdf_value(value[0])
|
|
167
|
+
|
|
168
|
+
# Convert all other types to string
|
|
169
|
+
else:
|
|
170
|
+
return str(value)
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 fema-ffrd
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: rashdf
|
|
3
|
+
Version: 0.1.0b1
|
|
4
|
+
Summary: Read data from HEC-RAS HDF files.
|
|
5
|
+
Project-URL: repository, https://github.com/fema-ffrd/rashdf
|
|
6
|
+
Classifier: Development Status :: 4 - Beta
|
|
7
|
+
Classifier: Intended Audience :: Developers
|
|
8
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
14
|
+
Description-Content-Type: text/markdown
|
|
15
|
+
License-File: LICENSE
|
|
16
|
+
Requires-Dist: h5py
|
|
17
|
+
Requires-Dist: geopandas
|
|
18
|
+
Provides-Extra: dev
|
|
19
|
+
Requires-Dist: pytest ; extra == 'dev'
|
|
20
|
+
|
|
21
|
+
# rashdf
|
|
22
|
+
Read data from HEC-RAS HDF files.
|
|
23
|
+
|
|
24
|
+
## Setup
|
|
25
|
+
Create a virtual environment in the project directory:
|
|
26
|
+
```
|
|
27
|
+
$ python -m venv venv-rashdf
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
Activate the virtual environment:
|
|
31
|
+
```
|
|
32
|
+
$ source ./venv/bin/activate
|
|
33
|
+
(venv-rashdf) $
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
With the virtual environment activated, run the tests:
|
|
37
|
+
```
|
|
38
|
+
(venv-rashdf) $ pytest
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
rashdf/__init__.py,sha256=BC8nMo8C99kkSp_yJOtfRr9fZKaelZ5SSxHDGgu7j9E,83
|
|
2
|
+
rashdf/base.py,sha256=_mmY1s6z_CtPpJJMyV7m_k-E9yb1QPpxW1oEdGifRZ4,1421
|
|
3
|
+
rashdf/geom.py,sha256=m7JmFFGo4Sw2PcLTdxSaHWH4X4GRcpQvuz_tHswGpTA,7909
|
|
4
|
+
rashdf/plan.py,sha256=ASY3FmBV2UfxG9_WmEhPH2-C6rL-ULIFZbnRVd-kiOI,182
|
|
5
|
+
rashdf/utils.py,sha256=L1BLTilxAvb-TcYY02PWwfeTJoskT8EUEaRb64pvCFg,5715
|
|
6
|
+
rashdf-0.1.0b1.dist-info/LICENSE,sha256=L_0QaLpQVHPcglVjiaJPnOocwzP8uXevDRjUPr9DL1Y,1065
|
|
7
|
+
rashdf-0.1.0b1.dist-info/METADATA,sha256=2-D-23ddlBBxzW0Bfs0r_psw_3oMcO0DAUk8vHPSbXw,1034
|
|
8
|
+
rashdf-0.1.0b1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
|
9
|
+
rashdf-0.1.0b1.dist-info/top_level.txt,sha256=1iE403K85UTilumVhhghg2JdH3ABHE-2DmvFpKvi9fA,7
|
|
10
|
+
rashdf-0.1.0b1.dist-info/RECORD,,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
rashdf
|