rashdf 0.1.1__tar.gz → 0.2.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: rashdf
3
- Version: 0.1.1
3
+ Version: 0.2.1
4
4
  Summary: Read data from HEC-RAS HDF files.
5
5
  Project-URL: repository, https://github.com/fema-ffrd/rashdf
6
6
  Classifier: Development Status :: 4 - Beta
@@ -15,6 +15,7 @@ Description-Content-Type: text/markdown
15
15
  License-File: LICENSE
16
16
  Requires-Dist: h5py
17
17
  Requires-Dist: geopandas
18
+ Requires-Dist: pyarrow
18
19
  Provides-Extra: dev
19
20
  Requires-Dist: pre-commit; extra == "dev"
20
21
  Requires-Dist: ruff; extra == "dev"
@@ -77,6 +78,48 @@ datetime.datetime(2024, 3, 27, 9, 32, 15)],
77
78
  'Time Stamp Solution Went Unstable': 'Not Applicable'}
78
79
  ```
79
80
 
81
+ ## CLI
82
+ The `rashdf` command-line interface allows export directly to a variety of formats, enabled
83
+ by GeoPandas.
84
+ ```
85
+ $ rashdf <sub-command> <hdf-file> [<output-path>] [<options>]
86
+ ```
87
+
88
+ CLI help:
89
+ ```
90
+ $ rashdf --help
91
+ ```
92
+
93
+ Print the output formats supported by Fiona:
94
+ ```
95
+ $ rashdf --fiona-drivers
96
+ ```
97
+
98
+ Help for a specific subcommand:
99
+ ```
100
+ $ rashdf mesh_cell_polygons --help
101
+ ```
102
+
103
+ Example: export mesh cell faces to an ESRI Shapefile
104
+ ```
105
+ $ rashdf mesh_cell_faces BigRiver.g01.hdf big-river-mesh-cell-faces.shp
106
+ ```
107
+
108
+ Example: export mesh cell points to GeoParquet
109
+ ```
110
+ $ rashdf mesh_cell_points LittleCreek.g01.hdf --parquet little-creek-mesh-cell-points.parquet
111
+ ```
112
+
113
+ Example: export breaklines to OGC GeoPackage and reproject to a different CRS
114
+ ```
115
+ $ rashdf breaklines Whitemarsh.p01.hdf whitemarsh-breaklines.gpkg --to-crs EPSG:4326
116
+ ```
117
+
118
+ Example: write structures GeoJSON to `stdout`:
119
+ ```
120
+ $ rashdf structures Potomac.p01.hdf
121
+ ```
122
+
80
123
  ## Documentation
81
124
  Coming soon.
82
125
 
@@ -55,6 +55,48 @@ datetime.datetime(2024, 3, 27, 9, 32, 15)],
55
55
  'Time Stamp Solution Went Unstable': 'Not Applicable'}
56
56
  ```
57
57
 
58
+ ## CLI
59
+ The `rashdf` command-line interface allows export directly to a variety of formats, enabled
60
+ by GeoPandas.
61
+ ```
62
+ $ rashdf <sub-command> <hdf-file> [<output-path>] [<options>]
63
+ ```
64
+
65
+ CLI help:
66
+ ```
67
+ $ rashdf --help
68
+ ```
69
+
70
+ Print the output formats supported by Fiona:
71
+ ```
72
+ $ rashdf --fiona-drivers
73
+ ```
74
+
75
+ Help for a specific subcommand:
76
+ ```
77
+ $ rashdf mesh_cell_polygons --help
78
+ ```
79
+
80
+ Example: export mesh cell faces to an ESRI Shapefile
81
+ ```
82
+ $ rashdf mesh_cell_faces BigRiver.g01.hdf big-river-mesh-cell-faces.shp
83
+ ```
84
+
85
+ Example: export mesh cell points to GeoParquet
86
+ ```
87
+ $ rashdf mesh_cell_points LittleCreek.g01.hdf --parquet little-creek-mesh-cell-points.parquet
88
+ ```
89
+
90
+ Example: export breaklines to OGC GeoPackage and reproject to a different CRS
91
+ ```
92
+ $ rashdf breaklines Whitemarsh.p01.hdf whitemarsh-breaklines.gpkg --to-crs EPSG:4326
93
+ ```
94
+
95
+ Example: write structures GeoJSON to `stdout`:
96
+ ```
97
+ $ rashdf structures Potomac.p01.hdf
98
+ ```
99
+
58
100
  ## Documentation
59
101
  Coming soon.
60
102
 
@@ -12,8 +12,8 @@ classifiers = [
12
12
  "Programming Language :: Python :: 3.11",
13
13
  "Programming Language :: Python :: 3.12",
14
14
  ]
15
- version = "0.1.1"
16
- dependencies = ["h5py", "geopandas"]
15
+ version = "0.2.1"
16
+ dependencies = ["h5py", "geopandas", "pyarrow"]
17
17
 
18
18
  [project.optional-dependencies]
19
19
  dev = ["pre-commit", "ruff", "pytest"]
@@ -21,6 +21,9 @@ dev = ["pre-commit", "ruff", "pytest"]
21
21
  [project.urls]
22
22
  repository = "https://github.com/fema-ffrd/rashdf"
23
23
 
24
+ [project.scripts]
25
+ rashdf = "cli:main"
26
+
24
27
  [tool.pytest.ini_options]
25
28
  pythonpath = "src"
26
29
  testpaths = "tests"
@@ -0,0 +1,155 @@
1
+ from rashdf import RasGeomHdf
2
+ from rashdf.utils import df_datetimes_to_str
3
+
4
+ import fiona
5
+ from geopandas import GeoDataFrame
6
+
7
+ import argparse
8
+ from ast import literal_eval
9
+ from pathlib import Path
10
+ import sys
11
+ from typing import List, Optional
12
+ import warnings
13
+
14
+
15
+ COMMANDS = [
16
+ "mesh_areas",
17
+ "mesh_cell_points",
18
+ "mesh_cell_polygons",
19
+ "mesh_cell_faces",
20
+ "refinement_regions",
21
+ "bc_lines",
22
+ "breaklines",
23
+ "structures",
24
+ ]
25
+
26
+
27
+ def docstring_to_help(docstring: Optional[str]) -> str:
28
+ """Extract the first line of a docstring to use as help text for the rashdf CLI.
29
+
30
+ Note that this function replaces 'Return' with 'Export' in the help text.
31
+
32
+ Parameters
33
+ ----------
34
+ docstring : Optional[str]
35
+ The docstring to extract the first line from.
36
+
37
+ Returns
38
+ -------
39
+ str
40
+ The first line of the docstring with 'Return' replaced by 'Export'.
41
+ If the docstring is None, an empty string is returned.
42
+ """
43
+ if docstring is None:
44
+ return ""
45
+ help_text = docstring.split("\n")[0]
46
+ help_text = help_text.replace("Return", "Export")
47
+ return help_text
48
+
49
+
50
+ def fiona_supported_drivers() -> List[str]:
51
+ """Return a list of drivers supported by Fiona for writing output files.
52
+
53
+ Returns
54
+ -------
55
+ list
56
+ A list of drivers supported by Fiona for writing output files.
57
+ """
58
+ drivers = [d for d, s in fiona.supported_drivers.items() if "w" in s]
59
+ return drivers
60
+
61
+
62
+ def parse_args(args: str) -> argparse.Namespace:
63
+ parser = argparse.ArgumentParser(description="Extract data from HEC-RAS HDF files.")
64
+ parser.add_argument(
65
+ "--fiona-drivers",
66
+ action="store_true",
67
+ help="List the drivers supported by Fiona for writing output files.",
68
+ )
69
+ subparsers = parser.add_subparsers(help="Sub-command help")
70
+ for command in COMMANDS:
71
+ f = getattr(RasGeomHdf, command)
72
+ subparser = subparsers.add_parser(
73
+ command, description=docstring_to_help(f.__doc__)
74
+ )
75
+ subparser.set_defaults(func=command)
76
+ subparser.add_argument("hdf_file", type=str, help="Path to HEC-RAS HDF file.")
77
+ subparser.add_argument(
78
+ "output_file", type=str, help="Path to output file.", nargs="?"
79
+ )
80
+ subparser.add_argument(
81
+ "--to-crs", type=str, help='Output CRS. (e.g., "EPSG:4326")'
82
+ )
83
+ output_group = subparser.add_mutually_exclusive_group()
84
+ output_group.add_argument(
85
+ "--parquet", action="store_true", help="Output as Parquet."
86
+ )
87
+ output_group.add_argument(
88
+ "--feather", action="store_true", help="Output as Feather."
89
+ )
90
+ subparser.add_argument(
91
+ "--kwargs",
92
+ type=str,
93
+ help=(
94
+ "Keyword arguments as a Python dictionary literal"
95
+ " passed to the corresponding GeoPandas output method."
96
+ ),
97
+ )
98
+ args = parser.parse_args(args)
99
+ return args
100
+
101
+
102
+ def export(args: argparse.Namespace) -> Optional[str]:
103
+ if args.fiona_drivers:
104
+ for driver in fiona_supported_drivers():
105
+ print(driver)
106
+ return
107
+ if "://" in args.hdf_file:
108
+ geom_hdf = RasGeomHdf.open_uri(args.hdf_file)
109
+ else:
110
+ geom_hdf = RasGeomHdf(args.hdf_file)
111
+ func = getattr(geom_hdf, args.func)
112
+ gdf: GeoDataFrame = func()
113
+ kwargs = literal_eval(args.kwargs) if args.kwargs else {}
114
+ if args.to_crs:
115
+ gdf = gdf.to_crs(args.to_crs)
116
+ if not args.output_file:
117
+ # convert any datetime columns to strings
118
+ gdf = df_datetimes_to_str(gdf)
119
+ with warnings.catch_warnings():
120
+ # Squash warnings about converting the CRS to OGC URN format.
121
+ # Likely to come up since USACE's Albers projection is a custom CRS.
122
+ # A warning written to stdout might cause issues with downstream processing.
123
+ warnings.filterwarnings(
124
+ "ignore",
125
+ (
126
+ "GeoDataFrame's CRS is not representable in URN OGC format."
127
+ " Resulting JSON will contain no CRS information."
128
+ ),
129
+ )
130
+ result = gdf.to_json(**kwargs)
131
+ print(result)
132
+ return result
133
+ elif args.parquet:
134
+ gdf.to_parquet(args.output_file, **kwargs)
135
+ return
136
+ elif args.feather:
137
+ gdf.to_feather(args.output_file, **kwargs)
138
+ return
139
+ output_file_path = Path(args.output_file)
140
+ output_file_ext = output_file_path.suffix
141
+ if output_file_ext not in [".gpkg"]:
142
+ # unless the user specifies a format that supports datetime,
143
+ # convert any datetime columns to string
144
+ # TODO: besides Geopackage, which of the standard Fiona formats allow datetime?
145
+ gdf = df_datetimes_to_str(gdf)
146
+ gdf.to_file(args.output_file, **kwargs)
147
+
148
+
149
+ def main():
150
+ args = parse_args(sys.argv[1:])
151
+ export(args)
152
+
153
+
154
+ if __name__ == "__main__":
155
+ main()
@@ -1,7 +1,13 @@
1
1
  from .base import RasHdf
2
- from .utils import convert_ras_hdf_string, get_first_hdf_group, hdf5_attrs_to_dict
2
+ from .utils import (
3
+ convert_ras_hdf_string,
4
+ get_first_hdf_group,
5
+ hdf5_attrs_to_dict,
6
+ convert_ras_hdf_value,
7
+ )
3
8
 
4
9
  import numpy as np
10
+ import pandas as pd
5
11
  from geopandas import GeoDataFrame
6
12
  from pyproj import CRS
7
13
  from shapely import (
@@ -80,7 +86,7 @@ class RasGeomHdf(RasHdf):
80
86
  )
81
87
 
82
88
  def mesh_cell_polygons(self) -> GeoDataFrame:
83
- """Return the 2D flow mesh cell polygons.
89
+ """Return 2D flow mesh cell polygons.
84
90
 
85
91
  Returns
86
92
  -------
@@ -134,7 +140,7 @@ class RasGeomHdf(RasHdf):
134
140
  return GeoDataFrame(cell_dict, geometry="geometry", crs=self.projection())
135
141
 
136
142
  def mesh_cell_points(self) -> GeoDataFrame:
137
- """Return the 2D flow mesh cell points.
143
+ """Return 2D flow mesh cell points.
138
144
 
139
145
  Returns
140
146
  -------
@@ -160,7 +166,7 @@ class RasGeomHdf(RasHdf):
160
166
  return GeoDataFrame(pnt_dict, geometry="geometry", crs=self.projection())
161
167
 
162
168
  def mesh_cell_faces(self) -> GeoDataFrame:
163
- """Return the 2D flow mesh cell faces.
169
+ """Return 2D flow mesh cell faces.
164
170
 
165
171
  Returns
166
172
  -------
@@ -240,7 +246,7 @@ class RasGeomHdf(RasHdf):
240
246
  return d2_flow_area_attrs
241
247
 
242
248
  def bc_lines(self) -> GeoDataFrame:
243
- """Return the 2D mesh area boundary condition lines.
249
+ """Return 2D mesh area boundary condition lines.
244
250
 
245
251
  Returns
246
252
  -------
@@ -289,7 +295,7 @@ class RasGeomHdf(RasHdf):
289
295
  )
290
296
 
291
297
  def breaklines(self) -> GeoDataFrame:
292
- """Return the 2D mesh area breaklines.
298
+ """Return 2D mesh area breaklines.
293
299
 
294
300
  Returns
295
301
  -------
@@ -331,7 +337,7 @@ class RasGeomHdf(RasHdf):
331
337
  )
332
338
 
333
339
  def refinement_regions(self) -> GeoDataFrame:
334
- """Return the 2D mesh area refinement regions.
340
+ """Return 2D mesh area refinement regions.
335
341
 
336
342
  Returns
337
343
  -------
@@ -364,6 +370,55 @@ class RasGeomHdf(RasHdf):
364
370
  crs=self.projection(),
365
371
  )
366
372
 
373
+ def structures(self, datetime_to_str: bool = False) -> GeoDataFrame:
374
+ """Return the model structures.
375
+
376
+ Returns
377
+ -------
378
+ GeoDataFrame
379
+ A GeoDataFrame containing the model structures if they exist.
380
+ """
381
+ if "/Geometry/Structures" not in self:
382
+ return GeoDataFrame()
383
+ struct_data = self["/Geometry/Structures"]
384
+ v_conv_val = np.vectorize(convert_ras_hdf_value)
385
+ sd_attrs = struct_data["Attributes"][()]
386
+ struct_dict = {"struct_id": range(sd_attrs.shape[0])}
387
+ struct_dict.update(
388
+ {name: v_conv_val(sd_attrs[name]) for name in sd_attrs.dtype.names}
389
+ )
390
+ geoms = list()
391
+ for pnt_start, pnt_cnt, part_start, part_cnt in struct_data["Centerline Info"][
392
+ ()
393
+ ]:
394
+ points = struct_data["Centerline Points"][()][
395
+ pnt_start : pnt_start + pnt_cnt
396
+ ]
397
+ if part_cnt == 1:
398
+ geoms.append(LineString(points))
399
+ else:
400
+ parts = struct_data["Centerline Parts"][()][
401
+ part_start : part_start + part_cnt
402
+ ]
403
+ geoms.append(
404
+ MultiLineString(
405
+ list(
406
+ points[part_pnt_start : part_pnt_start + part_pnt_cnt]
407
+ for part_pnt_start, part_pnt_cnt in parts
408
+ )
409
+ )
410
+ )
411
+ struct_gdf = GeoDataFrame(
412
+ struct_dict,
413
+ geometry=geoms,
414
+ crs=self.projection(),
415
+ )
416
+ if datetime_to_str:
417
+ struct_gdf["Last Edited"] = struct_gdf["Last Edited"].apply(
418
+ lambda x: pd.Timestamp.isoformat(x)
419
+ )
420
+ return struct_gdf
421
+
367
422
  def connections(self) -> GeoDataFrame:
368
423
  raise NotImplementedError
369
424
 
@@ -376,9 +431,6 @@ class RasGeomHdf(RasHdf):
376
431
  def reference_points(self) -> GeoDataFrame:
377
432
  raise NotImplementedError
378
433
 
379
- def structures(self) -> GeoDataFrame:
380
- raise NotImplementedError
381
-
382
434
  def pump_stations(self) -> GeoDataFrame:
383
435
  raise NotImplementedError
384
436
 
@@ -1,29 +1,41 @@
1
- import numpy as np
2
1
  import h5py
3
- from typing import Any, List, Tuple, Union, Optional
2
+ import numpy as np
3
+ import pandas as pd
4
4
 
5
5
  from datetime import datetime, timedelta
6
6
  import re
7
+ from typing import Any, List, Tuple, Union, Optional
7
8
 
8
9
 
9
10
  def parse_ras_datetime(datetime_str: str) -> datetime:
10
- """Parse a datetime string from a RAS file into a datetime object.
11
+ """
12
+ Parse a datetime string from a RAS file into a datetime object. If the datetime has
13
+ a time of 2400, then it is converted to midnight of the next day.
11
14
 
12
15
  Parameters
13
16
  ----------
14
- datetime_str (str): The datetime string to be parsed. The string should be in the format "ddMMMyyyy HHmm".
17
+ datetime_str (str): The datetime string to be parsed. The string should be in the format "ddMMMyyyy HH:mm:ss".
15
18
 
16
19
  Returns
17
20
  -------
18
21
  datetime: A datetime object representing the parsed datetime.
19
22
  """
20
23
  format = "%d%b%Y %H:%M:%S"
21
- return datetime.strptime(datetime_str, format)
24
+
25
+ if datetime_str.endswith("24:00:00"):
26
+ datetime_str = datetime_str.replace("24:00:00", "00:00:00")
27
+ parsed_dt = datetime.strptime(datetime_str, format)
28
+ parsed_dt += timedelta(days=1)
29
+ else:
30
+ parsed_dt = datetime.strptime(datetime_str, format)
31
+
32
+ return parsed_dt
22
33
 
23
34
 
24
35
  def parse_ras_simulation_window_datetime(datetime_str) -> datetime:
25
36
  """
26
- Parse a datetime string from a RAS simulation window into a datetime object.
37
+ Parse a datetime string from a RAS simulation window into a datetime object.If the datetime has a
38
+ time of 2400, then it is converted to midnight of the next day.
27
39
 
28
40
  Parameters
29
41
  ----------
@@ -34,7 +46,15 @@ def parse_ras_simulation_window_datetime(datetime_str) -> datetime:
34
46
  datetime: A datetime object representing the parsed datetime.
35
47
  """
36
48
  format = "%d%b%Y %H%M"
37
- return datetime.strptime(datetime_str, format)
49
+
50
+ if datetime_str.endswith("2400"):
51
+ datetime_str = datetime_str.replace("2400", "0000")
52
+ parsed_dt = datetime.strptime(datetime_str, format)
53
+ parsed_dt += timedelta(days=1)
54
+ else:
55
+ parsed_dt = datetime.strptime(datetime_str, format)
56
+
57
+ return parsed_dt
38
58
 
39
59
 
40
60
  def parse_run_time_window(window: str) -> Tuple[datetime, datetime]:
@@ -221,3 +241,24 @@ def get_first_hdf_group(parent_group: h5py.Group) -> Optional[h5py.Group]:
221
241
  if isinstance(item, h5py.Group):
222
242
  return item
223
243
  return None
244
+
245
+
246
+ def df_datetimes_to_str(df: pd.DataFrame) -> pd.DataFrame:
247
+ """Convert any datetime64 columns in a DataFrame to strings.
248
+
249
+ Parameters
250
+ ----------
251
+ df : DataFrame
252
+ The DataFrame to convert.
253
+
254
+ Returns
255
+ -------
256
+ DataFrame
257
+ The DataFrame with any datetime64 columns converted to strings.
258
+ """
259
+ df_result = df.copy()
260
+ for col in df.select_dtypes(include=["datetime64"]).columns:
261
+ df_result[col] = df[col].apply(
262
+ lambda x: pd.Timestamp(x).isoformat() if pd.notnull(x) else None
263
+ )
264
+ return df_result
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: rashdf
3
- Version: 0.1.1
3
+ Version: 0.2.1
4
4
  Summary: Read data from HEC-RAS HDF files.
5
5
  Project-URL: repository, https://github.com/fema-ffrd/rashdf
6
6
  Classifier: Development Status :: 4 - Beta
@@ -15,6 +15,7 @@ Description-Content-Type: text/markdown
15
15
  License-File: LICENSE
16
16
  Requires-Dist: h5py
17
17
  Requires-Dist: geopandas
18
+ Requires-Dist: pyarrow
18
19
  Provides-Extra: dev
19
20
  Requires-Dist: pre-commit; extra == "dev"
20
21
  Requires-Dist: ruff; extra == "dev"
@@ -77,6 +78,48 @@ datetime.datetime(2024, 3, 27, 9, 32, 15)],
77
78
  'Time Stamp Solution Went Unstable': 'Not Applicable'}
78
79
  ```
79
80
 
81
+ ## CLI
82
+ The `rashdf` command-line interface allows export directly to a variety of formats, enabled
83
+ by GeoPandas.
84
+ ```
85
+ $ rashdf <sub-command> <hdf-file> [<output-path>] [<options>]
86
+ ```
87
+
88
+ CLI help:
89
+ ```
90
+ $ rashdf --help
91
+ ```
92
+
93
+ Print the output formats supported by Fiona:
94
+ ```
95
+ $ rashdf --fiona-drivers
96
+ ```
97
+
98
+ Help for a specific subcommand:
99
+ ```
100
+ $ rashdf mesh_cell_polygons --help
101
+ ```
102
+
103
+ Example: export mesh cell faces to an ESRI Shapefile
104
+ ```
105
+ $ rashdf mesh_cell_faces BigRiver.g01.hdf big-river-mesh-cell-faces.shp
106
+ ```
107
+
108
+ Example: export mesh cell points to GeoParquet
109
+ ```
110
+ $ rashdf mesh_cell_points LittleCreek.g01.hdf --parquet little-creek-mesh-cell-points.parquet
111
+ ```
112
+
113
+ Example: export breaklines to OGC GeoPackage and reproject to a different CRS
114
+ ```
115
+ $ rashdf breaklines Whitemarsh.p01.hdf whitemarsh-breaklines.gpkg --to-crs EPSG:4326
116
+ ```
117
+
118
+ Example: write structures GeoJSON to `stdout`:
119
+ ```
120
+ $ rashdf structures Potomac.p01.hdf
121
+ ```
122
+
80
123
  ## Documentation
81
124
  Coming soon.
82
125
 
@@ -1,6 +1,7 @@
1
1
  LICENSE
2
2
  README.md
3
3
  pyproject.toml
4
+ src/cli.py
4
5
  src/rashdf/__init__.py
5
6
  src/rashdf/base.py
6
7
  src/rashdf/geom.py
@@ -9,8 +10,10 @@ src/rashdf/utils.py
9
10
  src/rashdf.egg-info/PKG-INFO
10
11
  src/rashdf.egg-info/SOURCES.txt
11
12
  src/rashdf.egg-info/dependency_links.txt
13
+ src/rashdf.egg-info/entry_points.txt
12
14
  src/rashdf.egg-info/requires.txt
13
15
  src/rashdf.egg-info/top_level.txt
16
+ tests/test_cli.py
14
17
  tests/test_geom.py
15
18
  tests/test_plan.py
16
19
  tests/test_utils.py
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ rashdf = cli:main
@@ -1,5 +1,6 @@
1
1
  h5py
2
2
  geopandas
3
+ pyarrow
3
4
 
4
5
  [dev]
5
6
  pre-commit
@@ -0,0 +1,108 @@
1
+ from src.cli import parse_args, export, docstring_to_help, fiona_supported_drivers
2
+
3
+ import geopandas as gpd
4
+ from pyproj import CRS
5
+
6
+ import json
7
+ from pathlib import Path
8
+
9
+ TEST_DATA = Path("./tests/data")
10
+ MUNCIE_G05 = TEST_DATA / "ras/Muncie.g05.hdf"
11
+
12
+
13
+ def test_docstring_to_help():
14
+ docstring = """This is a test docstring.
15
+ This is not part of the help message.
16
+ """
17
+ assert docstring_to_help(docstring) == "This is a test docstring."
18
+
19
+ docstring = """Return the something or other.
20
+ Blah blah blah."""
21
+ assert docstring_to_help(docstring) == "Export the something or other."
22
+
23
+ docstring = None
24
+ assert docstring_to_help(docstring) == ""
25
+
26
+
27
+ def test_fiona_supported_drivers():
28
+ drivers = fiona_supported_drivers()
29
+ assert "ESRI Shapefile" in drivers
30
+ assert "GeoJSON" in drivers
31
+ assert "GPKG" in drivers
32
+
33
+
34
+ def test_parse_args():
35
+ args = parse_args(["structures", "test.hdf"])
36
+ assert args.func == "structures"
37
+ assert args.hdf_file == "test.hdf"
38
+ assert args.output_file is None
39
+ assert args.to_crs is None
40
+ assert not args.parquet
41
+ assert not args.feather
42
+ assert args.kwargs is None
43
+
44
+ args = parse_args(["mesh_areas", "test.hdf", "test.json"])
45
+ assert args.func == "mesh_areas"
46
+ assert args.hdf_file == "test.hdf"
47
+ assert args.output_file == "test.json"
48
+ assert args.to_crs is None
49
+ assert not args.parquet
50
+ assert not args.feather
51
+ assert args.kwargs is None
52
+
53
+ args = parse_args(
54
+ [
55
+ "mesh_areas",
56
+ "test.hdf",
57
+ "test.json",
58
+ "--to-crs",
59
+ "EPSG:4326",
60
+ "--parquet",
61
+ "--kwargs",
62
+ '{"compression": "gzip"}',
63
+ ]
64
+ )
65
+ assert args.func == "mesh_areas"
66
+ assert args.hdf_file == "test.hdf"
67
+ assert args.output_file == "test.json"
68
+ assert args.to_crs == "EPSG:4326"
69
+ assert args.parquet
70
+ assert not args.feather
71
+ assert args.kwargs == '{"compression": "gzip"}'
72
+
73
+ args = parse_args(["--fiona-drivers"])
74
+ assert args.fiona_drivers
75
+
76
+
77
+ def test_export(tmp_path: Path):
78
+ args = parse_args(["structures", str(MUNCIE_G05)])
79
+ exported = json.loads(export(args))
80
+ gdf = gpd.GeoDataFrame.from_features(exported)
81
+ assert len(gdf) == 3
82
+ assert gdf["Last Edited"].to_list() == [
83
+ "2024-04-15T15:21:34",
84
+ "2024-04-15T15:21:48",
85
+ "2024-04-15T15:26:15",
86
+ ]
87
+
88
+ test_json_path = tmp_path / "test.json"
89
+ args = parse_args(["mesh_areas", str(MUNCIE_G05), str(test_json_path)])
90
+ export(args)
91
+ gdf = gpd.read_file(test_json_path)
92
+ assert len(gdf) == 2
93
+
94
+ test_parquet_path = tmp_path / "test.parquet"
95
+ args = parse_args(
96
+ [
97
+ "mesh_cell_points",
98
+ str(MUNCIE_G05),
99
+ str(test_parquet_path),
100
+ "--parquet",
101
+ "--to-crs",
102
+ "EPSG:4326",
103
+ ]
104
+ )
105
+ export(args)
106
+ gdf = gpd.read_parquet(test_parquet_path)
107
+ assert len(gdf) == 5790
108
+ assert gdf.crs == CRS.from_epsg(4326)
@@ -98,3 +98,9 @@ def test_get_geom_2d_flow_area_attrs(tmp_path):
98
98
  )
99
99
  ras_hdf = RasGeomHdf(test_hdf)
100
100
  assert ras_hdf.get_geom_2d_flow_area_attrs() == TEST_ATTRS
101
+
102
+
103
+ def test_structs():
104
+ structs_json = TEST_JSON / "structures.json"
105
+ with RasGeomHdf(MUNCIE_G05) as ghdf:
106
+ assert _gdf_matches_json(ghdf.structures(datetime_to_str=True), structs_json)
@@ -0,0 +1,66 @@
1
+ from src.rashdf import utils
2
+
3
+ import numpy as np
4
+ import pandas as pd
5
+ import pytest
6
+
7
+ from datetime import datetime, timedelta
8
+
9
+
10
+ def test_convert_ras_hdf_value():
11
+ assert utils.convert_ras_hdf_value(b"True") is True
12
+ assert utils.convert_ras_hdf_value(b"False") is False
13
+ assert utils.convert_ras_hdf_value(np.float32(1.23)) == pytest.approx(1.23)
14
+ assert utils.convert_ras_hdf_value(np.int32(123)) == 123
15
+ assert utils.convert_ras_hdf_value(b"15Mar2024 16:39:01") == datetime(
16
+ 2024, 3, 15, 16, 39, 1
17
+ )
18
+ assert utils.convert_ras_hdf_value(b"15Mar2024 24:00:00") == datetime(
19
+ 2024, 3, 16, 0, 0, 0
20
+ )
21
+ assert utils.convert_ras_hdf_value(b"15Mar2024 16:39:01 to 16Mar2024 16:39:01") == [
22
+ datetime(2024, 3, 15, 16, 39, 1),
23
+ datetime(2024, 3, 16, 16, 39, 1),
24
+ ]
25
+ assert utils.convert_ras_hdf_value(b"18Mar2024 24:00:00 to 19Mar2024 24:00:00") == [
26
+ datetime(2024, 3, 19, 0, 0, 0),
27
+ datetime(2024, 3, 20, 0, 0, 0),
28
+ ]
29
+ assert utils.convert_ras_hdf_value(b"01:23:45") == timedelta(
30
+ hours=1, minutes=23, seconds=45
31
+ )
32
+ assert utils.convert_ras_hdf_value(b"15Mar2024 2400") == datetime(
33
+ 2024, 3, 16, 0, 0, 0
34
+ )
35
+ assert utils.convert_ras_hdf_value(b"15Mar2024 2315") == datetime(
36
+ 2024, 3, 15, 23, 15, 0
37
+ )
38
+
39
+ assert utils.convert_ras_hdf_value(b"15Mar2024 1639 to 16Mar2024 1639") == [
40
+ datetime(2024, 3, 15, 16, 39, 0),
41
+ datetime(2024, 3, 16, 16, 39, 0),
42
+ ]
43
+ assert utils.convert_ras_hdf_value(b"18Mar2024 2400 to 19Mar2024 2400") == [
44
+ datetime(2024, 3, 19, 0, 0, 0),
45
+ datetime(2024, 3, 20, 0, 0, 0),
46
+ ]
47
+
48
+
49
+ def test_df_datetimes_to_str():
50
+ df = pd.DataFrame(
51
+ {
52
+ "datetime": [
53
+ datetime(2024, 3, 15, 16, 39, 1),
54
+ datetime(2024, 3, 16, 16, 39, 1),
55
+ ],
56
+ "asdf": [
57
+ 0.123,
58
+ 0.456,
59
+ ],
60
+ }
61
+ )
62
+ assert df["datetime"].dtype.name == "datetime64[ns]"
63
+ df = utils.df_datetimes_to_str(df)
64
+ assert df["datetime"].dtype.name == "object"
65
+ assert df["datetime"].tolist() == ["2024-03-15T16:39:01", "2024-03-16T16:39:01"]
66
+ assert df["asdf"].tolist() == [0.123, 0.456]
@@ -1,23 +0,0 @@
1
- from src.rashdf import utils
2
-
3
- import numpy as np
4
- import pytest
5
-
6
- from datetime import datetime, timedelta
7
-
8
-
9
- def test_convert_ras_hdf_value():
10
- assert utils.convert_ras_hdf_value(b"True") is True
11
- assert utils.convert_ras_hdf_value(b"False") is False
12
- assert utils.convert_ras_hdf_value(np.float32(1.23)) == pytest.approx(1.23)
13
- assert utils.convert_ras_hdf_value(np.int32(123)) == 123
14
- assert utils.convert_ras_hdf_value(b"15Mar2024 16:39:01") == datetime(
15
- 2024, 3, 15, 16, 39, 1
16
- )
17
- assert utils.convert_ras_hdf_value(b"15Mar2024 16:39:01 to 16Mar2024 16:39:01") == [
18
- datetime(2024, 3, 15, 16, 39, 1),
19
- datetime(2024, 3, 16, 16, 39, 1),
20
- ]
21
- assert utils.convert_ras_hdf_value(b"01:23:45") == timedelta(
22
- hours=1, minutes=23, seconds=45
23
- )
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes