rashdf 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rashdf/base.py +4 -1
- rashdf/plan.py +308 -8
- rashdf/utils.py +32 -2
- {rashdf-0.5.0.dist-info → rashdf-0.7.0.dist-info}/METADATA +20 -11
- rashdf-0.7.0.dist-info/RECORD +12 -0
- {rashdf-0.5.0.dist-info → rashdf-0.7.0.dist-info}/WHEEL +1 -1
- rashdf-0.5.0.dist-info/RECORD +0 -12
- {rashdf-0.5.0.dist-info → rashdf-0.7.0.dist-info}/LICENSE +0 -0
- {rashdf-0.5.0.dist-info → rashdf-0.7.0.dist-info}/entry_points.txt +0 -0
- {rashdf-0.5.0.dist-info → rashdf-0.7.0.dist-info}/top_level.txt +0 -0
rashdf/base.py
CHANGED
|
@@ -19,6 +19,7 @@ class RasHdf(h5py.File):
|
|
|
19
19
|
Additional keyword arguments to pass to h5py.File
|
|
20
20
|
"""
|
|
21
21
|
super().__init__(name, mode="r", **kwargs)
|
|
22
|
+
self._loc = name
|
|
22
23
|
|
|
23
24
|
@classmethod
|
|
24
25
|
def open_uri(
|
|
@@ -49,7 +50,9 @@ class RasHdf(h5py.File):
|
|
|
49
50
|
import fsspec
|
|
50
51
|
|
|
51
52
|
remote_file = fsspec.open(uri, mode="rb", **fsspec_kwargs)
|
|
52
|
-
|
|
53
|
+
result = cls(remote_file.open(), **h5py_kwargs)
|
|
54
|
+
result._loc = uri
|
|
55
|
+
return result
|
|
53
56
|
|
|
54
57
|
def get_attrs(self, attr_path: str) -> Dict:
|
|
55
58
|
"""Convert attributes from a HEC-RAS HDF file into a Python dictionary for a given attribute path.
|
rashdf/plan.py
CHANGED
|
@@ -5,6 +5,8 @@ from .utils import (
|
|
|
5
5
|
df_datetimes_to_str,
|
|
6
6
|
ras_timesteps_to_datetimes,
|
|
7
7
|
parse_ras_datetime_ms,
|
|
8
|
+
deprecated,
|
|
9
|
+
convert_ras_hdf_value,
|
|
8
10
|
)
|
|
9
11
|
|
|
10
12
|
from geopandas import GeoDataFrame
|
|
@@ -155,6 +157,7 @@ class RasPlanHdf(RasGeomHdf):
|
|
|
155
157
|
PLAN_INFO_PATH = "Plan Data/Plan Information"
|
|
156
158
|
PLAN_PARAMS_PATH = "Plan Data/Plan Parameters"
|
|
157
159
|
PRECIP_PATH = "Event Conditions/Meteorology/Precipitation"
|
|
160
|
+
OBS_DATA_PATH = "Event Conditions/Observed Data"
|
|
158
161
|
RESULTS_UNSTEADY_PATH = "Results/Unsteady"
|
|
159
162
|
RESULTS_UNSTEADY_SUMMARY_PATH = f"{RESULTS_UNSTEADY_PATH}/Summary"
|
|
160
163
|
VOLUME_ACCOUNTING_PATH = f"{RESULTS_UNSTEADY_PATH}/Volume Accounting"
|
|
@@ -165,6 +168,8 @@ class RasPlanHdf(RasGeomHdf):
|
|
|
165
168
|
UNSTEADY_TIME_SERIES_PATH = f"{BASE_OUTPUT_PATH}/Unsteady Time Series"
|
|
166
169
|
REFERENCE_LINES_OUTPUT_PATH = f"{UNSTEADY_TIME_SERIES_PATH}/Reference Lines"
|
|
167
170
|
REFERENCE_POINTS_OUTPUT_PATH = f"{UNSTEADY_TIME_SERIES_PATH}/Reference Points"
|
|
171
|
+
OBS_FLOW_OUTPUT_PATH = f"{OBS_DATA_PATH}/Flow"
|
|
172
|
+
OBS_STAGE_OUTPUT_PATH = f"{OBS_DATA_PATH}/Stage"
|
|
168
173
|
|
|
169
174
|
RESULTS_STEADY_PATH = "Results/Steady"
|
|
170
175
|
BASE_STEADY_PATH = f"{RESULTS_STEADY_PATH}/Output/Output Blocks/Base Output"
|
|
@@ -585,7 +590,8 @@ class RasPlanHdf(RasGeomHdf):
|
|
|
585
590
|
Returns
|
|
586
591
|
-------
|
|
587
592
|
DataFrame
|
|
588
|
-
A DataFrame with columns 'mesh_name', 'cell_id' or 'face_id', a value column,
|
|
593
|
+
A DataFrame with columns 'mesh_name', 'cell_id' or 'face_id', a value column,
|
|
594
|
+
and a time column if the value corresponds to a specific time.
|
|
589
595
|
"""
|
|
590
596
|
methods_with_times = {
|
|
591
597
|
SummaryOutputVar.MAXIMUM_WATER_SURFACE: self.mesh_max_ws,
|
|
@@ -604,6 +610,76 @@ class RasPlanHdf(RasGeomHdf):
|
|
|
604
610
|
df = other_methods[var]()
|
|
605
611
|
return df
|
|
606
612
|
|
|
613
|
+
def _mesh_summary_outputs_df(
|
|
614
|
+
self,
|
|
615
|
+
cells_or_faces: str,
|
|
616
|
+
output_vars: Optional[List[SummaryOutputVar]] = None,
|
|
617
|
+
round_to: str = "0.1 s",
|
|
618
|
+
) -> DataFrame:
|
|
619
|
+
if cells_or_faces == "cells":
|
|
620
|
+
feature_id_field = "cell_id"
|
|
621
|
+
elif cells_or_faces == "faces":
|
|
622
|
+
feature_id_field = "face_id"
|
|
623
|
+
else:
|
|
624
|
+
raise ValueError('cells_or_faces must be either "cells" or "faces".')
|
|
625
|
+
if output_vars is None:
|
|
626
|
+
summary_output_vars = self._summary_output_vars(
|
|
627
|
+
cells_or_faces=cells_or_faces
|
|
628
|
+
)
|
|
629
|
+
elif isinstance(output_vars, list):
|
|
630
|
+
summary_output_vars = []
|
|
631
|
+
for var in output_vars:
|
|
632
|
+
if not isinstance(var, SummaryOutputVar):
|
|
633
|
+
var = SummaryOutputVar(var)
|
|
634
|
+
summary_output_vars.append(var)
|
|
635
|
+
else:
|
|
636
|
+
raise ValueError(
|
|
637
|
+
"include_output must be a boolean or a list of SummaryOutputVar values."
|
|
638
|
+
)
|
|
639
|
+
df = self.mesh_summary_output(summary_output_vars[0], round_to=round_to)
|
|
640
|
+
for var in summary_output_vars[1:]:
|
|
641
|
+
df_var = self.mesh_summary_output(var, round_to=round_to)
|
|
642
|
+
df = df.merge(df_var, on=["mesh_name", feature_id_field], how="left")
|
|
643
|
+
return df
|
|
644
|
+
|
|
645
|
+
def mesh_cells_summary_output(self, round_to: str = "0.1 s") -> DataFrame:
|
|
646
|
+
"""
|
|
647
|
+
Return a DataFrame with summary output data for each mesh cell in the model.
|
|
648
|
+
|
|
649
|
+
Parameters
|
|
650
|
+
----------
|
|
651
|
+
round_to : str, optional
|
|
652
|
+
The time unit to round the datetimes to. Default: "0.1 s" (seconds).
|
|
653
|
+
See Pandas documentation for valid time units:
|
|
654
|
+
https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html
|
|
655
|
+
|
|
656
|
+
Returns
|
|
657
|
+
-------
|
|
658
|
+
DataFrame
|
|
659
|
+
A DataFrame with columns 'mesh_name', 'cell_id', and columns for each
|
|
660
|
+
summary output variable.
|
|
661
|
+
"""
|
|
662
|
+
return self._mesh_summary_outputs_df("cells", round_to=round_to)
|
|
663
|
+
|
|
664
|
+
def mesh_faces_summary_output(self, round_to: str = "0.1 s") -> DataFrame:
|
|
665
|
+
"""
|
|
666
|
+
Return a DataFrame with summary output data for each mesh face in the model.
|
|
667
|
+
|
|
668
|
+
Parameters
|
|
669
|
+
----------
|
|
670
|
+
round_to : str, optional
|
|
671
|
+
The time unit to round the datetimes to. Default: "0.1 s" (seconds).
|
|
672
|
+
See Pandas documentation for valid time units:
|
|
673
|
+
https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html
|
|
674
|
+
|
|
675
|
+
Returns
|
|
676
|
+
-------
|
|
677
|
+
DataFrame
|
|
678
|
+
A DataFrame with columns 'mesh_name', 'face_id', and columns for each
|
|
679
|
+
summary output variable.
|
|
680
|
+
"""
|
|
681
|
+
return self._mesh_summary_outputs_df("faces", round_to=round_to)
|
|
682
|
+
|
|
607
683
|
def _summary_output_vars(
|
|
608
684
|
self, cells_or_faces: Optional[str] = None
|
|
609
685
|
) -> List[SummaryOutputVar]:
|
|
@@ -812,7 +888,7 @@ class RasPlanHdf(RasGeomHdf):
|
|
|
812
888
|
mesh_name: str,
|
|
813
889
|
var: TimeSeriesOutputVar,
|
|
814
890
|
) -> Tuple[np.ndarray, str]:
|
|
815
|
-
path =
|
|
891
|
+
path = self._mesh_timeseries_output_path(mesh_name, var.value)
|
|
816
892
|
group = self.get(path)
|
|
817
893
|
try:
|
|
818
894
|
import dask.array as da
|
|
@@ -830,6 +906,7 @@ class RasPlanHdf(RasGeomHdf):
|
|
|
830
906
|
self,
|
|
831
907
|
mesh_name: str,
|
|
832
908
|
var: Union[str, TimeSeriesOutputVar],
|
|
909
|
+
truncate: bool = True,
|
|
833
910
|
) -> xr.DataArray:
|
|
834
911
|
"""Return the time series output data for a given variable.
|
|
835
912
|
|
|
@@ -839,6 +916,8 @@ class RasPlanHdf(RasGeomHdf):
|
|
|
839
916
|
The name of the 2D flow area mesh.
|
|
840
917
|
var : TimeSeriesOutputVar
|
|
841
918
|
The time series output variable to retrieve.
|
|
919
|
+
truncate : bool, optional
|
|
920
|
+
If True, truncate the number of cells to the listed cell count.
|
|
842
921
|
|
|
843
922
|
Returns
|
|
844
923
|
-------
|
|
@@ -856,7 +935,10 @@ class RasPlanHdf(RasGeomHdf):
|
|
|
856
935
|
values, units = self._mesh_timeseries_output_values_units(mesh_name, var)
|
|
857
936
|
if var in TIME_SERIES_OUTPUT_VARS_CELLS:
|
|
858
937
|
cell_count = mesh_names_counts[mesh_name]
|
|
859
|
-
|
|
938
|
+
if truncate:
|
|
939
|
+
values = values[:, :cell_count]
|
|
940
|
+
else:
|
|
941
|
+
values = values[:, :]
|
|
860
942
|
id_coord = "cell_id"
|
|
861
943
|
elif var in TIME_SERIES_OUTPUT_VARS_FACES:
|
|
862
944
|
id_coord = "face_id"
|
|
@@ -874,24 +956,28 @@ class RasPlanHdf(RasGeomHdf):
|
|
|
874
956
|
"mesh_name": mesh_name,
|
|
875
957
|
"variable": var.value,
|
|
876
958
|
"units": units,
|
|
959
|
+
"hdf_path": self._mesh_timeseries_output_path(mesh_name, var.value),
|
|
877
960
|
},
|
|
878
961
|
)
|
|
879
962
|
return da
|
|
880
963
|
|
|
964
|
+
def _mesh_timeseries_output_path(self, mesh_name: str, var_name: str) -> str:
|
|
965
|
+
return f"{self.UNSTEADY_TIME_SERIES_PATH}/2D Flow Areas/{mesh_name}/{var_name}"
|
|
966
|
+
|
|
881
967
|
def _mesh_timeseries_outputs(
|
|
882
|
-
self, mesh_name: str, vars: List[TimeSeriesOutputVar]
|
|
968
|
+
self, mesh_name: str, vars: List[TimeSeriesOutputVar], truncate: bool = True
|
|
883
969
|
) -> xr.Dataset:
|
|
884
970
|
datasets = {}
|
|
885
971
|
for var in vars:
|
|
886
972
|
var_path = f"{self.UNSTEADY_TIME_SERIES_PATH}/2D Flow Areas/{mesh_name}/{var.value}"
|
|
887
973
|
if self.get(var_path) is None:
|
|
888
974
|
continue
|
|
889
|
-
da = self.mesh_timeseries_output(mesh_name, var)
|
|
975
|
+
da = self.mesh_timeseries_output(mesh_name, var, truncate=truncate)
|
|
890
976
|
datasets[var.value] = da
|
|
891
977
|
ds = xr.Dataset(datasets, attrs={"mesh_name": mesh_name})
|
|
892
978
|
return ds
|
|
893
979
|
|
|
894
|
-
def
|
|
980
|
+
def mesh_cells_timeseries_output(self, mesh_name: str) -> xr.Dataset:
|
|
895
981
|
"""Return the time series output data for cells in a 2D flow area mesh.
|
|
896
982
|
|
|
897
983
|
Parameters
|
|
@@ -907,7 +993,25 @@ class RasPlanHdf(RasGeomHdf):
|
|
|
907
993
|
ds = self._mesh_timeseries_outputs(mesh_name, TIME_SERIES_OUTPUT_VARS_CELLS)
|
|
908
994
|
return ds
|
|
909
995
|
|
|
910
|
-
|
|
996
|
+
@deprecated
|
|
997
|
+
def mesh_timeseries_output_cells(self, mesh_name: str) -> xr.Dataset:
|
|
998
|
+
"""Return the time series output data for cells in a 2D flow area mesh.
|
|
999
|
+
|
|
1000
|
+
Deprecated: use mesh_cells_timeseries_output instead.
|
|
1001
|
+
|
|
1002
|
+
Parameters
|
|
1003
|
+
----------
|
|
1004
|
+
mesh_name : str
|
|
1005
|
+
The name of the 2D flow area mesh.
|
|
1006
|
+
|
|
1007
|
+
Returns
|
|
1008
|
+
-------
|
|
1009
|
+
xr.Dataset
|
|
1010
|
+
An xarray Dataset with DataArrays for each time series output variable.
|
|
1011
|
+
"""
|
|
1012
|
+
return self.mesh_cells_timeseries_output(mesh_name)
|
|
1013
|
+
|
|
1014
|
+
def mesh_faces_timeseries_output(self, mesh_name: str) -> xr.Dataset:
|
|
911
1015
|
"""Return the time series output data for faces in a 2D flow area mesh.
|
|
912
1016
|
|
|
913
1017
|
Parameters
|
|
@@ -923,6 +1027,24 @@ class RasPlanHdf(RasGeomHdf):
|
|
|
923
1027
|
ds = self._mesh_timeseries_outputs(mesh_name, TIME_SERIES_OUTPUT_VARS_FACES)
|
|
924
1028
|
return ds
|
|
925
1029
|
|
|
1030
|
+
@deprecated
|
|
1031
|
+
def mesh_timeseries_output_faces(self, mesh_name: str) -> xr.Dataset:
|
|
1032
|
+
"""Return the time series output data for faces in a 2D flow area mesh.
|
|
1033
|
+
|
|
1034
|
+
Deprecated: use mesh_faces_timeseries_output instead.
|
|
1035
|
+
|
|
1036
|
+
Parameters
|
|
1037
|
+
----------
|
|
1038
|
+
mesh_name : str
|
|
1039
|
+
The name of the 2D flow area mesh.
|
|
1040
|
+
|
|
1041
|
+
Returns
|
|
1042
|
+
-------
|
|
1043
|
+
xr.Dataset
|
|
1044
|
+
An xarray Dataset with DataArrays for each time series output variable.
|
|
1045
|
+
"""
|
|
1046
|
+
return self.mesh_faces_timeseries_output(mesh_name)
|
|
1047
|
+
|
|
926
1048
|
def reference_timeseries_output(self, reftype: str = "lines") -> xr.Dataset:
|
|
927
1049
|
"""Return timeseries output data for reference lines or points from a HEC-RAS HDF plan file.
|
|
928
1050
|
|
|
@@ -984,7 +1106,7 @@ class RasPlanHdf(RasGeomHdf):
|
|
|
984
1106
|
f"{abbrev}_name": (f"{abbrev}_id", names),
|
|
985
1107
|
"mesh_name": (f"{abbrev}_id", mesh_areas),
|
|
986
1108
|
},
|
|
987
|
-
attrs={"
|
|
1109
|
+
attrs={"units": units, "hdf_path": f"{output_path}/{var}"},
|
|
988
1110
|
)
|
|
989
1111
|
das[var] = da
|
|
990
1112
|
return xr.Dataset(das)
|
|
@@ -999,6 +1121,74 @@ class RasPlanHdf(RasGeomHdf):
|
|
|
999
1121
|
"""
|
|
1000
1122
|
return self.reference_timeseries_output(reftype="lines")
|
|
1001
1123
|
|
|
1124
|
+
def observed_timeseries_input(self, vartype: str = "Flow") -> dict:
|
|
1125
|
+
"""Return observed timeseries input data for reference lines and points from a HEC-RAS HDF plan file.
|
|
1126
|
+
|
|
1127
|
+
Parameters
|
|
1128
|
+
----------
|
|
1129
|
+
vartype : str, optional
|
|
1130
|
+
The type of observed data to retrieve. Must be either "Flow" or "Stage".
|
|
1131
|
+
(default: "Flow")
|
|
1132
|
+
|
|
1133
|
+
Returns
|
|
1134
|
+
-------
|
|
1135
|
+
xr.Dataset
|
|
1136
|
+
An xarray Dataset with observed timeseries input data for both reference lines and reference points.
|
|
1137
|
+
"""
|
|
1138
|
+
if vartype == "Flow":
|
|
1139
|
+
output_path = self.OBS_FLOW_OUTPUT_PATH
|
|
1140
|
+
elif vartype == "Stage":
|
|
1141
|
+
output_path = self.OBS_STAGE_OUTPUT_PATH
|
|
1142
|
+
else:
|
|
1143
|
+
raise ValueError('vartype must be either "Flow" or "Stage".')
|
|
1144
|
+
|
|
1145
|
+
observed_group = self.get(output_path)
|
|
1146
|
+
if observed_group is None:
|
|
1147
|
+
raise RasPlanHdfError(
|
|
1148
|
+
f"Could not find HDF group at path '{output_path}'."
|
|
1149
|
+
f" Does the Plan HDF file contain reference {vartype} output data?"
|
|
1150
|
+
)
|
|
1151
|
+
if "Attributes" in observed_group.keys():
|
|
1152
|
+
attr_path = observed_group["Attributes"]
|
|
1153
|
+
attrs_df = pd.DataFrame(attr_path[:]).map(convert_ras_hdf_value)
|
|
1154
|
+
|
|
1155
|
+
das = {}
|
|
1156
|
+
for idx, site in enumerate(observed_group.keys()):
|
|
1157
|
+
if site != "Attributes":
|
|
1158
|
+
# Site Ex: 'Ref Point: Grapevine_Lake_RP'
|
|
1159
|
+
site_path = observed_group[site]
|
|
1160
|
+
site_name = site.split(":")[1][1:] # Grapevine_Lake_RP
|
|
1161
|
+
ref_type = site.split(":")[0] # Ref Point
|
|
1162
|
+
if ref_type == "Ref Line":
|
|
1163
|
+
ref_type = "refln"
|
|
1164
|
+
else:
|
|
1165
|
+
ref_type = "refpt"
|
|
1166
|
+
df = pd.DataFrame(site_path[:]).map(convert_ras_hdf_value)
|
|
1167
|
+
# rename Date to time
|
|
1168
|
+
df = df.rename(columns={"Date": "time"})
|
|
1169
|
+
# Ensure the Date index is unique
|
|
1170
|
+
df = df.drop_duplicates(subset="time")
|
|
1171
|
+
# Package into an 1D xarray DataArray
|
|
1172
|
+
values = df["Value"].values
|
|
1173
|
+
times = df["time"].values
|
|
1174
|
+
da = xr.DataArray(
|
|
1175
|
+
values,
|
|
1176
|
+
name=vartype,
|
|
1177
|
+
dims=["time"],
|
|
1178
|
+
coords={
|
|
1179
|
+
"time": times,
|
|
1180
|
+
},
|
|
1181
|
+
attrs={
|
|
1182
|
+
"hdf_path": f"{output_path}/{site}",
|
|
1183
|
+
},
|
|
1184
|
+
)
|
|
1185
|
+
# Expand dimensions to add additional coordinates
|
|
1186
|
+
da = da.expand_dims({f"{ref_type}_id": [idx - 1]})
|
|
1187
|
+
da = da.expand_dims({f"{ref_type}_name": [site_name]})
|
|
1188
|
+
das[site_name] = da
|
|
1189
|
+
das = xr.concat([das[site] for site in das.keys()], dim="time")
|
|
1190
|
+
return das
|
|
1191
|
+
|
|
1002
1192
|
def reference_points_timeseries_output(self) -> xr.Dataset:
|
|
1003
1193
|
"""Return timeseries output data for reference points from a HEC-RAS HDF plan file.
|
|
1004
1194
|
|
|
@@ -1317,3 +1507,113 @@ class RasPlanHdf(RasGeomHdf):
|
|
|
1317
1507
|
A DataFrame containing the velocity inside the cross sections
|
|
1318
1508
|
"""
|
|
1319
1509
|
return self.steady_profile_xs_output(XsSteadyOutputVar.VELOCITY_TOTAL)
|
|
1510
|
+
|
|
1511
|
+
def _zmeta(self, ds: xr.Dataset) -> Dict:
|
|
1512
|
+
"""Given a xarray Dataset, return kerchunk-style zarr reference metadata."""
|
|
1513
|
+
from kerchunk.hdf import SingleHdf5ToZarr
|
|
1514
|
+
import zarr
|
|
1515
|
+
import base64
|
|
1516
|
+
|
|
1517
|
+
encoding = {}
|
|
1518
|
+
chunk_meta = {}
|
|
1519
|
+
|
|
1520
|
+
# Loop through each variable / DataArray in the Dataset
|
|
1521
|
+
for var, da in ds.data_vars.items():
|
|
1522
|
+
# The "hdf_path" attribute is the path within the HDF5 file
|
|
1523
|
+
# that the DataArray was read from. This is attribute is inserted
|
|
1524
|
+
# by rashdf (see "mesh_timeseries_output" method).
|
|
1525
|
+
hdf_ds_path = da.attrs["hdf_path"]
|
|
1526
|
+
hdf_ds = self.get(hdf_ds_path)
|
|
1527
|
+
if hdf_ds is None:
|
|
1528
|
+
# If we don't know where in the HDF5 the data came from, we
|
|
1529
|
+
# have to skip it, because we won't be able to generate the
|
|
1530
|
+
# correct metadata for it.
|
|
1531
|
+
continue
|
|
1532
|
+
# Get the filters and storage info for the HDF5 dataset.
|
|
1533
|
+
# Calling private methods from Kerchunk here because
|
|
1534
|
+
# there's not a nice public API for this part. This is hacky
|
|
1535
|
+
# and a bit risky because these private methods are more likely
|
|
1536
|
+
# to change, but short of reimplementing these functions ourselves
|
|
1537
|
+
# it's the best way to get the metadata we need.
|
|
1538
|
+
# TODO: raise an issue in Kerchunk to expose this functionality?
|
|
1539
|
+
filters = SingleHdf5ToZarr._decode_filters(None, hdf_ds)
|
|
1540
|
+
encoding[var] = {"compressor": None, "filters": filters}
|
|
1541
|
+
storage_info = SingleHdf5ToZarr._storage_info(None, hdf_ds)
|
|
1542
|
+
# Generate chunk metadata for the DataArray
|
|
1543
|
+
for key, value in storage_info.items():
|
|
1544
|
+
chunk_number = ".".join([str(k) for k in key])
|
|
1545
|
+
chunk_key = f"{var}/{chunk_number}"
|
|
1546
|
+
chunk_meta[chunk_key] = [str(self._loc), value["offset"], value["size"]]
|
|
1547
|
+
# "Write" the Dataset to a temporary in-memory zarr store (which
|
|
1548
|
+
# is the same a Python dictionary)
|
|
1549
|
+
zarr_tmp = zarr.MemoryStore()
|
|
1550
|
+
# Use compute=False here because we don't _actually_ want to write
|
|
1551
|
+
# the data to the zarr store, we just want to generate the metadata.
|
|
1552
|
+
ds.to_zarr(zarr_tmp, mode="w", compute=False, encoding=encoding)
|
|
1553
|
+
zarr_meta = {"version": 1, "refs": {}}
|
|
1554
|
+
# Loop through the in-memory Zarr store, decode the data to strings,
|
|
1555
|
+
# and add it to the final metadata dictionary.
|
|
1556
|
+
for key, value in zarr_tmp.items():
|
|
1557
|
+
try:
|
|
1558
|
+
value_str = value.decode("utf-8")
|
|
1559
|
+
except UnicodeDecodeError:
|
|
1560
|
+
value_str = "base64:" + base64.b64encode(value).decode("utf-8")
|
|
1561
|
+
zarr_meta["refs"][key] = value_str
|
|
1562
|
+
zarr_meta["refs"].update(chunk_meta)
|
|
1563
|
+
return zarr_meta
|
|
1564
|
+
|
|
1565
|
+
def zmeta_mesh_cells_timeseries_output(self, mesh_name: str) -> Dict:
|
|
1566
|
+
"""Return kerchunk-style zarr reference metadata.
|
|
1567
|
+
|
|
1568
|
+
Requires the 'zarr' and 'kerchunk' packages.
|
|
1569
|
+
|
|
1570
|
+
Returns
|
|
1571
|
+
-------
|
|
1572
|
+
dict
|
|
1573
|
+
Dictionary of kerchunk-style zarr reference metadata.
|
|
1574
|
+
"""
|
|
1575
|
+
ds = self._mesh_timeseries_outputs(
|
|
1576
|
+
mesh_name, TIME_SERIES_OUTPUT_VARS_CELLS, truncate=False
|
|
1577
|
+
)
|
|
1578
|
+
return self._zmeta(ds)
|
|
1579
|
+
|
|
1580
|
+
def zmeta_mesh_faces_timeseries_output(self, mesh_name: str) -> Dict:
|
|
1581
|
+
"""Return kerchunk-style zarr reference metadata.
|
|
1582
|
+
|
|
1583
|
+
Requires the 'zarr' and 'kerchunk' packages.
|
|
1584
|
+
|
|
1585
|
+
Returns
|
|
1586
|
+
-------
|
|
1587
|
+
dict
|
|
1588
|
+
Dictionary of kerchunk-style zarr reference metadata.
|
|
1589
|
+
"""
|
|
1590
|
+
ds = self._mesh_timeseries_outputs(
|
|
1591
|
+
mesh_name, TIME_SERIES_OUTPUT_VARS_FACES, truncate=False
|
|
1592
|
+
)
|
|
1593
|
+
return self._zmeta(ds)
|
|
1594
|
+
|
|
1595
|
+
def zmeta_reference_lines_timeseries_output(self) -> Dict:
|
|
1596
|
+
"""Return kerchunk-style zarr reference metadata.
|
|
1597
|
+
|
|
1598
|
+
Requires the 'zarr' and 'kerchunk' packages.
|
|
1599
|
+
|
|
1600
|
+
Returns
|
|
1601
|
+
-------
|
|
1602
|
+
dict
|
|
1603
|
+
Dictionary of kerchunk-style zarr reference metadata.
|
|
1604
|
+
"""
|
|
1605
|
+
ds = self.reference_lines_timeseries_output()
|
|
1606
|
+
return self._zmeta(ds)
|
|
1607
|
+
|
|
1608
|
+
def zmeta_reference_points_timeseries_output(self) -> Dict:
|
|
1609
|
+
"""Return kerchunk-style zarr reference metadata.
|
|
1610
|
+
|
|
1611
|
+
Requires the 'zarr' and 'kerchunk' packages.
|
|
1612
|
+
|
|
1613
|
+
Returns
|
|
1614
|
+
-------
|
|
1615
|
+
dict
|
|
1616
|
+
Dictionary of kerchunk-style zarr reference metadata.
|
|
1617
|
+
"""
|
|
1618
|
+
ds = self.reference_points_timeseries_output()
|
|
1619
|
+
return self._zmeta(ds)
|
rashdf/utils.py
CHANGED
|
@@ -6,8 +6,8 @@ import pandas as pd
|
|
|
6
6
|
|
|
7
7
|
from datetime import datetime, timedelta
|
|
8
8
|
import re
|
|
9
|
-
from typing import Any, List, Tuple, Union, Optional
|
|
10
|
-
|
|
9
|
+
from typing import Any, Callable, List, Tuple, Union, Optional
|
|
10
|
+
import warnings
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
def parse_ras_datetime_ms(datetime_str: str) -> datetime:
|
|
@@ -308,3 +308,33 @@ def ras_timesteps_to_datetimes(
|
|
|
308
308
|
start_time + pd.Timedelta(timestep, unit=time_unit).round(round_to)
|
|
309
309
|
for timestep in timesteps.astype(np.float64)
|
|
310
310
|
]
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
def deprecated(func) -> Callable:
|
|
314
|
+
"""
|
|
315
|
+
Deprecate a function.
|
|
316
|
+
|
|
317
|
+
This is a decorator which can be used to mark functions as deprecated.
|
|
318
|
+
It will result in a warning being emitted when the function is used.
|
|
319
|
+
|
|
320
|
+
Parameters
|
|
321
|
+
----------
|
|
322
|
+
func: The function to be deprecated.
|
|
323
|
+
|
|
324
|
+
Returns
|
|
325
|
+
-------
|
|
326
|
+
The decorated function.
|
|
327
|
+
"""
|
|
328
|
+
|
|
329
|
+
def new_func(*args, **kwargs):
|
|
330
|
+
warnings.warn(
|
|
331
|
+
f"{func.__name__} is deprecated and will be removed in a future version.",
|
|
332
|
+
category=DeprecationWarning,
|
|
333
|
+
stacklevel=2,
|
|
334
|
+
)
|
|
335
|
+
return func(*args, **kwargs)
|
|
336
|
+
|
|
337
|
+
new_func.__name__ = func.__name__
|
|
338
|
+
new_func.__doc__ = func.__doc__
|
|
339
|
+
new_func.__dict__.update(func.__dict__)
|
|
340
|
+
return new_func
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: rashdf
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.7.0
|
|
4
4
|
Summary: Read data from HEC-RAS HDF files.
|
|
5
5
|
Project-URL: repository, https://github.com/fema-ffrd/rashdf
|
|
6
6
|
Classifier: Development Status :: 4 - Beta
|
|
@@ -14,19 +14,24 @@ Classifier: Programming Language :: Python :: 3.12
|
|
|
14
14
|
Description-Content-Type: text/markdown
|
|
15
15
|
License-File: LICENSE
|
|
16
16
|
Requires-Dist: h5py
|
|
17
|
-
Requires-Dist: geopandas
|
|
17
|
+
Requires-Dist: geopandas<2.0,>=1.0
|
|
18
18
|
Requires-Dist: pyarrow
|
|
19
19
|
Requires-Dist: xarray
|
|
20
20
|
Provides-Extra: dev
|
|
21
|
-
Requires-Dist: pre-commit
|
|
22
|
-
Requires-Dist: ruff
|
|
23
|
-
Requires-Dist: pytest
|
|
24
|
-
Requires-Dist: pytest-cov
|
|
25
|
-
Requires-Dist:
|
|
21
|
+
Requires-Dist: pre-commit; extra == "dev"
|
|
22
|
+
Requires-Dist: ruff; extra == "dev"
|
|
23
|
+
Requires-Dist: pytest; extra == "dev"
|
|
24
|
+
Requires-Dist: pytest-cov; extra == "dev"
|
|
25
|
+
Requires-Dist: kerchunk; extra == "dev"
|
|
26
|
+
Requires-Dist: zarr; extra == "dev"
|
|
27
|
+
Requires-Dist: dask; extra == "dev"
|
|
28
|
+
Requires-Dist: fsspec; extra == "dev"
|
|
29
|
+
Requires-Dist: s3fs; extra == "dev"
|
|
30
|
+
Requires-Dist: fiona==1.9.6; extra == "dev"
|
|
26
31
|
Provides-Extra: docs
|
|
27
|
-
Requires-Dist: sphinx
|
|
28
|
-
Requires-Dist: numpydoc
|
|
29
|
-
Requires-Dist:
|
|
32
|
+
Requires-Dist: sphinx; extra == "docs"
|
|
33
|
+
Requires-Dist: numpydoc; extra == "docs"
|
|
34
|
+
Requires-Dist: sphinx_rtd_theme; extra == "docs"
|
|
30
35
|
|
|
31
36
|
# rashdf
|
|
32
37
|
[](https://github.com/fema-ffrd/rashdf/actions/workflows/continuous-integration.yml)
|
|
@@ -140,8 +145,12 @@ $ python -m venv venv-rashdf
|
|
|
140
145
|
|
|
141
146
|
Activate the virtual environment:
|
|
142
147
|
```
|
|
143
|
-
|
|
148
|
+
# For macOS/Linux
|
|
149
|
+
$ source ./venv-rashdf/bin/activate
|
|
144
150
|
(venv-rashdf) $
|
|
151
|
+
|
|
152
|
+
# For Windows
|
|
153
|
+
> ./venv-rashdf/Scripts/activate
|
|
145
154
|
```
|
|
146
155
|
|
|
147
156
|
Install dev dependencies:
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
cli.py,sha256=yItWmCxnYLcuOpJVRpUsfv_NLS9IxLjojZB9GrxfKAU,6571
|
|
2
|
+
rashdf/__init__.py,sha256=XXFtJDgLPCimqAhfsFz_pTWYECJiRT0i-Kb1uflXmVU,156
|
|
3
|
+
rashdf/base.py,sha256=cAQJX1aeBJKb3MJ06ltpbRTUaZX5NkuxpR1J4f7FyTU,2507
|
|
4
|
+
rashdf/geom.py,sha256=2aTfj6mqZGP6rysflQ5L8FeItlYJsknO00sKHo-yaTw,26090
|
|
5
|
+
rashdf/plan.py,sha256=IjOdh95fCsLd7bUmHvjQ4fdoqgMjCh1L4MvKu6o2Lv0,59524
|
|
6
|
+
rashdf/utils.py,sha256=Cba6sULF0m0jg6CQass4bPm2oxTd_avoe1pRQxq082c,10896
|
|
7
|
+
rashdf-0.7.0.dist-info/LICENSE,sha256=L_0QaLpQVHPcglVjiaJPnOocwzP8uXevDRjUPr9DL1Y,1065
|
|
8
|
+
rashdf-0.7.0.dist-info/METADATA,sha256=DAmUtjj2amZAyyoQHbsNEf9pLm5DBLG6JY9xTypwjTc,5986
|
|
9
|
+
rashdf-0.7.0.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
|
10
|
+
rashdf-0.7.0.dist-info/entry_points.txt,sha256=LHHMR1lLy4wRyscMuW1RlYDXemtPgqQhNcILz0DtStY,36
|
|
11
|
+
rashdf-0.7.0.dist-info/top_level.txt,sha256=SrmLb6FFTJtM_t6O1v0M0JePshiQJMHr0yYVkHL7ztk,11
|
|
12
|
+
rashdf-0.7.0.dist-info/RECORD,,
|
rashdf-0.5.0.dist-info/RECORD
DELETED
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
cli.py,sha256=yItWmCxnYLcuOpJVRpUsfv_NLS9IxLjojZB9GrxfKAU,6571
|
|
2
|
-
rashdf/__init__.py,sha256=XXFtJDgLPCimqAhfsFz_pTWYECJiRT0i-Kb1uflXmVU,156
|
|
3
|
-
rashdf/base.py,sha256=lHYVDwFTA1qFI34QYZ55QKcp7b8CeZsmDfESdkYISbg,2432
|
|
4
|
-
rashdf/geom.py,sha256=2aTfj6mqZGP6rysflQ5L8FeItlYJsknO00sKHo-yaTw,26090
|
|
5
|
-
rashdf/plan.py,sha256=ggXzP4Ryx9MxMSHFrkMpFIjYCdIBufWiFPsFx5SFY6c,47426
|
|
6
|
-
rashdf/utils.py,sha256=93arHtIT-iL9dIpbYr7esjrxv1uJabTRJSruyjvr8mw,10168
|
|
7
|
-
rashdf-0.5.0.dist-info/LICENSE,sha256=L_0QaLpQVHPcglVjiaJPnOocwzP8uXevDRjUPr9DL1Y,1065
|
|
8
|
-
rashdf-0.5.0.dist-info/METADATA,sha256=hnF7VT4q5-tBkwMZQvF1VfGArEQ4y1jznn1A4L0PwGs,5729
|
|
9
|
-
rashdf-0.5.0.dist-info/WHEEL,sha256=Z4pYXqR_rTB7OWNDYFOm1qRk0RX6GFP2o8LgvP453Hk,91
|
|
10
|
-
rashdf-0.5.0.dist-info/entry_points.txt,sha256=LHHMR1lLy4wRyscMuW1RlYDXemtPgqQhNcILz0DtStY,36
|
|
11
|
-
rashdf-0.5.0.dist-info/top_level.txt,sha256=SrmLb6FFTJtM_t6O1v0M0JePshiQJMHr0yYVkHL7ztk,11
|
|
12
|
-
rashdf-0.5.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|