ras-commander 0.66.0__tar.gz → 0.68.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. {ras_commander-0.66.0/ras_commander.egg-info → ras_commander-0.68.0}/PKG-INFO +1 -1
  2. ras_commander-0.68.0/ras_commander/Decorators.py +206 -0
  3. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/HdfPlan.py +1 -19
  4. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/HdfResultsMesh.py +26 -26
  5. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/RasPrj.py +79 -29
  6. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/__init__.py +1 -1
  7. {ras_commander-0.66.0 → ras_commander-0.68.0/ras_commander.egg-info}/PKG-INFO +1 -1
  8. {ras_commander-0.66.0 → ras_commander-0.68.0}/setup.py +1 -1
  9. ras_commander-0.66.0/ras_commander/Decorators.py +0 -144
  10. {ras_commander-0.66.0 → ras_commander-0.68.0}/LICENSE +0 -0
  11. {ras_commander-0.66.0 → ras_commander-0.68.0}/README.md +0 -0
  12. {ras_commander-0.66.0 → ras_commander-0.68.0}/pyproject.toml +0 -0
  13. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/HdfBase.py +0 -0
  14. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/HdfBndry.py +0 -0
  15. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/HdfFluvialPluvial.py +0 -0
  16. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/HdfInfiltration.py +0 -0
  17. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/HdfMesh.py +0 -0
  18. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/HdfPipe.py +0 -0
  19. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/HdfPlot.py +0 -0
  20. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/HdfPump.py +0 -0
  21. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/HdfResultsPlan.py +0 -0
  22. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/HdfResultsPlot.py +0 -0
  23. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/HdfResultsXsec.py +0 -0
  24. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/HdfStruc.py +0 -0
  25. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/HdfUtils.py +0 -0
  26. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/HdfXsec.py +0 -0
  27. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/LoggingConfig.py +0 -0
  28. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/RasCmdr.py +0 -0
  29. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/RasExamples.py +0 -0
  30. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/RasGeo.py +0 -0
  31. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/RasMapper.py +0 -0
  32. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/RasPlan.py +0 -0
  33. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/RasUnsteady.py +0 -0
  34. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander/RasUtils.py +0 -0
  35. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander.egg-info/SOURCES.txt +0 -0
  36. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander.egg-info/dependency_links.txt +0 -0
  37. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander.egg-info/requires.txt +0 -0
  38. {ras_commander-0.66.0 → ras_commander-0.68.0}/ras_commander.egg-info/top_level.txt +0 -0
  39. {ras_commander-0.66.0 → ras_commander-0.68.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ras-commander
3
- Version: 0.66.0
3
+ Version: 0.68.0
4
4
  Summary: A Python library for automating HEC-RAS 6.x operations
5
5
  Home-page: https://github.com/gpt-cmdr/ras-commander
6
6
  Author: William M. Katzenmeyer, P.E., C.F.M.
@@ -0,0 +1,206 @@
1
+ from functools import wraps
2
+ from pathlib import Path
3
+ from typing import Union
4
+ import logging
5
+ import h5py
6
+ import inspect
7
+ import pandas as pd
8
+
9
+
10
+ def log_call(func):
11
+ @wraps(func)
12
+ def wrapper(*args, **kwargs):
13
+ logger = logging.getLogger(func.__module__)
14
+ logger.debug(f"Calling {func.__name__}")
15
+ result = func(*args, **kwargs)
16
+ logger.debug(f"Finished {func.__name__}")
17
+ return result
18
+ return wrapper
19
+
20
+ def standardize_input(file_type: str = 'plan_hdf'):
21
+ """
22
+ Decorator to standardize input for HDF file operations.
23
+
24
+ This decorator processes various input types and converts them to a Path object
25
+ pointing to the correct HDF file. It handles the following input types:
26
+ - h5py.File objects
27
+ - pathlib.Path objects
28
+ - Strings (file paths or plan/geom numbers)
29
+ - Integers (interpreted as plan/geom numbers)
30
+
31
+ The decorator also manages RAS object references and logging.
32
+
33
+ Args:
34
+ file_type (str): Specifies whether to look for 'plan_hdf' or 'geom_hdf' files.
35
+
36
+ Returns:
37
+ A decorator that wraps the function to standardize its input to a Path object.
38
+ """
39
+ def decorator(func):
40
+ @wraps(func)
41
+ def wrapper(*args, **kwargs):
42
+ logger = logging.getLogger(func.__module__)
43
+
44
+ # Check if the function expects an hdf_path parameter
45
+ sig = inspect.signature(func)
46
+ param_names = list(sig.parameters.keys())
47
+
48
+ # If first parameter is 'hdf_file', pass an h5py object
49
+ if param_names and param_names[0] == 'hdf_file':
50
+ if isinstance(args[0], h5py.File):
51
+ return func(*args, **kwargs)
52
+ elif isinstance(args[0], (str, Path)):
53
+ with h5py.File(args[0], 'r') as hdf:
54
+ return func(hdf, *args[1:], **kwargs)
55
+ else:
56
+ raise ValueError(f"Expected h5py.File or path, got {type(args[0])}")
57
+
58
+ # Handle both static method calls and regular function calls
59
+ if args and isinstance(args[0], type):
60
+ # Static method call, remove the class argument
61
+ args = args[1:]
62
+
63
+ # Get hdf_input from kwargs if provided with hdf_path key, or take first positional arg
64
+ hdf_input = kwargs.pop('hdf_path', None) if 'hdf_path' in kwargs else (args[0] if args else None)
65
+
66
+ # Import ras here to ensure we get the most current instance
67
+ from .RasPrj import ras as ras
68
+ ras_object = kwargs.pop('ras_object', None) or (args[1] if len(args) > 1 else None)
69
+ ras_obj = ras_object or ras
70
+
71
+ # If no hdf_input provided, return the function unmodified
72
+ if hdf_input is None:
73
+ return func(*args, **kwargs)
74
+
75
+ hdf_path = None
76
+
77
+ # Clean and normalize string inputs
78
+ if isinstance(hdf_input, str):
79
+ # Clean the string (remove extra whitespace, normalize path separators)
80
+ hdf_input = hdf_input.strip()
81
+
82
+ # Check if it's a raw file path that exists
83
+ try:
84
+ test_path = Path(hdf_input)
85
+ if test_path.is_file():
86
+ hdf_path = test_path
87
+ logger.info(f"Using HDF file from direct string path: {hdf_path}")
88
+ except Exception as e:
89
+ logger.debug(f"Error converting string to path: {str(e)}")
90
+
91
+ # If a valid path wasn't created from string processing, continue with normal flow
92
+ if hdf_path is None:
93
+ # If hdf_input is already a Path and exists, use it directly
94
+ if isinstance(hdf_input, Path) and hdf_input.is_file():
95
+ hdf_path = hdf_input
96
+ logger.info(f"Using existing Path object HDF file: {hdf_path}")
97
+ # If hdf_input is an h5py.File object, use its filename
98
+ elif isinstance(hdf_input, h5py.File):
99
+ hdf_path = Path(hdf_input.filename)
100
+ logger.info(f"Using HDF file from h5py.File object: {hdf_path}")
101
+ # Handle Path objects that might not be verified yet
102
+ elif isinstance(hdf_input, Path):
103
+ if hdf_input.is_file():
104
+ hdf_path = hdf_input
105
+ logger.info(f"Using verified Path object HDF file: {hdf_path}")
106
+ # Handle string inputs that are plan/geom numbers
107
+ elif isinstance(hdf_input, str) and (hdf_input.isdigit() or (len(hdf_input) > 1 and hdf_input[0] == 'p' and hdf_input[1:].isdigit())):
108
+ try:
109
+ ras_obj.check_initialized()
110
+ except Exception as e:
111
+ raise ValueError(f"RAS object is not initialized: {str(e)}")
112
+
113
+ number_str = hdf_input if hdf_input.isdigit() else hdf_input[1:]
114
+ number_int = int(number_str)
115
+
116
+ if file_type == 'plan_hdf':
117
+ try:
118
+ # Convert plan_number column to integers for comparison
119
+ plan_info = ras_obj.plan_df[ras_obj.plan_df['plan_number'].astype(int) == number_int]
120
+ if not plan_info.empty:
121
+ # Make sure HDF_Results_Path is a string and not None
122
+ hdf_path_str = plan_info.iloc[0]['HDF_Results_Path']
123
+ if pd.notna(hdf_path_str):
124
+ hdf_path = Path(str(hdf_path_str))
125
+ except Exception as e:
126
+ logger.warning(f"Error retrieving plan HDF path: {str(e)}")
127
+
128
+
129
+ elif file_type == 'geom_hdf':
130
+ try:
131
+ # Convert geometry_number column to integers for comparison
132
+ geom_info = ras_obj.plan_df[ras_obj.plan_df['geometry_number'].astype(int) == number_int]
133
+ if not geom_info.empty:
134
+ hdf_path_str = ras_obj.geom_df.iloc[0]['hdf_path']
135
+ if pd.notna(hdf_path_str):
136
+ hdf_path = Path(str(hdf_path_str))
137
+ except Exception as e:
138
+ logger.warning(f"Error retrieving geometry HDF path: {str(e)}")
139
+ else:
140
+ raise ValueError(f"Invalid file type: {file_type}")
141
+
142
+
143
+
144
+
145
+ # Handle integer inputs (assuming they're plan or geom numbers)
146
+ elif isinstance(hdf_input, int):
147
+ try:
148
+ ras_obj.check_initialized()
149
+ except Exception as e:
150
+ raise ValueError(f"RAS object is not initialized: {str(e)}")
151
+
152
+ number_int = hdf_input
153
+
154
+ if file_type == 'plan_hdf':
155
+ try:
156
+ # Convert plan_number column to integers for comparison
157
+ plan_info = ras_obj.plan_df[ras_obj.plan_df['plan_number'].astype(int) == number_int]
158
+ if not plan_info.empty:
159
+ # Make sure HDF_Results_Path is a string and not None
160
+ hdf_path_str = plan_info.iloc[0]['HDF_Results_Path']
161
+ if pd.notna(hdf_path_str):
162
+ hdf_path = Path(str(hdf_path_str))
163
+ except Exception as e:
164
+ logger.warning(f"Error retrieving plan HDF path: {str(e)}")
165
+ elif file_type == 'geom_hdf':
166
+ try:
167
+ # Convert geometry_number column to integers for comparison
168
+ geom_info = ras_obj.plan_df[ras_obj.plan_df['geometry_number'].astype(int) == number_int]
169
+ if not geom_info.empty:
170
+ hdf_path_str = ras_obj.geom_df.iloc[0]['hdf_path']
171
+ if pd.notna(hdf_path_str):
172
+ hdf_path = Path(str(hdf_path_str))
173
+ except Exception as e:
174
+ logger.warning(f"Error retrieving geometry HDF path: {str(e)}")
175
+ else:
176
+ raise ValueError(f"Invalid file type: {file_type}")
177
+
178
+ # Final verification that the path exists
179
+ if hdf_path is None or not hdf_path.exists():
180
+ error_msg = f"HDF file not found: {hdf_input}"
181
+ logger.error(error_msg)
182
+ raise FileNotFoundError(error_msg)
183
+
184
+ logger.info(f"Final validated HDF file path: {hdf_path}")
185
+
186
+ # Now try to validate the HDF file structure (but don't fail if validation fails)
187
+ try:
188
+ with h5py.File(hdf_path, 'r') as test_file:
189
+ # Just open to verify it's a valid HDF5 file
190
+ logger.debug(f"Successfully opened HDF file for validation: {hdf_path}")
191
+ except Exception as e:
192
+ logger.warning(f"Warning: Could not validate HDF file: {str(e)}")
193
+ # Continue anyway, let the function handle detailed validation
194
+
195
+ # Pass all original arguments and keywords, replacing hdf_input with standardized hdf_path
196
+ # If the original input was positional, replace the first argument
197
+ if args and 'hdf_path' not in kwargs:
198
+ new_args = (hdf_path,) + args[1:]
199
+ else:
200
+ new_args = args
201
+ kwargs['hdf_path'] = hdf_path
202
+
203
+ return func(*new_args, **kwargs)
204
+
205
+ return wrapper
206
+ return decorator
@@ -11,25 +11,7 @@ The file has been forked and modified for use in RAS Commander.
11
11
 
12
12
  All of the methods in this class are static and are designed to be used without instantiation.
13
13
 
14
- List of Functions in HdfPlan:
15
- - get_simulation_start_time()
16
- - get_simulation_end_time()
17
- - get_unsteady_datetimes()
18
- - get_plan_info_attrs()
19
- - get_plan_parameters()
20
- - get_meteorology_precip_attrs()
21
- - get_geom_attrs()
22
-
23
-
24
- REVISIONS NEEDED:
25
14
 
26
- Use get_ prefix for functions that return data.
27
- Since we are extracting plan data, we should use get_plan_...
28
- BUT, we will never set results data, so we should use results_
29
-
30
- We need to shorten names where possible.
31
-
32
- List of Revised Functions in HdfPlan:
33
15
  - get_plan_start_time()
34
16
  - get_plan_end_time()
35
17
  - get_plan_timestamps_list()
@@ -283,7 +265,7 @@ class HdfPlan:
283
265
 
284
266
  @staticmethod
285
267
  @log_call
286
- @standardize_input(file_type='plan_hdf')
268
+ @standardize_input(file_type='geom_hdf')
287
269
  def get_geometry_information(hdf_path: Path) -> pd.DataFrame:
288
270
  """
289
271
  Get root level geometry attributes from the HDF plan file.
@@ -150,8 +150,8 @@ class HdfResultsMesh:
150
150
  Valid variables include:
151
151
  "Water Surface", "Face Velocity", "Cell Velocity X"...
152
152
  """
153
- with h5py.File(hdf_path, 'r') as hdf_file:
154
- return HdfResultsMesh._get_mesh_timeseries_output(hdf_file, mesh_name, var, truncate)
153
+ with h5py.File(hdf_path, 'r') as hdf_path:
154
+ return HdfResultsMesh._get_mesh_timeseries_output(hdf_path, mesh_name, var, truncate)
155
155
 
156
156
  @staticmethod
157
157
  @log_call
@@ -210,8 +210,8 @@ class HdfResultsMesh:
210
210
  - Variable metadata
211
211
  """
212
212
  try:
213
- with h5py.File(hdf_path, 'r') as hdf_file:
214
- return HdfResultsMesh._get_mesh_cells_timeseries_output(hdf_file, mesh_names, var, truncate)
213
+ with h5py.File(hdf_path, 'r') as hdf_path:
214
+ return HdfResultsMesh._get_mesh_cells_timeseries_output(hdf_path, mesh_names, var, truncate)
215
215
  except Exception as e:
216
216
  logger.error(f"Error in get_mesh_cells_timeseries: {str(e)}")
217
217
  raise ValueError(f"Error processing timeseries output data: {e}")
@@ -219,7 +219,7 @@ class HdfResultsMesh:
219
219
  @staticmethod
220
220
  @log_call
221
221
  @standardize_input(file_type='plan_hdf')
222
- def get_mesh_last_iter(hdf_path: Path) -> pd.DataFrame:
222
+ def get_mesh_last_iter(hdf_file: Path) -> pd.DataFrame:
223
223
  """
224
224
  Get last iteration count for each mesh cell.
225
225
 
@@ -229,7 +229,7 @@ class HdfResultsMesh:
229
229
  Returns:
230
230
  pd.DataFrame: DataFrame containing last iteration counts.
231
231
  """
232
- return HdfResultsMesh.get_mesh_summary_output(hdf_path, "Cell Last Iteration")
232
+ return HdfResultsMesh.get_mesh_summary_output(hdf_file, "Cell Last Iteration")
233
233
 
234
234
 
235
235
  @staticmethod
@@ -394,7 +394,7 @@ class HdfResultsMesh:
394
394
 
395
395
 
396
396
  @staticmethod
397
- def _get_mesh_cells_timeseries_output(hdf_file: h5py.File,
397
+ def _get_mesh_cells_timeseries_output(hdf_path: h5py.File,
398
398
  mesh_names: Optional[Union[str, List[str]]] = None,
399
399
  var: Optional[str] = None,
400
400
  truncate: bool = False) -> Dict[str, xr.Dataset]:
@@ -402,7 +402,7 @@ class HdfResultsMesh:
402
402
  Get mesh cells timeseries output for specified meshes and variables.
403
403
 
404
404
  Args:
405
- hdf_file (h5py.File): Open HDF file object.
405
+ hdf_path (h5py.File): Open HDF file object.
406
406
  mesh_names (Optional[Union[str, List[str]]]): Name(s) of the mesh(es). If None, processes all available meshes.
407
407
  var (Optional[str]): Name of the variable to retrieve. If None, retrieves all variables.
408
408
  truncate (bool): If True, truncates the output to remove trailing zeros.
@@ -431,11 +431,11 @@ class HdfResultsMesh:
431
431
  }
432
432
 
433
433
  try:
434
- start_time = HdfBase.get_simulation_start_time(hdf_file)
435
- time_stamps = HdfBase.get_unsteady_timestamps(hdf_file)
434
+ start_time = HdfBase.get_simulation_start_time(hdf_path)
435
+ time_stamps = HdfBase.get_unsteady_timestamps(hdf_path)
436
436
 
437
437
  if mesh_names is None:
438
- mesh_names = HdfResultsMesh._get_available_meshes(hdf_file)
438
+ mesh_names = HdfResultsMesh._get_available_meshes(hdf_path)
439
439
  elif isinstance(mesh_names, str):
440
440
  mesh_names = [mesh_names]
441
441
 
@@ -450,7 +450,7 @@ class HdfResultsMesh:
450
450
  for variable in variables:
451
451
  try:
452
452
  path = HdfResultsMesh._get_mesh_timeseries_output_path(mesh_name, variable)
453
- dataset = hdf_file[path]
453
+ dataset = hdf_path[path]
454
454
  values = dataset[:]
455
455
  units = dataset.attrs.get("Units", "").decode("utf-8")
456
456
 
@@ -495,12 +495,12 @@ class HdfResultsMesh:
495
495
 
496
496
 
497
497
  @staticmethod
498
- def _get_mesh_timeseries_output(hdf_file: h5py.File, mesh_name: str, var: str, truncate: bool = True) -> xr.DataArray:
498
+ def _get_mesh_timeseries_output(hdf_path: h5py.File, mesh_name: str, var: str, truncate: bool = True) -> xr.DataArray:
499
499
  """
500
500
  Get timeseries output for a specific mesh and variable.
501
501
 
502
502
  Args:
503
- hdf_file (h5py.File): Open HDF file object.
503
+ hdf_path (h5py.File): Open HDF file object.
504
504
  mesh_name (str): Name of the mesh.
505
505
  var (str): Variable name to retrieve.
506
506
  truncate (bool): Whether to truncate the output to remove trailing zeros (default True).
@@ -514,18 +514,18 @@ class HdfResultsMesh:
514
514
  try:
515
515
  path = HdfResultsMesh._get_mesh_timeseries_output_path(mesh_name, var)
516
516
 
517
- if path not in hdf_file:
517
+ if path not in hdf_path:
518
518
  raise ValueError(f"Path {path} not found in HDF file")
519
519
 
520
- dataset = hdf_file[path]
520
+ dataset = hdf_path[path]
521
521
  values = dataset[:]
522
522
  units = dataset.attrs.get("Units", "").decode("utf-8")
523
523
 
524
524
  # Get start time and timesteps
525
- start_time = HdfBase.get_simulation_start_time(hdf_file)
525
+ start_time = HdfBase.get_simulation_start_time(hdf_path)
526
526
  # Updated to use the new function name from HdfUtils
527
527
  timesteps = HdfUtils.convert_timesteps_to_datetimes(
528
- np.array(hdf_file["Results/Unsteady/Output/Output Blocks/Base Output/Unsteady Time Series/Time"][:]),
528
+ np.array(hdf_path["Results/Unsteady/Output/Output Blocks/Base Output/Unsteady Time Series/Time"][:]),
529
529
  start_time
530
530
  )
531
531
 
@@ -555,12 +555,12 @@ class HdfResultsMesh:
555
555
 
556
556
 
557
557
  @staticmethod
558
- def _get_mesh_timeseries_output_values_units(hdf_file: h5py.File, mesh_name: str, var: str) -> Tuple[np.ndarray, str]:
558
+ def _get_mesh_timeseries_output_values_units(hdf_path: h5py.File, mesh_name: str, var: str) -> Tuple[np.ndarray, str]:
559
559
  """
560
560
  Get the mesh timeseries output values and units for a specific variable from the HDF file.
561
561
 
562
562
  Args:
563
- hdf_file (h5py.File): Open HDF file object.
563
+ hdf_path (h5py.File): Open HDF file object.
564
564
  mesh_name (str): Name of the mesh.
565
565
  var (str): Variable name to retrieve.
566
566
 
@@ -568,7 +568,7 @@ class HdfResultsMesh:
568
568
  Tuple[np.ndarray, str]: A tuple containing the output values and units.
569
569
  """
570
570
  path = HdfResultsMesh._get_mesh_timeseries_output_path(mesh_name, var)
571
- group = hdf_file[path]
571
+ group = hdf_path[path]
572
572
  values = group[:]
573
573
  units = group.attrs.get("Units")
574
574
  if units is not None:
@@ -577,17 +577,17 @@ class HdfResultsMesh:
577
577
 
578
578
 
579
579
  @staticmethod
580
- def _get_available_meshes(hdf_file: h5py.File) -> List[str]:
580
+ def _get_available_meshes(hdf_path: h5py.File) -> List[str]:
581
581
  """
582
582
  Get the names of all available meshes in the HDF file.
583
583
 
584
584
  Args:
585
- hdf_file (h5py.File): Open HDF file object.
585
+ hdf_path (h5py.File): Open HDF file object.
586
586
 
587
587
  Returns:
588
588
  List[str]: A list of mesh names.
589
589
  """
590
- return HdfMesh.get_mesh_area_names(hdf_file)
590
+ return HdfMesh.get_mesh_area_names(hdf_path)
591
591
 
592
592
 
593
593
  @staticmethod
@@ -599,7 +599,7 @@ class HdfResultsMesh:
599
599
 
600
600
  Parameters
601
601
  ----------
602
- hdf_file : h5py.File
602
+ hdf_path : h5py.File
603
603
  Open HDF file object.
604
604
  var : str
605
605
  The summary output variable to retrieve.
@@ -734,7 +734,7 @@ class HdfResultsMesh:
734
734
  Return the HDF group for a given mesh and summary output variable.
735
735
 
736
736
  Args:
737
- hdf_file (h5py.File): Open HDF file object.
737
+ hdf_path (h5py.File): Open HDF file object.
738
738
  mesh_name (str): Name of the mesh.
739
739
  var (str): Name of the summary output variable.
740
740
 
@@ -175,6 +175,9 @@ class RasPrj:
175
175
  # Set paths for geometry and flow files
176
176
  self._set_file_paths()
177
177
 
178
+ # Make sure all plan paths are properly set
179
+ self._set_plan_paths()
180
+
178
181
  except Exception as e:
179
182
  logger.error(f"Error loading project data: {e}")
180
183
  raise
@@ -220,6 +223,44 @@ class RasPrj:
220
223
  flow_path = self.project_folder / f"{self.project_name}.{prefix}{row['Flow File']}"
221
224
  self.plan_df.at[idx, 'Flow Path'] = str(flow_path)
222
225
 
226
+ def _set_plan_paths(self):
227
+ """Set full path information for plan files and their associated geometry and flow files."""
228
+ if self.plan_df.empty:
229
+ logger.debug("Plan DataFrame is empty, no paths to set")
230
+ return
231
+
232
+ # Ensure full path is set for all plan entries
233
+ if 'full_path' not in self.plan_df.columns or self.plan_df['full_path'].isna().any():
234
+ self.plan_df['full_path'] = self.plan_df['plan_number'].apply(
235
+ lambda x: str(self.project_folder / f"{self.project_name}.p{x}")
236
+ )
237
+
238
+ # Create the Geom Path and Flow Path columns if they don't exist
239
+ if 'Geom Path' not in self.plan_df.columns:
240
+ self.plan_df['Geom Path'] = None
241
+ if 'Flow Path' not in self.plan_df.columns:
242
+ self.plan_df['Flow Path'] = None
243
+
244
+ # Update paths for each plan entry
245
+ for idx, row in self.plan_df.iterrows():
246
+ try:
247
+ # Set geometry path if Geom File exists and Geom Path is missing or invalid
248
+ if pd.notna(row['Geom File']):
249
+ geom_path = self.project_folder / f"{self.project_name}.g{row['Geom File']}"
250
+ self.plan_df.at[idx, 'Geom Path'] = str(geom_path)
251
+
252
+ # Set flow path if Flow File exists and Flow Path is missing or invalid
253
+ if pd.notna(row['Flow File']):
254
+ # Determine the prefix (u for unsteady, f for steady flow)
255
+ prefix = 'u' if pd.notna(row['unsteady_number']) else 'f'
256
+ flow_path = self.project_folder / f"{self.project_name}.{prefix}{row['Flow File']}"
257
+ self.plan_df.at[idx, 'Flow Path'] = str(flow_path)
258
+
259
+ if not self.suppress_logging:
260
+ logger.debug(f"Plan {row['plan_number']} paths set up")
261
+ except Exception as e:
262
+ logger.error(f"Error setting paths for plan {row.get('plan_number', idx)}: {e}")
263
+
223
264
  def _get_geom_file_for_plan(self, plan_number):
224
265
  """
225
266
  Get the geometry file path for a given plan number.
@@ -939,33 +980,60 @@ class RasPrj:
939
980
  return df
940
981
 
941
982
  if entry_type == 'Plan':
942
- # Define column groups
983
+ # Set required column order
943
984
  first_cols = ['plan_number', 'unsteady_number', 'geometry_number']
985
+
986
+ # Standard plan key columns in the exact order specified
944
987
  plan_key_cols = [
945
988
  'Plan Title', 'Program Version', 'Short Identifier', 'Simulation Date',
946
- 'Computation Interval', 'Mapping Interval', 'Run HTab', 'Run UNet',
947
- 'Run Sediment', 'Run PostProcess', 'Run WQNet', 'UNET Use Existing IB Tables',
948
- 'HDF_Results_Path', 'UNET 1D Methodology'
989
+ 'Std Step Tol', 'Computation Interval', 'Output Interval', 'Instantaneous Interval',
990
+ 'Mapping Interval', 'Run HTab', 'Run UNet', 'Run Sediment', 'Run PostProcess',
991
+ 'Run WQNet', 'Run RASMapper', 'UNET Use Existing IB Tables', 'HDF_Results_Path',
992
+ 'UNET 1D Methodology', 'Write IC File', 'Write IC File at Fixed DateTime',
993
+ 'IC Time', 'Write IC File Reoccurance', 'Write IC File at Sim End'
949
994
  ]
995
+
996
+ # Additional convenience columns
950
997
  file_path_cols = ['Geom File', 'Geom Path', 'Flow File', 'Flow Path']
951
998
 
952
- # Build column list
999
+ # Special columns that must be preserved
1000
+ special_cols = ['HDF_Results_Path']
1001
+
1002
+ # Build the final column list
953
1003
  all_cols = first_cols.copy()
954
- all_cols.extend([col for col in plan_key_cols if col in df.columns])
955
- all_cols.extend([col for col in df.columns if col not in all_cols + file_path_cols + ['full_path']])
1004
+
1005
+ # Add plan key columns if they exist
1006
+ for col in plan_key_cols:
1007
+ if col in df.columns and col not in all_cols and col not in special_cols:
1008
+ all_cols.append(col)
1009
+
1010
+ # Add any remaining columns not explicitly specified
1011
+ other_cols = [col for col in df.columns if col not in all_cols + file_path_cols + special_cols + ['full_path']]
1012
+ all_cols.extend(other_cols)
1013
+
1014
+ # Add HDF_Results_Path if it exists (ensure it comes before file paths)
1015
+ for special_col in special_cols:
1016
+ if special_col in df.columns and special_col not in all_cols:
1017
+ all_cols.append(special_col)
1018
+
1019
+ # Add file path columns at the end
956
1020
  all_cols.extend(file_path_cols)
957
1021
 
958
- # Rename and fill missing columns
1022
+ # Rename plan_number column
959
1023
  df = df.rename(columns={f'{entry_type.lower()}_number': 'plan_number'})
1024
+
1025
+ # Fill in missing columns with None
960
1026
  for col in all_cols:
961
1027
  if col not in df.columns:
962
1028
  df[col] = None
963
1029
 
964
- # Add full_path if present
965
- if 'full_path' in df.columns:
1030
+ # Make sure full_path column is preserved and included
1031
+ if 'full_path' in df.columns and 'full_path' not in all_cols:
966
1032
  all_cols.append('full_path')
967
1033
 
968
- return df[[col for col in all_cols if col in df.columns]]
1034
+ # Return DataFrame with specified column order
1035
+ cols_to_return = [col for col in all_cols if col in df.columns]
1036
+ return df[cols_to_return]
969
1037
 
970
1038
  return df
971
1039
 
@@ -1035,24 +1103,6 @@ class RasPrj:
1035
1103
  hdf_path = self.project_folder / f"{self.project_name}.p{entry_number}.hdf"
1036
1104
  entry['HDF_Results_Path'] = str(hdf_path) if hdf_path.exists() else None
1037
1105
 
1038
- def _set_plan_paths(self):
1039
- """Helper method to set paths in plan_df."""
1040
- if not self.plan_df['full_path'].any():
1041
- self.plan_df['full_path'] = self.plan_df['plan_number'].apply(
1042
- lambda x: str(self.project_folder / f"{self.project_name}.p{x}")
1043
- )
1044
-
1045
- for idx, row in self.plan_df.iterrows():
1046
- if pd.notna(row['Geom File']) and pd.isna(row.get('Geom Path')):
1047
- self.plan_df.at[idx, 'Geom Path'] = str(self.project_folder / f"{self.project_name}.g{row['Geom File']}")
1048
-
1049
- if pd.notna(row['Flow File']) and pd.isna(row.get('Flow Path')):
1050
- prefix = 'u' if pd.notna(row['unsteady_number']) else 'f'
1051
- self.plan_df.at[idx, 'Flow Path'] = str(self.project_folder / f"{self.project_name}.{prefix}{row['Flow File']}")
1052
-
1053
- if not self.suppress_logging:
1054
- logger.info(f"Plan {row['plan_number']} paths set up")
1055
-
1056
1106
 
1057
1107
  # Create a global instance named 'ras'
1058
1108
  # Defining the global instance allows the init_ras_project function to initialize the project.
@@ -10,7 +10,7 @@ try:
10
10
  __version__ = version("ras-commander")
11
11
  except PackageNotFoundError:
12
12
  # package is not installed
13
- __version__ = "0.66.0"
13
+ __version__ = "0.68.0"
14
14
 
15
15
  # Set up logging
16
16
  setup_logging()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ras-commander
3
- Version: 0.66.0
3
+ Version: 0.68.0
4
4
  Summary: A Python library for automating HEC-RAS 6.x operations
5
5
  Home-page: https://github.com/gpt-cmdr/ras-commander
6
6
  Author: William M. Katzenmeyer, P.E., C.F.M.
@@ -28,7 +28,7 @@ class CustomBuildPy(build_py):
28
28
 
29
29
  setup(
30
30
  name="ras-commander",
31
- version="0.66.0",
31
+ version="0.68.0",
32
32
  packages=find_packages(),
33
33
  include_package_data=True,
34
34
  python_requires='>=3.10',
@@ -1,144 +0,0 @@
1
- from functools import wraps
2
- from pathlib import Path
3
- from typing import Union
4
- import logging
5
- import h5py
6
- import inspect
7
-
8
-
9
- def log_call(func):
10
- @wraps(func)
11
- def wrapper(*args, **kwargs):
12
- logger = logging.getLogger(func.__module__)
13
- logger.debug(f"Calling {func.__name__}")
14
- result = func(*args, **kwargs)
15
- logger.debug(f"Finished {func.__name__}")
16
- return result
17
- return wrapper
18
-
19
- def standardize_input(file_type: str = 'plan_hdf'):
20
- """
21
- Decorator to standardize input for HDF file operations.
22
-
23
- This decorator processes various input types and converts them to a Path object
24
- pointing to the correct HDF file. It handles the following input types:
25
- - h5py.File objects
26
- - pathlib.Path objects
27
- - Strings (file paths or plan/geom numbers)
28
- - Integers (interpreted as plan/geom numbers)
29
-
30
- The decorator also manages RAS object references and logging.
31
-
32
- Args:
33
- file_type (str): Specifies whether to look for 'plan_hdf' or 'geom_hdf' files.
34
-
35
- Returns:
36
- A decorator that wraps the function to standardize its input to a Path object.
37
- """
38
- def decorator(func):
39
- @wraps(func)
40
- def wrapper(*args, **kwargs):
41
- logger = logging.getLogger(func.__module__)
42
-
43
- # Check if the function expects an hdf_path parameter
44
- sig = inspect.signature(func)
45
- param_names = list(sig.parameters.keys())
46
-
47
- # If first parameter is 'hdf_file', skip path processing
48
- if param_names and param_names[0] == 'hdf_file':
49
- return func(*args, **kwargs)
50
-
51
- # Handle both static method calls and regular function calls
52
- if args and isinstance(args[0], type):
53
- # Static method call, remove the class argument
54
- args = args[1:]
55
-
56
- hdf_input = kwargs.pop('hdf_path', None) or kwargs.pop('hdf_input', None) or (args[0] if args else None)
57
-
58
- # Import ras here to ensure we get the most current instance
59
- from .RasPrj import ras as ras
60
- ras_object = kwargs.pop('ras_object', None) or (args[1] if len(args) > 1 else None)
61
- ras_obj = ras_object or ras
62
-
63
- # If no hdf_input provided, return the function unmodified
64
- if hdf_input is None:
65
- return func(*args, **kwargs)
66
-
67
- # NEW: If input is already a Path and exists, use it directly regardless of file_type
68
- if isinstance(hdf_input, Path) and hdf_input.is_file():
69
- logger.info(f"Using existing HDF file: {hdf_input}")
70
- new_args = (hdf_input,) + args[1:]
71
- return func(*new_args, **kwargs)
72
-
73
- hdf_path = None
74
-
75
- # If hdf_input is already an h5py.File object, use its filename
76
- if isinstance(hdf_input, h5py.File):
77
- hdf_path = Path(hdf_input.filename)
78
- # Handle Path objects
79
- elif isinstance(hdf_input, Path):
80
- if hdf_input.is_file():
81
- hdf_path = hdf_input
82
- # Handle string inputs
83
- elif isinstance(hdf_input, str):
84
- # Check if it's a file path
85
- if Path(hdf_input).is_file():
86
- hdf_path = Path(hdf_input)
87
- # Check if it's a number (with or without 'p' prefix)
88
- elif hdf_input.isdigit() or (len(hdf_input) > 1 and hdf_input[0] == 'p' and hdf_input[1:].isdigit()):
89
- try:
90
- ras_obj.check_initialized()
91
- except Exception as e:
92
- raise ValueError(f"RAS object is not initialized: {str(e)}")
93
-
94
- # Extract the numeric part and convert to integer for comparison
95
- number_str = hdf_input if hdf_input.isdigit() else hdf_input[1:]
96
- number_int = int(number_str)
97
-
98
- if file_type == 'plan_hdf':
99
- # Convert plan_number column to integers for comparison
100
- plan_info = ras_obj.plan_df[ras_obj.plan_df['plan_number'].astype(int) == number_int]
101
- if not plan_info.empty:
102
- hdf_path = Path(plan_info.iloc[0]['HDF_Results_Path'])
103
- elif file_type == 'geom_hdf':
104
- # Convert geom_number column to integers for comparison
105
- geom_info = ras_obj.geom_df[ras_obj.geom_df['geom_number'].astype(int) == number_int]
106
- if not geom_info.empty:
107
- hdf_path = Path(geom_info.iloc[0]['HDF_Path'])
108
- else:
109
- raise ValueError(f"Invalid file type: {file_type}")
110
- # Handle integer inputs (assuming they're plan or geom numbers)
111
- elif isinstance(hdf_input, int):
112
- try:
113
- ras_obj.check_initialized()
114
- except Exception as e:
115
- raise ValueError(f"RAS object is not initialized: {str(e)}")
116
-
117
- number_int = hdf_input
118
-
119
- if file_type == 'plan_hdf':
120
- # Convert plan_number column to integers for comparison
121
- plan_info = ras_obj.plan_df[ras_obj.plan_df['plan_number'].astype(int) == number_int]
122
- if not plan_info.empty:
123
- hdf_path = Path(plan_info.iloc[0]['HDF_Results_Path'])
124
- elif file_type == 'geom_hdf':
125
- # Convert geom_number column to integers for comparison
126
- geom_info = ras_obj.geom_df[ras_obj.geom_df['geom_number'].astype(int) == number_int]
127
- if not geom_info.empty:
128
- hdf_path = Path(geom_info.iloc[0]['HDF_Path'])
129
- else:
130
- raise ValueError(f"Invalid file type: {file_type}")
131
-
132
- if hdf_path is None or not hdf_path.is_file():
133
- error_msg = f"HDF file not found: {hdf_input}"
134
- logger.error(error_msg)
135
- raise FileNotFoundError(error_msg)
136
-
137
- logger.info(f"Using HDF file: {hdf_path}")
138
-
139
- # Pass all original arguments and keywords, replacing hdf_input with standardized hdf_path
140
- new_args = (hdf_path,) + args[1:]
141
- return func(*new_args, **kwargs)
142
-
143
- return wrapper
144
- return decorator
File without changes
File without changes
File without changes