ras-commander 0.52.0__py3-none-any.whl → 0.54.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ras_commander/Decorators.py +137 -127
- ras_commander/HdfBase.py +21 -6
- ras_commander/HdfFluvialPluvial.py +553 -553
- ras_commander/HdfResultsPlan.py +192 -84
- ras_commander/HdfStruc.py +1 -1
- ras_commander/HdfXsec.py +2 -2
- ras_commander/LoggingConfig.py +2 -1
- ras_commander/RasCmdr.py +45 -20
- ras_commander/RasPlan.py +74 -65
- ras_commander/RasPrj.py +934 -917
- ras_commander/RasUnsteady.py +38 -19
- {ras_commander-0.52.0.dist-info → ras_commander-0.54.0.dist-info}/METADATA +92 -49
- {ras_commander-0.52.0.dist-info → ras_commander-0.54.0.dist-info}/RECORD +16 -16
- {ras_commander-0.52.0.dist-info → ras_commander-0.54.0.dist-info}/WHEEL +1 -1
- {ras_commander-0.52.0.dist-info → ras_commander-0.54.0.dist-info}/LICENSE +0 -0
- {ras_commander-0.52.0.dist-info → ras_commander-0.54.0.dist-info}/top_level.txt +0 -0
ras_commander/HdfResultsPlan.py
CHANGED
@@ -30,6 +30,7 @@ from .HdfResultsXsec import HdfResultsXsec
|
|
30
30
|
from .LoggingConfig import get_logger
|
31
31
|
import numpy as np
|
32
32
|
from datetime import datetime
|
33
|
+
from .RasPrj import ras
|
33
34
|
|
34
35
|
logger = get_logger(__name__)
|
35
36
|
|
@@ -62,6 +63,7 @@ class HdfResultsPlan:
|
|
62
63
|
|
63
64
|
Args:
|
64
65
|
hdf_path (Path): Path to the HEC-RAS plan HDF file.
|
66
|
+
ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
|
65
67
|
|
66
68
|
Returns:
|
67
69
|
pd.DataFrame: A DataFrame containing the unsteady attributes.
|
@@ -95,6 +97,7 @@ class HdfResultsPlan:
|
|
95
97
|
|
96
98
|
Args:
|
97
99
|
hdf_path (Path): Path to the HEC-RAS plan HDF file.
|
100
|
+
ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
|
98
101
|
|
99
102
|
Returns:
|
100
103
|
pd.DataFrame: A DataFrame containing the results unsteady summary attributes.
|
@@ -103,7 +106,7 @@ class HdfResultsPlan:
|
|
103
106
|
FileNotFoundError: If the specified HDF file is not found.
|
104
107
|
KeyError: If the "Results/Unsteady/Summary" group is not found in the HDF file.
|
105
108
|
"""
|
106
|
-
try:
|
109
|
+
try:
|
107
110
|
with h5py.File(hdf_path, 'r') as hdf_file:
|
108
111
|
if "Results/Unsteady/Summary" not in hdf_file:
|
109
112
|
raise KeyError("Results/Unsteady/Summary group not found in the HDF file.")
|
@@ -122,29 +125,28 @@ class HdfResultsPlan:
|
|
122
125
|
@staticmethod
|
123
126
|
@log_call
|
124
127
|
@standardize_input(file_type='plan_hdf')
|
125
|
-
def get_volume_accounting(hdf_path: Path) -> pd.DataFrame:
|
128
|
+
def get_volume_accounting(hdf_path: Path) -> Optional[pd.DataFrame]:
|
126
129
|
"""
|
127
130
|
Get volume accounting attributes from a HEC-RAS HDF plan file.
|
128
131
|
|
129
132
|
Args:
|
130
133
|
hdf_path (Path): Path to the HEC-RAS plan HDF file.
|
134
|
+
ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
|
131
135
|
|
132
136
|
Returns:
|
133
|
-
pd.DataFrame:
|
137
|
+
Optional[pd.DataFrame]: DataFrame containing the volume accounting attributes,
|
138
|
+
or None if the group is not found.
|
134
139
|
|
135
140
|
Raises:
|
136
141
|
FileNotFoundError: If the specified HDF file is not found.
|
137
|
-
KeyError: If the "Results/Unsteady/Summary/Volume Accounting" group is not found in the HDF file.
|
138
142
|
"""
|
139
143
|
try:
|
140
144
|
with h5py.File(hdf_path, 'r') as hdf_file:
|
141
145
|
if "Results/Unsteady/Summary/Volume Accounting" not in hdf_file:
|
142
|
-
|
146
|
+
return None
|
143
147
|
|
144
|
-
# Get attributes and
|
148
|
+
# Get attributes and convert to DataFrame
|
145
149
|
attrs_dict = dict(hdf_file["Results/Unsteady/Summary/Volume Accounting"].attrs)
|
146
|
-
|
147
|
-
# Create DataFrame with a single row index
|
148
150
|
return pd.DataFrame(attrs_dict, index=[0])
|
149
151
|
|
150
152
|
except FileNotFoundError:
|
@@ -160,98 +162,204 @@ class HdfResultsPlan:
|
|
160
162
|
|
161
163
|
Args:
|
162
164
|
hdf_path (Path): Path to HEC-RAS plan HDF file
|
165
|
+
ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
|
163
166
|
|
164
167
|
Returns:
|
165
|
-
Optional[pd.DataFrame]: DataFrame containing
|
166
|
-
- Plan identification (name, file)
|
167
|
-
- Simulation timing (start, end, duration)
|
168
|
-
- Process-specific compute times
|
169
|
-
- Performance metrics (simulation speeds)
|
170
|
-
Returns None if required data cannot be extracted
|
168
|
+
Optional[pd.DataFrame]: DataFrame containing runtime statistics or None if data cannot be extracted
|
171
169
|
|
172
170
|
Notes:
|
173
171
|
- Times are reported in multiple units (ms, s, hours)
|
174
172
|
- Compute speeds are calculated as simulation-time/compute-time ratios
|
175
173
|
- Process times include: geometry, preprocessing, event conditions,
|
176
|
-
|
177
|
-
|
178
|
-
Example:
|
179
|
-
>>> runtime_stats = HdfResultsPlan.get_runtime_data('path/to/plan.hdf')
|
180
|
-
>>> if runtime_stats is not None:
|
181
|
-
>>> print(f"Total compute time: {runtime_stats['Complete Process (hr)'][0]:.2f} hours")
|
174
|
+
and unsteady flow computations
|
182
175
|
"""
|
183
|
-
|
184
|
-
|
176
|
+
try:
|
177
|
+
if hdf_path is None:
|
178
|
+
logger.error(f"Could not find HDF file for input")
|
179
|
+
return None
|
180
|
+
|
181
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
182
|
+
logger.info(f"Extracting Plan Information from: {Path(hdf_file.filename).name}")
|
183
|
+
plan_info = hdf_file.get('/Plan Data/Plan Information')
|
184
|
+
if plan_info is None:
|
185
|
+
logger.warning("Group '/Plan Data/Plan Information' not found.")
|
186
|
+
return None
|
187
|
+
|
188
|
+
# Extract plan information
|
189
|
+
plan_name = HdfUtils.convert_ras_string(plan_info.attrs.get('Plan Name', 'Unknown'))
|
190
|
+
start_time_str = HdfUtils.convert_ras_string(plan_info.attrs.get('Simulation Start Time', 'Unknown'))
|
191
|
+
end_time_str = HdfUtils.convert_ras_string(plan_info.attrs.get('Simulation End Time', 'Unknown'))
|
192
|
+
|
193
|
+
try:
|
194
|
+
# Check if times are already datetime objects
|
195
|
+
if isinstance(start_time_str, datetime):
|
196
|
+
start_time = start_time_str
|
197
|
+
else:
|
198
|
+
start_time = datetime.strptime(start_time_str, "%d%b%Y %H:%M:%S")
|
199
|
+
|
200
|
+
if isinstance(end_time_str, datetime):
|
201
|
+
end_time = end_time_str
|
202
|
+
else:
|
203
|
+
end_time = datetime.strptime(end_time_str, "%d%b%Y %H:%M:%S")
|
204
|
+
|
205
|
+
simulation_duration = end_time - start_time
|
206
|
+
simulation_hours = simulation_duration.total_seconds() / 3600
|
207
|
+
except ValueError as e:
|
208
|
+
logger.error(f"Error parsing simulation times: {e}")
|
209
|
+
return None
|
210
|
+
|
211
|
+
logger.info(f"Plan Name: {plan_name}")
|
212
|
+
logger.info(f"Simulation Duration (hours): {simulation_hours}")
|
213
|
+
|
214
|
+
# Extract compute processes data
|
215
|
+
compute_processes = hdf_file.get('/Results/Summary/Compute Processes')
|
216
|
+
if compute_processes is None:
|
217
|
+
logger.warning("Dataset '/Results/Summary/Compute Processes' not found.")
|
218
|
+
return None
|
219
|
+
|
220
|
+
# Process compute times
|
221
|
+
process_names = [HdfUtils.convert_ras_string(name) for name in compute_processes['Process'][:]]
|
222
|
+
filenames = [HdfUtils.convert_ras_string(filename) for filename in compute_processes['Filename'][:]]
|
223
|
+
completion_times = compute_processes['Compute Time (ms)'][:]
|
224
|
+
|
225
|
+
compute_processes_df = pd.DataFrame({
|
226
|
+
'Process': process_names,
|
227
|
+
'Filename': filenames,
|
228
|
+
'Compute Time (ms)': completion_times,
|
229
|
+
'Compute Time (s)': completion_times / 1000,
|
230
|
+
'Compute Time (hours)': completion_times / (1000 * 3600)
|
231
|
+
})
|
232
|
+
|
233
|
+
# Create summary DataFrame
|
234
|
+
compute_processes_summary = {
|
235
|
+
'Plan Name': [plan_name],
|
236
|
+
'File Name': [Path(hdf_file.filename).name],
|
237
|
+
'Simulation Start Time': [start_time_str],
|
238
|
+
'Simulation End Time': [end_time_str],
|
239
|
+
'Simulation Duration (s)': [simulation_duration.total_seconds()],
|
240
|
+
'Simulation Time (hr)': [simulation_hours]
|
241
|
+
}
|
242
|
+
|
243
|
+
# Add process-specific times
|
244
|
+
process_types = {
|
245
|
+
'Completing Geometry': 'Completing Geometry (hr)',
|
246
|
+
'Preprocessing Geometry': 'Preprocessing Geometry (hr)',
|
247
|
+
'Completing Event Conditions': 'Completing Event Conditions (hr)',
|
248
|
+
'Unsteady Flow Computations': 'Unsteady Flow Computations (hr)'
|
249
|
+
}
|
250
|
+
|
251
|
+
for process, column in process_types.items():
|
252
|
+
time_value = compute_processes_df[
|
253
|
+
compute_processes_df['Process'] == process
|
254
|
+
]['Compute Time (hours)'].values[0] if process in process_names else 'N/A'
|
255
|
+
compute_processes_summary[column] = [time_value]
|
256
|
+
|
257
|
+
# Add total process time
|
258
|
+
total_time = compute_processes_df['Compute Time (hours)'].sum()
|
259
|
+
compute_processes_summary['Complete Process (hr)'] = [total_time]
|
260
|
+
|
261
|
+
# Calculate speeds
|
262
|
+
if compute_processes_summary['Unsteady Flow Computations (hr)'][0] != 'N/A':
|
263
|
+
compute_processes_summary['Unsteady Flow Speed (hr/hr)'] = [
|
264
|
+
simulation_hours / compute_processes_summary['Unsteady Flow Computations (hr)'][0]
|
265
|
+
]
|
266
|
+
else:
|
267
|
+
compute_processes_summary['Unsteady Flow Speed (hr/hr)'] = ['N/A']
|
268
|
+
|
269
|
+
compute_processes_summary['Complete Process Speed (hr/hr)'] = [
|
270
|
+
simulation_hours / total_time
|
271
|
+
]
|
272
|
+
|
273
|
+
return pd.DataFrame(compute_processes_summary)
|
274
|
+
|
275
|
+
except Exception as e:
|
276
|
+
logger.error(f"Error in get_runtime_data: {str(e)}")
|
185
277
|
return None
|
186
278
|
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
279
|
+
@staticmethod
|
280
|
+
@log_call
|
281
|
+
@standardize_input(file_type='plan_hdf')
|
282
|
+
def get_reference_timeseries(hdf_path: Path, reftype: str) -> pd.DataFrame:
|
283
|
+
"""
|
284
|
+
Get reference line or point timeseries output from HDF file.
|
193
285
|
|
194
|
-
|
195
|
-
|
196
|
-
|
286
|
+
Args:
|
287
|
+
hdf_path (Path): Path to HEC-RAS plan HDF file
|
288
|
+
reftype (str): Type of reference data ('lines' or 'points')
|
289
|
+
ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
|
197
290
|
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
291
|
+
Returns:
|
292
|
+
pd.DataFrame: DataFrame containing reference timeseries data
|
293
|
+
"""
|
294
|
+
try:
|
295
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
296
|
+
base_path = "Results/Unsteady/Output/Output Blocks/Base Output/Unsteady Time Series"
|
297
|
+
ref_path = f"{base_path}/Reference {reftype.capitalize()}"
|
298
|
+
|
299
|
+
if ref_path not in hdf_file:
|
300
|
+
logger.warning(f"Reference {reftype} data not found in HDF file")
|
301
|
+
return pd.DataFrame()
|
202
302
|
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
303
|
+
ref_group = hdf_file[ref_path]
|
304
|
+
time_data = hdf_file[f"{base_path}/Time"][:]
|
305
|
+
|
306
|
+
dfs = []
|
307
|
+
for ref_name in ref_group.keys():
|
308
|
+
ref_data = ref_group[ref_name][:]
|
309
|
+
df = pd.DataFrame(ref_data, columns=[ref_name])
|
310
|
+
df['Time'] = time_data
|
311
|
+
dfs.append(df)
|
207
312
|
|
208
|
-
|
209
|
-
|
210
|
-
logger.info(f"Simulation Duration (hours): {simulation_hours}")
|
313
|
+
if not dfs:
|
314
|
+
return pd.DataFrame()
|
211
315
|
|
212
|
-
|
213
|
-
if compute_processes is None:
|
214
|
-
logger.warning("Dataset '/Results/Summary/Compute Processes' not found.")
|
215
|
-
return None
|
316
|
+
return pd.concat(dfs, axis=1)
|
216
317
|
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
compute_processes_df = pd.DataFrame({
|
222
|
-
'Process': process_names,
|
223
|
-
'Filename': filenames,
|
224
|
-
'Compute Time (ms)': completion_times,
|
225
|
-
'Compute Time (s)': completion_times / 1000,
|
226
|
-
'Compute Time (hours)': completion_times / (1000 * 3600)
|
227
|
-
})
|
228
|
-
|
229
|
-
logger.debug("Compute processes DataFrame:")
|
230
|
-
logger.debug(compute_processes_df)
|
231
|
-
|
232
|
-
compute_processes_summary = {
|
233
|
-
'Plan Name': [plan_name],
|
234
|
-
'File Name': [Path(hdf_file.filename).name],
|
235
|
-
'Simulation Start Time': [start_time_str],
|
236
|
-
'Simulation End Time': [end_time_str],
|
237
|
-
'Simulation Duration (s)': [simulation_duration.total_seconds()],
|
238
|
-
'Simulation Time (hr)': [simulation_hours],
|
239
|
-
'Completing Geometry (hr)': [compute_processes_df[compute_processes_df['Process'] == 'Completing Geometry']['Compute Time (hours)'].values[0] if 'Completing Geometry' in compute_processes_df['Process'].values else 'N/A'],
|
240
|
-
'Preprocessing Geometry (hr)': [compute_processes_df[compute_processes_df['Process'] == 'Preprocessing Geometry']['Compute Time (hours)'].values[0] if 'Preprocessing Geometry' in compute_processes_df['Process'].values else 'N/A'],
|
241
|
-
'Completing Event Conditions (hr)': [compute_processes_df[compute_processes_df['Process'] == 'Completing Event Conditions']['Compute Time (hours)'].values[0] if 'Completing Event Conditions' in compute_processes_df['Process'].values else 'N/A'],
|
242
|
-
'Unsteady Flow Computations (hr)': [compute_processes_df[compute_processes_df['Process'] == 'Unsteady Flow Computations']['Compute Time (hours)'].values[0] if 'Unsteady Flow Computations' in compute_processes_df['Process'].values else 'N/A'],
|
243
|
-
'Complete Process (hr)': [compute_processes_df['Compute Time (hours)'].sum()]
|
244
|
-
}
|
245
|
-
|
246
|
-
compute_processes_summary['Unsteady Flow Speed (hr/hr)'] = [simulation_hours / compute_processes_summary['Unsteady Flow Computations (hr)'][0] if compute_processes_summary['Unsteady Flow Computations (hr)'][0] != 'N/A' else 'N/A']
|
247
|
-
compute_processes_summary['Complete Process Speed (hr/hr)'] = [simulation_hours / compute_processes_summary['Complete Process (hr)'][0] if compute_processes_summary['Complete Process (hr)'][0] != 'N/A' else 'N/A']
|
248
|
-
|
249
|
-
compute_summary_df = pd.DataFrame(compute_processes_summary)
|
250
|
-
logger.debug("Compute summary DataFrame:")
|
251
|
-
logger.debug(compute_summary_df)
|
252
|
-
|
253
|
-
return compute_summary_df
|
318
|
+
except Exception as e:
|
319
|
+
logger.error(f"Error reading reference {reftype} timeseries: {str(e)}")
|
320
|
+
return pd.DataFrame()
|
254
321
|
|
255
|
-
|
322
|
+
@staticmethod
|
323
|
+
@log_call
|
324
|
+
@standardize_input(file_type='plan_hdf')
|
325
|
+
def get_reference_summary(hdf_path: Path, reftype: str) -> pd.DataFrame:
|
326
|
+
"""
|
327
|
+
Get reference line or point summary output from HDF file.
|
256
328
|
|
329
|
+
Args:
|
330
|
+
hdf_path (Path): Path to HEC-RAS plan HDF file
|
331
|
+
reftype (str): Type of reference data ('lines' or 'points')
|
332
|
+
ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
|
257
333
|
|
334
|
+
Returns:
|
335
|
+
pd.DataFrame: DataFrame containing reference summary data
|
336
|
+
"""
|
337
|
+
try:
|
338
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
339
|
+
base_path = "Results/Unsteady/Output/Output Blocks/Base Output/Summary Output"
|
340
|
+
ref_path = f"{base_path}/Reference {reftype.capitalize()}"
|
341
|
+
|
342
|
+
if ref_path not in hdf_file:
|
343
|
+
logger.warning(f"Reference {reftype} summary data not found in HDF file")
|
344
|
+
return pd.DataFrame()
|
345
|
+
|
346
|
+
ref_group = hdf_file[ref_path]
|
347
|
+
dfs = []
|
348
|
+
|
349
|
+
for ref_name in ref_group.keys():
|
350
|
+
ref_data = ref_group[ref_name][:]
|
351
|
+
if ref_data.ndim == 2:
|
352
|
+
df = pd.DataFrame(ref_data.T, columns=['Value', 'Time'])
|
353
|
+
else:
|
354
|
+
df = pd.DataFrame({'Value': ref_data})
|
355
|
+
df['Reference'] = ref_name
|
356
|
+
dfs.append(df)
|
357
|
+
|
358
|
+
if not dfs:
|
359
|
+
return pd.DataFrame()
|
360
|
+
|
361
|
+
return pd.concat(dfs, ignore_index=True)
|
362
|
+
|
363
|
+
except Exception as e:
|
364
|
+
logger.error(f"Error reading reference {reftype} summary: {str(e)}")
|
365
|
+
return pd.DataFrame()
|
ras_commander/HdfStruc.py
CHANGED
@@ -269,7 +269,7 @@ class HdfStruc:
|
|
269
269
|
if "Geometry/Structures" not in hdf_file:
|
270
270
|
logger.info(f"No structures found in the geometry file: {hdf_path}")
|
271
271
|
return {}
|
272
|
-
return HdfUtils.
|
272
|
+
return HdfUtils.convert_hdf5_attrs_to_dict(hdf_file["Geometry/Structures"].attrs)
|
273
273
|
except Exception as e:
|
274
274
|
logger.error(f"Error reading geometry structures attributes: {str(e)}")
|
275
275
|
return {}
|
ras_commander/HdfXsec.py
CHANGED
@@ -5,7 +5,7 @@ Attribution: A substantial amount of code in this file is sourced or derived
|
|
5
5
|
from the https://github.com/fema-ffrd/rashdf library,
|
6
6
|
released under MIT license and Copyright (c) 2024 fema-ffrd
|
7
7
|
|
8
|
-
|
8
|
+
This source code has been forked and modified for use in RAS Commander.
|
9
9
|
|
10
10
|
-----
|
11
11
|
|
@@ -409,7 +409,7 @@ class HdfXsec:
|
|
409
409
|
result_gdf.at[idx, 'points'] = points
|
410
410
|
|
411
411
|
# Add stationing direction based on upstream/downstream info
|
412
|
-
if row['
|
412
|
+
if row['US Type'] == 'Junction' and row['DS Type'] != 'Junction':
|
413
413
|
# Reverse stationing if upstream is junction
|
414
414
|
result_gdf.at[idx, 'station_start'] = total_length
|
415
415
|
result_gdf.at[idx, 'station_end'] = 0.0
|
ras_commander/LoggingConfig.py
CHANGED
@@ -23,7 +23,8 @@ def setup_logging(log_file=None, log_level=logging.INFO):
|
|
23
23
|
|
24
24
|
# Define log format
|
25
25
|
log_format = logging.Formatter(
|
26
|
-
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
26
|
+
'%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
27
|
+
datefmt='%Y-%m-%d %H:%M:%S'
|
27
28
|
)
|
28
29
|
|
29
30
|
# Configure console handler
|
ras_commander/RasCmdr.py
CHANGED
@@ -271,13 +271,13 @@ class RasCmdr:
|
|
271
271
|
logger.info(f"Created worker folder: {worker_folder}")
|
272
272
|
|
273
273
|
try:
|
274
|
-
|
275
|
-
|
274
|
+
worker_ras = RasPrj()
|
275
|
+
worker_ras_object = init_ras_project(
|
276
276
|
ras_project_folder=worker_folder,
|
277
277
|
ras_version=ras_obj.ras_exe_path,
|
278
|
-
|
278
|
+
ras_object=worker_ras
|
279
279
|
)
|
280
|
-
worker_ras_objects[worker_id] =
|
280
|
+
worker_ras_objects[worker_id] = worker_ras_object
|
281
281
|
except Exception as e:
|
282
282
|
logger.critical(f"Failed to initialize RAS project for worker {worker_id}: {str(e)}")
|
283
283
|
worker_ras_objects[worker_id] = None
|
@@ -317,28 +317,53 @@ class RasCmdr:
|
|
317
317
|
continue
|
318
318
|
worker_folder = Path(worker_ras.project_folder)
|
319
319
|
try:
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
320
|
+
# First, close any open resources in the worker RAS object
|
321
|
+
worker_ras.close() if hasattr(worker_ras, 'close') else None
|
322
|
+
|
323
|
+
# Add a small delay to ensure file handles are released
|
324
|
+
time.sleep(1)
|
325
|
+
|
326
|
+
# Move files with retry mechanism
|
327
|
+
max_retries = 3
|
328
|
+
for retry in range(max_retries):
|
329
|
+
try:
|
330
|
+
for item in worker_folder.iterdir():
|
331
|
+
dest_path = final_dest_folder / item.name
|
332
|
+
if dest_path.exists():
|
333
|
+
if dest_path.is_dir():
|
334
|
+
shutil.rmtree(dest_path)
|
335
|
+
else:
|
336
|
+
dest_path.unlink()
|
337
|
+
# Use copy instead of move for more reliability
|
338
|
+
if item.is_dir():
|
339
|
+
shutil.copytree(item, dest_path)
|
340
|
+
else:
|
341
|
+
shutil.copy2(item, dest_path)
|
342
|
+
|
343
|
+
# Add another small delay before removal
|
344
|
+
time.sleep(1)
|
345
|
+
|
346
|
+
# Try to remove the worker folder
|
347
|
+
if worker_folder.exists():
|
348
|
+
shutil.rmtree(worker_folder)
|
349
|
+
break # If successful, break the retry loop
|
350
|
+
|
351
|
+
except PermissionError as pe:
|
352
|
+
if retry == max_retries - 1: # If this was the last retry
|
353
|
+
logger.error(f"Failed to move/remove files after {max_retries} attempts: {str(pe)}")
|
354
|
+
raise
|
355
|
+
time.sleep(2 ** retry) # Exponential backoff
|
356
|
+
continue
|
357
|
+
|
333
358
|
except Exception as e:
|
334
359
|
logger.error(f"Error moving results from {worker_folder} to {final_dest_folder}: {str(e)}")
|
335
360
|
|
336
361
|
try:
|
337
|
-
|
362
|
+
final_dest_folder_ras = RasPrj()
|
338
363
|
final_dest_folder_ras_obj = init_ras_project(
|
339
364
|
ras_project_folder=final_dest_folder,
|
340
365
|
ras_version=ras_obj.ras_exe_path,
|
341
|
-
|
366
|
+
ras_object=final_dest_folder_ras
|
342
367
|
)
|
343
368
|
final_dest_folder_ras_obj.check_initialized()
|
344
369
|
except Exception as e:
|
@@ -379,7 +404,7 @@ class RasCmdr:
|
|
379
404
|
other two compute_ functions. Per the original HEC-RAS test flag, it creates a separate test folder,
|
380
405
|
copies the project there, and executes the specified plans in sequential order.
|
381
406
|
|
382
|
-
For most purposes, just copying
|
407
|
+
For most purposes, just copying the project folder, initing that new folder, then running each plan
|
383
408
|
with compute_plan is a simpler and more flexible approach. This is shown in the examples provided
|
384
409
|
in the ras-commander library.
|
385
410
|
|