ras-commander 0.80.1__tar.gz → 0.80.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ras_commander-0.80.1/ras_commander.egg-info → ras_commander-0.80.3}/PKG-INFO +1 -1
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/HdfResultsPlan.py +338 -7
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/RasExamples.py +26 -6
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/RasPrj.py +11 -2
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/__init__.py +1 -1
- {ras_commander-0.80.1 → ras_commander-0.80.3/ras_commander.egg-info}/PKG-INFO +1 -1
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander.egg-info/SOURCES.txt +1 -2
- {ras_commander-0.80.1 → ras_commander-0.80.3}/setup.py +1 -1
- ras_commander-0.80.1/tests/test_ras_examples_initialization.py +0 -241
- {ras_commander-0.80.1 → ras_commander-0.80.3}/LICENSE +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/README.md +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/pyproject.toml +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/Decorators.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/HdfBase.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/HdfBndry.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/HdfFluvialPluvial.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/HdfInfiltration.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/HdfMesh.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/HdfPipe.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/HdfPlan.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/HdfPlot.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/HdfPump.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/HdfResultsMesh.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/HdfResultsPlot.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/HdfResultsXsec.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/HdfStruc.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/HdfUtils.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/HdfXsec.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/LoggingConfig.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/RasCmdr.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/RasGeo.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/RasMap.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/RasPlan.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/RasUnsteady.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander/RasUtils.py +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander.egg-info/dependency_links.txt +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander.egg-info/requires.txt +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/ras_commander.egg-info/top_level.txt +0 -0
- {ras_commander-0.80.1 → ras_commander-0.80.3}/setup.cfg +0 -0
|
@@ -6,14 +6,23 @@ Attribution:
|
|
|
6
6
|
Copyright (c) 2024 fema-ffrd, MIT license
|
|
7
7
|
|
|
8
8
|
Description:
|
|
9
|
-
Provides static methods for extracting unsteady flow results,
|
|
10
|
-
and reference data from HEC-RAS plan HDF files.
|
|
9
|
+
Provides static methods for extracting both unsteady and steady flow results,
|
|
10
|
+
volume accounting, and reference data from HEC-RAS plan HDF files.
|
|
11
11
|
|
|
12
12
|
Available Functions:
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
13
|
+
Unsteady Flow:
|
|
14
|
+
- get_unsteady_info: Extract unsteady attributes
|
|
15
|
+
- get_unsteady_summary: Extract unsteady summary data
|
|
16
|
+
- get_volume_accounting: Extract volume accounting data
|
|
17
|
+
- get_runtime_data: Extract runtime and compute time data
|
|
18
|
+
- get_reference_timeseries: Extract reference line/point timeseries
|
|
19
|
+
- get_reference_summary: Extract reference line/point summary
|
|
20
|
+
|
|
21
|
+
Steady Flow:
|
|
22
|
+
- is_steady_plan: Check if HDF contains steady state results
|
|
23
|
+
- get_steady_profile_names: Extract steady state profile names
|
|
24
|
+
- get_steady_wse: Extract WSE data for steady state profiles
|
|
25
|
+
- get_steady_info: Extract steady flow attributes and metadata
|
|
17
26
|
|
|
18
27
|
Note:
|
|
19
28
|
All methods are static and designed to be used without class instantiation.
|
|
@@ -378,4 +387,326 @@ class HdfResultsPlan:
|
|
|
378
387
|
|
|
379
388
|
except Exception as e:
|
|
380
389
|
logger.error(f"Error reading reference {reftype} summary: {str(e)}")
|
|
381
|
-
return pd.DataFrame()
|
|
390
|
+
return pd.DataFrame()
|
|
391
|
+
|
|
392
|
+
# ==================== STEADY STATE METHODS ====================
|
|
393
|
+
|
|
394
|
+
@staticmethod
|
|
395
|
+
@log_call
|
|
396
|
+
@standardize_input(file_type='plan_hdf')
|
|
397
|
+
def is_steady_plan(hdf_path: Path) -> bool:
|
|
398
|
+
"""
|
|
399
|
+
Check if HDF file contains steady state results.
|
|
400
|
+
|
|
401
|
+
Args:
|
|
402
|
+
hdf_path (Path): Path to HEC-RAS plan HDF file
|
|
403
|
+
ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
|
|
404
|
+
|
|
405
|
+
Returns:
|
|
406
|
+
bool: True if the HDF contains steady state results, False otherwise
|
|
407
|
+
|
|
408
|
+
Notes:
|
|
409
|
+
- Checks for existence of Results/Steady group
|
|
410
|
+
- Does not guarantee results are complete or valid
|
|
411
|
+
"""
|
|
412
|
+
try:
|
|
413
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
|
414
|
+
return "Results/Steady" in hdf_file
|
|
415
|
+
except Exception as e:
|
|
416
|
+
logger.error(f"Error checking if plan is steady: {str(e)}")
|
|
417
|
+
return False
|
|
418
|
+
|
|
419
|
+
@staticmethod
|
|
420
|
+
@log_call
|
|
421
|
+
@standardize_input(file_type='plan_hdf')
|
|
422
|
+
def get_steady_profile_names(hdf_path: Path) -> List[str]:
|
|
423
|
+
"""
|
|
424
|
+
Extract profile names from steady state results.
|
|
425
|
+
|
|
426
|
+
Args:
|
|
427
|
+
hdf_path (Path): Path to HEC-RAS plan HDF file
|
|
428
|
+
ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
|
|
429
|
+
|
|
430
|
+
Returns:
|
|
431
|
+
List[str]: List of profile names (e.g., ['50Pct', '10Pct', '1Pct'])
|
|
432
|
+
|
|
433
|
+
Raises:
|
|
434
|
+
FileNotFoundError: If the specified HDF file is not found
|
|
435
|
+
KeyError: If steady state results or profile names are not found
|
|
436
|
+
ValueError: If the plan is not a steady state plan
|
|
437
|
+
|
|
438
|
+
Example:
|
|
439
|
+
>>> from ras_commander import HdfResultsPlan, init_ras_project
|
|
440
|
+
>>> init_ras_project(Path('/path/to/project'), '6.6')
|
|
441
|
+
>>> profiles = HdfResultsPlan.get_steady_profile_names('01')
|
|
442
|
+
>>> print(profiles)
|
|
443
|
+
['50Pct', '20Pct', '10Pct', '4Pct', '2Pct', '1Pct', '0.2Pct']
|
|
444
|
+
"""
|
|
445
|
+
try:
|
|
446
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
|
447
|
+
# Check if this is a steady state plan
|
|
448
|
+
if "Results/Steady" not in hdf_file:
|
|
449
|
+
raise ValueError(f"HDF file does not contain steady state results: {hdf_path.name}")
|
|
450
|
+
|
|
451
|
+
# Path to profile names
|
|
452
|
+
profile_names_path = "Results/Steady/Output/Output Blocks/Base Output/Steady Profiles/Profile Names"
|
|
453
|
+
|
|
454
|
+
if profile_names_path not in hdf_file:
|
|
455
|
+
raise KeyError(f"Profile names not found at: {profile_names_path}")
|
|
456
|
+
|
|
457
|
+
# Read profile names dataset
|
|
458
|
+
profile_names_ds = hdf_file[profile_names_path]
|
|
459
|
+
profile_names_raw = profile_names_ds[()]
|
|
460
|
+
|
|
461
|
+
# Decode byte strings to regular strings
|
|
462
|
+
profile_names = []
|
|
463
|
+
for name in profile_names_raw:
|
|
464
|
+
if isinstance(name, bytes):
|
|
465
|
+
profile_names.append(name.decode('utf-8').strip())
|
|
466
|
+
else:
|
|
467
|
+
profile_names.append(str(name).strip())
|
|
468
|
+
|
|
469
|
+
logger.info(f"Found {len(profile_names)} steady state profiles: {profile_names}")
|
|
470
|
+
return profile_names
|
|
471
|
+
|
|
472
|
+
except FileNotFoundError:
|
|
473
|
+
raise FileNotFoundError(f"HDF file not found: {hdf_path}")
|
|
474
|
+
except KeyError as e:
|
|
475
|
+
raise KeyError(f"Error accessing steady state profile names: {str(e)}")
|
|
476
|
+
except Exception as e:
|
|
477
|
+
raise RuntimeError(f"Error reading steady state profile names: {str(e)}")
|
|
478
|
+
|
|
479
|
+
@staticmethod
|
|
480
|
+
@log_call
|
|
481
|
+
@standardize_input(file_type='plan_hdf')
|
|
482
|
+
def get_steady_wse(
|
|
483
|
+
hdf_path: Path,
|
|
484
|
+
profile_index: Optional[int] = None,
|
|
485
|
+
profile_name: Optional[str] = None
|
|
486
|
+
) -> pd.DataFrame:
|
|
487
|
+
"""
|
|
488
|
+
Extract water surface elevation (WSE) data for steady state profiles.
|
|
489
|
+
|
|
490
|
+
Args:
|
|
491
|
+
hdf_path (Path): Path to HEC-RAS plan HDF file
|
|
492
|
+
profile_index (int, optional): Index of profile to extract (0-based). If None, extracts all profiles.
|
|
493
|
+
profile_name (str, optional): Name of profile to extract (e.g., '1Pct'). If specified, overrides profile_index.
|
|
494
|
+
ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
|
|
495
|
+
|
|
496
|
+
Returns:
|
|
497
|
+
pd.DataFrame: DataFrame containing WSE data with columns:
|
|
498
|
+
- River: River name
|
|
499
|
+
- Reach: Reach name
|
|
500
|
+
- Station: Cross section river station
|
|
501
|
+
- Profile: Profile name (if multiple profiles)
|
|
502
|
+
- WSE: Water surface elevation (ft)
|
|
503
|
+
|
|
504
|
+
Raises:
|
|
505
|
+
FileNotFoundError: If the specified HDF file is not found
|
|
506
|
+
KeyError: If steady state results or WSE data are not found
|
|
507
|
+
ValueError: If profile_index or profile_name is invalid
|
|
508
|
+
|
|
509
|
+
Example:
|
|
510
|
+
>>> # Extract single profile by index
|
|
511
|
+
>>> wse_df = HdfResultsPlan.get_steady_wse('01', profile_index=5) # 100-year profile
|
|
512
|
+
|
|
513
|
+
>>> # Extract single profile by name
|
|
514
|
+
>>> wse_df = HdfResultsPlan.get_steady_wse('01', profile_name='1Pct')
|
|
515
|
+
|
|
516
|
+
>>> # Extract all profiles
|
|
517
|
+
>>> wse_df = HdfResultsPlan.get_steady_wse('01')
|
|
518
|
+
"""
|
|
519
|
+
try:
|
|
520
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
|
521
|
+
# Check if this is a steady state plan
|
|
522
|
+
if "Results/Steady" not in hdf_file:
|
|
523
|
+
raise ValueError(f"HDF file does not contain steady state results: {hdf_path.name}")
|
|
524
|
+
|
|
525
|
+
# Paths to data
|
|
526
|
+
wse_path = "Results/Steady/Output/Output Blocks/Base Output/Steady Profiles/Cross Sections/Water Surface"
|
|
527
|
+
xs_attrs_path = "Results/Steady/Output/Geometry Info/Cross Section Attributes"
|
|
528
|
+
profile_names_path = "Results/Steady/Output/Output Blocks/Base Output/Steady Profiles/Profile Names"
|
|
529
|
+
|
|
530
|
+
# Check required paths exist
|
|
531
|
+
if wse_path not in hdf_file:
|
|
532
|
+
raise KeyError(f"WSE data not found at: {wse_path}")
|
|
533
|
+
if xs_attrs_path not in hdf_file:
|
|
534
|
+
raise KeyError(f"Cross section attributes not found at: {xs_attrs_path}")
|
|
535
|
+
|
|
536
|
+
# Get WSE dataset (shape: num_profiles × num_cross_sections)
|
|
537
|
+
wse_ds = hdf_file[wse_path]
|
|
538
|
+
wse_data = wse_ds[()]
|
|
539
|
+
num_profiles, num_xs = wse_data.shape
|
|
540
|
+
|
|
541
|
+
# Get profile names
|
|
542
|
+
if profile_names_path in hdf_file:
|
|
543
|
+
profile_names_raw = hdf_file[profile_names_path][()]
|
|
544
|
+
profile_names = [
|
|
545
|
+
name.decode('utf-8').strip() if isinstance(name, bytes) else str(name).strip()
|
|
546
|
+
for name in profile_names_raw
|
|
547
|
+
]
|
|
548
|
+
else:
|
|
549
|
+
# Fallback to numbered profiles
|
|
550
|
+
profile_names = [f"Profile_{i+1}" for i in range(num_profiles)]
|
|
551
|
+
|
|
552
|
+
# Get cross section attributes
|
|
553
|
+
xs_attrs = hdf_file[xs_attrs_path][()]
|
|
554
|
+
|
|
555
|
+
# Determine which profiles to extract
|
|
556
|
+
if profile_name is not None:
|
|
557
|
+
# Find profile by name
|
|
558
|
+
try:
|
|
559
|
+
profile_idx = profile_names.index(profile_name)
|
|
560
|
+
except ValueError:
|
|
561
|
+
raise ValueError(
|
|
562
|
+
f"Profile name '{profile_name}' not found. "
|
|
563
|
+
f"Available profiles: {profile_names}"
|
|
564
|
+
)
|
|
565
|
+
profiles_to_extract = [(profile_idx, profile_name)]
|
|
566
|
+
|
|
567
|
+
elif profile_index is not None:
|
|
568
|
+
# Validate profile index
|
|
569
|
+
if profile_index < 0 or profile_index >= num_profiles:
|
|
570
|
+
raise ValueError(
|
|
571
|
+
f"Profile index {profile_index} out of range. "
|
|
572
|
+
f"Valid range: 0 to {num_profiles-1}"
|
|
573
|
+
)
|
|
574
|
+
profiles_to_extract = [(profile_index, profile_names[profile_index])]
|
|
575
|
+
|
|
576
|
+
else:
|
|
577
|
+
# Extract all profiles
|
|
578
|
+
profiles_to_extract = list(enumerate(profile_names))
|
|
579
|
+
|
|
580
|
+
# Build DataFrame
|
|
581
|
+
rows = []
|
|
582
|
+
for prof_idx, prof_name in profiles_to_extract:
|
|
583
|
+
wse_values = wse_data[prof_idx, :]
|
|
584
|
+
|
|
585
|
+
for xs_idx in range(num_xs):
|
|
586
|
+
river = xs_attrs[xs_idx]['River']
|
|
587
|
+
reach = xs_attrs[xs_idx]['Reach']
|
|
588
|
+
station = xs_attrs[xs_idx]['Station']
|
|
589
|
+
|
|
590
|
+
# Decode byte strings
|
|
591
|
+
river = river.decode('utf-8') if isinstance(river, bytes) else str(river)
|
|
592
|
+
reach = reach.decode('utf-8') if isinstance(reach, bytes) else str(reach)
|
|
593
|
+
station = station.decode('utf-8') if isinstance(station, bytes) else str(station)
|
|
594
|
+
|
|
595
|
+
row = {
|
|
596
|
+
'River': river.strip(),
|
|
597
|
+
'Reach': reach.strip(),
|
|
598
|
+
'Station': station.strip(),
|
|
599
|
+
'WSE': float(wse_values[xs_idx])
|
|
600
|
+
}
|
|
601
|
+
|
|
602
|
+
# Only add Profile column if extracting multiple profiles
|
|
603
|
+
if len(profiles_to_extract) > 1:
|
|
604
|
+
row['Profile'] = prof_name
|
|
605
|
+
|
|
606
|
+
rows.append(row)
|
|
607
|
+
|
|
608
|
+
df = pd.DataFrame(rows)
|
|
609
|
+
|
|
610
|
+
# Reorder columns
|
|
611
|
+
if 'Profile' in df.columns:
|
|
612
|
+
df = df[['River', 'Reach', 'Station', 'Profile', 'WSE']]
|
|
613
|
+
else:
|
|
614
|
+
df = df[['River', 'Reach', 'Station', 'WSE']]
|
|
615
|
+
|
|
616
|
+
logger.info(
|
|
617
|
+
f"Extracted WSE data for {len(profiles_to_extract)} profile(s), "
|
|
618
|
+
f"{num_xs} cross sections"
|
|
619
|
+
)
|
|
620
|
+
|
|
621
|
+
return df
|
|
622
|
+
|
|
623
|
+
except FileNotFoundError:
|
|
624
|
+
raise FileNotFoundError(f"HDF file not found: {hdf_path}")
|
|
625
|
+
except KeyError as e:
|
|
626
|
+
raise KeyError(f"Error accessing steady state WSE data: {str(e)}")
|
|
627
|
+
except Exception as e:
|
|
628
|
+
raise RuntimeError(f"Error reading steady state WSE data: {str(e)}")
|
|
629
|
+
|
|
630
|
+
@staticmethod
|
|
631
|
+
@log_call
|
|
632
|
+
@standardize_input(file_type='plan_hdf')
|
|
633
|
+
def get_steady_info(hdf_path: Path) -> pd.DataFrame:
|
|
634
|
+
"""
|
|
635
|
+
Get steady flow attributes and metadata from HEC-RAS HDF plan file.
|
|
636
|
+
|
|
637
|
+
Args:
|
|
638
|
+
hdf_path (Path): Path to HEC-RAS plan HDF file
|
|
639
|
+
ras_object (RasPrj, optional): Specific RAS object to use. If None, uses the global ras instance.
|
|
640
|
+
|
|
641
|
+
Returns:
|
|
642
|
+
pd.DataFrame: DataFrame containing steady flow attributes including:
|
|
643
|
+
- Program Name
|
|
644
|
+
- Program Version
|
|
645
|
+
- Type of Run
|
|
646
|
+
- Run Time Window
|
|
647
|
+
- Solution status
|
|
648
|
+
- And other metadata attributes
|
|
649
|
+
|
|
650
|
+
Raises:
|
|
651
|
+
FileNotFoundError: If the specified HDF file is not found
|
|
652
|
+
KeyError: If steady state results are not found
|
|
653
|
+
ValueError: If the plan is not a steady state plan
|
|
654
|
+
|
|
655
|
+
Example:
|
|
656
|
+
>>> info_df = HdfResultsPlan.get_steady_info('01')
|
|
657
|
+
>>> print(info_df['Solution'].values[0])
|
|
658
|
+
'Steady Finished Successfully'
|
|
659
|
+
"""
|
|
660
|
+
try:
|
|
661
|
+
with h5py.File(hdf_path, 'r') as hdf_file:
|
|
662
|
+
# Check if this is a steady state plan
|
|
663
|
+
if "Results/Steady" not in hdf_file:
|
|
664
|
+
raise ValueError(f"HDF file does not contain steady state results: {hdf_path.name}")
|
|
665
|
+
|
|
666
|
+
attrs_dict = {}
|
|
667
|
+
|
|
668
|
+
# Get attributes from Results/Steady/Output
|
|
669
|
+
output_path = "Results/Steady/Output"
|
|
670
|
+
if output_path in hdf_file:
|
|
671
|
+
output_group = hdf_file[output_path]
|
|
672
|
+
for key, value in output_group.attrs.items():
|
|
673
|
+
if isinstance(value, bytes):
|
|
674
|
+
attrs_dict[key] = value.decode('utf-8')
|
|
675
|
+
else:
|
|
676
|
+
attrs_dict[key] = value
|
|
677
|
+
|
|
678
|
+
# Get attributes from Results/Steady/Summary
|
|
679
|
+
summary_path = "Results/Steady/Summary"
|
|
680
|
+
if summary_path in hdf_file:
|
|
681
|
+
summary_group = hdf_file[summary_path]
|
|
682
|
+
for key, value in summary_group.attrs.items():
|
|
683
|
+
if isinstance(value, bytes):
|
|
684
|
+
attrs_dict[key] = value.decode('utf-8')
|
|
685
|
+
else:
|
|
686
|
+
attrs_dict[key] = value
|
|
687
|
+
|
|
688
|
+
# Add flow file information from Plan Data
|
|
689
|
+
plan_info_path = "Plan Data/Plan Information"
|
|
690
|
+
if plan_info_path in hdf_file:
|
|
691
|
+
plan_info = hdf_file[plan_info_path]
|
|
692
|
+
for key in ['Flow Filename', 'Flow Title']:
|
|
693
|
+
if key in plan_info.attrs:
|
|
694
|
+
value = plan_info.attrs[key]
|
|
695
|
+
if isinstance(value, bytes):
|
|
696
|
+
attrs_dict[key] = value.decode('utf-8')
|
|
697
|
+
else:
|
|
698
|
+
attrs_dict[key] = value
|
|
699
|
+
|
|
700
|
+
if not attrs_dict:
|
|
701
|
+
logger.warning("No steady state attributes found in HDF file")
|
|
702
|
+
return pd.DataFrame()
|
|
703
|
+
|
|
704
|
+
logger.info(f"Extracted {len(attrs_dict)} steady state attributes")
|
|
705
|
+
return pd.DataFrame(attrs_dict, index=[0])
|
|
706
|
+
|
|
707
|
+
except FileNotFoundError:
|
|
708
|
+
raise FileNotFoundError(f"HDF file not found: {hdf_path}")
|
|
709
|
+
except KeyError as e:
|
|
710
|
+
raise KeyError(f"Error accessing steady state info: {str(e)}")
|
|
711
|
+
except Exception as e:
|
|
712
|
+
raise RuntimeError(f"Error reading steady state info: {str(e)}")
|
|
@@ -128,11 +128,14 @@ class RasExamples:
|
|
|
128
128
|
self._save_to_csv()
|
|
129
129
|
|
|
130
130
|
@classmethod
|
|
131
|
-
def extract_project(cls, project_names: Union[str, List[str]]) -> Union[Path, List[Path]]:
|
|
131
|
+
def extract_project(cls, project_names: Union[str, List[str]], output_path: Union[str, Path] = None) -> Union[Path, List[Path]]:
|
|
132
132
|
"""Extract one or more specific HEC-RAS projects from the zip file.
|
|
133
133
|
|
|
134
134
|
Args:
|
|
135
135
|
project_names: Single project name as string or list of project names
|
|
136
|
+
output_path: Optional path where the project folder will be placed.
|
|
137
|
+
Can be a relative path (creates subfolder in current directory)
|
|
138
|
+
or an absolute path. If None, uses default 'example_projects' folder.
|
|
136
139
|
|
|
137
140
|
Returns:
|
|
138
141
|
Path: Single Path object if one project extracted
|
|
@@ -151,13 +154,26 @@ class RasExamples:
|
|
|
151
154
|
if isinstance(project_names, str):
|
|
152
155
|
project_names = [project_names]
|
|
153
156
|
|
|
157
|
+
# Determine the output directory
|
|
158
|
+
if output_path is None:
|
|
159
|
+
# Use default 'example_projects' folder
|
|
160
|
+
base_output_path = cls.projects_dir
|
|
161
|
+
else:
|
|
162
|
+
# Convert to Path object
|
|
163
|
+
base_output_path = Path(output_path)
|
|
164
|
+
# If relative path, make it relative to current working directory
|
|
165
|
+
if not base_output_path.is_absolute():
|
|
166
|
+
base_output_path = Path.cwd() / base_output_path
|
|
167
|
+
# Create the directory if it doesn't exist
|
|
168
|
+
base_output_path.mkdir(parents=True, exist_ok=True)
|
|
169
|
+
|
|
154
170
|
extracted_paths = []
|
|
155
171
|
|
|
156
172
|
for project_name in project_names:
|
|
157
173
|
# Check if this is a special project
|
|
158
174
|
if project_name in cls.SPECIAL_PROJECTS:
|
|
159
175
|
try:
|
|
160
|
-
special_path = cls._extract_special_project(project_name)
|
|
176
|
+
special_path = cls._extract_special_project(project_name, base_output_path)
|
|
161
177
|
extracted_paths.append(special_path)
|
|
162
178
|
continue
|
|
163
179
|
except Exception as e:
|
|
@@ -167,7 +183,7 @@ class RasExamples:
|
|
|
167
183
|
# Regular project extraction logic
|
|
168
184
|
logger.info("----- RasExamples Extracting Project -----")
|
|
169
185
|
logger.info(f"Extracting project '{project_name}'")
|
|
170
|
-
project_path =
|
|
186
|
+
project_path = base_output_path
|
|
171
187
|
|
|
172
188
|
if (project_path / project_name).exists():
|
|
173
189
|
logger.info(f"Project '{project_name}' already exists. Deleting existing folder...")
|
|
@@ -447,12 +463,13 @@ class RasExamples:
|
|
|
447
463
|
return int(number * units[unit])
|
|
448
464
|
|
|
449
465
|
@classmethod
|
|
450
|
-
def _extract_special_project(cls, project_name: str) -> Path:
|
|
466
|
+
def _extract_special_project(cls, project_name: str, output_path: Path = None) -> Path:
|
|
451
467
|
"""
|
|
452
468
|
Download and extract special projects that are not in the main zip file.
|
|
453
469
|
|
|
454
470
|
Args:
|
|
455
471
|
project_name: Name of the special project ('NewOrleansMetro' or 'BeaverLake')
|
|
472
|
+
output_path: Base output directory path. If None, uses cls.projects_dir
|
|
456
473
|
|
|
457
474
|
Returns:
|
|
458
475
|
Path: Path to the extracted project directory
|
|
@@ -466,8 +483,11 @@ class RasExamples:
|
|
|
466
483
|
logger.info(f"----- RasExamples Extracting Special Project -----")
|
|
467
484
|
logger.info(f"Extracting special project '{project_name}'")
|
|
468
485
|
|
|
486
|
+
# Use provided output_path or default
|
|
487
|
+
base_path = output_path if output_path else cls.projects_dir
|
|
488
|
+
|
|
469
489
|
# Create the project directory
|
|
470
|
-
project_path =
|
|
490
|
+
project_path = base_path / project_name
|
|
471
491
|
|
|
472
492
|
# Check if already exists
|
|
473
493
|
if project_path.exists():
|
|
@@ -484,7 +504,7 @@ class RasExamples:
|
|
|
484
504
|
|
|
485
505
|
# Download the zip file
|
|
486
506
|
url = cls.SPECIAL_PROJECTS[project_name]
|
|
487
|
-
zip_file_path =
|
|
507
|
+
zip_file_path = base_path / f"{project_name}_temp.zip"
|
|
488
508
|
|
|
489
509
|
logger.info(f"Downloading special project from: {url}")
|
|
490
510
|
logger.info("This may take a few moments...")
|
|
@@ -204,10 +204,19 @@ class RasPrj:
|
|
|
204
204
|
|
|
205
205
|
# Set paths for geometry and flow files
|
|
206
206
|
self._set_file_paths()
|
|
207
|
-
|
|
207
|
+
|
|
208
208
|
# Make sure all plan paths are properly set
|
|
209
209
|
self._set_plan_paths()
|
|
210
|
-
|
|
210
|
+
|
|
211
|
+
# Add flow_type column for deterministic steady/unsteady identification
|
|
212
|
+
if not self.plan_df.empty and 'unsteady_number' in self.plan_df.columns:
|
|
213
|
+
self.plan_df['flow_type'] = self.plan_df['unsteady_number'].apply(
|
|
214
|
+
lambda x: 'Unsteady' if pd.notna(x) else 'Steady'
|
|
215
|
+
)
|
|
216
|
+
else:
|
|
217
|
+
if not self.plan_df.empty:
|
|
218
|
+
self.plan_df['flow_type'] = 'Unknown'
|
|
219
|
+
|
|
211
220
|
except Exception as e:
|
|
212
221
|
logger.error(f"Error loading project data: {e}")
|
|
213
222
|
raise
|
|
@@ -33,5 +33,4 @@ ras_commander.egg-info/PKG-INFO
|
|
|
33
33
|
ras_commander.egg-info/SOURCES.txt
|
|
34
34
|
ras_commander.egg-info/dependency_links.txt
|
|
35
35
|
ras_commander.egg-info/requires.txt
|
|
36
|
-
ras_commander.egg-info/top_level.txt
|
|
37
|
-
tests/test_ras_examples_initialization.py
|
|
36
|
+
ras_commander.egg-info/top_level.txt
|
|
@@ -1,241 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Test script to verify RasExamples project extraction and initialization.
|
|
3
|
-
|
|
4
|
-
This script extracts multiple HEC-RAS example projects and attempts to initialize them
|
|
5
|
-
to check for any errors in the extraction or initialization process.
|
|
6
|
-
"""
|
|
7
|
-
|
|
8
|
-
import os
|
|
9
|
-
import sys
|
|
10
|
-
import tempfile
|
|
11
|
-
import shutil
|
|
12
|
-
from pathlib import Path
|
|
13
|
-
from datetime import datetime
|
|
14
|
-
|
|
15
|
-
# Add parent directory to path to import ras_commander
|
|
16
|
-
sys.path.insert(0, str(Path(__file__).parent.parent))
|
|
17
|
-
|
|
18
|
-
from ras_commander import RasExamples, init_ras_project, RasPrj
|
|
19
|
-
|
|
20
|
-
# List of projects to test
|
|
21
|
-
PROJECTS_TO_TEST = [
|
|
22
|
-
'BSTEM - Simple Example',
|
|
23
|
-
'Dredging Example',
|
|
24
|
-
'Reservoir Video Tutorial',
|
|
25
|
-
'SIAM Example',
|
|
26
|
-
'Simple Sediment Transport Example',
|
|
27
|
-
'Unsteady Sediment with Concentration Rules',
|
|
28
|
-
'Video Tutorial (Sediment Intro)',
|
|
29
|
-
'Baxter RAS Mapper',
|
|
30
|
-
'Chapter 4 Example Data',
|
|
31
|
-
'ConSpan Culvert',
|
|
32
|
-
'Mixed Flow Regime Channel',
|
|
33
|
-
'Wailupe GeoRAS',
|
|
34
|
-
'Balde Eagle Creek',
|
|
35
|
-
'Bridge Hydraulics',
|
|
36
|
-
'ContractionExpansionMinorLosses',
|
|
37
|
-
'Culvert Hydraulics',
|
|
38
|
-
'Culverts with Flap Gates',
|
|
39
|
-
'Dam Breaching',
|
|
40
|
-
'Elevation Controled Gates',
|
|
41
|
-
'Inline Structure with Gated Spillways',
|
|
42
|
-
'Internal Stage and Flow Boundary Condition',
|
|
43
|
-
'JunctionHydraulics',
|
|
44
|
-
'Lateral Strcuture with Gates',
|
|
45
|
-
'Lateral Structure connected to a River Reach',
|
|
46
|
-
'Lateral Structure Overflow Weir',
|
|
47
|
-
'Lateral Structure with Culverts and Gates',
|
|
48
|
-
'Lateral Structure with Culverts',
|
|
49
|
-
'Levee Breaching',
|
|
50
|
-
'Mixed Flow Regime',
|
|
51
|
-
'Multiple Reaches with Hydraulic Structures',
|
|
52
|
-
'NavigationDam',
|
|
53
|
-
'Pumping Station with Rules',
|
|
54
|
-
'Pumping Station',
|
|
55
|
-
'Rule Operations',
|
|
56
|
-
'Simplified Physical Breaching',
|
|
57
|
-
'Storage Area Hydraulic Connection',
|
|
58
|
-
'UngagedAreaInflows',
|
|
59
|
-
'Unsteady Flow Encroachment Analysis',
|
|
60
|
-
'Chippewa_2D',
|
|
61
|
-
'Weise_2D',
|
|
62
|
-
'BaldEagleCrkMulti2D',
|
|
63
|
-
'Muncie',
|
|
64
|
-
'Example 1 - Critical Creek',
|
|
65
|
-
'Example 10 - Stream Junction',
|
|
66
|
-
'Example 11 - Bridge Scour',
|
|
67
|
-
'Example 12 - Inline Structure',
|
|
68
|
-
'Example 13 - Singler Bridge (WSPRO)',
|
|
69
|
-
'Example 14 - Ice Covered River',
|
|
70
|
-
'Example 15 - Split Flow Junction with Lateral Weir',
|
|
71
|
-
'Example 16 - Channel Modification',
|
|
72
|
-
'Example 17 - Unsteady Flow Application',
|
|
73
|
-
'Example 18 - Advanced Inline Structure',
|
|
74
|
-
'Example 19 - Hydrologic Routing - ModPuls',
|
|
75
|
-
'Example 2 - Beaver Creek',
|
|
76
|
-
'Example 20 - HagerLatWeir',
|
|
77
|
-
'Example 21 - Overflow Gates',
|
|
78
|
-
'Example 22 - Groundwater Interflow',
|
|
79
|
-
'Example 23 - Urban Modeling',
|
|
80
|
-
'Example 24 - Mannings-n-Calibration',
|
|
81
|
-
'Example 3 - Single Culvert',
|
|
82
|
-
'Example 4 - Multiple Culverts',
|
|
83
|
-
'Example 5 - Multiple Openings',
|
|
84
|
-
'Example 6 - Floodway Determination',
|
|
85
|
-
'Example 7 - Multiple Plans',
|
|
86
|
-
'Example 8 - Looped Network',
|
|
87
|
-
'Example 9 - Mixed Flow Analysis',
|
|
88
|
-
'Davis',
|
|
89
|
-
'Nutrient Example',
|
|
90
|
-
'NewOrleansMetro',
|
|
91
|
-
'BeaverLake'
|
|
92
|
-
]
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
def test_project_extraction_and_initialization():
|
|
96
|
-
"""Test extraction and initialization of RAS example projects."""
|
|
97
|
-
|
|
98
|
-
# Save current directory
|
|
99
|
-
original_dir = os.getcwd()
|
|
100
|
-
|
|
101
|
-
# Change to parent directory if we're in the tests folder
|
|
102
|
-
if os.path.basename(os.getcwd()) == 'tests':
|
|
103
|
-
os.chdir('..')
|
|
104
|
-
print(f"Changed to parent directory: {os.getcwd()}")
|
|
105
|
-
|
|
106
|
-
# Results tracking
|
|
107
|
-
results = {
|
|
108
|
-
'successful': [],
|
|
109
|
-
'extraction_failed': [],
|
|
110
|
-
'initialization_failed': [],
|
|
111
|
-
'errors': {}
|
|
112
|
-
}
|
|
113
|
-
|
|
114
|
-
total_projects = len(PROJECTS_TO_TEST)
|
|
115
|
-
|
|
116
|
-
print(f"Testing {total_projects} HEC-RAS example projects...")
|
|
117
|
-
print(f"Projects will be extracted to: {os.path.join(os.getcwd(), 'example_projects')}")
|
|
118
|
-
|
|
119
|
-
for i, project_name in enumerate(PROJECTS_TO_TEST, 1):
|
|
120
|
-
print(f"\n[{i}/{total_projects}] Testing project: {project_name}")
|
|
121
|
-
print("-" * 60)
|
|
122
|
-
|
|
123
|
-
try:
|
|
124
|
-
# Extract the project (RasExamples handles destination automatically)
|
|
125
|
-
print(f" Extracting project...")
|
|
126
|
-
extraction_path = RasExamples.extract_project(project_name)
|
|
127
|
-
|
|
128
|
-
if not extraction_path:
|
|
129
|
-
print(f" [FAILED] Extraction failed: No path returned")
|
|
130
|
-
results['extraction_failed'].append(project_name)
|
|
131
|
-
continue
|
|
132
|
-
|
|
133
|
-
print(f" [SUCCESS] Extracted to: {extraction_path}")
|
|
134
|
-
|
|
135
|
-
# Try to initialize the project
|
|
136
|
-
print(f" Initializing project...")
|
|
137
|
-
try:
|
|
138
|
-
# Create a custom RasPrj object for this project
|
|
139
|
-
ras_project = RasPrj()
|
|
140
|
-
init_ras_project(extraction_path, ras_version="6.6", ras_object=ras_project)
|
|
141
|
-
|
|
142
|
-
if ras_project:
|
|
143
|
-
# Debug what attributes the object has
|
|
144
|
-
attrs = [attr for attr in dir(ras_project) if not attr.startswith('_')]
|
|
145
|
-
print(f" Debug - RasPrj attributes: {', '.join(attrs[:10])}...")
|
|
146
|
-
|
|
147
|
-
# Check for common attributes
|
|
148
|
-
if hasattr(ras_project, 'prj_file') and ras_project.prj_file is not None:
|
|
149
|
-
print(f" [SUCCESS] Successfully initialized")
|
|
150
|
-
print(f" - Project file: {ras_project.prj_file}")
|
|
151
|
-
print(f" - Project name: {ras_project.project_name if hasattr(ras_project, 'project_name') else 'N/A'}")
|
|
152
|
-
print(f" - Version: {ras_project.version if hasattr(ras_project, 'version') else 'N/A'}")
|
|
153
|
-
print(f" - Plans: {len(ras_project.plan_df) if hasattr(ras_project, 'plan_df') else 0} found")
|
|
154
|
-
print(f" - Geometries: {len(ras_project.geom_df) if hasattr(ras_project, 'geom_df') else 0} found")
|
|
155
|
-
results['successful'].append(project_name)
|
|
156
|
-
else:
|
|
157
|
-
print(f" [FAILED] Object missing prj_file attribute or it's None")
|
|
158
|
-
results['initialization_failed'].append(project_name)
|
|
159
|
-
else:
|
|
160
|
-
print(f" [FAILED] Initialization returned None")
|
|
161
|
-
results['initialization_failed'].append(project_name)
|
|
162
|
-
|
|
163
|
-
except Exception as init_error:
|
|
164
|
-
print(f" [FAILED] Initialization error: {str(init_error)}")
|
|
165
|
-
results['initialization_failed'].append(project_name)
|
|
166
|
-
results['errors'][project_name] = {
|
|
167
|
-
'stage': 'initialization',
|
|
168
|
-
'error': str(init_error)
|
|
169
|
-
}
|
|
170
|
-
|
|
171
|
-
except Exception as extract_error:
|
|
172
|
-
print(f" [FAILED] Extraction error: {str(extract_error)}")
|
|
173
|
-
results['extraction_failed'].append(project_name)
|
|
174
|
-
results['errors'][project_name] = {
|
|
175
|
-
'stage': 'extraction',
|
|
176
|
-
'error': str(extract_error)
|
|
177
|
-
}
|
|
178
|
-
|
|
179
|
-
# Print summary
|
|
180
|
-
print("\n" + "=" * 80)
|
|
181
|
-
print("TEST SUMMARY")
|
|
182
|
-
print("=" * 80)
|
|
183
|
-
print(f"Total projects tested: {total_projects}")
|
|
184
|
-
print(f"[SUCCESS] Successful: {len(results['successful'])}")
|
|
185
|
-
print(f"[FAILED] Extraction failed: {len(results['extraction_failed'])}")
|
|
186
|
-
print(f"[FAILED] Initialization failed: {len(results['initialization_failed'])}")
|
|
187
|
-
|
|
188
|
-
if results['extraction_failed']:
|
|
189
|
-
print(f"\nExtraction Failed Projects:")
|
|
190
|
-
for proj in results['extraction_failed']:
|
|
191
|
-
print(f" - {proj}")
|
|
192
|
-
if proj in results['errors']:
|
|
193
|
-
print(f" Error: {results['errors'][proj]['error']}")
|
|
194
|
-
|
|
195
|
-
if results['initialization_failed']:
|
|
196
|
-
print(f"\nInitialization Failed Projects:")
|
|
197
|
-
for proj in results['initialization_failed']:
|
|
198
|
-
print(f" - {proj}")
|
|
199
|
-
if proj in results['errors']:
|
|
200
|
-
print(f" Error: {results['errors'][proj]['error']}")
|
|
201
|
-
|
|
202
|
-
# Write detailed results to file
|
|
203
|
-
results_file = f"test_results_{datetime.now().strftime('%Y%m%d_%H%M%S')}.txt"
|
|
204
|
-
with open(results_file, 'w') as f:
|
|
205
|
-
f.write("RAS Examples Test Results\n")
|
|
206
|
-
f.write("=" * 80 + "\n")
|
|
207
|
-
f.write(f"Test Date: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
|
|
208
|
-
f.write(f"Total Projects: {total_projects}\n")
|
|
209
|
-
f.write(f"Successful: {len(results['successful'])}\n")
|
|
210
|
-
f.write(f"Extraction Failed: {len(results['extraction_failed'])}\n")
|
|
211
|
-
f.write(f"Initialization Failed: {len(results['initialization_failed'])}\n")
|
|
212
|
-
f.write("\n" + "=" * 80 + "\n")
|
|
213
|
-
|
|
214
|
-
f.write("\nSUCCESSFUL PROJECTS:\n")
|
|
215
|
-
for proj in results['successful']:
|
|
216
|
-
f.write(f" - {proj}\n")
|
|
217
|
-
|
|
218
|
-
f.write("\nEXTRACTION FAILED:\n")
|
|
219
|
-
for proj in results['extraction_failed']:
|
|
220
|
-
f.write(f" - {proj}\n")
|
|
221
|
-
if proj in results['errors']:
|
|
222
|
-
f.write(f" Error: {results['errors'][proj]['error']}\n")
|
|
223
|
-
|
|
224
|
-
f.write("\nINITIALIZATION FAILED:\n")
|
|
225
|
-
for proj in results['initialization_failed']:
|
|
226
|
-
f.write(f" - {proj}\n")
|
|
227
|
-
if proj in results['errors']:
|
|
228
|
-
f.write(f" Error: {results['errors'][proj]['error']}\n")
|
|
229
|
-
|
|
230
|
-
print(f"\nDetailed results written to: {results_file}")
|
|
231
|
-
|
|
232
|
-
# Restore original directory
|
|
233
|
-
os.chdir(original_dir)
|
|
234
|
-
|
|
235
|
-
return results
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
if __name__ == "__main__":
|
|
239
|
-
print("Starting RAS Examples Initialization Test")
|
|
240
|
-
print("=" * 80)
|
|
241
|
-
results = test_project_extraction_and_initialization()
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|