dar-backup 1.0.2__py3-none-any.whl → 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dar_backup/__about__.py +1 -1
- dar_backup/cleanup.py +4 -6
- dar_backup/command_runner.py +48 -2
- dar_backup/config_settings.py +0 -1
- dar_backup/dar_backup.py +92 -39
- dar_backup/dar_backup_systemd.py +1 -1
- dar_backup/demo.py +1 -2
- dar_backup/manager.py +793 -11
- dar_backup/util.py +9 -8
- {dar_backup-1.0.2.dist-info → dar_backup-1.1.0.dist-info}/METADATA +243 -25
- dar_backup-1.1.0.dist-info/RECORD +23 -0
- dar_backup/Changelog.md +0 -430
- dar_backup/README.md +0 -2105
- dar_backup-1.0.2.dist-info/RECORD +0 -25
- {dar_backup-1.0.2.dist-info → dar_backup-1.1.0.dist-info}/WHEEL +0 -0
- {dar_backup-1.0.2.dist-info → dar_backup-1.1.0.dist-info}/entry_points.txt +0 -0
- {dar_backup-1.0.2.dist-info → dar_backup-1.1.0.dist-info}/licenses/LICENSE +0 -0
dar_backup/manager.py
CHANGED
|
@@ -29,6 +29,8 @@ import sys
|
|
|
29
29
|
import subprocess
|
|
30
30
|
import threading
|
|
31
31
|
import shlex
|
|
32
|
+
import time as time_module
|
|
33
|
+
import dateparser
|
|
32
34
|
|
|
33
35
|
from inputimeout import inputimeout, TimeoutOccurred
|
|
34
36
|
|
|
@@ -36,6 +38,7 @@ from inputimeout import inputimeout, TimeoutOccurred
|
|
|
36
38
|
from . import __about__ as about
|
|
37
39
|
from dar_backup.config_settings import ConfigSettings
|
|
38
40
|
from dar_backup.util import setup_logging
|
|
41
|
+
from dar_backup.util import derive_trace_log_path
|
|
39
42
|
from dar_backup.util import CommandResult
|
|
40
43
|
from dar_backup.util import get_config_file
|
|
41
44
|
from dar_backup.util import send_discord_message
|
|
@@ -48,12 +51,12 @@ from dar_backup.util import show_scriptname
|
|
|
48
51
|
|
|
49
52
|
from dar_backup.command_runner import CommandRunner
|
|
50
53
|
from dar_backup.command_runner import CommandResult
|
|
51
|
-
from dar_backup.util import backup_definition_completer,
|
|
54
|
+
from dar_backup.util import backup_definition_completer, archive_content_completer, add_specific_archive_completer
|
|
52
55
|
|
|
53
|
-
from datetime import datetime
|
|
56
|
+
from datetime import datetime, tzinfo
|
|
54
57
|
from sys import stderr
|
|
55
58
|
from time import time
|
|
56
|
-
from typing import Dict, List,
|
|
59
|
+
from typing import Dict, List, Tuple, Optional
|
|
57
60
|
|
|
58
61
|
# Constants
|
|
59
62
|
SCRIPTNAME = os.path.basename(__file__)
|
|
@@ -464,6 +467,693 @@ def find_file(file, backup_def, config_settings):
|
|
|
464
467
|
return process.returncode
|
|
465
468
|
|
|
466
469
|
|
|
470
|
+
def restore_at(backup_def: str, paths: List[str], when: str, target: str, config_settings: ConfigSettings, verbose: bool = False) -> int:
|
|
471
|
+
"""
|
|
472
|
+
Perform a Point-in-Time Recovery (PITR) using dar_manager.
|
|
473
|
+
|
|
474
|
+
Args:
|
|
475
|
+
backup_def: Backup definition name (prefix for the catalog DB, e.g. "example").
|
|
476
|
+
paths: One or more file or directory paths as stored in the DAR catalog
|
|
477
|
+
(must be relative, e.g. "tmp/unit-test/.../file.txt").
|
|
478
|
+
when: Date/time string to restore "as of". Parsed via dateparser and
|
|
479
|
+
converted to dar_manager format YYYY/MM/DD-HH:MM:SS. If None/empty,
|
|
480
|
+
the latest version is restored.
|
|
481
|
+
target: Destination directory for restore output. Required to avoid
|
|
482
|
+
restoring into an unintended working directory. Restore is rebased
|
|
483
|
+
under this directory using dar options (-R).
|
|
484
|
+
config_settings: Loaded ConfigSettings used to locate backup dirs/DB and
|
|
485
|
+
timeouts.
|
|
486
|
+
verbose: If True, enables dar_manager verbose logging (-v).
|
|
487
|
+
|
|
488
|
+
Returns:
|
|
489
|
+
Process return code (0 on success, non-zero on failure). If dar_manager
|
|
490
|
+
reports no files restored for a dated PITR, a fallback path is attempted
|
|
491
|
+
that selects the correct archive via dar_manager metadata and restores
|
|
492
|
+
the file(s) directly with dar.
|
|
493
|
+
"""
|
|
494
|
+
database = f"{backup_def}{DB_SUFFIX}"
|
|
495
|
+
database_path = os.path.join(get_db_dir(config_settings), database)
|
|
496
|
+
logger.debug(
|
|
497
|
+
"PITR restore requested: backup_def=%s paths=%d when=%s target=%s db=%s",
|
|
498
|
+
backup_def,
|
|
499
|
+
len(paths),
|
|
500
|
+
when,
|
|
501
|
+
target,
|
|
502
|
+
database_path,
|
|
503
|
+
)
|
|
504
|
+
|
|
505
|
+
if not os.path.exists(database_path):
|
|
506
|
+
logger.error(f'Database not found: "{database_path}"')
|
|
507
|
+
return 1
|
|
508
|
+
|
|
509
|
+
if not target:
|
|
510
|
+
logger.error("Restore target directory is required (--target).")
|
|
511
|
+
return 1
|
|
512
|
+
unsafe_reason = _restore_target_unsafe_reason(target)
|
|
513
|
+
if unsafe_reason:
|
|
514
|
+
logger.error(unsafe_reason)
|
|
515
|
+
return 1
|
|
516
|
+
|
|
517
|
+
# Parse date (or default to "now" for latest restore)
|
|
518
|
+
parsed_date = None
|
|
519
|
+
if when:
|
|
520
|
+
parsed_date = _parse_when(when)
|
|
521
|
+
if parsed_date:
|
|
522
|
+
date_arg = parsed_date.strftime("%Y/%m/%d-%H:%M:%S")
|
|
523
|
+
logger.info("Restoring files as of: %s (from input '%s')", date_arg, when)
|
|
524
|
+
logger.debug("Parsed PITR timestamp: %s -> %s", when, date_arg)
|
|
525
|
+
else:
|
|
526
|
+
logger.error(f"Could not parse date: '{when}'")
|
|
527
|
+
return 1
|
|
528
|
+
else:
|
|
529
|
+
parsed_date = datetime.now()
|
|
530
|
+
logger.info(
|
|
531
|
+
"Restoring files as of: %s (no --when provided; using current time)",
|
|
532
|
+
parsed_date.strftime("%Y/%m/%d-%H:%M:%S"),
|
|
533
|
+
)
|
|
534
|
+
|
|
535
|
+
# Target directory handling: pass -R and -n via dar_manager's -e option so dar
|
|
536
|
+
# rebases paths and fails fast instead of prompting to overwrite.
|
|
537
|
+
if target:
|
|
538
|
+
logger.debug("PITR target directory: %s (cwd=%s)", target, os.getcwd())
|
|
539
|
+
if not os.path.exists(target):
|
|
540
|
+
try:
|
|
541
|
+
os.makedirs(target, exist_ok=True)
|
|
542
|
+
except Exception as e:
|
|
543
|
+
logger.error(f"Could not create target directory '{target}': {e}")
|
|
544
|
+
return 1
|
|
545
|
+
logger.debug("Created target directory: %s", target)
|
|
546
|
+
# Fail fast if any requested paths already exist under target.
|
|
547
|
+
normalized_paths = [os.path.normpath(path.lstrip(os.sep)) for path in paths]
|
|
548
|
+
if normalized_paths:
|
|
549
|
+
logger.debug("Normalized restore paths count=%d sample=%s", len(normalized_paths), normalized_paths[:3])
|
|
550
|
+
existing = []
|
|
551
|
+
for rel_path in normalized_paths:
|
|
552
|
+
if not rel_path or rel_path == ".":
|
|
553
|
+
continue
|
|
554
|
+
candidate = os.path.join(target, rel_path)
|
|
555
|
+
if os.path.exists(candidate):
|
|
556
|
+
existing.append(rel_path)
|
|
557
|
+
if existing:
|
|
558
|
+
sample = ", ".join(existing[:3])
|
|
559
|
+
extra = f" (+{len(existing) - 3} more)" if len(existing) > 3 else ""
|
|
560
|
+
logger.error(
|
|
561
|
+
"Restore target '%s' already contains path(s) to restore: %s%s. For safety, PITR restores abort "
|
|
562
|
+
"without overwriting existing files. Use a clean/empty target.",
|
|
563
|
+
target,
|
|
564
|
+
sample,
|
|
565
|
+
extra,
|
|
566
|
+
)
|
|
567
|
+
return 1
|
|
568
|
+
|
|
569
|
+
# For PITR restores, skip dar_manager -w restore to avoid interactive prompts.
|
|
570
|
+
# Use dar_manager metadata for selection, then restore directly with dar.
|
|
571
|
+
logger.info(
|
|
572
|
+
"PITR restore uses direct dar restore with catalog-derived chain (non-interactive)."
|
|
573
|
+
)
|
|
574
|
+
return _restore_with_dar(backup_def, paths, parsed_date, target, config_settings)
|
|
575
|
+
|
|
576
|
+
|
|
577
|
+
def _restore_target_unsafe_reason(target: str) -> Optional[str]:
|
|
578
|
+
target_abs = os.path.abspath(target)
|
|
579
|
+
target_norm = os.path.normpath(target_abs)
|
|
580
|
+
|
|
581
|
+
allow_prefixes = (
|
|
582
|
+
"/tmp",
|
|
583
|
+
"/var/tmp",
|
|
584
|
+
"/home",
|
|
585
|
+
)
|
|
586
|
+
if target_norm in allow_prefixes or target_norm.startswith(allow_prefixes):
|
|
587
|
+
return None
|
|
588
|
+
|
|
589
|
+
protected_prefixes = (
|
|
590
|
+
"/bin",
|
|
591
|
+
"/sbin",
|
|
592
|
+
"/usr",
|
|
593
|
+
"/etc",
|
|
594
|
+
"/lib",
|
|
595
|
+
"/lib64",
|
|
596
|
+
"/boot",
|
|
597
|
+
"/proc",
|
|
598
|
+
"/sys",
|
|
599
|
+
"/dev",
|
|
600
|
+
"/var",
|
|
601
|
+
"/root",
|
|
602
|
+
)
|
|
603
|
+
if target_norm == "/" or target_norm in protected_prefixes:
|
|
604
|
+
return f"Restore target '{target_norm}' is a protected system directory. Choose a safer location."
|
|
605
|
+
if any(target_norm.startswith(prefix + os.sep) for prefix in protected_prefixes):
|
|
606
|
+
return f"Restore target '{target_norm}' is under a protected system directory. Choose a safer location."
|
|
607
|
+
|
|
608
|
+
return None
|
|
609
|
+
|
|
610
|
+
|
|
611
|
+
def _local_tzinfo() -> tzinfo:
|
|
612
|
+
return datetime.now().astimezone().tzinfo
|
|
613
|
+
|
|
614
|
+
|
|
615
|
+
def _normalize_when_dt(dt: datetime) -> datetime:
|
|
616
|
+
if dt.tzinfo is None or dt.tzinfo.utcoffset(dt) is None:
|
|
617
|
+
return dt
|
|
618
|
+
local_tz = _local_tzinfo()
|
|
619
|
+
return dt.astimezone(local_tz).replace(tzinfo=None)
|
|
620
|
+
|
|
621
|
+
|
|
622
|
+
def _parse_when(when: str) -> Optional[datetime]:
|
|
623
|
+
parsed = dateparser.parse(when)
|
|
624
|
+
if not parsed:
|
|
625
|
+
return None
|
|
626
|
+
normalized = _normalize_when_dt(parsed)
|
|
627
|
+
if normalized is not parsed:
|
|
628
|
+
logger.debug("Normalized PITR timestamp with timezone: %s -> %s", parsed, normalized)
|
|
629
|
+
return normalized
|
|
630
|
+
|
|
631
|
+
|
|
632
|
+
def _coerce_timeout(value: Optional[int]) -> Optional[int]:
|
|
633
|
+
if value is None:
|
|
634
|
+
return None
|
|
635
|
+
if isinstance(value, bool):
|
|
636
|
+
return None
|
|
637
|
+
if isinstance(value, int):
|
|
638
|
+
return None if value <= 0 else value
|
|
639
|
+
if isinstance(value, str):
|
|
640
|
+
try:
|
|
641
|
+
value_int = int(value)
|
|
642
|
+
except ValueError:
|
|
643
|
+
return None
|
|
644
|
+
return None if value_int <= 0 else value_int
|
|
645
|
+
return None
|
|
646
|
+
|
|
647
|
+
|
|
648
|
+
def _parse_archive_map(list_output: str) -> Dict[int, str]:
|
|
649
|
+
archives: Dict[int, str] = {}
|
|
650
|
+
for line in list_output.splitlines():
|
|
651
|
+
stripped = line.strip()
|
|
652
|
+
if not stripped or stripped.startswith("archive #") or stripped.startswith("-"):
|
|
653
|
+
continue
|
|
654
|
+
parts = stripped.split()
|
|
655
|
+
if len(parts) < 3 or not parts[0].isdigit():
|
|
656
|
+
continue
|
|
657
|
+
num = int(parts[0])
|
|
658
|
+
basename = parts[-1]
|
|
659
|
+
path = " ".join(parts[1:-1])
|
|
660
|
+
archives[num] = os.path.join(path, basename)
|
|
661
|
+
return archives
|
|
662
|
+
|
|
663
|
+
|
|
664
|
+
def _replace_path_prefix(path: str, old_prefix: str, new_prefix: str) -> Optional[str]:
|
|
665
|
+
old_norm = os.path.normpath(old_prefix)
|
|
666
|
+
new_norm = os.path.normpath(new_prefix)
|
|
667
|
+
if path == old_norm:
|
|
668
|
+
return new_norm
|
|
669
|
+
if path.startswith(old_norm + os.sep):
|
|
670
|
+
suffix = path[len(old_norm):]
|
|
671
|
+
return os.path.normpath(new_norm + suffix)
|
|
672
|
+
return None
|
|
673
|
+
|
|
674
|
+
|
|
675
|
+
def relocate_archive_paths(
|
|
676
|
+
backup_def: str,
|
|
677
|
+
old_prefix: str,
|
|
678
|
+
new_prefix: str,
|
|
679
|
+
config_settings: ConfigSettings,
|
|
680
|
+
dry_run: bool = False,
|
|
681
|
+
) -> int:
|
|
682
|
+
database = f"{backup_def}{DB_SUFFIX}"
|
|
683
|
+
database_path = os.path.join(get_db_dir(config_settings), database)
|
|
684
|
+
if not os.path.exists(database_path):
|
|
685
|
+
logger.error(f'Database not found: "{database_path}"')
|
|
686
|
+
return 1
|
|
687
|
+
|
|
688
|
+
timeout = _coerce_timeout(getattr(config_settings, "command_timeout_secs", None))
|
|
689
|
+
list_result = runner.run(["dar_manager", "--base", database_path, "--list"], timeout=timeout)
|
|
690
|
+
stdout = list_result.stdout or ""
|
|
691
|
+
stderr = list_result.stderr or ""
|
|
692
|
+
if list_result.returncode != 0:
|
|
693
|
+
logger.error(f'Error listing catalogs for: "{database_path}"')
|
|
694
|
+
logger.error(f"stderr: {stderr}")
|
|
695
|
+
logger.error(f"stdout: {stdout}")
|
|
696
|
+
return list_result.returncode
|
|
697
|
+
|
|
698
|
+
archive_map = _parse_archive_map(stdout)
|
|
699
|
+
if not archive_map:
|
|
700
|
+
logger.error("Could not determine archive list from dar_manager output.")
|
|
701
|
+
return 1
|
|
702
|
+
|
|
703
|
+
updates: List[Tuple[int, str, str, str]] = []
|
|
704
|
+
for catalog_no, full_path in archive_map.items():
|
|
705
|
+
current_dir = os.path.dirname(full_path)
|
|
706
|
+
new_dir = _replace_path_prefix(current_dir, old_prefix, new_prefix)
|
|
707
|
+
if new_dir and new_dir != current_dir:
|
|
708
|
+
updates.append((catalog_no, current_dir, new_dir, os.path.basename(full_path)))
|
|
709
|
+
|
|
710
|
+
if not updates:
|
|
711
|
+
logger.info(
|
|
712
|
+
"No archive paths matched '%s' in database '%s'.",
|
|
713
|
+
os.path.normpath(old_prefix),
|
|
714
|
+
database_path,
|
|
715
|
+
)
|
|
716
|
+
return 0
|
|
717
|
+
|
|
718
|
+
logger.info(
|
|
719
|
+
"Updating %d archive path(s) from '%s' to '%s' in database '%s'.",
|
|
720
|
+
len(updates),
|
|
721
|
+
os.path.normpath(old_prefix),
|
|
722
|
+
os.path.normpath(new_prefix),
|
|
723
|
+
database_path,
|
|
724
|
+
)
|
|
725
|
+
failures = 0
|
|
726
|
+
for catalog_no, current_dir, new_dir, basename in updates:
|
|
727
|
+
logger.info("Archive #%d (%s): %s -> %s", catalog_no, basename, current_dir, new_dir)
|
|
728
|
+
if dry_run:
|
|
729
|
+
continue
|
|
730
|
+
result = runner.run(
|
|
731
|
+
["dar_manager", "--base", database_path, "-p", str(catalog_no), new_dir],
|
|
732
|
+
timeout=timeout,
|
|
733
|
+
)
|
|
734
|
+
if result.returncode != 0:
|
|
735
|
+
failures += 1
|
|
736
|
+
logger.error(
|
|
737
|
+
"Failed updating archive #%d path to '%s' (returncode=%s).",
|
|
738
|
+
catalog_no,
|
|
739
|
+
new_dir,
|
|
740
|
+
result.returncode,
|
|
741
|
+
)
|
|
742
|
+
logger.error(f"stderr: {result.stderr}")
|
|
743
|
+
|
|
744
|
+
if failures:
|
|
745
|
+
logger.error("Relocate completed with %d failure(s).", failures)
|
|
746
|
+
return 1
|
|
747
|
+
logger.info("Relocate completed successfully.")
|
|
748
|
+
return 0
|
|
749
|
+
|
|
750
|
+
|
|
751
|
+
def _parse_archive_info(archive_map: Dict[int, str]) -> List[Tuple[int, datetime, str]]:
|
|
752
|
+
info: List[Tuple[int, datetime, str]] = []
|
|
753
|
+
pattern = re.compile(r"^(.*)_(FULL|DIFF|INCR)_(\d{4}-\d{2}-\d{2})(?:_(\d{6}))?(?:_.*)?$")
|
|
754
|
+
for catalog_no, path in archive_map.items():
|
|
755
|
+
base = os.path.basename(path)
|
|
756
|
+
match = pattern.match(base)
|
|
757
|
+
if not match:
|
|
758
|
+
continue
|
|
759
|
+
_, archive_type, date_str, time_str = match.groups()
|
|
760
|
+
try:
|
|
761
|
+
if time_str:
|
|
762
|
+
archive_date = datetime.strptime(f"{date_str}_{time_str}", "%Y-%m-%d_%H%M%S")
|
|
763
|
+
else:
|
|
764
|
+
archive_date = datetime.strptime(date_str, "%Y-%m-%d")
|
|
765
|
+
except ValueError:
|
|
766
|
+
continue
|
|
767
|
+
info.append((catalog_no, archive_date, archive_type))
|
|
768
|
+
return info
|
|
769
|
+
|
|
770
|
+
|
|
771
|
+
def _select_archive_chain(archive_info: List[Tuple[int, datetime, str]], when_dt: datetime) -> List[int]:
|
|
772
|
+
order = {"FULL": 0, "DIFF": 1, "INCR": 2}
|
|
773
|
+
candidates = [
|
|
774
|
+
(catalog_no, date, archive_type)
|
|
775
|
+
for catalog_no, date, archive_type in archive_info
|
|
776
|
+
if date <= when_dt
|
|
777
|
+
]
|
|
778
|
+
candidates.sort(key=lambda item: (item[1], order.get(item[2], 99), item[0]))
|
|
779
|
+
last_full = None
|
|
780
|
+
last_full_key = None
|
|
781
|
+
for catalog_no, date, archive_type in candidates:
|
|
782
|
+
if archive_type == "FULL":
|
|
783
|
+
last_full = catalog_no
|
|
784
|
+
last_full_key = (date, order["FULL"], catalog_no)
|
|
785
|
+
if last_full is None:
|
|
786
|
+
return []
|
|
787
|
+
|
|
788
|
+
last_diff = None
|
|
789
|
+
last_diff_key = None
|
|
790
|
+
for catalog_no, date, archive_type in candidates:
|
|
791
|
+
key = (date, order.get(archive_type, 99), catalog_no)
|
|
792
|
+
if key <= last_full_key:
|
|
793
|
+
continue
|
|
794
|
+
if archive_type == "DIFF":
|
|
795
|
+
last_diff = catalog_no
|
|
796
|
+
last_diff_key = key
|
|
797
|
+
|
|
798
|
+
base_key = last_diff_key or last_full_key
|
|
799
|
+
last_incr = None
|
|
800
|
+
for catalog_no, date, archive_type in candidates:
|
|
801
|
+
key = (date, order.get(archive_type, 99), catalog_no)
|
|
802
|
+
if key <= base_key:
|
|
803
|
+
continue
|
|
804
|
+
if archive_type == "INCR":
|
|
805
|
+
last_incr = catalog_no
|
|
806
|
+
|
|
807
|
+
chain = [last_full]
|
|
808
|
+
if last_diff is not None:
|
|
809
|
+
chain.append(last_diff)
|
|
810
|
+
if last_incr is not None:
|
|
811
|
+
chain.append(last_incr)
|
|
812
|
+
return chain
|
|
813
|
+
|
|
814
|
+
|
|
815
|
+
def _is_directory_path(path: str) -> bool:
|
|
816
|
+
return os.path.isdir(os.path.join(os.sep, path))
|
|
817
|
+
|
|
818
|
+
|
|
819
|
+
def _looks_like_directory(path: str) -> bool:
|
|
820
|
+
if not path:
|
|
821
|
+
return False
|
|
822
|
+
normalized = path.rstrip(os.sep)
|
|
823
|
+
if not normalized:
|
|
824
|
+
return True
|
|
825
|
+
if path.endswith(os.sep):
|
|
826
|
+
return True
|
|
827
|
+
base = os.path.basename(normalized)
|
|
828
|
+
_, ext = os.path.splitext(base)
|
|
829
|
+
return ext == ""
|
|
830
|
+
|
|
831
|
+
|
|
832
|
+
def _treat_as_directory(path: str) -> bool:
|
|
833
|
+
if _is_directory_path(path):
|
|
834
|
+
return True
|
|
835
|
+
if _looks_like_directory(path):
|
|
836
|
+
logger.debug("Treating restore path '%s' as directory (heuristic).", path)
|
|
837
|
+
return True
|
|
838
|
+
return False
|
|
839
|
+
|
|
840
|
+
def _format_chain_item(
|
|
841
|
+
catalog_no: int,
|
|
842
|
+
info_by_no: Dict[int, Tuple[datetime, str]],
|
|
843
|
+
status: str,
|
|
844
|
+
) -> str:
|
|
845
|
+
info = info_by_no.get(catalog_no)
|
|
846
|
+
if info:
|
|
847
|
+
dt, archive_type = info
|
|
848
|
+
return f"#{catalog_no} {archive_type}@{dt} [{status}]"
|
|
849
|
+
return f"#{catalog_no} [unknown] [{status}]"
|
|
850
|
+
|
|
851
|
+
|
|
852
|
+
def _describe_archive(
|
|
853
|
+
catalog_no: int,
|
|
854
|
+
archive_map: Dict[int, str],
|
|
855
|
+
info_by_no: Dict[int, Tuple[datetime, str]],
|
|
856
|
+
) -> str:
|
|
857
|
+
archive_path = archive_map.get(catalog_no)
|
|
858
|
+
base = os.path.basename(archive_path) if archive_path else "unknown"
|
|
859
|
+
info = info_by_no.get(catalog_no)
|
|
860
|
+
if info:
|
|
861
|
+
dt, archive_type = info
|
|
862
|
+
dt_str = dt.strftime("%Y-%m-%d %H:%M:%S")
|
|
863
|
+
return f"#{catalog_no} {archive_type}@{dt_str} {base}"
|
|
864
|
+
return f"#{catalog_no} {base}"
|
|
865
|
+
|
|
866
|
+
|
|
867
|
+
def _missing_chain_elements(chain: List[int], archive_map: Dict[int, str]) -> List[str]:
|
|
868
|
+
missing = []
|
|
869
|
+
for catalog_no in chain:
|
|
870
|
+
archive_path = archive_map.get(catalog_no)
|
|
871
|
+
if not archive_path:
|
|
872
|
+
missing.append(f"catalog #{catalog_no} missing from archive map")
|
|
873
|
+
continue
|
|
874
|
+
slice_path = f"{archive_path}.1.dar"
|
|
875
|
+
if not os.path.exists(slice_path):
|
|
876
|
+
missing.append(slice_path)
|
|
877
|
+
return missing
|
|
878
|
+
|
|
879
|
+
|
|
880
|
+
def _pitr_chain_report(
|
|
881
|
+
backup_def: str,
|
|
882
|
+
paths: List[str],
|
|
883
|
+
when: str,
|
|
884
|
+
config_settings: ConfigSettings,
|
|
885
|
+
) -> int:
|
|
886
|
+
"""
|
|
887
|
+
Report the PITR archive chain that would be used for a restore at `when`,
|
|
888
|
+
without performing any restore actions. Returns non-zero if required
|
|
889
|
+
archives are missing or no chain/candidates can be determined.
|
|
890
|
+
"""
|
|
891
|
+
if not when:
|
|
892
|
+
logger.error("PITR report requires --when.")
|
|
893
|
+
return 1
|
|
894
|
+
|
|
895
|
+
parsed_date = _parse_when(when)
|
|
896
|
+
if not parsed_date:
|
|
897
|
+
logger.error(f"Could not parse date: '{when}'")
|
|
898
|
+
return 1
|
|
899
|
+
|
|
900
|
+
database = f"{backup_def}{DB_SUFFIX}"
|
|
901
|
+
database_path = os.path.join(get_db_dir(config_settings), database)
|
|
902
|
+
timeout = _coerce_timeout(getattr(config_settings, "command_timeout_secs", None))
|
|
903
|
+
list_result = runner.run(['dar_manager', '--base', database_path, '--list'], timeout=timeout)
|
|
904
|
+
archive_map = _parse_archive_map(list_result.stdout)
|
|
905
|
+
if not archive_map:
|
|
906
|
+
logger.error("Could not determine archive list from dar_manager output.")
|
|
907
|
+
return 1
|
|
908
|
+
|
|
909
|
+
archive_info = _parse_archive_info(archive_map)
|
|
910
|
+
info_by_no = {catalog_no: (dt, archive_type) for catalog_no, dt, archive_type in archive_info}
|
|
911
|
+
failures = 0
|
|
912
|
+
successes = 0
|
|
913
|
+
|
|
914
|
+
for path in paths:
|
|
915
|
+
if _treat_as_directory(path):
|
|
916
|
+
chain = _select_archive_chain(archive_info, parsed_date)
|
|
917
|
+
if not chain:
|
|
918
|
+
logger.error(f"No FULL archive found at or before {parsed_date} for '{path}'")
|
|
919
|
+
failures += 1
|
|
920
|
+
continue
|
|
921
|
+
missing = []
|
|
922
|
+
chain_display_parts = []
|
|
923
|
+
for catalog_no in chain:
|
|
924
|
+
archive_path = archive_map.get(catalog_no)
|
|
925
|
+
status = "ok"
|
|
926
|
+
if not archive_path:
|
|
927
|
+
status = "missing"
|
|
928
|
+
missing.append(f"catalog #{catalog_no} missing from archive map")
|
|
929
|
+
else:
|
|
930
|
+
slice_path = f"{archive_path}.1.dar"
|
|
931
|
+
if not os.path.exists(slice_path):
|
|
932
|
+
status = "missing"
|
|
933
|
+
missing.append(slice_path)
|
|
934
|
+
chain_display_parts.append(_format_chain_item(catalog_no, info_by_no, status))
|
|
935
|
+
chain_display = ", ".join(chain_display_parts)
|
|
936
|
+
logger.info("PITR chain report for '%s': %s", path, chain_display)
|
|
937
|
+
if missing:
|
|
938
|
+
for item in missing:
|
|
939
|
+
logger.error("PITR chain report missing archive: %s", item)
|
|
940
|
+
failures += 1
|
|
941
|
+
else:
|
|
942
|
+
successes += 1
|
|
943
|
+
continue
|
|
944
|
+
|
|
945
|
+
file_result = runner.run(['dar_manager', '--base', database_path, '-f', path], timeout=timeout)
|
|
946
|
+
versions = _parse_file_versions(file_result.stdout)
|
|
947
|
+
candidates = [(num, dt) for num, dt in versions if dt <= parsed_date]
|
|
948
|
+
candidates.sort(key=lambda item: item[1], reverse=True)
|
|
949
|
+
logger.info(
|
|
950
|
+
"PITR chain report candidates for '%s': %s",
|
|
951
|
+
path,
|
|
952
|
+
", ".join(f"#{num}@{dt}" for num, dt in candidates) or "<none>",
|
|
953
|
+
)
|
|
954
|
+
if not candidates:
|
|
955
|
+
logger.error(f"No archive version found for '{path}' at or before {parsed_date}")
|
|
956
|
+
failures += 1
|
|
957
|
+
continue
|
|
958
|
+
catalog_no, dt = candidates[0]
|
|
959
|
+
archive_path = archive_map.get(catalog_no)
|
|
960
|
+
if not archive_path:
|
|
961
|
+
logger.error("PITR chain report missing archive map entry for #%d (%s)", catalog_no, path)
|
|
962
|
+
failures += 1
|
|
963
|
+
continue
|
|
964
|
+
slice_path = f"{archive_path}.1.dar"
|
|
965
|
+
if not os.path.exists(slice_path):
|
|
966
|
+
logger.error("PITR chain report missing archive slice: %s", slice_path)
|
|
967
|
+
failures += 1
|
|
968
|
+
continue
|
|
969
|
+
logger.info("PITR chain report selected archive #%d (%s) for '%s'.", catalog_no, dt, path)
|
|
970
|
+
successes += 1
|
|
971
|
+
|
|
972
|
+
logger.info("PITR chain report summary: %d ok, %d failed.", successes, failures)
|
|
973
|
+
return 0 if failures == 0 else 1
|
|
974
|
+
|
|
975
|
+
|
|
976
|
+
def _parse_file_versions(file_output: str) -> List[Tuple[int, datetime]]:
|
|
977
|
+
versions: List[Tuple[int, datetime]] = []
|
|
978
|
+
for line in file_output.splitlines():
|
|
979
|
+
stripped = line.strip()
|
|
980
|
+
if not stripped:
|
|
981
|
+
continue
|
|
982
|
+
match = re.match(r"^(\d+)\s+([A-Za-z]{3}\s+[A-Za-z]{3}\s+\d+\s+\d{2}:\d{2}:\d{2}\s+\d{4})", stripped)
|
|
983
|
+
if not match:
|
|
984
|
+
continue
|
|
985
|
+
try:
|
|
986
|
+
catalog_no = int(match.group(1))
|
|
987
|
+
dt = datetime.strptime(match.group(2), "%a %b %d %H:%M:%S %Y")
|
|
988
|
+
except Exception:
|
|
989
|
+
continue
|
|
990
|
+
versions.append((catalog_no, dt))
|
|
991
|
+
return versions
|
|
992
|
+
|
|
993
|
+
|
|
994
|
+
def _guess_darrc_path(config_settings: ConfigSettings) -> Optional[str]:
|
|
995
|
+
config_dir = os.path.dirname(config_settings.config_file)
|
|
996
|
+
candidate = os.path.join(config_dir, ".darrc")
|
|
997
|
+
if os.path.exists(candidate):
|
|
998
|
+
return candidate
|
|
999
|
+
script_dir = os.path.dirname(os.path.realpath(__file__))
|
|
1000
|
+
fallback = os.path.join(script_dir, ".darrc")
|
|
1001
|
+
if os.path.exists(fallback):
|
|
1002
|
+
return fallback
|
|
1003
|
+
return None
|
|
1004
|
+
|
|
1005
|
+
|
|
1006
|
+
def _restore_with_dar(backup_def: str, paths: List[str], when_dt: datetime, target: str, config_settings: ConfigSettings) -> int:
|
|
1007
|
+
"""
|
|
1008
|
+
Restore specific paths by selecting the best matching archive (<= when_dt)
|
|
1009
|
+
using dar_manager metadata, then invoking dar directly.
|
|
1010
|
+
|
|
1011
|
+
This is a fallback for PITR when dar_manager reports that nothing could be
|
|
1012
|
+
restored for a dated request. It inspects the catalog to choose an archive
|
|
1013
|
+
for each path and restores into the provided target directory.
|
|
1014
|
+
"""
|
|
1015
|
+
database = f"{backup_def}{DB_SUFFIX}"
|
|
1016
|
+
database_path = os.path.join(get_db_dir(config_settings), database)
|
|
1017
|
+
timeout = _coerce_timeout(getattr(config_settings, "command_timeout_secs", None))
|
|
1018
|
+
list_result = runner.run(['dar_manager', '--base', database_path, '--list'], timeout=timeout)
|
|
1019
|
+
archive_map = _parse_archive_map(list_result.stdout)
|
|
1020
|
+
if not archive_map:
|
|
1021
|
+
logger.error("Could not determine archive list from dar_manager output.")
|
|
1022
|
+
return 1
|
|
1023
|
+
logger.debug("PITR archive map: %s", ", ".join(f"#{k}={v}" for k, v in sorted(archive_map.items())))
|
|
1024
|
+
archive_info = _parse_archive_info(archive_map)
|
|
1025
|
+
info_by_no = {catalog_no: (dt, archive_type) for catalog_no, dt, archive_type in archive_info}
|
|
1026
|
+
|
|
1027
|
+
darrc_path = _guess_darrc_path(config_settings)
|
|
1028
|
+
failures = 0
|
|
1029
|
+
successes = 0
|
|
1030
|
+
missing_archives = set()
|
|
1031
|
+
|
|
1032
|
+
for path in paths:
|
|
1033
|
+
file_result = runner.run(['dar_manager', '--base', database_path, '-f', path], timeout=timeout)
|
|
1034
|
+
if _treat_as_directory(path):
|
|
1035
|
+
chain = _select_archive_chain(archive_info, when_dt)
|
|
1036
|
+
if not chain:
|
|
1037
|
+
logger.error(f"No FULL archive found at or before {when_dt} for '{path}'")
|
|
1038
|
+
failures += 1
|
|
1039
|
+
continue
|
|
1040
|
+
missing = _missing_chain_elements(chain, archive_map)
|
|
1041
|
+
if missing:
|
|
1042
|
+
for item in missing:
|
|
1043
|
+
missing_archives.add(item)
|
|
1044
|
+
logger.error("PITR restore missing archive in chain for '%s': %s", path, item)
|
|
1045
|
+
failures += 1
|
|
1046
|
+
continue
|
|
1047
|
+
logger.info(
|
|
1048
|
+
"PITR restore directory '%s' using archive chain: %s",
|
|
1049
|
+
path,
|
|
1050
|
+
", ".join(_describe_archive(num, archive_map, info_by_no) for num in chain),
|
|
1051
|
+
)
|
|
1052
|
+
restored = True
|
|
1053
|
+
for catalog_no in chain:
|
|
1054
|
+
archive_path = archive_map.get(catalog_no)
|
|
1055
|
+
if not archive_path:
|
|
1056
|
+
missing_archives.add(f"catalog #{catalog_no} missing from archive map")
|
|
1057
|
+
logger.error(f"Archive number {catalog_no} missing from archive list; cannot restore '{path}'.")
|
|
1058
|
+
restored = False
|
|
1059
|
+
break
|
|
1060
|
+
if not os.path.exists(f"{archive_path}.1.dar"):
|
|
1061
|
+
missing_archives.add(f"{archive_path}.1.dar")
|
|
1062
|
+
logger.error(f"Archive slice missing for '{archive_path}.1.dar', cannot complete restore.")
|
|
1063
|
+
restored = False
|
|
1064
|
+
break
|
|
1065
|
+
cmd = ['dar', '-x', archive_path, '-wa', '-g', path, '--noconf', '-Q']
|
|
1066
|
+
if target:
|
|
1067
|
+
cmd.extend(['-R', target])
|
|
1068
|
+
if darrc_path:
|
|
1069
|
+
cmd.extend(['-B', darrc_path, 'restore-options'])
|
|
1070
|
+
logger.info(
|
|
1071
|
+
"Applying archive %s for '%s'.",
|
|
1072
|
+
_describe_archive(catalog_no, archive_map, info_by_no),
|
|
1073
|
+
path,
|
|
1074
|
+
)
|
|
1075
|
+
result = runner.run(cmd, timeout=timeout)
|
|
1076
|
+
if result.returncode != 0:
|
|
1077
|
+
logger.error(f"dar restore failed for '{path}' from '{archive_path}': {result.stderr}")
|
|
1078
|
+
restored = False
|
|
1079
|
+
break
|
|
1080
|
+
if restored:
|
|
1081
|
+
successes += 1
|
|
1082
|
+
else:
|
|
1083
|
+
failures += 1
|
|
1084
|
+
continue
|
|
1085
|
+
|
|
1086
|
+
versions = _parse_file_versions(file_result.stdout)
|
|
1087
|
+
candidates = [(num, dt) for num, dt in versions if dt <= when_dt]
|
|
1088
|
+
candidates.sort(key=lambda item: item[1], reverse=True)
|
|
1089
|
+
logger.debug(
|
|
1090
|
+
"PITR candidates for '%s': %s",
|
|
1091
|
+
path,
|
|
1092
|
+
", ".join(f"#{num}@{dt}" for num, dt in candidates) or "<none>",
|
|
1093
|
+
)
|
|
1094
|
+
if not candidates:
|
|
1095
|
+
logger.error(f"No archive version found for '{path}' at or before {when_dt}")
|
|
1096
|
+
failures += 1
|
|
1097
|
+
continue
|
|
1098
|
+
|
|
1099
|
+
restored = False
|
|
1100
|
+
for catalog_no, dt in candidates:
|
|
1101
|
+
archive_path = archive_map.get(catalog_no)
|
|
1102
|
+
if not archive_path:
|
|
1103
|
+
missing_archives.add(f"catalog #{catalog_no} missing from archive map")
|
|
1104
|
+
logger.error(f"Archive number {catalog_no} missing from archive list; cannot restore '{path}'.")
|
|
1105
|
+
restored = False
|
|
1106
|
+
break
|
|
1107
|
+
if not os.path.exists(f"{archive_path}.1.dar"):
|
|
1108
|
+
missing_archives.add(f"{archive_path}.1.dar")
|
|
1109
|
+
logger.error(f"Archive slice missing for '{archive_path}.1.dar', cannot restore '{path}'.")
|
|
1110
|
+
restored = False
|
|
1111
|
+
break
|
|
1112
|
+
logger.info(
|
|
1113
|
+
"PITR restore file '%s' using archive %s.",
|
|
1114
|
+
path,
|
|
1115
|
+
_describe_archive(catalog_no, archive_map, info_by_no),
|
|
1116
|
+
)
|
|
1117
|
+
cmd = ['dar', '-x', archive_path, '-wa', '-g', path, '--noconf', '-Q']
|
|
1118
|
+
if target:
|
|
1119
|
+
cmd.extend(['-R', target])
|
|
1120
|
+
if darrc_path:
|
|
1121
|
+
cmd.extend(['-B', darrc_path, 'restore-options'])
|
|
1122
|
+
logger.info(
|
|
1123
|
+
"Restoring '%s' from archive %s using dar.",
|
|
1124
|
+
path,
|
|
1125
|
+
_describe_archive(catalog_no, archive_map, info_by_no),
|
|
1126
|
+
)
|
|
1127
|
+
result = runner.run(cmd, timeout=timeout)
|
|
1128
|
+
if result.returncode == 0:
|
|
1129
|
+
restored = True
|
|
1130
|
+
successes += 1
|
|
1131
|
+
break
|
|
1132
|
+
logger.error(f"dar restore failed for '{path}' from '{archive_path}': {result.stderr}")
|
|
1133
|
+
|
|
1134
|
+
if not restored:
|
|
1135
|
+
failures += 1
|
|
1136
|
+
|
|
1137
|
+
logger.info("PITR restore summary: %d succeeded, %d failed.", successes, failures)
|
|
1138
|
+
if missing_archives:
|
|
1139
|
+
missing_list = sorted(missing_archives)
|
|
1140
|
+
sample = ", ".join(missing_list[:3])
|
|
1141
|
+
extra = f" (+{len(missing_list) - 3} more)" if len(missing_list) > 3 else ""
|
|
1142
|
+
logger.error("Missing archives detected during PITR restore: %s%s", sample, extra)
|
|
1143
|
+
ts = datetime.now().strftime("%Y-%m-%d %H:%M")
|
|
1144
|
+
send_discord_message(
|
|
1145
|
+
f"{ts} - manager: PITR restore missing archives ({len(missing_list)} missing).",
|
|
1146
|
+
config_settings=config_settings,
|
|
1147
|
+
)
|
|
1148
|
+
if failures:
|
|
1149
|
+
ts = datetime.now().strftime("%Y-%m-%d %H:%M")
|
|
1150
|
+
send_discord_message(
|
|
1151
|
+
f"{ts} - manager: PITR restore completed with failures ({failures} failed, {successes} succeeded).",
|
|
1152
|
+
config_settings=config_settings,
|
|
1153
|
+
)
|
|
1154
|
+
return 0 if failures == 0 else 1
|
|
1155
|
+
|
|
1156
|
+
|
|
467
1157
|
def add_specific_archive(archive: str, config_settings: ConfigSettings, directory: str = None) -> int:
|
|
468
1158
|
"""
|
|
469
1159
|
Adds the specified archive to its catalog database. Prompts for confirmation if it's older than existing entries.
|
|
@@ -573,27 +1263,30 @@ def add_directory(args: argparse.ArgumentParser, config_settings: ConfigSettings
|
|
|
573
1263
|
dar_pattern = re.compile(r'^(.*?_(FULL|DIFF|INCR)_(\d{4}-\d{2}-\d{2}))\.1.dar$') # just read slice #1 of an archive
|
|
574
1264
|
# List of DAR archives with their dates and base names
|
|
575
1265
|
dar_archives = []
|
|
1266
|
+
type_order = {"FULL": 0, "DIFF": 1, "INCR": 2}
|
|
576
1267
|
|
|
577
1268
|
for filename in os.listdir(args.add_dir):
|
|
578
1269
|
logger.debug(f"check if '{filename}' is a dar archive slice #1?")
|
|
579
1270
|
match = dar_pattern.match(filename)
|
|
580
1271
|
if match:
|
|
581
1272
|
base_name = match.group(1)
|
|
1273
|
+
archive_type = match.group(2)
|
|
582
1274
|
date_str = match.group(3)
|
|
583
1275
|
date_obj = datetime.strptime(date_str, '%Y-%m-%d')
|
|
584
|
-
dar_archives.append((date_obj, base_name))
|
|
585
|
-
logger.debug(f" -> yes: base name: {base_name}, date: {date_str}")
|
|
1276
|
+
dar_archives.append((date_obj, type_order.get(archive_type, 99), base_name, archive_type))
|
|
1277
|
+
logger.debug(f" -> yes: base name: {base_name}, type: {archive_type}, date: {date_str}")
|
|
586
1278
|
|
|
587
1279
|
if not dar_archives or len(dar_archives) == 0:
|
|
588
1280
|
logger.info(f"No 'dar' archives found in directory {args.add_dir}")
|
|
589
1281
|
return
|
|
590
1282
|
|
|
591
|
-
# Sort the DAR archives by date
|
|
1283
|
+
# Sort the DAR archives by date then type (FULL -> DIFF -> INCR) to avoid interactive ordering prompts.
|
|
592
1284
|
dar_archives.sort()
|
|
1285
|
+
logger.debug("Sorted archives for add-dir: %s", [(d.strftime("%Y-%m-%d"), t, n) for d, t, n, _ in dar_archives])
|
|
593
1286
|
|
|
594
1287
|
# Loop over the sorted DAR archives and process them
|
|
595
1288
|
result: List[Dict] = []
|
|
596
|
-
for
|
|
1289
|
+
for _date_obj, _type_order, base_name, _archive_type in dar_archives:
|
|
597
1290
|
logger.info(f"Adding dar archive: '{base_name}' to it's catalog database")
|
|
598
1291
|
result_archive = add_specific_archive(base_name, config_settings, args.add_dir)
|
|
599
1292
|
result.append({ f"{base_name}" : result_archive})
|
|
@@ -657,7 +1350,8 @@ def remove_specific_archive(archive: str, config_settings: ConfigSettings) -> in
|
|
|
657
1350
|
cat_no:int = cat_no_for_name(archive, config_settings)
|
|
658
1351
|
if cat_no >= 0:
|
|
659
1352
|
command = ['dar_manager', '--base', database_path, "--delete", str(cat_no)]
|
|
660
|
-
|
|
1353
|
+
timeout = _coerce_timeout(getattr(config_settings, "command_timeout_secs", None))
|
|
1354
|
+
process: CommandResult = runner.run(command, timeout=timeout)
|
|
661
1355
|
logger.info(f"CommandResult: {process}")
|
|
662
1356
|
else:
|
|
663
1357
|
logger.warning(f"archive: '{archive}' not found in it's catalog database: {database_path}")
|
|
@@ -684,6 +1378,26 @@ def build_arg_parser():
|
|
|
684
1378
|
parser.add_argument('-l', '--list-catalogs', action='store_true', help='List catalogs in databases for all backup definitions')
|
|
685
1379
|
parser.add_argument('--list-archive-contents', type=str, help="List contents of the archive's catalog. Argument is the archive name.").completer = archive_content_completer
|
|
686
1380
|
parser.add_argument('--find-file', type=str, help="List catalogs containing <path>/file. '-d <definition>' argument is also required")
|
|
1381
|
+
parser.add_argument('--restore-path', nargs='+', help="Restore specific path(s) (Point-in-Time Recovery).")
|
|
1382
|
+
parser.add_argument('--when', type=str, help="Date/time for restoration (used with --restore-path).")
|
|
1383
|
+
parser.add_argument('--target', type=str, default=None, help="Target directory for restoration (default: current dir).")
|
|
1384
|
+
parser.add_argument('--pitr-report', action='store_true', help="Report PITR archive chain for --restore-path/--when without restoring.")
|
|
1385
|
+
parser.add_argument(
|
|
1386
|
+
'--pitr-report-first',
|
|
1387
|
+
action='store_true',
|
|
1388
|
+
help="Run PITR chain report before restore and abort if missing archives.",
|
|
1389
|
+
)
|
|
1390
|
+
parser.add_argument(
|
|
1391
|
+
'--relocate-archive-path',
|
|
1392
|
+
nargs=2,
|
|
1393
|
+
metavar=("OLD", "NEW"),
|
|
1394
|
+
help="Rewrite archive path prefix in the catalog DB (requires --backup-def).",
|
|
1395
|
+
)
|
|
1396
|
+
parser.add_argument(
|
|
1397
|
+
'--relocate-archive-path-dry-run',
|
|
1398
|
+
action='store_true',
|
|
1399
|
+
help="Show archive path changes without applying them (use with --relocate-archive-path).",
|
|
1400
|
+
)
|
|
687
1401
|
parser.add_argument('--verbose', action='store_true', help='Be more verbose')
|
|
688
1402
|
parser.add_argument('--log-level', type=str, help="`debug` or `trace`, default is `info`", default="info")
|
|
689
1403
|
parser.add_argument('--log-stdout', action='store_true', help='also print log messages to stdout')
|
|
@@ -739,6 +1453,7 @@ def main():
|
|
|
739
1453
|
return
|
|
740
1454
|
|
|
741
1455
|
command_output_log = config_settings.logfile_location.replace("dar-backup.log", "dar-backup-commands.log")
|
|
1456
|
+
trace_log_file = derive_trace_log_path(config_settings.logfile_location)
|
|
742
1457
|
logger = setup_logging(
|
|
743
1458
|
config_settings.logfile_location,
|
|
744
1459
|
command_output_log,
|
|
@@ -746,6 +1461,7 @@ def main():
|
|
|
746
1461
|
args.log_stdout,
|
|
747
1462
|
logfile_max_bytes=config_settings.logfile_max_bytes,
|
|
748
1463
|
logfile_backup_count=config_settings.logfile_backup_count,
|
|
1464
|
+
trace_log_file=trace_log_file,
|
|
749
1465
|
trace_log_max_bytes=getattr(config_settings, "trace_log_max_bytes", 10485760),
|
|
750
1466
|
trace_log_backup_count=getattr(config_settings, "trace_log_backup_count", 1)
|
|
751
1467
|
)
|
|
@@ -753,6 +1469,7 @@ def main():
|
|
|
753
1469
|
runner = CommandRunner(
|
|
754
1470
|
logger=logger,
|
|
755
1471
|
command_logger=command_logger,
|
|
1472
|
+
default_timeout=getattr(config_settings, "command_timeout_secs", 30) or 30,
|
|
756
1473
|
default_capture_limit_bytes=getattr(config_settings, "command_capture_max_bytes", None)
|
|
757
1474
|
)
|
|
758
1475
|
|
|
@@ -767,10 +1484,12 @@ def main():
|
|
|
767
1484
|
start_msgs.append(("Config file:", args.config_file))
|
|
768
1485
|
args.verbose and start_msgs.append(("Backup dir:", config_settings.backup_dir))
|
|
769
1486
|
start_msgs.append(("Logfile:", config_settings.logfile_location))
|
|
1487
|
+
args.verbose and start_msgs.append(("Trace log:", trace_log_file))
|
|
770
1488
|
args.verbose and start_msgs.append(("Logfile max size (bytes):", config_settings.logfile_max_bytes))
|
|
771
1489
|
args.verbose and start_msgs.append(("Logfile backup count:", config_settings.logfile_backup_count))
|
|
772
1490
|
args.verbose and start_msgs.append(("--alternate-archive-dir:", args.alternate_archive_dir))
|
|
773
1491
|
args.verbose and start_msgs.append(("--remove-specific-archive:", args.remove_specific_archive))
|
|
1492
|
+
args.verbose and start_msgs.append(("--relocate-archive-path:", args.relocate_archive_path))
|
|
774
1493
|
dar_manager_properties = get_binary_info(command='dar_manager')
|
|
775
1494
|
start_msgs.append(("dar_manager:", dar_manager_properties['path']))
|
|
776
1495
|
start_msgs.append(("dar_manager v.:", dar_manager_properties['version']))
|
|
@@ -804,7 +1523,7 @@ def main():
|
|
|
804
1523
|
return
|
|
805
1524
|
|
|
806
1525
|
if args.backup_def and not args.backup_def.strip():
|
|
807
|
-
logger.error(
|
|
1526
|
+
logger.error("No backup definition given to --backup-def")
|
|
808
1527
|
sys.exit(1)
|
|
809
1528
|
return
|
|
810
1529
|
|
|
@@ -816,12 +1535,46 @@ def main():
|
|
|
816
1535
|
return
|
|
817
1536
|
|
|
818
1537
|
if args.list_archive_contents and not args.list_archive_contents.strip():
|
|
819
|
-
logger.error(
|
|
1538
|
+
logger.error("--list-archive-contents <param> not given, exiting")
|
|
1539
|
+
sys.exit(1)
|
|
1540
|
+
return
|
|
1541
|
+
|
|
1542
|
+
if args.relocate_archive_path and not args.backup_def:
|
|
1543
|
+
logger.error("--relocate-archive-path requires the --backup-def, exiting")
|
|
1544
|
+
sys.exit(1)
|
|
1545
|
+
return
|
|
1546
|
+
|
|
1547
|
+
if args.relocate_archive_path_dry_run and not args.relocate_archive_path:
|
|
1548
|
+
logger.error("--relocate-archive-path-dry-run requires --relocate-archive-path, exiting")
|
|
820
1549
|
sys.exit(1)
|
|
821
1550
|
return
|
|
822
1551
|
|
|
823
1552
|
if args.find_file and not args.backup_def:
|
|
824
|
-
logger.error(
|
|
1553
|
+
logger.error("--find-file requires the --backup-def, exiting")
|
|
1554
|
+
sys.exit(1)
|
|
1555
|
+
return
|
|
1556
|
+
|
|
1557
|
+
if args.restore_path and not args.backup_def:
|
|
1558
|
+
logger.error("--restore-path requires the --backup-def, exiting")
|
|
1559
|
+
sys.exit(1)
|
|
1560
|
+
|
|
1561
|
+
if args.restore_path and not args.target and not args.pitr_report:
|
|
1562
|
+
logger.error("--restore-path requires the --target directory, exiting")
|
|
1563
|
+
sys.exit(1)
|
|
1564
|
+
return
|
|
1565
|
+
|
|
1566
|
+
if args.pitr_report:
|
|
1567
|
+
if not args.restore_path:
|
|
1568
|
+
logger.error("--pitr-report requires --restore-path, exiting")
|
|
1569
|
+
sys.exit(1)
|
|
1570
|
+
return
|
|
1571
|
+
if not args.when:
|
|
1572
|
+
logger.error("--pitr-report requires --when, exiting")
|
|
1573
|
+
sys.exit(1)
|
|
1574
|
+
return
|
|
1575
|
+
|
|
1576
|
+
if args.pitr_report_first and not args.restore_path:
|
|
1577
|
+
logger.error("--pitr-report-first requires --restore-path, exiting")
|
|
825
1578
|
sys.exit(1)
|
|
826
1579
|
return
|
|
827
1580
|
|
|
@@ -879,11 +1632,40 @@ def main():
|
|
|
879
1632
|
sys.exit(result)
|
|
880
1633
|
return
|
|
881
1634
|
|
|
1635
|
+
if args.relocate_archive_path:
|
|
1636
|
+
old_prefix, new_prefix = args.relocate_archive_path
|
|
1637
|
+
result = relocate_archive_paths(
|
|
1638
|
+
args.backup_def,
|
|
1639
|
+
old_prefix,
|
|
1640
|
+
new_prefix,
|
|
1641
|
+
config_settings,
|
|
1642
|
+
dry_run=args.relocate_archive_path_dry_run,
|
|
1643
|
+
)
|
|
1644
|
+
sys.exit(result)
|
|
1645
|
+
return
|
|
1646
|
+
|
|
882
1647
|
|
|
883
1648
|
if args.find_file:
|
|
884
1649
|
result = find_file(args.find_file, args.backup_def, config_settings)
|
|
885
1650
|
sys.exit(result)
|
|
886
1651
|
return
|
|
1652
|
+
|
|
1653
|
+
if args.pitr_report:
|
|
1654
|
+
result = _pitr_chain_report(args.backup_def, args.restore_path, args.when, config_settings)
|
|
1655
|
+
sys.exit(result)
|
|
1656
|
+
return
|
|
1657
|
+
|
|
1658
|
+
if args.restore_path:
|
|
1659
|
+
if args.pitr_report_first:
|
|
1660
|
+
report_when = args.when or "now"
|
|
1661
|
+
result = _pitr_chain_report(args.backup_def, args.restore_path, report_when, config_settings)
|
|
1662
|
+
if result != 0:
|
|
1663
|
+
sys.exit(result)
|
|
1664
|
+
return
|
|
1665
|
+
result = restore_at(args.backup_def, args.restore_path, args.when, args.target, config_settings, verbose=args.verbose)
|
|
1666
|
+
sys.exit(result)
|
|
1667
|
+
return
|
|
1668
|
+
|
|
887
1669
|
except Exception as e:
|
|
888
1670
|
msg = f"Unexpected error during manager operation: {e}"
|
|
889
1671
|
logger.error(msg, exc_info=True)
|