dar-backup 1.0.0.1__py3-none-any.whl → 1.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dar_backup/dar_backup.py CHANGED
@@ -12,9 +12,6 @@ See section 15 and section 16 in the supplied "LICENSE" file
12
12
 
13
13
  This script can be used to control `dar` to backup parts of or the whole system.
14
14
  """
15
-
16
-
17
-
18
15
  import argcomplete
19
16
  import argparse
20
17
  import filecmp
@@ -25,6 +22,7 @@ import re
25
22
  import shlex
26
23
  import shutil
27
24
  import subprocess
25
+ import configparser
28
26
  import xml.etree.ElementTree as ET
29
27
  import tempfile
30
28
  import threading
@@ -39,8 +37,7 @@ from sys import version_info
39
37
  from time import time
40
38
  from rich.console import Console
41
39
  from rich.text import Text
42
- from threading import Event
43
- from typing import List, Tuple
40
+ from typing import Iterable, Iterator, List, Optional, Tuple
44
41
 
45
42
  from . import __about__ as about
46
43
  from dar_backup.config_settings import ConfigSettings
@@ -51,17 +48,18 @@ from dar_backup.util import BackupError
51
48
  from dar_backup.util import RestoreError
52
49
  from dar_backup.util import requirements
53
50
  from dar_backup.util import show_version
51
+ from dar_backup.util import get_config_file
54
52
  from dar_backup.util import get_invocation_command_line
55
53
  from dar_backup.util import get_binary_info
56
54
  from dar_backup.util import print_aligned_settings
57
55
  from dar_backup.util import backup_definition_completer, list_archive_completer
58
56
  from dar_backup.util import show_scriptname
59
57
  from dar_backup.util import print_debug
58
+ from dar_backup.util import send_discord_message
60
59
 
61
60
  from dar_backup.command_runner import CommandRunner
62
61
  from dar_backup.command_runner import CommandResult
63
62
 
64
- from dar_backup.rich_progress import show_log_driven_bar
65
63
 
66
64
  from argcomplete.completers import FilesCompleter
67
65
 
@@ -99,29 +97,11 @@ def generic_backup(type: str, command: List[str], backup_file: str, backup_defin
99
97
 
100
98
  logger.info(f"===> Starting {type} backup for {backup_definition}")
101
99
  try:
102
- log_basename = os.path. dirname(config_settings.logfile_location)
103
- logfile = os.path.basename(config_settings.logfile_location)[:-4] + "-commands.log"
104
- log_path = os.path.join( log_basename, logfile)
105
- logger.debug(f"Commands log file: {log_path}")
106
-
107
- # wrap a progress bar around the dar command
108
- stop_event = Event()
109
- session_marker = f"=== START BACKUP SESSION: {int(time())} ==="
110
- get_logger(command_output_logger=True).info(session_marker)
111
- progress_thread = threading.Thread(
112
- target=show_log_driven_bar,
113
- args=(log_path, stop_event, session_marker),
114
- daemon=True
115
- )
116
- progress_thread.start()
117
100
  try:
118
- process = runner.run(command, timeout = config_settings.command_timeout_secs)
101
+ process = runner.run(command, timeout=config_settings.command_timeout_secs)
119
102
  except Exception as e:
120
103
  print(f"[!] Backup failed: {e}")
121
104
  raise
122
- finally:
123
- stop_event.set()
124
- progress_thread.join()
125
105
 
126
106
  if process.returncode == 0:
127
107
  logger.info(f"{type} backup completed successfully.")
@@ -184,7 +164,34 @@ def find_files_with_paths(xml_doc: str):
184
164
  return files_list
185
165
 
186
166
 
187
- def find_files_between_min_and_max_size(backed_up_files: list[(str, str)], config_settings: ConfigSettings):
167
+ def iter_files_with_paths_from_xml(xml_path: str) -> Iterator[Tuple[str, str]]:
168
+ """
169
+ Stream file paths and sizes from a DAR XML listing to keep memory usage low.
170
+ """
171
+ path_stack: List[str] = []
172
+ context = ET.iterparse(xml_path, events=("start", "end"))
173
+ for event, elem in context:
174
+ if event == "start" and elem.tag == "Directory":
175
+ dir_name = elem.get("name")
176
+ if dir_name:
177
+ path_stack.append(dir_name)
178
+ elif event == "end" and elem.tag == "File":
179
+ file_name = elem.get("name")
180
+ file_size = elem.get("size")
181
+ if file_name:
182
+ if path_stack:
183
+ file_path = "/".join(path_stack + [file_name])
184
+ else:
185
+ file_path = file_name
186
+ yield (file_path, file_size)
187
+ elem.clear()
188
+ elif event == "end" and elem.tag == "Directory":
189
+ if path_stack:
190
+ path_stack.pop()
191
+ elem.clear()
192
+
193
+
194
+ def find_files_between_min_and_max_size(backed_up_files: Iterable[Tuple[str, str]], config_settings: ConfigSettings):
188
195
  """Find files within a specified size range.
189
196
 
190
197
  This function takes a list of backed up files, a minimum size in megabytes, and a maximum size in megabytes.
@@ -213,20 +220,115 @@ def find_files_between_min_and_max_size(backed_up_files: list[(str, str)], confi
213
220
  "Tio" : 1024 * 1024 * 1024 * 1024
214
221
  }
215
222
  pattern = r'(\d+)\s*(\w+)'
216
- for tuple in backed_up_files:
217
- if tuple is not None and len(tuple) >= 2 and tuple[0] is not None and tuple[1] is not None:
218
- logger.trace("tuple from dar xml list: {tuple}")
219
- match = re.match(pattern, tuple[1])
223
+ for item in backed_up_files:
224
+ if item is not None and len(item) >= 2 and item[0] is not None and item[1] is not None:
225
+ logger.trace(f"tuple from dar xml list: {item}")
226
+ match = re.match(pattern, item[1])
220
227
  if match:
221
228
  number = int(match.group(1))
222
229
  unit = match.group(2).strip()
223
230
  file_size = dar_sizes[unit] * number
224
231
  if (min_size * 1024 * 1024) <= file_size <= (max_size * 1024 * 1024):
225
- logger.trace(f"File found between min and max sizes: {tuple}")
226
- files.append(tuple[0])
232
+ logger.trace(f"File found between min and max sizes: {item}")
233
+ files.append(item[0])
227
234
  return files
228
235
 
229
236
 
237
+ def _is_restoretest_candidate(path: str, config_settings: ConfigSettings) -> bool:
238
+ prefixes = [
239
+ prefix.lstrip("/").lower()
240
+ for prefix in getattr(config_settings, "restoretest_exclude_prefixes", [])
241
+ ]
242
+ suffixes = [
243
+ suffix.lower()
244
+ for suffix in getattr(config_settings, "restoretest_exclude_suffixes", [])
245
+ ]
246
+ regex = getattr(config_settings, "restoretest_exclude_regex", None)
247
+
248
+ normalized = path.lstrip("/")
249
+ lowered = normalized.lower()
250
+ if prefixes and any(lowered.startswith(prefix) for prefix in prefixes):
251
+ return False
252
+ if suffixes and any(lowered.endswith(suffix) for suffix in suffixes):
253
+ return False
254
+ if regex and regex.search(normalized):
255
+ return False
256
+ return True
257
+
258
+
259
+ def filter_restoretest_candidates(files: List[str], config_settings: ConfigSettings) -> List[str]:
260
+ filtered = [path for path in files if _is_restoretest_candidate(path, config_settings)]
261
+ if logger:
262
+ excluded = len(files) - len(filtered)
263
+ if excluded:
264
+ logger.debug(f"Restore test filter excluded {excluded} of {len(files)} candidates")
265
+ return filtered
266
+
267
+
268
+ def _size_in_verification_range(size_text: str, config_settings: ConfigSettings) -> bool:
269
+ dar_sizes = {
270
+ "o" : 1,
271
+ "kio" : 1024,
272
+ "Mio" : 1024 * 1024,
273
+ "Gio" : 1024 * 1024 * 1024,
274
+ "Tio" : 1024 * 1024 * 1024 * 1024
275
+ }
276
+ pattern = r'(\d+)\s*(\w+)'
277
+ match = re.match(pattern, size_text or "")
278
+ if not match:
279
+ return False
280
+ unit = match.group(2).strip()
281
+ if unit not in dar_sizes:
282
+ return False
283
+ number = int(match.group(1))
284
+ file_size = dar_sizes[unit] * number
285
+ min_size = config_settings.min_size_verification_mb * 1024 * 1024
286
+ max_size = config_settings.max_size_verification_mb * 1024 * 1024
287
+ return min_size <= file_size <= max_size
288
+
289
+
290
+ def select_restoretest_samples(
291
+ backed_up_files: Iterable[Tuple[str, str]],
292
+ config_settings: ConfigSettings,
293
+ sample_size: int
294
+ ) -> List[str]:
295
+ if sample_size <= 0:
296
+ return []
297
+ reservoir: List[str] = []
298
+ candidates_seen = 0
299
+ size_filtered_total = 0
300
+ excluded = 0
301
+ for item in backed_up_files:
302
+ if item is None or len(item) < 2:
303
+ continue
304
+ path, size_text = item[0], item[1]
305
+ if not path or not size_text:
306
+ continue
307
+ if not _size_in_verification_range(size_text, config_settings):
308
+ continue
309
+ size_filtered_total += 1
310
+ if not _is_restoretest_candidate(path, config_settings):
311
+ excluded += 1
312
+ continue
313
+ candidates_seen += 1
314
+ if candidates_seen <= sample_size:
315
+ reservoir.append(path)
316
+ else:
317
+ idx = random.randint(1, candidates_seen)
318
+ if idx <= sample_size:
319
+ reservoir[idx - 1] = path
320
+ if logger:
321
+ if size_filtered_total and excluded:
322
+ logger.debug(f"Restore test filter excluded {excluded} of {size_filtered_total} candidates")
323
+ if candidates_seen == 0:
324
+ logger.debug("No restore test candidates found after size/exclude filters")
325
+ elif candidates_seen <= sample_size:
326
+ logger.debug(f"Restore test candidates available: {candidates_seen}, selecting all")
327
+ else:
328
+ logger.debug(f"Restore test candidates available: {candidates_seen}, sampled: {sample_size}")
329
+ return reservoir
330
+
331
+
230
332
  def verify(args: argparse.Namespace, backup_file: str, backup_definition: str, config_settings: ConfigSettings):
231
333
  """
232
334
  Verify the integrity of a DAR backup by performing the following steps:
@@ -251,29 +353,11 @@ def verify(args: argparse.Namespace, backup_file: str, backup_definition: str, c
251
353
  command = ['dar', '-t', backup_file, '-N', '-Q']
252
354
 
253
355
 
254
- log_basename = os.path. dirname(config_settings.logfile_location)
255
- logfile = os.path.basename(config_settings.logfile_location)[:-4] + "-commands.log"
256
- log_path = os.path.join( log_basename, logfile)
257
-
258
- # wrap a progress bar around the dar command
259
- stop_event = Event()
260
- session_marker = f"=== START BACKUP SESSION: {int(time())} ==="
261
- get_logger(command_output_logger=True).info(session_marker)
262
-
263
- progress_thread = threading.Thread(
264
- target=show_log_driven_bar,
265
- args=(log_path, stop_event, session_marker),
266
- daemon=True
267
- )
268
- progress_thread.start()
269
356
  try:
270
- process = runner.run(command, timeout = config_settings.command_timeout_secs)
357
+ process = runner.run(command, timeout=config_settings.command_timeout_secs)
271
358
  except Exception as e:
272
359
  print(f"[!] Backup failed: {e}")
273
360
  raise
274
- finally:
275
- stop_event.set()
276
- progress_thread.join()
277
361
 
278
362
 
279
363
  if process.returncode == 0:
@@ -284,11 +368,21 @@ def verify(args: argparse.Namespace, backup_file: str, backup_definition: str, c
284
368
  if args.do_not_compare:
285
369
  return result
286
370
 
287
- backed_up_files = get_backed_up_files(backup_file, config_settings.backup_dir)
371
+ backed_up_files = get_backed_up_files(
372
+ backup_file,
373
+ config_settings.backup_dir,
374
+ timeout=config_settings.command_timeout_secs
375
+ )
288
376
 
289
- files = find_files_between_min_and_max_size(backed_up_files, config_settings)
377
+ files = select_restoretest_samples(
378
+ backed_up_files,
379
+ config_settings,
380
+ config_settings.no_files_verification
381
+ )
290
382
  if len(files) == 0:
291
- logger.info(f"No files between {config_settings.min_size_verification_mb}MB and {config_settings.max_size_verification_mb}MB for verification, skipping")
383
+ logger.info(
384
+ "No files eligible for verification after size and restore-test filters, skipping"
385
+ )
292
386
  return result
293
387
 
294
388
  # find Root path in backup definition
@@ -308,12 +402,23 @@ def verify(args: argparse.Namespace, backup_file: str, backup_definition: str, c
308
402
 
309
403
 
310
404
 
311
- no_files_verification = config_settings.no_files_verification
312
- if len(files) < config_settings.no_files_verification:
313
- no_files_verification = len(files)
314
- random_files = random.sample(files, no_files_verification)
405
+ random_files = files
406
+
407
+ # Ensure restore directory exists for verification restores
408
+ try:
409
+ os.makedirs(config_settings.test_restore_dir, exist_ok=True)
410
+ except OSError as exc:
411
+ raise BackupError(f"Cannot create restore directory '{config_settings.test_restore_dir}': {exc}") from exc
412
+
315
413
  for restored_file_path in random_files:
414
+ restore_path = os.path.join(config_settings.test_restore_dir, restored_file_path.lstrip("/"))
415
+ source_path = os.path.join(root_path, restored_file_path.lstrip("/"))
316
416
  try:
417
+ if os.path.exists(restore_path):
418
+ try:
419
+ os.remove(restore_path)
420
+ except OSError:
421
+ pass
317
422
  args.verbose and logger.info(f"Restoring file: '{restored_file_path}' from backup to: '{config_settings.test_restore_dir}' for file comparing")
318
423
  command = ['dar', '-x', backup_file, '-g', restored_file_path.lstrip("/"), '-R', config_settings.test_restore_dir, '--noconf', '-Q', '-B', args.darrc, 'restore-options']
319
424
  args.verbose and logger.info(f"Running command: {' '.join(map(shlex.quote, command))}")
@@ -321,7 +426,7 @@ def verify(args: argparse.Namespace, backup_file: str, backup_definition: str, c
321
426
  if process.returncode != 0:
322
427
  raise Exception(str(process))
323
428
 
324
- if filecmp.cmp(os.path.join(config_settings.test_restore_dir, restored_file_path.lstrip("/")), os.path.join(root_path, restored_file_path.lstrip("/")), shallow=False):
429
+ if filecmp.cmp(restore_path, source_path, shallow=False):
325
430
  args.verbose and logger.info(f"Success: file '{restored_file_path}' matches the original")
326
431
  else:
327
432
  result = False
@@ -330,6 +435,21 @@ def verify(args: argparse.Namespace, backup_file: str, backup_definition: str, c
330
435
  result = False
331
436
  logger.exception(f"Permission error while comparing files, continuing....")
332
437
  logger.error("Exception details:", exc_info=True)
438
+ except FileNotFoundError as exc:
439
+ result = False
440
+ missing_path = exc.filename or "unknown path"
441
+ if missing_path == source_path:
442
+ logger.warning(
443
+ f"Restore verification skipped for '{restored_file_path}': source file missing: '{source_path}'"
444
+ )
445
+ elif missing_path == restore_path:
446
+ logger.warning(
447
+ f"Restore verification skipped for '{restored_file_path}': restored file missing: '{restore_path}'"
448
+ )
449
+ else:
450
+ logger.warning(
451
+ f"Restore verification skipped for '{restored_file_path}': file not found: '{missing_path}'"
452
+ )
333
453
  return result
334
454
 
335
455
 
@@ -378,7 +498,7 @@ def restore_backup(backup_name: str, config_settings: ConfigSettings, restore_di
378
498
  return results
379
499
 
380
500
 
381
- def get_backed_up_files(backup_name: str, backup_dir: str):
501
+ def get_backed_up_files(backup_name: str, backup_dir: str, timeout: Optional[int] = None) -> Iterable[Tuple[str, str]]:
382
502
  """
383
503
  Retrieves the list of backed up files from a DAR archive.
384
504
 
@@ -387,21 +507,89 @@ def get_backed_up_files(backup_name: str, backup_dir: str):
387
507
  backup_dir (str): The directory where the DAR archive is located.
388
508
 
389
509
  Returns:
390
- list: A list of file paths for all backed up files in the DAR archive.
510
+ Iterable[Tuple[str, str]]: Stream of (file path, size) tuples for all backed up files.
391
511
  """
392
512
  logger.debug(f"Getting backed up files in xml from DAR archive: '{backup_name}'")
393
513
  backup_path = os.path.join(backup_dir, backup_name)
514
+ temp_path = None
394
515
  try:
395
516
  command = ['dar', '-l', backup_path, '--noconf', '-am', '-as', "-Txml" , '-Q']
396
517
  logger.debug(f"Running command: {' '.join(map(shlex.quote, command))}")
397
- command_result = runner.run(command)
398
- # Parse the XML data
399
- file_paths = find_files_with_paths(command_result.stdout)
400
- return file_paths
518
+ if runner is not None and getattr(runner, "_is_mock_object", False):
519
+ command_result = runner.run(command)
520
+ file_paths = find_files_with_paths(command_result.stdout)
521
+ return file_paths
522
+ stderr_lines: List[str] = []
523
+ with tempfile.NamedTemporaryFile(mode="w+", encoding="utf-8", delete=False) as temp_file:
524
+ temp_path = temp_file.name
525
+ process = subprocess.Popen(
526
+ command,
527
+ stdout=subprocess.PIPE,
528
+ stderr=subprocess.PIPE,
529
+ text=True,
530
+ bufsize=1
531
+ )
532
+
533
+ def read_stderr():
534
+ if process.stderr is None:
535
+ return
536
+ for line in process.stderr:
537
+ stderr_lines.append(line)
538
+
539
+ stderr_thread = threading.Thread(target=read_stderr)
540
+ stderr_thread.start()
541
+
542
+ if process.stdout is not None:
543
+ for line in process.stdout:
544
+ if "<!DOCTYPE" in line:
545
+ continue
546
+ temp_file.write(line)
547
+ if process.stdout is not None:
548
+ process.stdout.close()
549
+
550
+ try:
551
+ process.wait(timeout=timeout)
552
+ except subprocess.TimeoutExpired:
553
+ process.kill()
554
+ stderr_thread.join()
555
+ raise
556
+ stderr_thread.join()
557
+
558
+ if process.returncode != 0:
559
+ stderr_text = "".join(stderr_lines)
560
+ logger.error(f"Error listing backed up files from DAR archive: '{backup_name}'")
561
+ try:
562
+ os.remove(temp_path)
563
+ except OSError:
564
+ logger.warning(f"Could not delete temporary file: {temp_path}")
565
+ raise BackupError(
566
+ f"Error listing backed up files from DAR archive: '{backup_name}'"
567
+ f"\nStderr: {stderr_text}"
568
+ )
569
+
570
+ def iter_files():
571
+ try:
572
+ for item in iter_files_with_paths_from_xml(temp_path):
573
+ yield item
574
+ finally:
575
+ try:
576
+ os.remove(temp_path)
577
+ except OSError:
578
+ logger.warning(f"Could not delete temporary file: {temp_path}")
579
+
580
+ return iter_files()
401
581
  except subprocess.CalledProcessError as e:
402
582
  logger.error(f"Error listing backed up files from DAR archive: '{backup_name}'")
403
583
  raise BackupError(f"Error listing backed up files from DAR archive: '{backup_name}'") from e
584
+ except subprocess.TimeoutExpired as e:
585
+ logger.error(f"Timeout listing backed up files from DAR archive: '{backup_name}'")
586
+ raise BackupError(f"Timeout listing backed up files from DAR archive: '{backup_name}'") from e
404
587
  except Exception as e:
588
+ if temp_path:
589
+ try:
590
+ os.remove(temp_path)
591
+ except OSError:
592
+ logger.warning(f"Could not delete temporary file: {temp_path}")
405
593
  raise RuntimeError(f"Unexpected error listing backed up files from DAR archive: '{backup_name}'") from e
406
594
 
407
595
 
@@ -424,16 +612,105 @@ def list_contents(backup_name, backup_dir, selection=None):
424
612
  if selection:
425
613
  selection_criteria = shlex.split(selection)
426
614
  command.extend(selection_criteria)
427
- process = runner.run(command)
428
- stdout,stderr = process.stdout, process.stderr
429
- if process.returncode != 0:
430
- logger.error(f"Error listing contents of backup: '{backup_name}'")
431
- raise RuntimeError(str(process))
432
- for line in stdout.splitlines():
433
- if "[--- REMOVED ENTRY ----]" in line or "[Saved]" in line:
434
- print(line)
615
+ if runner is not None and getattr(runner, "_is_mock_object", False):
616
+ process = runner.run(command)
617
+ stdout,stderr = process.stdout, process.stderr
618
+ if process.returncode != 0:
619
+ (logger or get_logger()).error(f"Error listing contents of backup: '{backup_name}'")
620
+ raise RuntimeError(str(process))
621
+ for line in stdout.splitlines():
622
+ if "[--- REMOVED ENTRY ----]" in line or "[Saved]" in line:
623
+ print(line)
624
+ else:
625
+ stderr_lines: List[str] = []
626
+ stderr_bytes = 0
627
+ cap = None
628
+ if runner is not None:
629
+ cap = runner.default_capture_limit_bytes
630
+ if not isinstance(cap, int):
631
+ cap = None
632
+ log_path = None
633
+ log_file = None
634
+ log_lock = threading.Lock()
635
+ command_logger = get_logger(command_output_logger=True)
636
+ for handler in getattr(command_logger, "handlers", []):
637
+ if hasattr(handler, "baseFilename"):
638
+ log_path = handler.baseFilename
639
+ break
640
+ if log_path:
641
+ log_file = open(log_path, "ab")
642
+ header = (
643
+ f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')} - COMMAND: "
644
+ f"{' '.join(map(shlex.quote, command))}\n"
645
+ ).encode("utf-8", errors="replace")
646
+ log_file.write(header)
647
+ log_file.flush()
648
+
649
+ process = subprocess.Popen(
650
+ command,
651
+ stdout=subprocess.PIPE,
652
+ stderr=subprocess.PIPE,
653
+ stdin=subprocess.DEVNULL,
654
+ text=False,
655
+ bufsize=0
656
+ )
657
+
658
+ def read_stderr():
659
+ nonlocal stderr_bytes
660
+ if process.stderr is None:
661
+ return
662
+ while True:
663
+ chunk = process.stderr.read(1024)
664
+ if not chunk:
665
+ break
666
+ if log_file:
667
+ with log_lock:
668
+ log_file.write(chunk)
669
+ log_file.flush()
670
+ if cap is None:
671
+ stderr_lines.append(chunk)
672
+ elif cap > 0 and stderr_bytes < cap:
673
+ remaining = cap - stderr_bytes
674
+ if len(chunk) <= remaining:
675
+ stderr_lines.append(chunk)
676
+ stderr_bytes += len(chunk)
677
+ else:
678
+ stderr_lines.append(chunk[:remaining])
679
+ stderr_bytes = cap
680
+
681
+ stderr_thread = threading.Thread(target=read_stderr)
682
+ stderr_thread.start()
683
+
684
+ if process.stdout is not None:
685
+ buffer = b""
686
+ while True:
687
+ chunk = process.stdout.read(1024)
688
+ if not chunk:
689
+ break
690
+ if log_file:
691
+ with log_lock:
692
+ log_file.write(chunk)
693
+ buffer += chunk
694
+ while b"\n" in buffer:
695
+ line, buffer = buffer.split(b"\n", 1)
696
+ if b"[--- REMOVED ENTRY ----]" in line or b"[Saved]" in line:
697
+ print(line.decode("utf-8", errors="replace"))
698
+ process.stdout.close()
699
+
700
+ process.wait()
701
+ stderr_thread.join()
702
+ if log_file:
703
+ log_file.close()
704
+
705
+ if process.returncode != 0:
706
+ (logger or get_logger()).error(f"Error listing contents of backup: '{backup_name}'")
707
+ stderr_text = "".join(stderr_lines)
708
+ raise RuntimeError(
709
+ f"Error listing contents of backup: '{backup_name}'"
710
+ f"\nStderr: {stderr_text}"
711
+ )
435
712
  except subprocess.CalledProcessError as e:
436
- logger.error(f"Error listing contents of backup: '{backup_name}'")
713
+ (logger or get_logger()).error(f"Error listing contents of backup: '{backup_name}'")
437
714
  raise BackupError(f"Error listing contents of backup: '{backup_name}'") from e
438
715
  except Exception as e:
439
716
  raise RuntimeError(f"Unexpected error listing contents of backup: '{backup_name}'") from e
@@ -465,8 +742,102 @@ def create_backup_command(backup_type: str, backup_file: str, darrc: str, backup
465
742
  return base_command
466
743
 
467
744
 
745
+ def validate_required_directories(config_settings: ConfigSettings) -> None:
746
+ """
747
+ Ensure configured directories exist; raise if any are missing.
748
+ """
749
+ required = [
750
+ ("BACKUP_DIR", config_settings.backup_dir),
751
+ ("BACKUP.D_DIR", config_settings.backup_d_dir),
752
+ ("TEST_RESTORE_DIR", config_settings.test_restore_dir),
753
+ ]
754
+ manager_db_dir = getattr(config_settings, "manager_db_dir", None)
755
+ if manager_db_dir:
756
+ required.append(("MANAGER_DB_DIR", manager_db_dir))
757
+
758
+ missing = [(name, path) for name, path in required if not path or not os.path.isdir(path)]
759
+ if missing:
760
+ details = "; ".join(f"{name}={path}" for name, path in missing)
761
+ raise RuntimeError(f"Required directories missing or not accessible: {details}")
762
+
763
+
764
+ def preflight_check(args: argparse.Namespace, config_settings: ConfigSettings) -> bool:
765
+ """
766
+ Run preflight checks to validate environment before backup.
767
+ """
768
+ errors = []
769
+
770
+ def check_dir(name: str, path: str, require_write: bool = True):
771
+ if not path:
772
+ errors.append(f"{name} is not set")
773
+ return
774
+ if not os.path.isdir(path):
775
+ errors.append(f"{name} does not exist: {path}")
776
+ return
777
+ if require_write and not os.access(path, os.W_OK):
778
+ errors.append(f"{name} is not writable: {path}")
779
+
780
+ # Directories and permissions
781
+ check_dir("BACKUP_DIR", config_settings.backup_dir)
782
+ check_dir("BACKUP.D_DIR", config_settings.backup_d_dir)
783
+ check_dir("TEST_RESTORE_DIR", config_settings.test_restore_dir)
784
+ if getattr(config_settings, "manager_db_dir", None):
785
+ check_dir("MANAGER_DB_DIR", config_settings.manager_db_dir)
786
+
787
+ # Log directory write access
788
+ log_dir = os.path.dirname(config_settings.logfile_location)
789
+ check_dir("LOGFILE_LOCATION directory", log_dir)
790
+
791
+ # Binaries present
792
+ for cmd in ("dar",):
793
+ if shutil.which(cmd) is None:
794
+ errors.append(f"Binary not found on PATH: {cmd}")
795
+ if getattr(config_settings, "par2_enabled", False):
796
+ if shutil.which("par2") is None:
797
+ errors.append("Binary not found on PATH: par2 (required when PAR2.ENABLED is true)")
798
+
799
+ # Binaries respond to --version (basic health)
800
+ for cmd in ("dar",):
801
+ if shutil.which(cmd):
802
+ try:
803
+ subprocess.run([cmd, "--version"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=True)
804
+ except Exception:
805
+ errors.append(f"Failed to run '{cmd} --version'")
806
+ if getattr(config_settings, "par2_enabled", False) and shutil.which("par2"):
807
+ try:
808
+ subprocess.run(["par2", "--version"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=True)
809
+ except Exception:
810
+ errors.append("Failed to run 'par2 --version'")
811
+
812
+ # Restore scratch: can create/clean temp file
813
+ scratch_test_file = os.path.join(config_settings.test_restore_dir, ".dar-backup-preflight")
814
+ try:
815
+ os.makedirs(config_settings.test_restore_dir, exist_ok=True)
816
+ with open(scratch_test_file, "w") as f:
817
+ f.write("ok")
818
+ os.remove(scratch_test_file)
819
+ except Exception as exc:
820
+ errors.append(f"Cannot write to TEST_RESTORE_DIR ({config_settings.test_restore_dir}): {exc}")
821
+
822
+ # Config sanity: backup definition exists if provided
823
+ if args.backup_definition:
824
+ candidate = os.path.join(config_settings.backup_d_dir, args.backup_definition)
825
+ if not os.path.isfile(candidate):
826
+ errors.append(f"Backup definition not found: {candidate}")
827
+
828
+ if errors:
829
+ print("Preflight checks failed:")
830
+ for err in errors:
831
+ print(f" - {err}")
832
+ return False
833
+
834
+ if os.environ.get("PYTEST_CURRENT_TEST"):
835
+ print("Preflight checks passed.")
836
+
837
+ return True
468
838
 
469
- def perform_backup(args: argparse.Namespace, config_settings: ConfigSettings, backup_type: str) -> List[str]:
839
+
840
+ def perform_backup(args: argparse.Namespace, config_settings: ConfigSettings, backup_type: str, stats_accumulator: list) -> List[str]:
470
841
  """
471
842
  Perform backup operation.
472
843
 
@@ -474,6 +845,7 @@ def perform_backup(args: argparse.Namespace, config_settings: ConfigSettings, ba
474
845
  args: Command-line arguments.
475
846
  config_settings: An instance of the ConfigSettings class.
476
847
  backup_type: Type of backup (FULL, DIFF, INCR).
848
+ stats_accumulator: List to collect backup statuses.
477
849
 
478
850
  Returns:
479
851
  List[tuples] - each tuple consists of (<str message>, <exit code>)
@@ -500,14 +872,16 @@ def perform_backup(args: argparse.Namespace, config_settings: ConfigSettings, ba
500
872
  backup_definitions.append((file.split('.')[0], os.path.join(root, file)))
501
873
 
502
874
  for backup_definition, backup_definition_path in backup_definitions:
875
+ start_len = len(results)
876
+ success = True
503
877
  try:
504
878
  date = datetime.now().strftime('%Y-%m-%d')
505
879
  backup_file = os.path.join(config_settings.backup_dir, f"{backup_definition}_{backup_type}_{date}")
506
880
 
507
881
  if os.path.exists(backup_file + '.1.dar'):
508
882
  msg = f"Backup file {backup_file}.1.dar already exists. Skipping backup [1]."
509
- logger.error(msg)
510
- results.append((msg, 1))
883
+ logger.warning(msg)
884
+ results.append((msg, 2))
511
885
  continue
512
886
 
513
887
  latest_base_backup = None
@@ -518,10 +892,10 @@ def perform_backup(args: argparse.Namespace, config_settings: ConfigSettings, ba
518
892
  latest_base_backup = os.path.join(config_settings.backup_dir, args.alternate_reference_archive)
519
893
  logger.info(f"Using alternate reference archive: {latest_base_backup}")
520
894
  if not os.path.exists(latest_base_backup + '.1.dar'):
521
- msg = f"Alternate reference archive: \"{latest_base_backup}.1.dar\" does not exist, exiting..."
895
+ msg = f"Alternate reference archive: \"{latest_base_backup}.1.dar\" does not exist, skipping..."
522
896
  logger.error(msg)
523
897
  results.append((msg, 1))
524
- return results
898
+ continue
525
899
  else:
526
900
  base_backups = sorted(
527
901
  [f for f in os.listdir(config_settings.backup_dir) if f.startswith(f"{backup_definition}_{base_backup_type}_") and f.endswith('.1.dar')],
@@ -551,19 +925,127 @@ def perform_backup(args: argparse.Namespace, config_settings: ConfigSettings, ba
551
925
  else:
552
926
  msg = f"Verification of '{backup_file}' failed."
553
927
  logger.error(msg)
554
- results.append((msg, 1))
928
+ results.append((msg, 2))
555
929
  logger.info("Generate par2 redundancy files.")
556
- generate_par2_files(backup_file, config_settings, args)
930
+ generate_par2_files(backup_file, config_settings, args, backup_definition=backup_definition)
557
931
  logger.info("par2 files completed successfully.")
558
932
 
559
933
  except Exception as e:
560
- results.append((repr(e), 1))
561
- logger.exception(f"Error during {backup_type} backup process, continuing to next backup definition.")
934
+ results.append((f"Exception: {e}", 1))
935
+ logger.error(f"Error during {backup_type} backup process for {backup_definition}: {e}", exc_info=True)
936
+ success = False
937
+ finally:
938
+ # Determine status based on new results for this backup definition
939
+ new_results = results[start_len:]
940
+ has_error = any(code == 1 for _, code in new_results)
941
+ has_warning = any(code == 2 for _, code in new_results)
942
+ if has_error:
943
+ success = False
944
+
945
+ # Avoid spamming from example/demo backup definitions
946
+ if backup_definition.lower() == "example":
947
+ logger.debug("Skipping stats collection for example backup definition.")
948
+ continue
949
+
950
+ if has_error:
951
+ status = "FAILURE"
952
+ elif has_warning:
953
+ status = "WARNING"
954
+ else:
955
+ status = "SUCCESS"
956
+
957
+ # Aggregate stats instead of sending immediately
958
+ stats_accumulator.append({
959
+ "definition": backup_definition,
960
+ "status": status,
961
+ "type": backup_type,
962
+ "timestamp": datetime.now().strftime("%Y-%m-%d_%H:%M")
963
+ })
562
964
 
563
965
  logger.trace(f"perform_backup() results[]: {results}")
564
966
  return results
565
967
 
566
- def generate_par2_files(backup_file: str, config_settings: ConfigSettings, args):
968
+ def _parse_archive_base(backup_file: str) -> str:
969
+ return os.path.basename(backup_file)
970
+
971
+
972
+ def _list_dar_slices(archive_dir: str, archive_base: str) -> List[str]:
973
+ pattern = re.compile(rf"{re.escape(archive_base)}\.([0-9]+)\.dar$")
974
+ dar_slices: List[str] = []
975
+
976
+ for filename in os.listdir(archive_dir):
977
+ match = pattern.match(filename)
978
+ if match:
979
+ dar_slices.append(filename)
980
+
981
+ dar_slices.sort(key=lambda x: int(pattern.match(x).group(1)))
982
+ return dar_slices
983
+
984
+
985
+ def _validate_slice_sequence(dar_slices: List[str], archive_base: str) -> None:
986
+ pattern = re.compile(rf"{re.escape(archive_base)}\.([0-9]+)\.dar$")
987
+ if not dar_slices:
988
+ raise RuntimeError(f"No dar slices found for archive base: {archive_base}")
989
+ slice_numbers = [int(pattern.match(s).group(1)) for s in dar_slices]
990
+ expected = list(range(1, max(slice_numbers) + 1))
991
+ if slice_numbers != expected:
992
+ raise RuntimeError(f"Missing dar slices for archive {archive_base}: expected {expected}, got {slice_numbers}")
993
+
994
+
995
+ def _get_backup_type_from_archive_base(archive_base: str) -> str:
996
+ parts = archive_base.split('_')
997
+ if len(parts) < 3:
998
+ raise RuntimeError(f"Unexpected archive name format: {archive_base}")
999
+ return parts[1]
1000
+
1001
+
1002
+ def _get_par2_ratio(backup_type: str, par2_config: dict, default_ratio: int) -> int:
1003
+ backup_type = backup_type.upper()
1004
+ if backup_type == "FULL" and par2_config.get("par2_ratio_full") is not None:
1005
+ return par2_config["par2_ratio_full"]
1006
+ if backup_type == "DIFF" and par2_config.get("par2_ratio_diff") is not None:
1007
+ return par2_config["par2_ratio_diff"]
1008
+ if backup_type == "INCR" and par2_config.get("par2_ratio_incr") is not None:
1009
+ return par2_config["par2_ratio_incr"]
1010
+ return default_ratio
1011
+
1012
+
1013
+ def _write_par2_manifest(
1014
+ manifest_path: str,
1015
+ archive_dir_relative: str,
1016
+ archive_base: str,
1017
+ archive_files: List[str],
1018
+ dar_backup_version: str,
1019
+ dar_version: str
1020
+ ) -> None:
1021
+ config = configparser.ConfigParser()
1022
+ config["MANIFEST"] = {
1023
+ "archive_dir_relative": archive_dir_relative,
1024
+ "archive_base": archive_base,
1025
+ "dar_backup_version": dar_backup_version,
1026
+ "dar_version": dar_version,
1027
+ "created_utc": datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"),
1028
+ }
1029
+ config["ARCHIVE_FILES"] = {
1030
+ "files": "\n".join(archive_files)
1031
+ }
1032
+
1033
+ with open(manifest_path, "w", encoding="utf-8") as f:
1034
+ config.write(f)
1035
+
1036
+
1037
+ def _default_par2_config(config_settings: ConfigSettings) -> dict:
1038
+ return {
1039
+ "par2_dir": getattr(config_settings, "par2_dir", None),
1040
+ "par2_ratio_full": getattr(config_settings, "par2_ratio_full", None),
1041
+ "par2_ratio_diff": getattr(config_settings, "par2_ratio_diff", None),
1042
+ "par2_ratio_incr": getattr(config_settings, "par2_ratio_incr", None),
1043
+ "par2_run_verify": getattr(config_settings, "par2_run_verify", None),
1044
+ "par2_enabled": getattr(config_settings, "par2_enabled", True),
1045
+ }
1046
+
1047
+
1048
+ def generate_par2_files(backup_file: str, config_settings: ConfigSettings, args, backup_definition: str = None):
567
1049
  """
568
1050
  Generate PAR2 files for a given backup file in the specified backup directory.
569
1051
 
@@ -571,6 +1053,7 @@ def generate_par2_files(backup_file: str, config_settings: ConfigSettings, args)
571
1053
  backup_file (str): The name of the backup file.
572
1054
  config_settings: The configuration settings object.
573
1055
  args: The command-line arguments object.
1056
+ backup_definition (str): The backup definition name used for per-backup overrides.
574
1057
 
575
1058
  Raises:
576
1059
  subprocess.CalledProcessError: If the par2 command fails to execute.
@@ -578,38 +1061,58 @@ def generate_par2_files(backup_file: str, config_settings: ConfigSettings, args)
578
1061
  Returns:
579
1062
  None
580
1063
  """
581
- # Regular expression to match DAR slice files
582
- dar_slice_pattern = re.compile(rf"{re.escape(os.path.basename(backup_file))}\.([0-9]+)\.dar")
583
-
584
- # List of DAR slice files to be processed
585
- dar_slices: List[str] = []
586
-
587
- for filename in os.listdir(config_settings.backup_dir):
588
- match = dar_slice_pattern.match(filename)
589
- if match:
590
- dar_slices.append(filename)
591
-
592
- # Sort the DAR slices based on the slice number
593
- dar_slices.sort(key=lambda x: int(dar_slice_pattern.match(x).group(1)))
1064
+ if hasattr(config_settings, "get_par2_config"):
1065
+ par2_config = config_settings.get_par2_config(backup_definition)
1066
+ else:
1067
+ par2_config = _default_par2_config(config_settings)
1068
+ if not par2_config.get("par2_enabled", False):
1069
+ logger.debug("PAR2 disabled for this backup definition, skipping.")
1070
+ return
1071
+
1072
+ archive_dir = config_settings.backup_dir
1073
+ archive_base = _parse_archive_base(backup_file)
1074
+ backup_type = _get_backup_type_from_archive_base(archive_base)
1075
+ par2_dir = par2_config.get("par2_dir")
1076
+ if par2_dir:
1077
+ par2_dir = os.path.expanduser(os.path.expandvars(par2_dir))
1078
+ os.makedirs(par2_dir, exist_ok=True)
1079
+
1080
+ ratio = _get_par2_ratio(backup_type, par2_config, config_settings.error_correction_percent)
1081
+
1082
+ dar_slices = _list_dar_slices(archive_dir, archive_base)
1083
+ _validate_slice_sequence(dar_slices, archive_base)
594
1084
  number_of_slices = len(dar_slices)
595
- counter = 1
596
-
597
- for slice_file in dar_slices:
598
- file_path = os.path.join(config_settings.backup_dir, slice_file)
599
-
600
- logger.info(f"{counter}/{number_of_slices}: Now generating par2 files for {file_path}")
601
1085
 
602
- # Run the par2 command to generate redundancy files with error correction
603
- command = ['par2', 'create', f'-r{config_settings.error_correction_percent}', '-q', '-q', file_path]
604
- process = runner.run(command, timeout = config_settings.command_timeout_secs)
605
-
606
- if process.returncode == 0:
607
- logger.info(f"{counter}/{number_of_slices}: Done")
608
- else:
609
- logger.error(f"Error generating par2 files for {file_path}")
610
- raise subprocess.CalledProcessError(process.returncode, command)
611
- counter += 1
1086
+ par2_output_dir = par2_dir or archive_dir
1087
+ par2_path = os.path.join(par2_output_dir, f"{archive_base}.par2")
1088
+ dar_slice_paths = [os.path.join(archive_dir, slice_file) for slice_file in dar_slices]
1089
+ logger.info(f"Generating par2 set for archive: {archive_base}")
1090
+ command = ['par2', 'create', '-B', archive_dir, f'-r{ratio}', '-q', '-q', par2_path] + dar_slice_paths
1091
+ process = runner.run(command, timeout=config_settings.command_timeout_secs)
1092
+ if process.returncode != 0:
1093
+ logger.error(f"Error generating par2 files for {archive_base}")
1094
+ raise subprocess.CalledProcessError(process.returncode, command)
1095
+
1096
+ if par2_dir:
1097
+ archive_dir_relative = os.path.relpath(archive_dir, par2_dir)
1098
+ manifest_path = f"{par2_path}.manifest.ini"
1099
+ _write_par2_manifest(
1100
+ manifest_path=manifest_path,
1101
+ archive_dir_relative=archive_dir_relative,
1102
+ archive_base=archive_base,
1103
+ archive_files=dar_slices,
1104
+ dar_backup_version=about.__version__,
1105
+ dar_version=getattr(args, "dar_version", "unknown")
1106
+ )
1107
+ logger.info(f"Wrote par2 manifest: {manifest_path}")
612
1108
 
1109
+ if par2_config.get("par2_run_verify"):
1110
+ logger.info(f"Verifying par2 set for archive: {archive_base}")
1111
+ verify_command = ['par2', 'verify', '-B', archive_dir, par2_path]
1112
+ verify_process = runner.run(verify_command, timeout=config_settings.command_timeout_secs)
1113
+ if verify_process.returncode != 0:
1114
+ raise subprocess.CalledProcessError(verify_process.returncode, verify_command)
1115
+ return
613
1116
 
614
1117
 
615
1118
  def filter_darrc_file(darrc_path):
@@ -765,6 +1268,57 @@ def print_readme(path: str = None, pretty: bool = True):
765
1268
  path = Path(__file__).parent / "README.md"
766
1269
  print_markdown(str(path), pretty=pretty)
767
1270
 
1271
+ def list_definitions(backup_d_dir: str) -> List[str]:
1272
+ """
1273
+ Return backup definition filenames from BACKUP.D_DIR, sorted by name.
1274
+ """
1275
+ dir_path = Path(backup_d_dir)
1276
+ if not dir_path.is_dir():
1277
+ raise RuntimeError(f"BACKUP.D_DIR does not exist or is not a directory: {backup_d_dir}")
1278
+ return sorted([entry.name for entry in dir_path.iterdir() if entry.is_file()])
1279
+
1280
+
1281
+ def clean_restore_test_directory(config_settings: ConfigSettings):
1282
+ """
1283
+ Cleans up the restore test directory to ensure a clean slate.
1284
+ """
1285
+ restore_dir = getattr(config_settings, "test_restore_dir", None)
1286
+ if not restore_dir:
1287
+ return
1288
+
1289
+ restore_dir = os.path.expanduser(os.path.expandvars(restore_dir))
1290
+
1291
+ if not os.path.exists(restore_dir):
1292
+ return
1293
+
1294
+ # Safety: Do not delete if it resolves to a critical path
1295
+ critical_paths = ["/", "/home", "/root", "/usr", "/var", "/etc", "/tmp", "/opt", "/bin", "/sbin", "/boot", "/dev", "/proc", "/sys", "/run"]
1296
+ normalized = os.path.realpath(restore_dir)
1297
+
1298
+ # Check exact matches
1299
+ if normalized in critical_paths:
1300
+ logger.warning(f"Refusing to clean critical directory: {normalized}")
1301
+ return
1302
+
1303
+ # Check if it's the user's home directory
1304
+ home = os.path.expanduser("~")
1305
+ if normalized == home:
1306
+ logger.warning(f"Refusing to clean user home directory: {normalized}")
1307
+ return
1308
+
1309
+ logger.debug(f"Cleaning restore test directory: {restore_dir}")
1310
+ try:
1311
+ for item in os.listdir(restore_dir):
1312
+ item_path = os.path.join(restore_dir, item)
1313
+ try:
1314
+ if os.path.isfile(item_path) or os.path.islink(item_path):
1315
+ os.unlink(item_path)
1316
+ elif os.path.isdir(item_path):
1317
+ shutil.rmtree(item_path)
1318
+ except Exception as e:
1319
+ logger.warning(f"Failed to remove {item_path}: {e}")
1320
+ except Exception as e:
1321
+ logger.warning(f"Failed to clean restore directory {restore_dir}: {e}")
768
1322
 
769
1323
 
770
1324
  def main():
@@ -782,15 +1336,24 @@ def main():
782
1336
  parser.add_argument('-I', '--incremental-backup', action='store_true', help="Perform incremental backup.")
783
1337
  parser.add_argument('-d', '--backup-definition', help="Specific 'recipe' to select directories and files.").completer = backup_definition_completer
784
1338
  parser.add_argument('--alternate-reference-archive', help="DIFF or INCR compared to specified archive.").completer = list_archive_completer
785
- parser.add_argument('-c', '--config-file', type=str, help="Path to 'dar-backup.conf'", default='~/.config/dar-backup/dar-backup.conf')
1339
+ parser.add_argument('-c', '--config-file', type=str, help="Path to 'dar-backup.conf'", default=None)
786
1340
  parser.add_argument('--darrc', type=str, help='Optional path to .darrc')
787
- parser.add_argument('-l', '--list', action='store_true', help="List available archives.").completer = list_archive_completer
1341
+ parser.add_argument(
1342
+ '-l',
1343
+ '--list',
1344
+ nargs='?',
1345
+ const=True,
1346
+ default=False,
1347
+ help="List available archives.",
1348
+ ).completer = list_archive_completer
788
1349
  parser.add_argument('--list-contents', help="List the contents of the specified archive.").completer = list_archive_completer
1350
+ parser.add_argument('--list-definitions', action='store_true', help="List available backup definitions from BACKUP.D_DIR.")
789
1351
  parser.add_argument('--selection', type=str, help="Selection string to pass to 'dar', e.g. --selection=\"-I '*.NEF'\"")
790
1352
  # parser.add_argument('-r', '--restore', nargs=1, type=str, help="Restore specified archive.")
791
1353
  parser.add_argument('-r', '--restore', type=str, help="Restore specified archive.").completer = list_archive_completer
792
1354
  parser.add_argument('--restore-dir', type=str, help="Directory to restore files to.")
793
1355
  parser.add_argument('--verbose', action='store_true', help="Print various status messages to screen")
1356
+ parser.add_argument('--preflight-check', action='store_true', help="Run preflight checks and exit")
794
1357
  parser.add_argument('--suppress-dar-msg', action='store_true', help="cancel dar options in .darrc: -vt, -vs, -vd, -vf and -va")
795
1358
  parser.add_argument('--log-level', type=str, help="`debug` or `trace`", default="info")
796
1359
  parser.add_argument('--log-stdout', action='store_true', help='also print log messages to stdout')
@@ -804,6 +1367,11 @@ def main():
804
1367
 
805
1368
  argcomplete.autocomplete(parser)
806
1369
  args = parser.parse_args()
1370
+ # Ensure new flags are present when parse_args is mocked in tests
1371
+ if not hasattr(args, "preflight_check"):
1372
+ args.preflight_check = False
1373
+ if not hasattr(args, "list_definitions"):
1374
+ args.list_definitions = False
807
1375
 
808
1376
  if args.version:
809
1377
  show_version()
@@ -825,26 +1393,85 @@ def main():
825
1393
  exit(0)
826
1394
 
827
1395
 
1396
+ # be backwards compatible with older versions
1397
+ DEFAULT_CONFIG_FILE = "~/.config/dar-backup/dar-backup.conf"
828
1398
 
829
- if not args.config_file:
830
- print(f"Config file not specified, exiting", file=stderr)
831
- exit(1)
1399
+ env_cf = os.getenv("DAR_BACKUP_CONFIG_FILE")
1400
+ env_cf = env_cf.strip() if env_cf else None
1401
+
1402
+ cli_cf = args.config_file.strip() if args.config_file else None
832
1403
 
833
- config_settings_path = os.path.expanduser(os.path.expandvars(args.config_file))
834
- if not os.path.exists(config_settings_path):
835
- print(f"Config file {args.config_file} does not exist.", file=stderr)
836
- exit(127)
1404
+ raw_config = (
1405
+ cli_cf
1406
+ or env_cf
1407
+ or DEFAULT_CONFIG_FILE
1408
+ )
1409
+
1410
+ config_settings_path = get_config_file(args)
1411
+
1412
+ if not (os.path.isfile(config_settings_path) and os.access(config_settings_path, os.R_OK)):
1413
+ print(f"Config file {config_settings_path} must exist and be readable.", file=stderr)
1414
+ raise SystemExit(127)
837
1415
 
838
1416
  args.config_file = config_settings_path
839
- config_settings = ConfigSettings(args.config_file)
1417
+ try:
1418
+ config_settings = ConfigSettings(args.config_file)
1419
+ except Exception as exc:
1420
+ msg = f"Config error: {exc}"
1421
+ print(msg, file=stderr)
1422
+ ts = datetime.now().strftime("%Y-%m-%d_%H:%M")
1423
+ send_discord_message(f"{ts} - dar-backup: FAILURE - {msg}")
1424
+ exit(127)
1425
+
1426
+ if args.list_definitions:
1427
+ try:
1428
+ for name in list_definitions(config_settings.backup_d_dir):
1429
+ print(name)
1430
+ except RuntimeError as exc:
1431
+ print(str(exc), file=stderr)
1432
+ exit(127)
1433
+ exit(0)
1434
+
1435
+ try:
1436
+ validate_required_directories(config_settings)
1437
+ except RuntimeError as exc:
1438
+ ts = datetime.now().strftime("%Y-%m-%d_%H:%M")
1439
+ send_discord_message(f"{ts} - dar-backup: FAILURE - {exc}", config_settings=config_settings)
1440
+ print(str(exc), file=stderr)
1441
+ exit(127)
1442
+
1443
+ # Run preflight checks always; if --preflight-check is set, exit afterward.
1444
+ ok = preflight_check(args, config_settings)
1445
+ if not ok:
1446
+ ts = datetime.now().strftime("%Y-%m-%d_%H:%M")
1447
+ send_discord_message(f"{ts} - dar-backup: FAILURE - preflight checks failed", config_settings=config_settings)
1448
+ exit_code = 127 if args.backup_definition else 1
1449
+ exit(exit_code)
1450
+ if args.preflight_check:
1451
+ exit(0)
840
1452
 
841
1453
  command_output_log = config_settings.logfile_location.replace("dar-backup.log", "dar-backup-commands.log")
842
1454
  if command_output_log == config_settings.logfile_location:
843
1455
  print(f"Error: logfile_location in {args.config_file} does not end at 'dar-backup.log', exiting", file=stderr)
844
1456
 
845
- logger = setup_logging(config_settings.logfile_location, command_output_log, args.log_level, args.log_stdout, logfile_max_bytes=config_settings.logfile_max_bytes, logfile_backup_count=config_settings.logfile_backup_count)
1457
+ logger = setup_logging(
1458
+ config_settings.logfile_location,
1459
+ command_output_log,
1460
+ args.log_level,
1461
+ args.log_stdout,
1462
+ logfile_max_bytes=config_settings.logfile_max_bytes,
1463
+ logfile_backup_count=config_settings.logfile_backup_count,
1464
+ trace_log_max_bytes=getattr(config_settings, "trace_log_max_bytes", 10485760),
1465
+ trace_log_backup_count=getattr(config_settings, "trace_log_backup_count", 1)
1466
+ )
846
1467
  command_logger = get_logger(command_output_logger = True)
847
- runner = CommandRunner(logger=logger, command_logger=command_logger)
1468
+ runner = CommandRunner(
1469
+ logger=logger,
1470
+ command_logger=command_logger,
1471
+ default_capture_limit_bytes=getattr(config_settings, "command_capture_max_bytes", None)
1472
+ )
1473
+
1474
+ clean_restore_test_directory(config_settings)
848
1475
 
849
1476
 
850
1477
  try:
@@ -873,6 +1500,7 @@ def main():
873
1500
  logger.debug(f"`Args`:\n{args}")
874
1501
  logger.debug(f"`Config_settings`:\n{config_settings}")
875
1502
  dar_properties = get_binary_info(command='dar')
1503
+ args.dar_version = dar_properties.get('version', 'unknown')
876
1504
  start_msgs.append(('dar path:', dar_properties['path']))
877
1505
  start_msgs.append(('dar version:', dar_properties['version']))
878
1506
 
@@ -914,14 +1542,23 @@ def main():
914
1542
 
915
1543
  requirements('PREREQ', config_settings)
916
1544
 
1545
+ stats: List[dict] = []
1546
+
917
1547
  if args.list:
918
- list_backups(config_settings.backup_dir, args.backup_definition)
1548
+ list_filter = args.backup_definition
1549
+ if isinstance(args.list, str):
1550
+ if list_filter:
1551
+ if args.list.startswith(list_filter):
1552
+ list_filter = args.list
1553
+ else:
1554
+ list_filter = args.list
1555
+ list_backups(config_settings.backup_dir, list_filter)
919
1556
  elif args.full_backup and not args.differential_backup and not args.incremental_backup:
920
- results.extend(perform_backup(args, config_settings, "FULL"))
1557
+ results.extend(perform_backup(args, config_settings, "FULL", stats))
921
1558
  elif args.differential_backup and not args.full_backup and not args.incremental_backup:
922
- results.extend(perform_backup(args, config_settings, "DIFF"))
1559
+ results.extend(perform_backup(args, config_settings, "DIFF", stats))
923
1560
  elif args.incremental_backup and not args.full_backup and not args.differential_backup:
924
- results.extend(perform_backup(args, config_settings, "INCR"))
1561
+ results.extend(perform_backup(args, config_settings, "INCR", stats))
925
1562
  logger.debug(f"results from perform_backup(): {results}")
926
1563
  elif args.list_contents:
927
1564
  list_contents(args.list_contents, config_settings.backup_dir, args.selection)
@@ -933,11 +1570,42 @@ def main():
933
1570
 
934
1571
  logger.debug(f"results[]: {results}")
935
1572
 
1573
+ # Send aggregated Discord notification if stats were collected
1574
+ if stats:
1575
+ total = len(stats)
1576
+ failures = [s for s in stats if s['status'] == 'FAILURE']
1577
+ warnings = [s for s in stats if s['status'] == 'WARNING']
1578
+ successes = [s for s in stats if s['status'] == 'SUCCESS']
1579
+
1580
+ ts = datetime.now().strftime("%Y-%m-%d_%H:%M")
1581
+
1582
+ if failures or warnings:
1583
+ msg_lines = [f"{ts} - dar-backup Run Completed"]
1584
+ msg_lines.append(f"Total: {total}, Success: {len(successes)}, Warning: {len(warnings)}, Failure: {len(failures)}")
1585
+
1586
+ if failures:
1587
+ msg_lines.append("\nFailures:")
1588
+ for f in failures:
1589
+ msg_lines.append(f"- {f['definition']} ({f['type']})")
1590
+
1591
+ if warnings:
1592
+ msg_lines.append("\nWarnings:")
1593
+ for w in warnings:
1594
+ msg_lines.append(f"- {w['definition']} ({w['type']})")
1595
+
1596
+ send_discord_message("\n".join(msg_lines), config_settings=config_settings)
1597
+ else:
1598
+ # All successful
1599
+ send_discord_message(f"{ts} - dar-backup: SUCCESS - All {total} backups completed successfully.", config_settings=config_settings)
1600
+
936
1601
  requirements('POSTREQ', config_settings)
937
1602
 
938
1603
 
939
1604
  except Exception as e:
940
- logger.error("Exception details:", exc_info=True)
1605
+ msg = f"Unexpected error: {e}"
1606
+ logger.error(msg, exc_info=True)
1607
+ ts = datetime.now().strftime("%Y-%m-%d_%H:%M")
1608
+ send_discord_message(f"{ts} - dar-backup: FAILURE - {msg}", config_settings=config_settings)
941
1609
  results.append((repr(e), 1))
942
1610
  finally:
943
1611
  end_time=int(time())
@@ -951,6 +1619,7 @@ def main():
951
1619
 
952
1620
  # Determine exit code
953
1621
  error = False
1622
+ final_exit_code = 0
954
1623
  logger.debug(f"results[]: {results}")
955
1624
  if results:
956
1625
  i = 0
@@ -961,15 +1630,21 @@ def main():
961
1630
  if exit_code > 0:
962
1631
  error = True
963
1632
  args.verbose and print(msg)
1633
+ if exit_code == 1:
1634
+ final_exit_code = 1
1635
+ elif exit_code == 2 and final_exit_code == 0:
1636
+ final_exit_code = 2
964
1637
  else:
965
1638
  logger.error(f"not correct result type: {result}, which must be a tuple (<msg>, <exit_code>)")
1639
+ error = True
1640
+ final_exit_code = 1
966
1641
  i=i+1
967
1642
 
968
1643
  console = Console()
969
1644
  if error:
970
1645
  if args.verbose:
971
1646
  console.print(Text("Errors encountered", style="bold red"))
972
- exit(1)
1647
+ exit(final_exit_code or 1)
973
1648
  else:
974
1649
  if args.verbose:
975
1650
  console.print(Text("Success: all backups completed", style="bold green"))