dar-backup 1.0.0.1__py3-none-any.whl → 1.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dar_backup/util.py CHANGED
@@ -14,6 +14,7 @@ import locale
14
14
  import configparser
15
15
  import inspect
16
16
  import logging
17
+ import json
17
18
 
18
19
  import os
19
20
  import re
@@ -23,12 +24,16 @@ import shutil
23
24
  import sys
24
25
  import threading
25
26
  import traceback
27
+ import urllib.error
28
+ import urllib.request
26
29
 
27
30
  import dar_backup.__about__ as about
28
31
 
29
32
 
33
+
30
34
  from argcomplete.completers import ChoicesCompleter
31
- from datetime import datetime
35
+ from datetime import datetime, date
36
+ from datetime import date
32
37
  from dar_backup.config_settings import ConfigSettings
33
38
  from logging.handlers import RotatingFileHandler
34
39
  from pathlib import Path
@@ -42,6 +47,26 @@ from typing import Tuple
42
47
  logger=None
43
48
  secondary_logger=None
44
49
 
50
+ class CleanFormatter(logging.Formatter):
51
+ """
52
+ Formatter that ignores exception tracebacks.
53
+ """
54
+ def format(self, record):
55
+ # Save original exception info
56
+ orig_exc_info = record.exc_info
57
+ orig_exc_text = record.exc_text
58
+
59
+ # Temporarily hide it
60
+ record.exc_info = None
61
+ record.exc_text = None
62
+
63
+ try:
64
+ return super().format(record)
65
+ finally:
66
+ # Restore it so other handlers (like the trace handler) can use it
67
+ record.exc_info = orig_exc_info
68
+ record.exc_text = orig_exc_text
69
+
45
70
  #def setup_logging(log_file: str, command_output_log_file: str, log_level: str = "info", log_to_stdout: bool = False) -> logging.Logger:
46
71
  def setup_logging(
47
72
  log_file: str,
@@ -50,10 +75,15 @@ def setup_logging(
50
75
  log_to_stdout: bool = False,
51
76
  logfile_max_bytes: int = 26214400,
52
77
  logfile_backup_count: int = 5,
78
+ trace_log_file: str = None,
79
+ trace_log_max_bytes: int = 10485760,
80
+ trace_log_backup_count: int = 1
53
81
  ) -> logging.Logger:
54
82
 
55
83
  """
56
84
  Sets up logging for the main program and a separate secondary logfile for command outputs.
85
+
86
+ Also sets up a trace log file that captures all logs at DEBUG level including stack traces.
57
87
 
58
88
  Args:
59
89
  log_file (str): The path to the main log file.
@@ -62,6 +92,9 @@ def setup_logging(
62
92
  log_to_stdout (bool): If True, log messages will be printed to the console. Defaults to False.
63
93
  logfile_max_bytes: max file size of a log file, defailt = 26214400.
64
94
  logfile_backup_count: max numbers of logs files, default = 5.
95
+ trace_log_file (str): Optional path for the trace log file. Defaults to log_file with ".trace.log" suffix.
96
+ trace_log_max_bytes: max file size of the trace log file, default = 10485760 (10MB).
97
+ trace_log_backup_count: max numbers of trace log files, default = 1.
65
98
 
66
99
  Returns:
67
100
  a RotatingFileHandler logger instance.
@@ -80,6 +113,7 @@ def setup_logging(
80
113
 
81
114
  logging.Logger.trace = trace
82
115
 
116
+ # Main log file handler (clean logs)
83
117
  file_handler = RotatingFileHandler(
84
118
  log_file,
85
119
  maxBytes=logfile_max_bytes,
@@ -87,6 +121,23 @@ def setup_logging(
87
121
  encoding="utf-8",
88
122
  )
89
123
 
124
+ # Trace log file handler (full details)
125
+ if not trace_log_file:
126
+ if log_file == "/dev/null":
127
+ trace_log_file = "/dev/null"
128
+ else:
129
+ base, ext = os.path.splitext(log_file)
130
+ trace_log_file = f"{base}.trace{ext}"
131
+
132
+ trace_handler = RotatingFileHandler(
133
+ trace_log_file,
134
+ maxBytes=trace_log_max_bytes,
135
+ backupCount=trace_log_backup_count,
136
+ encoding="utf-8",
137
+ )
138
+ # Trace handler gets everything (DEBUG level) and keeps tracebacks
139
+ trace_handler.setLevel(logging.DEBUG)
140
+
90
141
  command_handler = RotatingFileHandler(
91
142
  command_output_log_file,
92
143
  maxBytes=logfile_max_bytes,
@@ -94,24 +145,35 @@ def setup_logging(
94
145
  encoding="utf-8",
95
146
  )
96
147
 
97
- formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
98
- file_handler.setFormatter(formatter)
99
- command_handler.setFormatter(formatter)
148
+ standard_formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
149
+ clean_formatter = CleanFormatter('%(asctime)s - %(levelname)s - %(message)s')
150
+
151
+ file_handler.setFormatter(clean_formatter)
152
+ trace_handler.setFormatter(standard_formatter)
153
+ command_handler.setFormatter(standard_formatter)
100
154
 
101
155
 
102
156
  # Setup main logger
103
157
  logger = logging.getLogger("main_logger")
104
- logger.setLevel(logging.DEBUG if log_level == "debug" else TRACE_LEVEL_NUM if log_level == "trace" else logging.INFO)
158
+ # Ensure logger captures everything so trace_handler can see DEBUG messages even if main log_level is INFO
159
+ logger.setLevel(logging.DEBUG)
160
+
161
+ # Configure file_handler level based on user preference
162
+ file_handler.setLevel(logging.DEBUG if log_level == "debug" else TRACE_LEVEL_NUM if log_level == "trace" else logging.INFO)
163
+
105
164
  logger.addHandler(file_handler)
165
+ logger.addHandler(trace_handler)
106
166
 
107
167
  # Setup secondary logger for command outputs
108
168
  secondary_logger = logging.getLogger("command_output_logger")
109
169
  secondary_logger.setLevel(logging.DEBUG if log_level == "debug" else TRACE_LEVEL_NUM if log_level == "trace" else logging.INFO)
110
170
  secondary_logger.addHandler(command_handler)
171
+ secondary_logger.addHandler(trace_handler)
111
172
 
112
173
  if log_to_stdout:
113
174
  stdout_handler = logging.StreamHandler(sys.stdout)
114
- stdout_handler.setFormatter(formatter)
175
+ stdout_handler.setFormatter(clean_formatter)
176
+ stdout_handler.setLevel(logging.DEBUG if log_level == "debug" else TRACE_LEVEL_NUM if log_level == "trace" else logging.INFO)
115
177
  logger.addHandler(stdout_handler)
116
178
 
117
179
  return logger
@@ -136,15 +198,29 @@ def get_logger(command_output_logger: bool = False) -> logging.Logger:
136
198
  return secondary_logger if command_output_logger else logger
137
199
 
138
200
 
201
+ def _default_completer_logfile() -> str:
202
+ try:
203
+ uid = os.getuid()
204
+ except AttributeError:
205
+ uid = None
206
+ suffix = str(uid) if uid is not None else "unknown"
207
+ return f"/tmp/dar_backup_completer_{suffix}.log"
208
+
209
+
139
210
  # Setup completer logger only once
140
- def _setup_completer_logger(logfile="/tmp/dar_backup_completer.log"):
211
+ def _setup_completer_logger(logfile: str = None):
141
212
  logger = logging.getLogger("completer")
142
213
  if not logger.handlers:
143
- handler = logging.FileHandler(logfile)
144
- formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
145
- handler.setFormatter(formatter)
146
- logger.addHandler(handler)
147
- logger.setLevel(logging.DEBUG)
214
+ try:
215
+ logfile = logfile or _default_completer_logfile()
216
+ handler = logging.FileHandler(logfile)
217
+ formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
218
+ handler.setFormatter(formatter)
219
+ logger.addHandler(handler)
220
+ logger.setLevel(logging.DEBUG)
221
+ except Exception:
222
+ logger.addHandler(logging.NullHandler())
223
+ logger.setLevel(logging.DEBUG)
148
224
  return logger
149
225
 
150
226
  # Singleton logger for completer debugging
@@ -200,10 +276,80 @@ def show_version():
200
276
  print(f"{script_name} source code is here: https://github.com/per2jensen/dar-backup")
201
277
  print(about.__license__)
202
278
 
279
+
280
+ def send_discord_message(
281
+ content: str,
282
+ config_settings: typing.Optional[ConfigSettings] = None,
283
+ timeout_seconds: int = 10
284
+ ) -> bool:
285
+ """
286
+ Send a message to a Discord webhook if configured either in the config file or via environment.
287
+
288
+ The environment varible DAR_BACKUP_DISCORD_WEBHOOK_URL, when set, takes precedence over the config file variable
289
+ with the same name. If neither is defined, the function logs an info-level message and returns False.
290
+
291
+ Returns:
292
+ bool: True if the message was sent successfully, otherwise False.
293
+ """
294
+ log = get_logger()
295
+
296
+ config_webhook = getattr(config_settings, "dar_backup_discord_webhook_url", None) if config_settings else None
297
+ env_webhook = os.environ.get("DAR_BACKUP_DISCORD_WEBHOOK_URL")
298
+
299
+ webhook_url = env_webhook or config_webhook
300
+ source = "environment" if env_webhook else ("config file" if config_webhook else None)
301
+
302
+ if not webhook_url:
303
+ log and log.info("Discord message not sent: DAR_BACKUP_DISCORD_WEBHOOK_URL not configured.")
304
+ return False
305
+
306
+ payload = json.dumps({"content": content}).encode("utf-8")
307
+ user_agent = f"dar-backup/{about.__version__}"
308
+
309
+ request = urllib.request.Request(
310
+ webhook_url,
311
+ data=payload,
312
+ headers={
313
+ "Accept": "application/json",
314
+ "Content-Type": "application/json",
315
+ "User-Agent": user_agent,
316
+ },
317
+ method="POST",
318
+ )
319
+
320
+ try:
321
+ with urllib.request.urlopen(request, timeout=timeout_seconds):
322
+ pass
323
+ log and log.debug(f"Discord webhook message sent using {source}.")
324
+ return True
325
+ except urllib.error.HTTPError as exc:
326
+ # Attempt to read a short error body for diagnostics
327
+ body = None
328
+ try:
329
+ body = exc.read().decode(errors="replace")
330
+ except Exception:
331
+ body = None
332
+ detail = f" body='{body.strip()}'" if body else ""
333
+ message = f"Discord webhook HTTP error {exc.code}: {exc.reason}{detail}"
334
+ if log:
335
+ log.error(message)
336
+ else:
337
+ print(message, file=sys.stderr)
338
+ except Exception as exc:
339
+ message = f"Failed to send Discord webhook message: {exc}"
340
+ if log:
341
+ log.error(message)
342
+ else:
343
+ print(message, file=sys.stderr)
344
+
345
+ return False
346
+
347
+
203
348
  def extract_version(output):
204
349
  match = re.search(r'(\d+\.\d+(\.\d+)?)', output)
205
350
  return match.group(1) if match else "unknown"
206
351
 
352
+
207
353
  def get_binary_info(command):
208
354
  """
209
355
  Return information about a binary command.
@@ -286,13 +432,64 @@ def requirements(type: str, config_setting: ConfigSettings):
286
432
  if type in config_setting.config:
287
433
  for key in sorted(config_setting.config[type].keys()):
288
434
  script = config_setting.config[type][key]
435
+ use_run_fallback = (
436
+ os.getenv("PYTEST_CURRENT_TEST") is not None
437
+ or getattr(subprocess.run, "__module__", "") != "subprocess"
438
+ )
289
439
  try:
290
- result = subprocess.run(script, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, shell=True, check=True)
291
- logger.debug(f"{type} {key}: '{script}' run, return code: {result.returncode}")
292
- logger.debug(f"{type} stdout:\n{result.stdout}")
293
- if result.returncode != 0:
294
- logger.error(f"{type} stderr:\n{result.stderr}")
295
- raise RuntimeError(f"{type} {key}: '{script}' failed, return code: {result.returncode}")
440
+ if use_run_fallback:
441
+ result = subprocess.run(
442
+ script,
443
+ stdout=subprocess.PIPE,
444
+ stderr=subprocess.PIPE,
445
+ text=True,
446
+ shell=True,
447
+ check=True
448
+ )
449
+ logger.debug(f"{type} {key}: '{script}' run, return code: {result.returncode}")
450
+ logger.debug(f"{type} stdout:\n{result.stdout}")
451
+ if result.returncode != 0:
452
+ logger.error(f"{type} stderr:\n{result.stderr}")
453
+ raise RuntimeError(f"{type} {key}: '{script}' failed, return code: {result.returncode}")
454
+ else:
455
+ process = subprocess.Popen(
456
+ script,
457
+ stdout=subprocess.PIPE,
458
+ stderr=subprocess.PIPE,
459
+ text=True,
460
+ shell=True
461
+ )
462
+ stdout_lines = []
463
+ stderr_lines = []
464
+
465
+ def read_stream(stream, lines, level):
466
+ if stream is None:
467
+ return
468
+ for line in stream:
469
+ logger.log(level, line.rstrip())
470
+ lines.append(line)
471
+
472
+ stdout_thread = threading.Thread(
473
+ target=read_stream,
474
+ args=(process.stdout, stdout_lines, logging.DEBUG)
475
+ )
476
+ stderr_thread = threading.Thread(
477
+ target=read_stream,
478
+ args=(process.stderr, stderr_lines, logging.ERROR)
479
+ )
480
+ stdout_thread.start()
481
+ stderr_thread.start()
482
+
483
+ process.wait()
484
+ stdout_thread.join()
485
+ stderr_thread.join()
486
+
487
+ logger.debug(f"{type} {key}: '{script}' run, return code: {process.returncode}")
488
+ if process.returncode != 0:
489
+ stderr_text = "".join(stderr_lines)
490
+ if stderr_text:
491
+ logger.error(f"{type} stderr:\n{stderr_text}")
492
+ raise RuntimeError(f"{type} {key}: '{script}' failed, return code: {process.returncode}")
296
493
  except subprocess.CalledProcessError as e:
297
494
  logger.error(f"Error executing {key}: '{script}': {e}")
298
495
  raise e
@@ -428,16 +625,14 @@ def expand_path(path: str) -> str:
428
625
  return os.path.expanduser(os.path.expandvars(path))
429
626
 
430
627
 
431
-
432
628
  def backup_definition_completer(prefix, parsed_args, **kwargs):
433
- config_path = getattr(parsed_args, 'config_file', '~/.config/dar-backup/dar-backup.conf')
434
- config_path = expand_path(config_path)
435
- config_file = os.path.expanduser(config_path)
436
629
  try:
630
+ config_file = get_config_file(parsed_args)
437
631
  config = ConfigSettings(config_file)
438
632
  backup_d_dir = os.path.expanduser(config.backup_d_dir)
439
633
  return [f for f in os.listdir(backup_d_dir) if f.startswith(prefix)]
440
634
  except Exception:
635
+ completer_logger.exception("backup_definition_completer failed")
441
636
  return []
442
637
 
443
638
 
@@ -450,46 +645,84 @@ def extract_backup_definition_fallback() -> str:
450
645
  str: The value of the --backup-definition argument if found, else an empty string.
451
646
  """
452
647
  comp_line = os.environ.get("COMP_LINE", "")
453
- # Match both "--backup-definition VALUE" and "-d VALUE"
454
- match = re.search(r"(--backup-definition|-d)\s+([^\s]+)", comp_line)
455
- if match:
456
- return match.group(2)
648
+ try:
649
+ tokens = shlex.split(comp_line)
650
+ except ValueError:
651
+ tokens = comp_line.split()
652
+
653
+ for i, token in enumerate(tokens):
654
+ if token in ("-d", "--backup-definition", "--backup-def"):
655
+ if i + 1 < len(tokens):
656
+ return tokens[i + 1]
657
+ elif token.startswith(("--backup-definition=", "--backup-def=", "-d=")):
658
+ return token.split("=", 1)[1]
457
659
  return ""
458
660
 
459
661
 
460
662
 
461
663
 
462
664
  def list_archive_completer(prefix, parsed_args, **kwargs):
463
- import os
464
- import configparser
465
- from dar_backup.util import extract_backup_definition_fallback
665
+ try:
666
+ import os
667
+ import configparser
668
+ from dar_backup.util import extract_backup_definition_fallback
669
+
670
+ comp_line = os.environ.get("COMP_LINE", "")
671
+ if "cleanup" in comp_line and "--cleanup-specific-archives" not in comp_line:
672
+ return []
673
+
674
+ backup_def = (
675
+ getattr(parsed_args, "backup_definition", None)
676
+ or getattr(parsed_args, "backup_def", None)
677
+ or extract_backup_definition_fallback()
678
+ )
679
+ head, last = split_archive_list_prefix(prefix)
680
+ config_path = get_config_file(parsed_args)
681
+ if not os.path.exists(config_path):
682
+ return []
466
683
 
467
- backup_def = getattr(parsed_args, "backup_definition", None) or extract_backup_definition_fallback()
468
- config_path = getattr(parsed_args, "config_file", None) or "~/.config/dar-backup/dar-backup.conf"
684
+ config = configparser.ConfigParser()
685
+ config.read(config_path)
686
+ backup_dir = config.get("DIRECTORIES", "BACKUP_DIR", fallback="")
687
+ backup_dir = os.path.expanduser(os.path.expandvars(backup_dir))
469
688
 
470
- config_path = os.path.expanduser(os.path.expandvars(config_path))
471
- if not os.path.exists(config_path):
472
- return []
689
+ if not os.path.isdir(backup_dir):
690
+ return []
473
691
 
474
- config = configparser.ConfigParser()
475
- config.read(config_path)
476
- backup_dir = config.get("DIRECTORIES", "BACKUP_DIR", fallback="")
477
- backup_dir = os.path.expanduser(os.path.expandvars(backup_dir))
692
+ files = os.listdir(backup_dir)
693
+ archive_re = re.compile(rf"^{re.escape(backup_def)}_.+_\d{{4}}-\d{{2}}-\d{{2}}\.1\.dar$") if backup_def else re.compile(r".+_\d{4}-\d{2}-\d{2}\.1\.dar$")
478
694
 
479
- if not os.path.isdir(backup_dir):
480
- return []
695
+ completions = []
696
+ for fname in files:
697
+ if not archive_re.match(fname):
698
+ continue
699
+ base = fname.rsplit(".1.dar", 1)[0]
700
+ if last and not base.startswith(last):
701
+ continue
702
+ if head:
703
+ completions.append(f"{head}, {base}")
704
+ else:
705
+ completions.append(base)
481
706
 
482
- files = os.listdir(backup_dir)
483
- archive_re = re.compile(rf"^{re.escape(backup_def)}_.+_\d{{4}}-\d{{2}}-\d{{2}}\.1\.dar$") if backup_def else re.compile(r".+_\d{4}-\d{2}-\d{2}\.1\.dar$")
707
+ completions = sorted(set(completions), key=sort_key)
708
+ return completions or ["[no matching archives]"]
709
+ except Exception:
710
+ completer_logger.exception("list_archive_completer failed")
711
+ return []
484
712
 
485
- completions = [
486
- f.rsplit(".1.dar", 1)[0]
487
- for f in files
488
- if archive_re.match(f)
489
- ]
490
713
 
491
- completions = sorted(set(completions), key=sort_key)
492
- return completions or ["[no matching archives]"]
714
+ def split_archive_list_prefix(prefix: str) -> tuple[str, str]:
715
+ """
716
+ Split a comma-separated archive list into (head, last).
717
+ Strips whitespace so completions don't include leading/trailing spaces.
718
+ """
719
+ if not prefix or "," not in prefix:
720
+ return ("", prefix.strip())
721
+ parts = [part.strip() for part in prefix.split(",")]
722
+ head_parts = [part for part in parts[:-1] if part]
723
+ head = ", ".join(head_parts)
724
+ last = parts[-1]
725
+ return (head, last)
493
726
 
494
727
 
495
728
 
@@ -511,6 +744,7 @@ def sort_key(archive_name: str):
511
744
  completer_logger.debug(f"Archive: {archive_name}, Def: {def_name}, Date: {date}")
512
745
  return (def_name, date)
513
746
  except Exception:
747
+ completer_logger.exception("sort_key failed")
514
748
  return (archive_name, datetime.min)
515
749
 
516
750
 
@@ -523,52 +757,56 @@ def archive_content_completer(prefix, parsed_args, **kwargs):
523
757
  Only entries found in the catalog database (via `dar_manager --list`) are shown.
524
758
  """
525
759
 
526
- from dar_backup.config_settings import ConfigSettings
527
- import subprocess
528
- import re
529
- import os
530
- from datetime import datetime
531
-
532
- # Expand config path
533
- config_file = expand_path(getattr(parsed_args, "config_file", "~/.config/dar-backup/dar-backup.conf"))
534
- config = ConfigSettings(config_file=config_file)
535
- #db_dir = expand_path((getattr(config, 'manager_db_dir', config.backup_dir))) # use manager_db_dir if set, else backup_dir
536
- db_dir = expand_path(getattr(config, 'manager_db_dir', None) or config.backup_dir)
537
-
538
- # Which db files to inspect?
539
- backup_def = getattr(parsed_args, "backup_def", None)
540
- db_files = (
541
- [os.path.join( db_dir, f"{backup_def}.db")]
542
- if backup_def
543
- else [os.path.join( db_dir, f) for f in os.listdir( db_dir) if f.endswith(".db")]
544
- )
760
+ try:
761
+ from dar_backup.config_settings import ConfigSettings
762
+ import subprocess
763
+ import re
764
+ import os
765
+ from datetime import datetime
766
+
767
+ # Expand config path
768
+ config_file = get_config_file(parsed_args)
769
+ config = ConfigSettings(config_file=config_file)
770
+ #db_dir = expand_path((getattr(config, 'manager_db_dir', config.backup_dir))) # use manager_db_dir if set, else backup_dir
771
+ db_dir = expand_path(getattr(config, 'manager_db_dir', None) or config.backup_dir)
772
+
773
+ # Which db files to inspect?
774
+ backup_def = getattr(parsed_args, "backup_def", None)
775
+ db_files = (
776
+ [os.path.join( db_dir, f"{backup_def}.db")]
777
+ if backup_def
778
+ else [os.path.join( db_dir, f) for f in os.listdir( db_dir) if f.endswith(".db")]
779
+ )
545
780
 
546
- completions = []
781
+ completions = []
547
782
 
548
- for db_path in db_files:
549
- if not os.path.exists(db_path):
550
- continue
783
+ for db_path in db_files:
784
+ if not os.path.exists(db_path):
785
+ continue
551
786
 
552
- try:
553
- result = subprocess.run(
554
- ["dar_manager", "--base", db_path, "--list"],
555
- stdout=subprocess.PIPE,
556
- stderr=subprocess.DEVNULL,
557
- text=True,
558
- check=True
559
- )
560
- except subprocess.CalledProcessError:
561
- continue
787
+ try:
788
+ result = subprocess.run(
789
+ ["dar_manager", "--base", db_path, "--list"],
790
+ stdout=subprocess.PIPE,
791
+ stderr=subprocess.DEVNULL,
792
+ text=True,
793
+ check=True
794
+ )
795
+ except subprocess.CalledProcessError:
796
+ continue
562
797
 
563
- for line in result.stdout.splitlines():
564
- parts = line.strip().split("\t")
565
- if len(parts) >= 3:
566
- archive = parts[2].strip()
567
- if archive.startswith(prefix):
568
- completions.append(archive)
798
+ for line in result.stdout.splitlines():
799
+ parts = line.strip().split("\t")
800
+ if len(parts) >= 3:
801
+ archive = parts[2].strip()
802
+ if archive.startswith(prefix):
803
+ completions.append(archive)
569
804
 
570
- completions = sorted(set(completions), key=sort_key)
571
- return completions or ["[no matching archives]"]
805
+ completions = sorted(set(completions), key=sort_key)
806
+ return completions or ["[no matching archives]"]
807
+ except Exception:
808
+ completer_logger.exception("archive_content_completer failed")
809
+ return []
572
810
 
573
811
 
574
812
 
@@ -578,55 +816,59 @@ def add_specific_archive_completer(prefix, parsed_args, **kwargs):
578
816
  but not yet present in the <backup_def>.db catalog.
579
817
  If --backup-def is provided, restrict suggestions to that.
580
818
  """
581
- from dar_backup.config_settings import ConfigSettings
582
- import subprocess
583
- import re
584
- import os
585
- from datetime import datetime
586
-
587
- config_file = expand_path(getattr(parsed_args, "config_file", "~/.config/dar-backup/dar-backup.conf"))
588
- config = ConfigSettings(config_file=config_file)
589
- #db_dir = expand_path((getattr(config, 'manager_db_dir', config.backup_dir))) # use manager_db_dir if set, else backup_dir
590
- db_dir = expand_path(getattr(config, 'manager_db_dir') or config.backup_dir)
591
- backup_dir = config.backup_dir
592
- backup_def = getattr(parsed_args, "backup_def", None)
593
-
594
- # Match pattern for archive base names: e.g. test_FULL_2025-04-01
595
- dar_pattern = re.compile(r"^(.*?_(FULL|DIFF|INCR)_(\d{4}-\d{2}-\d{2}))\.1\.dar$")
596
-
597
- # Step 1: scan backup_dir for .1.dar files
598
- all_archives = set()
599
- for fname in os.listdir(backup_dir):
600
- match = dar_pattern.match(fname)
601
- if match:
602
- base = match.group(1)
603
- if base.startswith(prefix):
604
- if not backup_def or base.startswith(f"{backup_def}_"):
605
- all_archives.add(base)
606
-
607
- # Step 2: exclude ones already present in the .db
608
- db_path = os.path.join(db_dir, f"{backup_def}.db") if backup_def else None
609
- existing = set()
610
-
611
- if db_path and os.path.exists(db_path):
612
- try:
613
- result = subprocess.run(
614
- ["dar_manager", "--base", db_path, "--list"],
615
- stdout=subprocess.PIPE,
616
- stderr=subprocess.DEVNULL,
617
- text=True,
618
- check=True
619
- )
620
- for line in result.stdout.splitlines():
621
- parts = line.strip().split("\t")
622
- if len(parts) >= 3:
623
- existing.add(parts[2].strip())
624
- except subprocess.CalledProcessError:
625
- pass
626
-
627
- # Step 3: return filtered list
628
- candidates = sorted(archive for archive in all_archives if archive not in existing)
629
- return candidates or ["[no new archives]"]
819
+ try:
820
+ from dar_backup.config_settings import ConfigSettings
821
+ import subprocess
822
+ import re
823
+ import os
824
+ from datetime import datetime
825
+
826
+ config_file = get_config_file(parsed_args)
827
+ config = ConfigSettings(config_file=config_file)
828
+ #db_dir = expand_path((getattr(config, 'manager_db_dir', config.backup_dir))) # use manager_db_dir if set, else backup_dir
829
+ db_dir = expand_path(getattr(config, 'manager_db_dir') or config.backup_dir)
830
+ backup_dir = config.backup_dir
831
+ backup_def = getattr(parsed_args, "backup_def", None)
832
+
833
+ # Match pattern for archive base names: e.g. test_FULL_2025-04-01
834
+ dar_pattern = re.compile(r"^(.*?_(FULL|DIFF|INCR)_(\d{4}-\d{2}-\d{2}))\.1\.dar$")
835
+
836
+ # Step 1: scan backup_dir for .1.dar files
837
+ all_archives = set()
838
+ for fname in os.listdir(backup_dir):
839
+ match = dar_pattern.match(fname)
840
+ if match:
841
+ base = match.group(1)
842
+ if base.startswith(prefix):
843
+ if not backup_def or base.startswith(f"{backup_def}_"):
844
+ all_archives.add(base)
845
+
846
+ # Step 2: exclude ones already present in the .db
847
+ db_path = os.path.join(db_dir, f"{backup_def}.db") if backup_def else None
848
+ existing = set()
849
+
850
+ if db_path and os.path.exists(db_path):
851
+ try:
852
+ result = subprocess.run(
853
+ ["dar_manager", "--base", db_path, "--list"],
854
+ stdout=subprocess.PIPE,
855
+ stderr=subprocess.DEVNULL,
856
+ text=True,
857
+ check=True
858
+ )
859
+ for line in result.stdout.splitlines():
860
+ parts = line.strip().split("\t")
861
+ if len(parts) >= 3:
862
+ existing.add(parts[2].strip())
863
+ except subprocess.CalledProcessError:
864
+ pass
865
+
866
+ # Step 3: return filtered list
867
+ candidates = sorted(archive for archive in all_archives if archive not in existing)
868
+ return candidates or ["[no new archives]"]
869
+ except Exception:
870
+ completer_logger.exception("add_specific_archive_completer failed")
871
+ return []
630
872
 
631
873
 
632
874
 
@@ -735,8 +977,12 @@ def normalize_dir(path: str) -> str:
735
977
 
736
978
  # Reusable pattern for archive file naming
737
979
  archive_pattern = re.compile(
738
- r'^.+?_(FULL|DIFF|INCR)_(\d{4}-\d{2}-\d{2})\.\d+\.dar'
739
- r'(?:\.vol\d+(?:\+\d+)?\.par2|\.par2)?$'
980
+ r'^.+?_(FULL|DIFF|INCR)_(\d{4}-\d{2}-\d{2})'
981
+ r'(?:'
982
+ r'\.\d+\.dar(?:\.vol\d+(?:\+\d+)?\.par2|\.par2)?'
983
+ r'|(?:\.vol\d+(?:\+\d+)?\.par2|\.par2)'
984
+ r'|\.par2\.manifest\.ini'
985
+ r')$'
740
986
  )
741
987
 
742
988
  def is_safe_filename(filename: str) -> bool:
@@ -757,4 +1003,119 @@ def is_safe_path(path: str) -> bool:
757
1003
  and '..' not in normalized.split(os.sep)
758
1004
  )
759
1005
 
1006
+ def get_config_file(args) -> str:
1007
+ """
1008
+ Returns the config file path based on the following precedence:
1009
+ 1. Command-line argument (--config-file)
1010
+ 2. Environment variable (DAR_BACKUP_CONFIG_FILE)
1011
+ 3. Default path (~/.config/dar-backup/dar-backup.conf)
1012
+ """
1013
+ DEFAULT_CONFIG_FILE = "~/.config/dar-backup/dar-backup.conf"
1014
+
1015
+ env_cf = os.getenv("DAR_BACKUP_CONFIG_FILE")
1016
+ env_cf = env_cf.strip() if env_cf else None
1017
+
1018
+ cli_cf = getattr(args, "config_file", None)
1019
+ cli_cf = cli_cf.strip() if cli_cf else None
1020
+
1021
+ raw_config = (
1022
+ cli_cf
1023
+ or env_cf
1024
+ or DEFAULT_CONFIG_FILE
1025
+ )
1026
+
1027
+ config_settings_path = os.path.abspath(os.path.expanduser(os.path.expandvars(raw_config)))
1028
+ return config_settings_path
1029
+
1030
+
1031
+
1032
+ def is_under_base_dir(candidate: Path, base_dir: Path) -> bool:
1033
+ """
1034
+ True iff candidate resolves under base_dir (symlink-safe).
1035
+ """
1036
+ try:
1037
+ base = base_dir.resolve(strict=True)
1038
+ resolved = candidate.resolve(strict=False)
1039
+ except Exception:
1040
+ return False
1041
+ return resolved == base or base in resolved.parents
1042
+
1043
+
1044
+ def safe_remove_file(path_str: str, *, base_dir: Path) -> bool:
1045
+ """
1046
+ Remove a file only if it:
1047
+ - is under base_dir (after resolve),
1048
+ - matches archive naming convention by BASENAME,
1049
+ - is a regular file (not a dir),
1050
+ - is not a symlink (optional hardening).
1051
+ Returns True if removed.
1052
+ """
1053
+ p = Path(path_str)
1054
+
1055
+ # Enforce containment first (defeats ../ and symlink escape)
1056
+ if not is_under_base_dir(p, base_dir):
1057
+ logger.warning("Refusing to delete outside base_dir: %s (base=%s)", p, base_dir)
1058
+ return False
1059
+
1060
+ # Validate filename shape on basename only
1061
+ if not is_safe_filename(p.name):
1062
+ logger.warning("Refusing to delete non-matching filename: %s", p.name)
1063
+ return False
1064
+
1065
+ # Hardening: don't follow symlinks
1066
+ if p.is_symlink():
1067
+ logger.warning("Refusing to delete symlink: %s", p)
1068
+ return False
1069
+
1070
+ # Only delete regular files
1071
+ if not p.is_file():
1072
+ logger.warning("Refusing to delete non-file: %s", p)
1073
+ return False
1074
+
1075
+ p.unlink()
1076
+ return True
1077
+
1078
+
1079
+
1080
+ # Allowed archive name:
1081
+ # <definition>_(FULL|DIFF|INCR)_YYYY-MM-DD
1082
+ # Example:
1083
+ # pj-homedir_INCR_2025-11-22
1084
+ _ARCHIVE_NAME_RE = re.compile(
1085
+ r"^(?P<def>[A-Za-z0-9][A-Za-z0-9._-]{0,127})_"
1086
+ r"(?P<kind>FULL|DIFF|INCR)_"
1087
+ r"(?P<date>\d{4}-\d{2}-\d{2})$"
1088
+ )
1089
+
1090
+ def is_archive_name_allowed(name: str) -> bool:
1091
+ """
1092
+ Return True iff the archive name is safe and valid.
1093
+
1094
+ Security properties:
1095
+ - name only, never a path (no /, \\, or ..)
1096
+ - strict allowed character set
1097
+ - must be FULL / DIFF / INCR
1098
+ - date must be a real calendar date
1099
+ """
1100
+ if not isinstance(name, str):
1101
+ return False
1102
+
1103
+ name = name.strip()
1104
+
1105
+ # Reject anything path-like
1106
+ if "/" in name or "\\" in name or ".." in name:
1107
+ return False
1108
+
1109
+ m = _ARCHIVE_NAME_RE.match(name)
1110
+ if not m:
1111
+ return False
1112
+
1113
+ # Validate date is real (not just shape)
1114
+ try:
1115
+ date.fromisoformat(m.group("date")) # <-- FIX
1116
+ # alternatively:
1117
+ # datetime.strptime(m.group("date"), "%Y-%m-%d")
1118
+ except ValueError:
1119
+ return False
760
1120
 
1121
+ return True