dar-backup 1.0.0.1__py3-none-any.whl → 1.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,6 +8,10 @@ import os
8
8
  import re
9
9
  import shlex
10
10
  import sys
11
+ try:
12
+ import termios
13
+ except ImportError:
14
+ termios = None
11
15
  import tempfile
12
16
  sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../src")))
13
17
  from typing import List, Optional, Union
@@ -80,11 +84,13 @@ class CommandRunner:
80
84
  self,
81
85
  logger: Optional[logging.Logger] = None,
82
86
  command_logger: Optional[logging.Logger] = None,
83
- default_timeout: int = 30
87
+ default_timeout: int = 30,
88
+ default_capture_limit_bytes: Optional[int] = None
84
89
  ):
85
90
  self.logger = logger or get_logger()
86
91
  self.command_logger = command_logger or get_logger(command_output_logger=True)
87
92
  self.default_timeout = default_timeout
93
+ self.default_capture_limit_bytes = default_capture_limit_bytes
88
94
 
89
95
  if not self.logger or not self.command_logger:
90
96
  self.logger_fallback()
@@ -125,124 +131,213 @@ class CommandRunner:
125
131
  timeout: Optional[int] = None,
126
132
  check: bool = False,
127
133
  capture_output: bool = True,
128
- text: bool = True
134
+ capture_output_limit_bytes: Optional[int] = None,
135
+ log_output: bool = True,
136
+ text: bool = True,
137
+ cwd: Optional[str] = None,
138
+ stdin: Optional[int] = subprocess.DEVNULL
129
139
  ) -> CommandResult:
130
- self._text_mode = text
140
+ self._text_mode = text
131
141
  timeout = timeout or self.default_timeout
132
-
133
- cmd_sanitized = None
134
-
135
- try:
136
- cmd_sanitized = sanitize_cmd(cmd)
137
- except ValueError as e:
138
- stack = traceback.format_exc()
139
- self.logger.error(f"Command sanitation failed: {e}")
140
- return CommandResult(
141
- returncode=-1,
142
- note=f"Sanitizing failed: command: {' '.join(cmd)}",
143
- stdout='',
144
- stderr=str(e),
145
- stack=stack,
146
-
147
- )
148
- finally:
149
- cmd = cmd_sanitized
150
-
151
- #command = f"Executing command: {' '.join(cmd)} (timeout={timeout}s)"
152
- command = f"Executing command: {' '.join(shlex.quote(arg) for arg in cmd)} (timeout={timeout}s)"
153
-
154
-
155
- self.command_logger.info(command)
156
- self.logger.debug(command)
157
-
158
- stdout_lines = []
159
- stderr_lines = []
142
+ if capture_output_limit_bytes is None:
143
+ capture_output_limit_bytes = self.default_capture_limit_bytes
144
+ if capture_output_limit_bytes is not None and capture_output_limit_bytes < 0:
145
+ capture_output_limit_bytes = None
146
+
147
+ tty_fd = None
148
+ tty_file = None
149
+ saved_tty_attrs = None
150
+ if termios is not None:
151
+ try:
152
+ if os.path.exists("/dev/tty"):
153
+ tty_file = open("/dev/tty")
154
+ tty_fd = tty_file.fileno()
155
+ elif sys.stdin and sys.stdin.isatty():
156
+ tty_fd = sys.stdin.fileno()
157
+ if tty_fd is not None:
158
+ saved_tty_attrs = termios.tcgetattr(tty_fd)
159
+ except Exception:
160
+ tty_fd = None
161
+ saved_tty_attrs = None
162
+ if tty_file:
163
+ tty_file.close()
164
+ tty_file = None
160
165
 
161
166
  try:
162
- process = subprocess.Popen(
163
- cmd,
164
- stdout=subprocess.PIPE if capture_output else None,
165
- stderr=subprocess.PIPE if capture_output else None,
166
- text=False,
167
- bufsize=-1
168
- )
169
- except Exception as e:
170
- stack = traceback.format_exc()
171
- return CommandResult(
172
- returncode=-1,
173
- stdout='',
174
- stderr=str(e),
175
- stack=stack
176
- )
167
+ cmd_sanitized = None
177
168
 
178
- def stream_output(stream, lines, level):
179
169
  try:
180
- while True:
181
- chunk = stream.read(1024)
182
- if not chunk:
183
- break
184
- if self._text_mode:
185
- decoded = chunk.decode('utf-8', errors='replace')
186
- lines.append(decoded)
187
- self.command_logger.log(level, decoded.strip())
188
- else:
189
- lines.append(chunk)
190
- # Avoid logging raw binary data to prevent garbled logs
191
- except Exception as e:
192
- self.logger.warning(f"stream_output decode error: {e}")
170
+ cmd_sanitized = sanitize_cmd(cmd)
171
+ except ValueError as e:
172
+ stack = traceback.format_exc()
173
+ self.logger.error(f"Command sanitation failed: {e}")
174
+ if isinstance(cmd, list):
175
+ cmd_text = " ".join(map(str, cmd))
176
+ else:
177
+ cmd_text = str(cmd)
178
+ return CommandResult(
179
+ returncode=-1,
180
+ note=f"Sanitizing failed: command: {cmd_text}",
181
+ stdout='',
182
+ stderr=str(e),
183
+ stack=stack,
184
+
185
+ )
193
186
  finally:
194
- stream.close()
195
-
187
+ cmd = cmd_sanitized
196
188
 
189
+ #command = f"Executing command: {' '.join(cmd)} (timeout={timeout}s)"
190
+ command = f"Executing command: {' '.join(shlex.quote(arg) for arg in cmd)} (timeout={timeout}s)"
197
191
 
198
- threads = []
199
- if capture_output and process.stdout:
200
- t_out = threading.Thread(target=stream_output, args=(process.stdout, stdout_lines, logging.INFO))
201
- t_out.start()
202
- threads.append(t_out)
203
- if capture_output and process.stderr:
204
- t_err = threading.Thread(target=stream_output, args=(process.stderr, stderr_lines, logging.ERROR))
205
- t_err.start()
206
- threads.append(t_err)
207
-
208
- try:
209
- process.wait(timeout=timeout)
210
- except subprocess.TimeoutExpired:
211
- process.kill()
212
- log_msg = f"Command timed out after {timeout} seconds: {' '.join(cmd)}:\n"
213
- self.logger.error(log_msg)
214
- return CommandResult(-1, ''.join(stdout_lines), log_msg.join(stderr_lines))
215
- except Exception as e:
216
- stack = traceback.format_exc()
217
- log_msg = f"Command execution failed: {' '.join(cmd)} with error: {e}\n"
218
- self.logger.error(log_msg)
219
- return CommandResult(-1, ''.join(stdout_lines), log_msg.join(stderr_lines), stack)
220
192
 
221
- for t in threads:
222
- t.join()
193
+ self.command_logger.info(command)
194
+ self.logger.debug(command)
223
195
 
196
+ stdout_lines = []
197
+ stderr_lines = []
198
+ truncated_stdout = {"value": False}
199
+ truncated_stderr = {"value": False}
224
200
 
201
+ try:
202
+ use_pipes = capture_output or log_output
203
+ process = subprocess.Popen(
204
+ cmd,
205
+ stdout=subprocess.PIPE if use_pipes else None,
206
+ stderr=subprocess.PIPE if use_pipes else None,
207
+ stdin=stdin,
208
+ text=False,
209
+ bufsize=-1,
210
+ cwd=cwd
211
+ )
212
+ except Exception as e:
213
+ stack = traceback.format_exc()
214
+ return CommandResult(
215
+ returncode=-1,
216
+ stdout='',
217
+ stderr=str(e),
218
+ stack=stack
219
+ )
220
+
221
+ def stream_output(stream, lines, level, truncated_flag):
222
+ captured_bytes = 0
223
+ try:
224
+ while True:
225
+ chunk = stream.read(1024)
226
+ if not chunk:
227
+ break
228
+ if self._text_mode:
229
+ decoded = chunk.decode('utf-8', errors='replace')
230
+ if log_output:
231
+ self.command_logger.log(level, decoded.strip())
232
+ if capture_output:
233
+ if capture_output_limit_bytes is None:
234
+ lines.append(decoded)
235
+ else:
236
+ remaining = capture_output_limit_bytes - captured_bytes
237
+ if remaining > 0:
238
+ if len(chunk) <= remaining:
239
+ lines.append(decoded)
240
+ captured_bytes += len(chunk)
241
+ else:
242
+ piece = chunk[:remaining]
243
+ lines.append(piece.decode('utf-8', errors='replace'))
244
+ captured_bytes = capture_output_limit_bytes
245
+ truncated_flag["value"] = True
246
+ else:
247
+ truncated_flag["value"] = True
248
+ else:
249
+ if capture_output:
250
+ if capture_output_limit_bytes is None:
251
+ lines.append(chunk)
252
+ else:
253
+ remaining = capture_output_limit_bytes - captured_bytes
254
+ if remaining > 0:
255
+ if len(chunk) <= remaining:
256
+ lines.append(chunk)
257
+ captured_bytes += len(chunk)
258
+ else:
259
+ lines.append(chunk[:remaining])
260
+ captured_bytes = capture_output_limit_bytes
261
+ truncated_flag["value"] = True
262
+ else:
263
+ truncated_flag["value"] = True
264
+ # Avoid logging raw binary data to prevent garbled logs
265
+ except Exception as e:
266
+ self.logger.warning(f"stream_output decode error: {e}")
267
+ finally:
268
+ stream.close()
269
+
270
+ threads = []
271
+ if (capture_output or log_output) and process.stdout:
272
+ t_out = threading.Thread(
273
+ target=stream_output,
274
+ args=(process.stdout, stdout_lines, logging.INFO, truncated_stdout)
275
+ )
276
+ t_out.start()
277
+ threads.append(t_out)
278
+ if (capture_output or log_output) and process.stderr:
279
+ t_err = threading.Thread(
280
+ target=stream_output,
281
+ args=(process.stderr, stderr_lines, logging.ERROR, truncated_stderr)
282
+ )
283
+ t_err.start()
284
+ threads.append(t_err)
225
285
 
226
- if self._text_mode:
227
- stdout_combined = ''.join(stdout_lines)
228
- stderr_combined = ''.join(stderr_lines)
229
- else:
230
- stdout_combined = b''.join(stdout_lines)
231
- stderr_combined = b''.join(stderr_lines)
232
-
286
+ try:
287
+ process.wait(timeout=timeout)
288
+ except subprocess.TimeoutExpired:
289
+ process.kill()
290
+ log_msg = f"Command timed out after {timeout} seconds: {' '.join(cmd)}:\n"
291
+ self.logger.error(log_msg)
292
+ return CommandResult(-1, ''.join(stdout_lines), log_msg.join(stderr_lines))
293
+ except Exception as e:
294
+ stack = traceback.format_exc()
295
+ log_msg = f"Command execution failed: {' '.join(cmd)} with error: {e}\n"
296
+ self.logger.error(log_msg)
297
+ return CommandResult(-1, ''.join(stdout_lines), log_msg.join(stderr_lines), stack)
298
+
299
+ for t in threads:
300
+ t.join()
301
+
302
+ if self._text_mode:
303
+ stdout_combined = ''.join(stdout_lines)
304
+ stderr_combined = ''.join(stderr_lines)
305
+ else:
306
+ stdout_combined = b''.join(stdout_lines)
307
+ stderr_combined = b''.join(stderr_lines)
308
+
309
+ note = None
310
+ if truncated_stdout["value"] or truncated_stderr["value"]:
311
+ parts = []
312
+ if truncated_stdout["value"]:
313
+ parts.append("stdout truncated")
314
+ if truncated_stderr["value"]:
315
+ parts.append("stderr truncated")
316
+ note = ", ".join(parts)
317
+
318
+ if check and process.returncode != 0:
319
+ self.logger.error(f"Command failed with exit code {process.returncode}")
320
+ return CommandResult(
321
+ process.returncode,
322
+ stdout_combined,
323
+ stderr_combined,
324
+ stack=traceback.format_stack()
325
+ )
233
326
 
234
- if check and process.returncode != 0:
235
- self.logger.error(f"Command failed with exit code {process.returncode}")
236
327
  return CommandResult(
237
328
  process.returncode,
238
329
  stdout_combined,
239
330
  stderr_combined,
240
- stack=traceback.format_stack()
331
+ note=note
241
332
  )
242
-
243
- return CommandResult(
244
- process.returncode,
245
- stdout_combined,
246
- stderr_combined
247
- )
248
-
333
+ finally:
334
+ if termios is not None and saved_tty_attrs is not None and tty_fd is not None:
335
+ try:
336
+ termios.tcsetattr(tty_fd, termios.TCSADRAIN, saved_tty_attrs)
337
+ except Exception:
338
+ self.logger.debug("Failed to restore terminal attributes", exc_info=True)
339
+ if tty_file is not None:
340
+ try:
341
+ tty_file.close()
342
+ except Exception:
343
+ self.logger.debug("Failed to close /dev/tty handle", exc_info=True)
@@ -1,9 +1,11 @@
1
1
  # SPDX-License-Identifier: GPL-3.0-or-later
2
2
 
3
3
  import configparser
4
+ import re
4
5
  from dataclasses import dataclass, field, fields
5
6
  from os.path import expandvars, expanduser
6
7
  from pathlib import Path
8
+ from typing import Optional, Pattern
7
9
 
8
10
  from dar_backup.exceptions import ConfigSettingsError
9
11
 
@@ -37,6 +39,7 @@ class ConfigSettings:
37
39
  min_size_verification_mb: int = field(init=False)
38
40
  no_files_verification: int = field(init=False)
39
41
  command_timeout_secs: int = field(init=False)
42
+ command_capture_max_bytes: Optional[int] = field(init=False, default=None)
40
43
  backup_dir: str = field(init=False)
41
44
  test_restore_dir: str = field(init=False)
42
45
  backup_d_dir: str = field(init=False)
@@ -44,8 +47,19 @@ class ConfigSettings:
44
47
  incr_age: int = field(init=False)
45
48
  error_correction_percent: int = field(init=False)
46
49
  par2_enabled: bool = field(init=False)
50
+ par2_dir: Optional[str] = field(init=False, default=None)
51
+ par2_ratio_full: Optional[int] = field(init=False, default=None)
52
+ par2_ratio_diff: Optional[int] = field(init=False, default=None)
53
+ par2_ratio_incr: Optional[int] = field(init=False, default=None)
54
+ par2_run_verify: Optional[bool] = field(init=False, default=None)
47
55
  logfile_max_bytes: int = field(init=False)
48
56
  logfile_no_count: int = field(init=False)
57
+ trace_log_max_bytes: int = field(init=False)
58
+ trace_log_backup_count: int = field(init=False)
59
+ dar_backup_discord_webhook_url: Optional[str] = field(init=False, default=None)
60
+ restoretest_exclude_prefixes: list[str] = field(init=False, default_factory=list)
61
+ restoretest_exclude_suffixes: list[str] = field(init=False, default_factory=list)
62
+ restoretest_exclude_regex: Optional[Pattern[str]] = field(init=False, default=None)
49
63
 
50
64
 
51
65
  OPTIONAL_CONFIG_FIELDS = [
@@ -70,6 +84,34 @@ class ConfigSettings:
70
84
  "type": int,
71
85
  "default": 5,
72
86
  },
87
+ {
88
+ "section": "MISC",
89
+ "key": "TRACE_LOG_MAX_BYTES",
90
+ "attr": "trace_log_max_bytes",
91
+ "type": int,
92
+ "default": 10485760, # 10 MB
93
+ },
94
+ {
95
+ "section": "MISC",
96
+ "key": "TRACE_LOG_BACKUP_COUNT",
97
+ "attr": "trace_log_backup_count",
98
+ "type": int,
99
+ "default": 1,
100
+ },
101
+ {
102
+ "section": "MISC",
103
+ "key": "DAR_BACKUP_DISCORD_WEBHOOK_URL",
104
+ "attr": "dar_backup_discord_webhook_url",
105
+ "type": str,
106
+ "default": None,
107
+ },
108
+ {
109
+ "section": "MISC",
110
+ "key": "COMMAND_CAPTURE_MAX_BYTES",
111
+ "attr": "command_capture_max_bytes",
112
+ "type": int,
113
+ "default": 102400,
114
+ },
73
115
  # Add more optional fields here
74
116
  ]
75
117
 
@@ -78,7 +120,7 @@ class ConfigSettings:
78
120
  raise ConfigSettingsError("`config_file` must be specified.")
79
121
 
80
122
  try:
81
- self.config = configparser.ConfigParser()
123
+ self.config = configparser.ConfigParser(inline_comment_prefixes=['#'])
82
124
  loaded_files = self.config.read(self.config_file)
83
125
  if not loaded_files:
84
126
  raise RuntimeError(f"Configuration file not found or unreadable: '{self.config_file}'")
@@ -103,6 +145,27 @@ class ConfigSettings:
103
145
  else:
104
146
  raise ConfigSettingsError(f"Invalid boolean value for 'ENABLED' in [PAR2]: '{val}'")
105
147
 
148
+ self.par2_dir = self._get_optional_str("PAR2", "PAR2_DIR", default=None)
149
+ self.par2_ratio_full = self._get_optional_int("PAR2", "PAR2_RATIO_FULL", default=None)
150
+ self.par2_ratio_diff = self._get_optional_int("PAR2", "PAR2_RATIO_DIFF", default=None)
151
+ self.par2_ratio_incr = self._get_optional_int("PAR2", "PAR2_RATIO_INCR", default=None)
152
+ self.par2_run_verify = self._get_optional_bool("PAR2", "PAR2_RUN_VERIFY", default=None)
153
+ self.restoretest_exclude_prefixes = self._get_optional_csv_list(
154
+ "MISC",
155
+ "RESTORETEST_EXCLUDE_PREFIXES",
156
+ default=[]
157
+ )
158
+ self.restoretest_exclude_suffixes = self._get_optional_csv_list(
159
+ "MISC",
160
+ "RESTORETEST_EXCLUDE_SUFFIXES",
161
+ default=[]
162
+ )
163
+ self.restoretest_exclude_regex = self._get_optional_regex(
164
+ "MISC",
165
+ "RESTORETEST_EXCLUDE_REGEX",
166
+ default=None
167
+ )
168
+
106
169
  # Load optional fields
107
170
  for opt in self.OPTIONAL_CONFIG_FIELDS:
108
171
  if self.config.has_option(opt['section'], opt['key']):
@@ -144,4 +207,98 @@ class ConfigSettings:
144
207
  ]
145
208
  return f"<ConfigSettings({', '.join(safe_fields)})>"
146
209
 
210
+ def _get_optional_str(self, section: str, key: str, default: Optional[str] = None) -> Optional[str]:
211
+ if self.config.has_option(section, key):
212
+ return self.config.get(section, key).strip()
213
+ return default
214
+
215
+ def _get_optional_int(self, section: str, key: str, default: Optional[int] = None) -> Optional[int]:
216
+ if self.config.has_option(section, key):
217
+ raw = self.config.get(section, key).strip()
218
+ return int(raw)
219
+ return default
220
+
221
+ def _get_optional_bool(self, section: str, key: str, default: Optional[bool] = None) -> Optional[bool]:
222
+ if not self.config.has_option(section, key):
223
+ return default
224
+ val = self.config.get(section, key).strip().lower()
225
+ if val in ('true', '1', 'yes'):
226
+ return True
227
+ if val in ('false', '0', 'no'):
228
+ return False
229
+ raise ConfigSettingsError(f"Invalid boolean value for '{key}' in [{section}]: '{val}'")
230
+
231
+ def _get_optional_csv_list(self, section: str, key: str, default: Optional[list[str]] = None) -> list[str]:
232
+ if not self.config.has_option(section, key):
233
+ return default if default is not None else []
234
+ raw = self.config.get(section, key).strip()
235
+ if not raw:
236
+ return default if default is not None else []
237
+ return [item.strip() for item in raw.split(",") if item.strip()]
238
+
239
+ def _get_optional_regex(
240
+ self,
241
+ section: str,
242
+ key: str,
243
+ default: Optional[Pattern[str]] = None
244
+ ) -> Optional[Pattern[str]]:
245
+ if not self.config.has_option(section, key):
246
+ return default
247
+ raw = self.config.get(section, key).strip()
248
+ if not raw:
249
+ return default
250
+ try:
251
+ return re.compile(raw, re.IGNORECASE)
252
+ except re.error as exc:
253
+ raise ConfigSettingsError(
254
+ f"Invalid regex for '{key}' in [{section}]: {exc}"
255
+ ) from exc
256
+
257
+ def get_par2_config(self, backup_definition: Optional[str] = None) -> dict:
258
+ """
259
+ Return PAR2 settings, applying per-backup overrides when present.
260
+ """
261
+ par2_config = {
262
+ "par2_dir": self.par2_dir,
263
+ "par2_ratio_full": self.par2_ratio_full,
264
+ "par2_ratio_diff": self.par2_ratio_diff,
265
+ "par2_ratio_incr": self.par2_ratio_incr,
266
+ "par2_run_verify": self.par2_run_verify,
267
+ "par2_enabled": self.par2_enabled,
268
+ }
269
+
270
+ if not backup_definition or not self.config.has_section(backup_definition):
271
+ return par2_config
272
+
273
+ section = self.config[backup_definition]
274
+ for raw_key, raw_value in section.items():
275
+ key = raw_key.upper()
276
+ value = raw_value.strip()
277
+ if not key.startswith("PAR2_"):
278
+ continue
279
+ if key == "PAR2_DIR":
280
+ par2_config["par2_dir"] = value
281
+ elif key == "PAR2_RATIO_FULL":
282
+ par2_config["par2_ratio_full"] = int(value)
283
+ elif key == "PAR2_RATIO_DIFF":
284
+ par2_config["par2_ratio_diff"] = int(value)
285
+ elif key == "PAR2_RATIO_INCR":
286
+ par2_config["par2_ratio_incr"] = int(value)
287
+ elif key == "PAR2_RUN_VERIFY":
288
+ val = value.lower()
289
+ if val in ('true', '1', 'yes'):
290
+ par2_config["par2_run_verify"] = True
291
+ elif val in ('false', '0', 'no'):
292
+ par2_config["par2_run_verify"] = False
293
+ else:
294
+ raise ConfigSettingsError(f"Invalid boolean value for 'PAR2_RUN_VERIFY' in [{backup_definition}]: '{value}'")
295
+ elif key == "PAR2_ENABLED":
296
+ val = value.lower()
297
+ if val in ('true', '1', 'yes'):
298
+ par2_config["par2_enabled"] = True
299
+ elif val in ('false', '0', 'no'):
300
+ par2_config["par2_enabled"] = False
301
+ else:
302
+ raise ConfigSettingsError(f"Invalid boolean value for 'PAR2_ENABLED' in [{backup_definition}]: '{value}'")
147
303
 
304
+ return par2_config
@@ -9,10 +9,22 @@ LOGFILE_LOCATION = ~/dar-backup/dar-backup.log
9
9
  MAX_SIZE_VERIFICATION_MB = 20
10
10
  MIN_SIZE_VERIFICATION_MB = 1
11
11
  NO_FILES_VERIFICATION = 5
12
+ # Optional restore test filters (case-insensitive)
13
+ # RESTORETEST_EXCLUDE_PREFIXES = .cache/, .local/share/Trash/, .mozilla/
14
+ # RESTORETEST_EXCLUDE_SUFFIXES = .sqlite-wal, .sqlite-shm, .log, .tmp, .lock, .journal
15
+ # RESTORETEST_EXCLUDE_REGEX = (^|/)(Cache|Logs)/ # optional extra noise
12
16
  # timeout in seconds for backup, test, restore and par2 operations
13
17
  # The author has such `dar` tasks running for 10-15 hours on the yearly backups, so a value of 24 hours is used.
14
18
  # If a timeout is not specified when using the CommandRunner, a default timeout of 30 secs is used.
15
19
  COMMAND_TIMEOUT_SECS = 86400
20
+ # Optional limit on captured command output (in bytes). Output beyond this
21
+ # size is still logged but not kept in memory. Use 0 to avoid buffering entirely.
22
+ # Default is 102400.
23
+ # COMMAND_CAPTURE_MAX_BYTES = 102400
24
+ #DAR_BACKUP_DISCORD_WEBHOOK_URL = https://discord.com/api/webhooks/<id>/<token>
25
+ # Optional Trace log configuration (debug level logs with stack traces)
26
+ # TRACE_LOG_MAX_BYTES = 10485760 # 10 MB default
27
+ # TRACE_LOG_BACKUP_COUNT = 1 # 1 backup file default
16
28
 
17
29
  [DIRECTORIES]
18
30
  BACKUP_DIR = @@BACKUP_DIR@@
@@ -31,6 +43,12 @@ INCR_AGE = 40
31
43
  [PAR2]
32
44
  ERROR_CORRECTION_PERCENT = 5
33
45
  ENABLED = True
46
+ # Optional PAR2 configuration
47
+ # PAR2_DIR = /path/to/par2-store
48
+ # PAR2_RATIO_FULL = 10
49
+ # PAR2_RATIO_DIFF = 5
50
+ # PAR2_RATIO_INCR = 5
51
+ # PAR2_RUN_VERIFY = false
34
52
 
35
53
  [PREREQ]
36
54
  #SCRIPT_1 = <pre-script 1>
@@ -29,10 +29,19 @@ LOGFILE_LOCATION = {{ vars_map.DAR_BACKUP_DIR -}}/dar-backup.log
29
29
  # optional parameters
30
30
  # LOGFILE_MAX_BYTES = 26214400 # 25 MB default, change as neeeded
31
31
  # LOGFILE_BACKUP_COUNT = 5 # default, change as needed
32
+ # DAR_BACKUP_DISCORD_WEBHOOK_URL **should really** be given as an environment variable for security reasons
33
+ # DAR_BACKUP_DISCORD_WEBHOOK_URL = https://discord.com/api/webhooks/<id>/<token>
34
+ # Optional Trace log configuration (debug level logs with stack traces)
35
+ # TRACE_LOG_MAX_BYTES = 10485760 # 10 MB default
36
+ # TRACE_LOG_BACKUP_COUNT = 1 # 1 backup file default
32
37
 
33
38
  MAX_SIZE_VERIFICATION_MB = 2
34
39
  MIN_SIZE_VERIFICATION_MB = 0
35
40
  NO_FILES_VERIFICATION = 1
41
+ # Optional restore test filters (case-insensitive)
42
+ # RESTORETEST_EXCLUDE_PREFIXES = .cache/, .local/share/Trash/, .mozilla/
43
+ # RESTORETEST_EXCLUDE_SUFFIXES = .sqlite-wal, .sqlite-shm, .log, .tmp, .lock, .journal
44
+ # RESTORETEST_EXCLUDE_REGEX = (^|/)(Cache|Logs)/
36
45
  # timeout in seconds for backup, test, restore and par2 operations
37
46
  # The author has such `dar` tasks running for 10-15 hours on the yearly backups, so a value of 24 hours is used.
38
47
  # If a timeout is not specified when using the CommandRunner, a default timeout of 30 secs is used.
@@ -54,7 +63,15 @@ INCR_AGE = 40
54
63
 
55
64
  [PAR2]
56
65
  ERROR_CORRECTION_PERCENT = 5
66
+ # Enable or disable PAR2 generation globally
57
67
  ENABLED = True
68
+ # Optional PAR2 configuration
69
+ # PAR2_DIR = /path/to/par2-store
70
+ # PAR2_RATIOs are meuasured as percentages. Same function as ERROR_CORRECTION_PERCENT
71
+ # PAR2_RATIO_FULL = 10
72
+ # PAR2_RATIO_DIFF = 5
73
+ # PAR2_RATIO_INCR = 5
74
+ # PAR2_RUN_VERIFY = false
58
75
 
59
76
  [PREREQ]
60
77
  #SCRIPT_1 = <pre-script 1>
@@ -62,3 +79,30 @@ ENABLED = True
62
79
  [POSTREQ]
63
80
  #SCRIPT_1 = <post-script 1>
64
81
 
82
+ #######################################################################
83
+ ## Per-backup configuration example overrides
84
+ #######################################################################
85
+ #
86
+ ## --------------------------------------------------------------------
87
+ ## Per-backup overrides (section name must match backup.d filename stem)
88
+ ## Example: backup.d/home.conf -> [home]
89
+ ## --------------------------------------------------------------------
90
+ #
91
+ ##[home]
92
+ ## Disable PAR2 entirely for this backup definition
93
+ #PAR2_ENABLED = false
94
+ ##
95
+ ##[media]
96
+ ## Store PAR2 files in a separate location for this backup definition
97
+ ##PAR2_DIR = /samba/par2/media
98
+ ## Raise redundancy only for FULL
99
+ ##
100
+ #[documents]
101
+ ## Run verify par2 sets after creation
102
+ #PAR2_RUN_VERIFY = true
103
+ ##
104
+ ##[etc]
105
+ ## Keep global PAR2 settings but tweak ratios for this backup definition
106
+ ##PAR2_RATIO_FULL = 15
107
+ ##PAR2_RATIO_DIFF = 8
108
+ ##PAR2_RATIO_INCR = 8