dar-backup 1.0.1__py3-none-any.whl → 1.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dar_backup/Changelog.md +29 -0
- dar_backup/README.md +75 -15
- dar_backup/__about__.py +1 -2
- dar_backup/clean_log.py +102 -63
- dar_backup/cleanup.py +136 -102
- dar_backup/command_runner.py +75 -13
- dar_backup/config_settings.py +25 -11
- dar_backup/dar-backup.conf +7 -0
- dar_backup/dar-backup.conf.j2 +3 -1
- dar_backup/dar_backup.py +438 -64
- dar_backup/demo.py +18 -9
- dar_backup/installer.py +18 -1
- dar_backup/manager.py +295 -88
- dar_backup/util.py +119 -11
- {dar_backup-1.0.1.dist-info → dar_backup-1.0.2.dist-info}/METADATA +78 -18
- dar_backup-1.0.2.dist-info/RECORD +25 -0
- dar_backup-1.0.1.dist-info/RECORD +0 -25
- {dar_backup-1.0.1.dist-info → dar_backup-1.0.2.dist-info}/WHEEL +0 -0
- {dar_backup-1.0.1.dist-info → dar_backup-1.0.2.dist-info}/entry_points.txt +0 -0
- {dar_backup-1.0.1.dist-info → dar_backup-1.0.2.dist-info}/licenses/LICENSE +0 -0
dar_backup/cleanup.py
CHANGED
|
@@ -48,6 +48,7 @@ from dar_backup.util import is_archive_name_allowed
|
|
|
48
48
|
from dar_backup.util import is_safe_filename
|
|
49
49
|
from dar_backup.util import safe_remove_file
|
|
50
50
|
from dar_backup.util import show_scriptname
|
|
51
|
+
from dar_backup.util import send_discord_message
|
|
51
52
|
|
|
52
53
|
from dar_backup.command_runner import CommandRunner
|
|
53
54
|
from dar_backup.command_runner import CommandResult
|
|
@@ -67,7 +68,6 @@ def _delete_par2_files(
|
|
|
67
68
|
else:
|
|
68
69
|
par2_config = {
|
|
69
70
|
"par2_dir": None,
|
|
70
|
-
"par2_mode": None,
|
|
71
71
|
}
|
|
72
72
|
|
|
73
73
|
par2_dir = par2_config.get("par2_dir") or backup_dir
|
|
@@ -76,49 +76,33 @@ def _delete_par2_files(
|
|
|
76
76
|
logger.warning(f"PAR2 directory not found, skipping cleanup: {par2_dir}")
|
|
77
77
|
return
|
|
78
78
|
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
targets
|
|
84
|
-
manifest_path = os.path.join(par2_dir, f"{archive_name}.par2.manifest.ini")
|
|
85
|
-
if os.path.exists(manifest_path):
|
|
86
|
-
targets.append(manifest_path)
|
|
87
|
-
if not targets:
|
|
88
|
-
logger.info("No par2 files matched the per-archive cleanup pattern.")
|
|
89
|
-
return
|
|
90
|
-
for file_path in sorted(set(targets)):
|
|
91
|
-
try:
|
|
92
|
-
if dry_run:
|
|
93
|
-
logger.info(f"Dry run: would delete PAR2 file: {file_path}")
|
|
94
|
-
else:
|
|
95
|
-
safe_remove_file(file_path, base_dir=Path(par2_dir))
|
|
96
|
-
logger.info(f"Deleted PAR2 file: {file_path}")
|
|
97
|
-
except Exception as e:
|
|
98
|
-
logger.error(f"Error deleting PAR2 file {file_path}: {e}")
|
|
99
|
-
return
|
|
100
|
-
|
|
101
|
-
if par2_mode != "per-slice":
|
|
102
|
-
logger.error(f"Unsupported PAR2_MODE during cleanup: {par2_mode}")
|
|
103
|
-
return
|
|
79
|
+
par2_glob = os.path.join(par2_dir, f"{archive_name}*.par2")
|
|
80
|
+
targets = set(glob.glob(par2_glob))
|
|
81
|
+
manifest_path = os.path.join(par2_dir, f"{archive_name}.par2.manifest.ini")
|
|
82
|
+
if os.path.exists(manifest_path):
|
|
83
|
+
targets.add(manifest_path)
|
|
104
84
|
|
|
105
85
|
par2_regex = re.compile(rf"^{re.escape(archive_name)}\.[0-9]+\.dar.*\.par2$")
|
|
106
|
-
|
|
107
|
-
|
|
86
|
+
for entry in os.scandir(par2_dir):
|
|
87
|
+
if not entry.is_file():
|
|
88
|
+
continue
|
|
89
|
+
filename = entry.name
|
|
108
90
|
if par2_regex.match(filename):
|
|
109
|
-
|
|
110
|
-
try:
|
|
111
|
-
if dry_run:
|
|
112
|
-
logger.info(f"Dry run: would delete PAR2 file: {file_path}")
|
|
113
|
-
else:
|
|
114
|
-
safe_remove_file(file_path, base_dir=Path(par2_dir))
|
|
115
|
-
logger.info(f"Deleted PAR2 file: {file_path}")
|
|
116
|
-
files_deleted = True
|
|
117
|
-
except Exception as e:
|
|
118
|
-
logger.error(f"Error deleting PAR2 file {file_path}: {e}")
|
|
91
|
+
targets.add(entry.path)
|
|
119
92
|
|
|
120
|
-
if not
|
|
121
|
-
logger.info("No
|
|
93
|
+
if not targets:
|
|
94
|
+
logger.info("No par2 files matched the cleanup patterns.")
|
|
95
|
+
return
|
|
96
|
+
|
|
97
|
+
for file_path in sorted(targets):
|
|
98
|
+
try:
|
|
99
|
+
if dry_run:
|
|
100
|
+
logger.info(f"Dry run: would delete PAR2 file: {file_path}")
|
|
101
|
+
else:
|
|
102
|
+
safe_remove_file(file_path, base_dir=Path(par2_dir))
|
|
103
|
+
logger.info(f"Deleted PAR2 file: {file_path}")
|
|
104
|
+
except Exception as e:
|
|
105
|
+
logger.error(f"Error deleting PAR2 file {file_path}: {e}")
|
|
122
106
|
|
|
123
107
|
|
|
124
108
|
def delete_old_backups(backup_dir, age, backup_type, args, backup_definition=None, config_settings: ConfigSettings = None):
|
|
@@ -138,7 +122,10 @@ def delete_old_backups(backup_dir, age, backup_type, args, backup_definition=Non
|
|
|
138
122
|
archives_deleted = {}
|
|
139
123
|
|
|
140
124
|
dry_run = getattr(args, "dry_run", False) is True
|
|
141
|
-
for
|
|
125
|
+
for entry in os.scandir(backup_dir):
|
|
126
|
+
if not entry.is_file():
|
|
127
|
+
continue
|
|
128
|
+
filename = entry.name
|
|
142
129
|
if not filename.endswith('.dar'):
|
|
143
130
|
continue
|
|
144
131
|
if backup_definition and not filename.startswith(backup_definition):
|
|
@@ -152,7 +139,7 @@ def delete_old_backups(backup_dir, age, backup_type, args, backup_definition=Non
|
|
|
152
139
|
raise
|
|
153
140
|
|
|
154
141
|
if file_date < cutoff_date:
|
|
155
|
-
file_path =
|
|
142
|
+
file_path = entry.path
|
|
156
143
|
try:
|
|
157
144
|
if dry_run:
|
|
158
145
|
logger.info(f"Dry run: would delete {backup_type} backup: {file_path}")
|
|
@@ -190,9 +177,12 @@ def delete_archive(backup_dir, archive_name, args, config_settings: ConfigSettin
|
|
|
190
177
|
# Delete the specified .dar files according to the naming convention
|
|
191
178
|
files_deleted = False
|
|
192
179
|
dry_run = getattr(args, "dry_run", False) is True
|
|
193
|
-
for
|
|
180
|
+
for entry in os.scandir(backup_dir):
|
|
181
|
+
if not entry.is_file():
|
|
182
|
+
continue
|
|
183
|
+
filename = entry.name
|
|
194
184
|
if archive_regex.match(filename):
|
|
195
|
-
file_path =
|
|
185
|
+
file_path = entry.path
|
|
196
186
|
try:
|
|
197
187
|
if dry_run:
|
|
198
188
|
logger.info(f"Dry run: would delete archive slice: {file_path}")
|
|
@@ -308,15 +298,35 @@ def main():
|
|
|
308
298
|
raise SystemExit(127)
|
|
309
299
|
args.config_file = config_settings_path
|
|
310
300
|
|
|
311
|
-
|
|
301
|
+
try:
|
|
302
|
+
config_settings = ConfigSettings(args.config_file)
|
|
303
|
+
except Exception as exc:
|
|
304
|
+
msg = f"Config error: {exc}"
|
|
305
|
+
print(msg, file=stderr)
|
|
306
|
+
ts = datetime.now().strftime("%Y-%m-%d_%H:%M")
|
|
307
|
+
send_discord_message(f"{ts} - cleanup: FAILURE - {msg}")
|
|
308
|
+
sys.exit(127)
|
|
312
309
|
|
|
313
310
|
start_time=int(time())
|
|
314
311
|
|
|
315
312
|
# command_output_log = os.path.join(config_settings.logfile_location.removesuffix("dar-backup.log"), "dar-backup-commands.log")
|
|
316
313
|
command_output_log = config_settings.logfile_location.replace("dar-backup.log", "dar-backup-commands.log")
|
|
317
|
-
logger = setup_logging(
|
|
314
|
+
logger = setup_logging(
|
|
315
|
+
config_settings.logfile_location,
|
|
316
|
+
command_output_log,
|
|
317
|
+
args.log_level,
|
|
318
|
+
args.log_stdout,
|
|
319
|
+
logfile_max_bytes=config_settings.logfile_max_bytes,
|
|
320
|
+
logfile_backup_count=config_settings.logfile_backup_count,
|
|
321
|
+
trace_log_max_bytes=getattr(config_settings, "trace_log_max_bytes", 10485760),
|
|
322
|
+
trace_log_backup_count=getattr(config_settings, "trace_log_backup_count", 1)
|
|
323
|
+
)
|
|
318
324
|
command_logger = get_logger(command_output_logger = True)
|
|
319
|
-
runner = CommandRunner(
|
|
325
|
+
runner = CommandRunner(
|
|
326
|
+
logger=logger,
|
|
327
|
+
command_logger=command_logger,
|
|
328
|
+
default_capture_limit_bytes=getattr(config_settings, "command_capture_max_bytes", None)
|
|
329
|
+
)
|
|
320
330
|
|
|
321
331
|
start_msgs: List[Tuple[str, str]] = []
|
|
322
332
|
|
|
@@ -341,65 +351,89 @@ def main():
|
|
|
341
351
|
print_aligned_settings(start_msgs, highlight_keywords=dangerous_keywords, quiet=not args.verbose)
|
|
342
352
|
|
|
343
353
|
# run PREREQ scripts
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
354
|
+
try:
|
|
355
|
+
requirements('PREREQ', config_settings)
|
|
356
|
+
except Exception as exc:
|
|
357
|
+
msg = f"PREREQ failed: {exc}"
|
|
358
|
+
logger.error(msg)
|
|
359
|
+
ts = datetime.now().strftime("%Y-%m-%d_%H:%M")
|
|
360
|
+
send_discord_message(f"{ts} - cleanup: FAILURE - {msg}", config_settings=config_settings)
|
|
361
|
+
sys.exit(1)
|
|
362
|
+
|
|
363
|
+
try:
|
|
364
|
+
if args.alternate_archive_dir:
|
|
365
|
+
if not os.path.exists(args.alternate_archive_dir):
|
|
366
|
+
logger.error(f"Alternate archive directory does not exist: {args.alternate_archive_dir}, exiting")
|
|
367
|
+
sys.exit(1)
|
|
368
|
+
if not os.path.isdir(args.alternate_archive_dir):
|
|
369
|
+
logger.error(f"Alternate archive directory is not a directory, exiting")
|
|
370
|
+
sys.exit(1)
|
|
371
|
+
config_settings.backup_dir = args.alternate_archive_dir
|
|
372
|
+
|
|
373
|
+
if args.cleanup_specific_archives is None and args.test_mode:
|
|
374
|
+
logger.info("No --cleanup-specific-archives provided; skipping specific archive deletion in test mode.")
|
|
375
|
+
|
|
376
|
+
if args.cleanup_specific_archives or args.cleanup_specific_archives_list:
|
|
377
|
+
combined = []
|
|
378
|
+
if args.cleanup_specific_archives:
|
|
379
|
+
combined.extend(args.cleanup_specific_archives.split(','))
|
|
380
|
+
combined.extend(args.cleanup_specific_archives_list or [])
|
|
381
|
+
archive_names = [name.strip() for name in combined if name.strip()]
|
|
382
|
+
logger.info(f"Cleaning up specific archives: {', '.join(archive_names)}")
|
|
383
|
+
for archive_name in archive_names:
|
|
384
|
+
if not is_archive_name_allowed(archive_name):
|
|
385
|
+
logger.error(f"Refusing unsafe archive name: {archive_name}")
|
|
368
386
|
continue
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
backup_definitions.append(args.backup_definition)
|
|
387
|
+
if "_FULL_" in archive_name:
|
|
388
|
+
if not confirm_full_archive_deletion(archive_name, args.test_mode):
|
|
389
|
+
continue
|
|
390
|
+
archive_path = os.path.join(config_settings.backup_dir, archive_name.strip())
|
|
391
|
+
logger.info(f"Deleting archive: {archive_path}")
|
|
392
|
+
delete_archive(config_settings.backup_dir, archive_name.strip(), args, config_settings)
|
|
393
|
+
elif args.list:
|
|
394
|
+
list_backups(config_settings.backup_dir, args.backup_definition)
|
|
378
395
|
else:
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
396
|
+
backup_definitions = []
|
|
397
|
+
if args.backup_definition:
|
|
398
|
+
backup_definitions.append(args.backup_definition)
|
|
399
|
+
else:
|
|
400
|
+
for root, _, files in os.walk(config_settings.backup_d_dir):
|
|
401
|
+
for file in files:
|
|
402
|
+
backup_definitions.append(file.split('.')[0])
|
|
403
|
+
|
|
404
|
+
for definition in backup_definitions:
|
|
405
|
+
delete_old_backups(
|
|
406
|
+
config_settings.backup_dir,
|
|
407
|
+
config_settings.diff_age,
|
|
408
|
+
'DIFF',
|
|
409
|
+
args,
|
|
410
|
+
backup_definition=definition,
|
|
411
|
+
config_settings=config_settings
|
|
412
|
+
)
|
|
413
|
+
delete_old_backups(
|
|
414
|
+
config_settings.backup_dir,
|
|
415
|
+
config_settings.incr_age,
|
|
416
|
+
'INCR',
|
|
417
|
+
args,
|
|
418
|
+
backup_definition=definition,
|
|
419
|
+
config_settings=config_settings
|
|
420
|
+
)
|
|
421
|
+
except Exception as e:
|
|
422
|
+
msg = f"Unexpected error during cleanup: {e}"
|
|
423
|
+
logger.error(msg, exc_info=True)
|
|
424
|
+
ts = datetime.now().strftime("%Y-%m-%d_%H:%M")
|
|
425
|
+
send_discord_message(f"{ts} - cleanup: FAILURE - {msg}", config_settings=config_settings)
|
|
426
|
+
sys.exit(1)
|
|
400
427
|
|
|
401
428
|
# run POST scripts
|
|
402
|
-
|
|
429
|
+
try:
|
|
430
|
+
requirements('POSTREQ', config_settings)
|
|
431
|
+
except Exception as exc:
|
|
432
|
+
msg = f"POSTREQ failed: {exc}"
|
|
433
|
+
logger.error(msg)
|
|
434
|
+
ts = datetime.now().strftime("%Y-%m-%d_%H:%M")
|
|
435
|
+
send_discord_message(f"{ts} - cleanup: FAILURE - {msg}", config_settings=config_settings)
|
|
436
|
+
sys.exit(1)
|
|
403
437
|
|
|
404
438
|
|
|
405
439
|
end_time=int(time())
|
dar_backup/command_runner.py
CHANGED
|
@@ -84,11 +84,13 @@ class CommandRunner:
|
|
|
84
84
|
self,
|
|
85
85
|
logger: Optional[logging.Logger] = None,
|
|
86
86
|
command_logger: Optional[logging.Logger] = None,
|
|
87
|
-
default_timeout: int = 30
|
|
87
|
+
default_timeout: int = 30,
|
|
88
|
+
default_capture_limit_bytes: Optional[int] = None
|
|
88
89
|
):
|
|
89
90
|
self.logger = logger or get_logger()
|
|
90
91
|
self.command_logger = command_logger or get_logger(command_output_logger=True)
|
|
91
92
|
self.default_timeout = default_timeout
|
|
93
|
+
self.default_capture_limit_bytes = default_capture_limit_bytes
|
|
92
94
|
|
|
93
95
|
if not self.logger or not self.command_logger:
|
|
94
96
|
self.logger_fallback()
|
|
@@ -129,12 +131,18 @@ class CommandRunner:
|
|
|
129
131
|
timeout: Optional[int] = None,
|
|
130
132
|
check: bool = False,
|
|
131
133
|
capture_output: bool = True,
|
|
134
|
+
capture_output_limit_bytes: Optional[int] = None,
|
|
135
|
+
log_output: bool = True,
|
|
132
136
|
text: bool = True,
|
|
133
137
|
cwd: Optional[str] = None,
|
|
134
138
|
stdin: Optional[int] = subprocess.DEVNULL
|
|
135
139
|
) -> CommandResult:
|
|
136
140
|
self._text_mode = text
|
|
137
141
|
timeout = timeout or self.default_timeout
|
|
142
|
+
if capture_output_limit_bytes is None:
|
|
143
|
+
capture_output_limit_bytes = self.default_capture_limit_bytes
|
|
144
|
+
if capture_output_limit_bytes is not None and capture_output_limit_bytes < 0:
|
|
145
|
+
capture_output_limit_bytes = None
|
|
138
146
|
|
|
139
147
|
tty_fd = None
|
|
140
148
|
tty_file = None
|
|
@@ -163,9 +171,13 @@ class CommandRunner:
|
|
|
163
171
|
except ValueError as e:
|
|
164
172
|
stack = traceback.format_exc()
|
|
165
173
|
self.logger.error(f"Command sanitation failed: {e}")
|
|
174
|
+
if isinstance(cmd, list):
|
|
175
|
+
cmd_text = " ".join(map(str, cmd))
|
|
176
|
+
else:
|
|
177
|
+
cmd_text = str(cmd)
|
|
166
178
|
return CommandResult(
|
|
167
179
|
returncode=-1,
|
|
168
|
-
note=f"Sanitizing failed: command: {
|
|
180
|
+
note=f"Sanitizing failed: command: {cmd_text}",
|
|
169
181
|
stdout='',
|
|
170
182
|
stderr=str(e),
|
|
171
183
|
stack=stack,
|
|
@@ -183,12 +195,15 @@ class CommandRunner:
|
|
|
183
195
|
|
|
184
196
|
stdout_lines = []
|
|
185
197
|
stderr_lines = []
|
|
198
|
+
truncated_stdout = {"value": False}
|
|
199
|
+
truncated_stderr = {"value": False}
|
|
186
200
|
|
|
187
201
|
try:
|
|
202
|
+
use_pipes = capture_output or log_output
|
|
188
203
|
process = subprocess.Popen(
|
|
189
204
|
cmd,
|
|
190
|
-
stdout=subprocess.PIPE if
|
|
191
|
-
stderr=subprocess.PIPE if
|
|
205
|
+
stdout=subprocess.PIPE if use_pipes else None,
|
|
206
|
+
stderr=subprocess.PIPE if use_pipes else None,
|
|
192
207
|
stdin=stdin,
|
|
193
208
|
text=False,
|
|
194
209
|
bufsize=-1,
|
|
@@ -203,7 +218,8 @@ class CommandRunner:
|
|
|
203
218
|
stack=stack
|
|
204
219
|
)
|
|
205
220
|
|
|
206
|
-
def stream_output(stream, lines, level):
|
|
221
|
+
def stream_output(stream, lines, level, truncated_flag):
|
|
222
|
+
captured_bytes = 0
|
|
207
223
|
try:
|
|
208
224
|
while True:
|
|
209
225
|
chunk = stream.read(1024)
|
|
@@ -211,10 +227,40 @@ class CommandRunner:
|
|
|
211
227
|
break
|
|
212
228
|
if self._text_mode:
|
|
213
229
|
decoded = chunk.decode('utf-8', errors='replace')
|
|
214
|
-
|
|
215
|
-
|
|
230
|
+
if log_output:
|
|
231
|
+
self.command_logger.log(level, decoded.strip())
|
|
232
|
+
if capture_output:
|
|
233
|
+
if capture_output_limit_bytes is None:
|
|
234
|
+
lines.append(decoded)
|
|
235
|
+
else:
|
|
236
|
+
remaining = capture_output_limit_bytes - captured_bytes
|
|
237
|
+
if remaining > 0:
|
|
238
|
+
if len(chunk) <= remaining:
|
|
239
|
+
lines.append(decoded)
|
|
240
|
+
captured_bytes += len(chunk)
|
|
241
|
+
else:
|
|
242
|
+
piece = chunk[:remaining]
|
|
243
|
+
lines.append(piece.decode('utf-8', errors='replace'))
|
|
244
|
+
captured_bytes = capture_output_limit_bytes
|
|
245
|
+
truncated_flag["value"] = True
|
|
246
|
+
else:
|
|
247
|
+
truncated_flag["value"] = True
|
|
216
248
|
else:
|
|
217
|
-
|
|
249
|
+
if capture_output:
|
|
250
|
+
if capture_output_limit_bytes is None:
|
|
251
|
+
lines.append(chunk)
|
|
252
|
+
else:
|
|
253
|
+
remaining = capture_output_limit_bytes - captured_bytes
|
|
254
|
+
if remaining > 0:
|
|
255
|
+
if len(chunk) <= remaining:
|
|
256
|
+
lines.append(chunk)
|
|
257
|
+
captured_bytes += len(chunk)
|
|
258
|
+
else:
|
|
259
|
+
lines.append(chunk[:remaining])
|
|
260
|
+
captured_bytes = capture_output_limit_bytes
|
|
261
|
+
truncated_flag["value"] = True
|
|
262
|
+
else:
|
|
263
|
+
truncated_flag["value"] = True
|
|
218
264
|
# Avoid logging raw binary data to prevent garbled logs
|
|
219
265
|
except Exception as e:
|
|
220
266
|
self.logger.warning(f"stream_output decode error: {e}")
|
|
@@ -222,12 +268,18 @@ class CommandRunner:
|
|
|
222
268
|
stream.close()
|
|
223
269
|
|
|
224
270
|
threads = []
|
|
225
|
-
if capture_output and process.stdout:
|
|
226
|
-
t_out = threading.Thread(
|
|
271
|
+
if (capture_output or log_output) and process.stdout:
|
|
272
|
+
t_out = threading.Thread(
|
|
273
|
+
target=stream_output,
|
|
274
|
+
args=(process.stdout, stdout_lines, logging.INFO, truncated_stdout)
|
|
275
|
+
)
|
|
227
276
|
t_out.start()
|
|
228
277
|
threads.append(t_out)
|
|
229
|
-
if capture_output and process.stderr:
|
|
230
|
-
t_err = threading.Thread(
|
|
278
|
+
if (capture_output or log_output) and process.stderr:
|
|
279
|
+
t_err = threading.Thread(
|
|
280
|
+
target=stream_output,
|
|
281
|
+
args=(process.stderr, stderr_lines, logging.ERROR, truncated_stderr)
|
|
282
|
+
)
|
|
231
283
|
t_err.start()
|
|
232
284
|
threads.append(t_err)
|
|
233
285
|
|
|
@@ -254,6 +306,15 @@ class CommandRunner:
|
|
|
254
306
|
stdout_combined = b''.join(stdout_lines)
|
|
255
307
|
stderr_combined = b''.join(stderr_lines)
|
|
256
308
|
|
|
309
|
+
note = None
|
|
310
|
+
if truncated_stdout["value"] or truncated_stderr["value"]:
|
|
311
|
+
parts = []
|
|
312
|
+
if truncated_stdout["value"]:
|
|
313
|
+
parts.append("stdout truncated")
|
|
314
|
+
if truncated_stderr["value"]:
|
|
315
|
+
parts.append("stderr truncated")
|
|
316
|
+
note = ", ".join(parts)
|
|
317
|
+
|
|
257
318
|
if check and process.returncode != 0:
|
|
258
319
|
self.logger.error(f"Command failed with exit code {process.returncode}")
|
|
259
320
|
return CommandResult(
|
|
@@ -266,7 +327,8 @@ class CommandRunner:
|
|
|
266
327
|
return CommandResult(
|
|
267
328
|
process.returncode,
|
|
268
329
|
stdout_combined,
|
|
269
|
-
stderr_combined
|
|
330
|
+
stderr_combined,
|
|
331
|
+
note=note
|
|
270
332
|
)
|
|
271
333
|
finally:
|
|
272
334
|
if termios is not None and saved_tty_attrs is not None and tty_fd is not None:
|
dar_backup/config_settings.py
CHANGED
|
@@ -39,6 +39,7 @@ class ConfigSettings:
|
|
|
39
39
|
min_size_verification_mb: int = field(init=False)
|
|
40
40
|
no_files_verification: int = field(init=False)
|
|
41
41
|
command_timeout_secs: int = field(init=False)
|
|
42
|
+
command_capture_max_bytes: Optional[int] = field(init=False, default=None)
|
|
42
43
|
backup_dir: str = field(init=False)
|
|
43
44
|
test_restore_dir: str = field(init=False)
|
|
44
45
|
backup_d_dir: str = field(init=False)
|
|
@@ -47,14 +48,14 @@ class ConfigSettings:
|
|
|
47
48
|
error_correction_percent: int = field(init=False)
|
|
48
49
|
par2_enabled: bool = field(init=False)
|
|
49
50
|
par2_dir: Optional[str] = field(init=False, default=None)
|
|
50
|
-
par2_layout: Optional[str] = field(init=False, default=None)
|
|
51
|
-
par2_mode: Optional[str] = field(init=False, default=None)
|
|
52
51
|
par2_ratio_full: Optional[int] = field(init=False, default=None)
|
|
53
52
|
par2_ratio_diff: Optional[int] = field(init=False, default=None)
|
|
54
53
|
par2_ratio_incr: Optional[int] = field(init=False, default=None)
|
|
55
54
|
par2_run_verify: Optional[bool] = field(init=False, default=None)
|
|
56
55
|
logfile_max_bytes: int = field(init=False)
|
|
57
56
|
logfile_no_count: int = field(init=False)
|
|
57
|
+
trace_log_max_bytes: int = field(init=False)
|
|
58
|
+
trace_log_backup_count: int = field(init=False)
|
|
58
59
|
dar_backup_discord_webhook_url: Optional[str] = field(init=False, default=None)
|
|
59
60
|
restoretest_exclude_prefixes: list[str] = field(init=False, default_factory=list)
|
|
60
61
|
restoretest_exclude_suffixes: list[str] = field(init=False, default_factory=list)
|
|
@@ -83,6 +84,20 @@ class ConfigSettings:
|
|
|
83
84
|
"type": int,
|
|
84
85
|
"default": 5,
|
|
85
86
|
},
|
|
87
|
+
{
|
|
88
|
+
"section": "MISC",
|
|
89
|
+
"key": "TRACE_LOG_MAX_BYTES",
|
|
90
|
+
"attr": "trace_log_max_bytes",
|
|
91
|
+
"type": int,
|
|
92
|
+
"default": 10485760, # 10 MB
|
|
93
|
+
},
|
|
94
|
+
{
|
|
95
|
+
"section": "MISC",
|
|
96
|
+
"key": "TRACE_LOG_BACKUP_COUNT",
|
|
97
|
+
"attr": "trace_log_backup_count",
|
|
98
|
+
"type": int,
|
|
99
|
+
"default": 1,
|
|
100
|
+
},
|
|
86
101
|
{
|
|
87
102
|
"section": "MISC",
|
|
88
103
|
"key": "DAR_BACKUP_DISCORD_WEBHOOK_URL",
|
|
@@ -90,6 +105,13 @@ class ConfigSettings:
|
|
|
90
105
|
"type": str,
|
|
91
106
|
"default": None,
|
|
92
107
|
},
|
|
108
|
+
{
|
|
109
|
+
"section": "MISC",
|
|
110
|
+
"key": "COMMAND_CAPTURE_MAX_BYTES",
|
|
111
|
+
"attr": "command_capture_max_bytes",
|
|
112
|
+
"type": int,
|
|
113
|
+
"default": 102400,
|
|
114
|
+
},
|
|
93
115
|
# Add more optional fields here
|
|
94
116
|
]
|
|
95
117
|
|
|
@@ -98,7 +120,7 @@ class ConfigSettings:
|
|
|
98
120
|
raise ConfigSettingsError("`config_file` must be specified.")
|
|
99
121
|
|
|
100
122
|
try:
|
|
101
|
-
self.config = configparser.ConfigParser()
|
|
123
|
+
self.config = configparser.ConfigParser(inline_comment_prefixes=['#'])
|
|
102
124
|
loaded_files = self.config.read(self.config_file)
|
|
103
125
|
if not loaded_files:
|
|
104
126
|
raise RuntimeError(f"Configuration file not found or unreadable: '{self.config_file}'")
|
|
@@ -124,8 +146,6 @@ class ConfigSettings:
|
|
|
124
146
|
raise ConfigSettingsError(f"Invalid boolean value for 'ENABLED' in [PAR2]: '{val}'")
|
|
125
147
|
|
|
126
148
|
self.par2_dir = self._get_optional_str("PAR2", "PAR2_DIR", default=None)
|
|
127
|
-
self.par2_layout = self._get_optional_str("PAR2", "PAR2_LAYOUT", default="by-backup")
|
|
128
|
-
self.par2_mode = self._get_optional_str("PAR2", "PAR2_MODE", default=None)
|
|
129
149
|
self.par2_ratio_full = self._get_optional_int("PAR2", "PAR2_RATIO_FULL", default=None)
|
|
130
150
|
self.par2_ratio_diff = self._get_optional_int("PAR2", "PAR2_RATIO_DIFF", default=None)
|
|
131
151
|
self.par2_ratio_incr = self._get_optional_int("PAR2", "PAR2_RATIO_INCR", default=None)
|
|
@@ -240,8 +260,6 @@ class ConfigSettings:
|
|
|
240
260
|
"""
|
|
241
261
|
par2_config = {
|
|
242
262
|
"par2_dir": self.par2_dir,
|
|
243
|
-
"par2_layout": self.par2_layout,
|
|
244
|
-
"par2_mode": self.par2_mode,
|
|
245
263
|
"par2_ratio_full": self.par2_ratio_full,
|
|
246
264
|
"par2_ratio_diff": self.par2_ratio_diff,
|
|
247
265
|
"par2_ratio_incr": self.par2_ratio_incr,
|
|
@@ -260,10 +278,6 @@ class ConfigSettings:
|
|
|
260
278
|
continue
|
|
261
279
|
if key == "PAR2_DIR":
|
|
262
280
|
par2_config["par2_dir"] = value
|
|
263
|
-
elif key == "PAR2_LAYOUT":
|
|
264
|
-
par2_config["par2_layout"] = value
|
|
265
|
-
elif key == "PAR2_MODE":
|
|
266
|
-
par2_config["par2_mode"] = value
|
|
267
281
|
elif key == "PAR2_RATIO_FULL":
|
|
268
282
|
par2_config["par2_ratio_full"] = int(value)
|
|
269
283
|
elif key == "PAR2_RATIO_DIFF":
|
dar_backup/dar-backup.conf
CHANGED
|
@@ -17,7 +17,14 @@ NO_FILES_VERIFICATION = 5
|
|
|
17
17
|
# The author has such `dar` tasks running for 10-15 hours on the yearly backups, so a value of 24 hours is used.
|
|
18
18
|
# If a timeout is not specified when using the CommandRunner, a default timeout of 30 secs is used.
|
|
19
19
|
COMMAND_TIMEOUT_SECS = 86400
|
|
20
|
+
# Optional limit on captured command output (in bytes). Output beyond this
|
|
21
|
+
# size is still logged but not kept in memory. Use 0 to avoid buffering entirely.
|
|
22
|
+
# Default is 102400.
|
|
23
|
+
# COMMAND_CAPTURE_MAX_BYTES = 102400
|
|
20
24
|
#DAR_BACKUP_DISCORD_WEBHOOK_URL = https://discord.com/api/webhooks/<id>/<token>
|
|
25
|
+
# Optional Trace log configuration (debug level logs with stack traces)
|
|
26
|
+
# TRACE_LOG_MAX_BYTES = 10485760 # 10 MB default
|
|
27
|
+
# TRACE_LOG_BACKUP_COUNT = 1 # 1 backup file default
|
|
21
28
|
|
|
22
29
|
[DIRECTORIES]
|
|
23
30
|
BACKUP_DIR = @@BACKUP_DIR@@
|
dar_backup/dar-backup.conf.j2
CHANGED
|
@@ -31,6 +31,9 @@ LOGFILE_LOCATION = {{ vars_map.DAR_BACKUP_DIR -}}/dar-backup.log
|
|
|
31
31
|
# LOGFILE_BACKUP_COUNT = 5 # default, change as needed
|
|
32
32
|
# DAR_BACKUP_DISCORD_WEBHOOK_URL **should really** be given as an environment variable for security reasons
|
|
33
33
|
# DAR_BACKUP_DISCORD_WEBHOOK_URL = https://discord.com/api/webhooks/<id>/<token>
|
|
34
|
+
# Optional Trace log configuration (debug level logs with stack traces)
|
|
35
|
+
# TRACE_LOG_MAX_BYTES = 10485760 # 10 MB default
|
|
36
|
+
# TRACE_LOG_BACKUP_COUNT = 1 # 1 backup file default
|
|
34
37
|
|
|
35
38
|
MAX_SIZE_VERIFICATION_MB = 2
|
|
36
39
|
MIN_SIZE_VERIFICATION_MB = 0
|
|
@@ -64,7 +67,6 @@ ERROR_CORRECTION_PERCENT = 5
|
|
|
64
67
|
ENABLED = True
|
|
65
68
|
# Optional PAR2 configuration
|
|
66
69
|
# PAR2_DIR = /path/to/par2-store
|
|
67
|
-
# PAR2_LAYOUT = by-backup
|
|
68
70
|
# PAR2_RATIOs are meuasured as percentages. Same function as ERROR_CORRECTION_PERCENT
|
|
69
71
|
# PAR2_RATIO_FULL = 10
|
|
70
72
|
# PAR2_RATIO_DIFF = 5
|