dar-backup 1.0.1__py3-none-any.whl → 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dar_backup/__about__.py +1 -2
- dar_backup/clean_log.py +102 -63
- dar_backup/cleanup.py +139 -107
- dar_backup/command_runner.py +123 -15
- dar_backup/config_settings.py +25 -12
- dar_backup/dar-backup.conf +7 -0
- dar_backup/dar-backup.conf.j2 +3 -1
- dar_backup/dar_backup.py +529 -102
- dar_backup/dar_backup_systemd.py +1 -1
- dar_backup/demo.py +19 -11
- dar_backup/installer.py +18 -1
- dar_backup/manager.py +1085 -96
- dar_backup/util.py +128 -19
- {dar_backup-1.0.1.dist-info → dar_backup-1.1.0.dist-info}/METADATA +320 -42
- dar_backup-1.1.0.dist-info/RECORD +23 -0
- dar_backup/Changelog.md +0 -401
- dar_backup/README.md +0 -2045
- dar_backup-1.0.1.dist-info/RECORD +0 -25
- {dar_backup-1.0.1.dist-info → dar_backup-1.1.0.dist-info}/WHEEL +0 -0
- {dar_backup-1.0.1.dist-info → dar_backup-1.1.0.dist-info}/entry_points.txt +0 -0
- {dar_backup-1.0.1.dist-info → dar_backup-1.1.0.dist-info}/licenses/LICENSE +0 -0
dar_backup/manager.py
CHANGED
|
@@ -27,6 +27,10 @@ import os
|
|
|
27
27
|
import re
|
|
28
28
|
import sys
|
|
29
29
|
import subprocess
|
|
30
|
+
import threading
|
|
31
|
+
import shlex
|
|
32
|
+
import time as time_module
|
|
33
|
+
import dateparser
|
|
30
34
|
|
|
31
35
|
from inputimeout import inputimeout, TimeoutOccurred
|
|
32
36
|
|
|
@@ -34,8 +38,10 @@ from inputimeout import inputimeout, TimeoutOccurred
|
|
|
34
38
|
from . import __about__ as about
|
|
35
39
|
from dar_backup.config_settings import ConfigSettings
|
|
36
40
|
from dar_backup.util import setup_logging
|
|
41
|
+
from dar_backup.util import derive_trace_log_path
|
|
37
42
|
from dar_backup.util import CommandResult
|
|
38
43
|
from dar_backup.util import get_config_file
|
|
44
|
+
from dar_backup.util import send_discord_message
|
|
39
45
|
from dar_backup.util import get_logger
|
|
40
46
|
from dar_backup.util import get_binary_info
|
|
41
47
|
from dar_backup.util import show_version
|
|
@@ -45,12 +51,12 @@ from dar_backup.util import show_scriptname
|
|
|
45
51
|
|
|
46
52
|
from dar_backup.command_runner import CommandRunner
|
|
47
53
|
from dar_backup.command_runner import CommandResult
|
|
48
|
-
from dar_backup.util import backup_definition_completer,
|
|
54
|
+
from dar_backup.util import backup_definition_completer, archive_content_completer, add_specific_archive_completer
|
|
49
55
|
|
|
50
|
-
from datetime import datetime
|
|
56
|
+
from datetime import datetime, tzinfo
|
|
51
57
|
from sys import stderr
|
|
52
58
|
from time import time
|
|
53
|
-
from typing import Dict, List,
|
|
59
|
+
from typing import Dict, List, Tuple, Optional
|
|
54
60
|
|
|
55
61
|
# Constants
|
|
56
62
|
SCRIPTNAME = os.path.basename(__file__)
|
|
@@ -62,6 +68,25 @@ logger = None
|
|
|
62
68
|
runner = None
|
|
63
69
|
|
|
64
70
|
|
|
71
|
+
def _open_command_log(command: List[str]):
|
|
72
|
+
command_logger = get_logger(command_output_logger=True)
|
|
73
|
+
log_path = None
|
|
74
|
+
for handler in getattr(command_logger, "handlers", []):
|
|
75
|
+
if hasattr(handler, "baseFilename"):
|
|
76
|
+
log_path = handler.baseFilename
|
|
77
|
+
break
|
|
78
|
+
if not log_path:
|
|
79
|
+
return None, None
|
|
80
|
+
log_file = open(log_path, "ab")
|
|
81
|
+
header = (
|
|
82
|
+
f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')} - COMMAND: "
|
|
83
|
+
f"{' '.join(map(shlex.quote, command))}\n"
|
|
84
|
+
).encode("utf-8", errors="replace")
|
|
85
|
+
log_file.write(header)
|
|
86
|
+
log_file.flush()
|
|
87
|
+
return log_file, threading.Lock()
|
|
88
|
+
|
|
89
|
+
|
|
65
90
|
def get_db_dir(config_settings: ConfigSettings) -> str:
|
|
66
91
|
"""
|
|
67
92
|
Return the correct directory for storing catalog databases.
|
|
@@ -131,24 +156,106 @@ def list_catalogs(backup_def: str, config_settings: ConfigSettings, suppress_out
|
|
|
131
156
|
return CommandResult(1, '', error_msg)
|
|
132
157
|
|
|
133
158
|
command = ['dar_manager', '--base', database_path, '--list']
|
|
134
|
-
|
|
135
|
-
|
|
159
|
+
if runner is not None and not hasattr(runner, "default_capture_limit_bytes"):
|
|
160
|
+
process = runner.run(command, capture_output_limit_bytes=-1)
|
|
161
|
+
stdout, stderr = process.stdout, process.stderr
|
|
136
162
|
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
163
|
+
if process.returncode != 0:
|
|
164
|
+
logger.error(f'Error listing catalogs for: "{database_path}"')
|
|
165
|
+
logger.error(f"stderr: {stderr}")
|
|
166
|
+
logger.error(f"stdout: {stdout}")
|
|
167
|
+
return process
|
|
168
|
+
|
|
169
|
+
# Extract only archive basenames from stdout
|
|
170
|
+
archive_names = []
|
|
171
|
+
archive_lines = []
|
|
172
|
+
for line in stdout.splitlines():
|
|
173
|
+
line = line.strip()
|
|
174
|
+
if not line or "archive #" in line or "dar path" in line or "compression" in line:
|
|
175
|
+
continue
|
|
176
|
+
parts = line.split("\t")
|
|
177
|
+
if len(parts) >= 3:
|
|
178
|
+
archive_names.append(parts[2].strip())
|
|
179
|
+
archive_lines.append(line)
|
|
180
|
+
else:
|
|
181
|
+
stderr_lines: List[str] = []
|
|
182
|
+
stderr_bytes = 0
|
|
183
|
+
cap = getattr(config_settings, "command_capture_max_bytes", None)
|
|
184
|
+
if not isinstance(cap, int):
|
|
185
|
+
cap = None
|
|
186
|
+
log_file, log_lock = _open_command_log(command)
|
|
187
|
+
|
|
188
|
+
process = subprocess.Popen(
|
|
189
|
+
command,
|
|
190
|
+
stdout=subprocess.PIPE,
|
|
191
|
+
stderr=subprocess.PIPE,
|
|
192
|
+
text=False,
|
|
193
|
+
bufsize=0
|
|
194
|
+
)
|
|
142
195
|
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
196
|
+
def read_stderr():
|
|
197
|
+
nonlocal stderr_bytes
|
|
198
|
+
if process.stderr is None:
|
|
199
|
+
return
|
|
200
|
+
while True:
|
|
201
|
+
chunk = process.stderr.read(1024)
|
|
202
|
+
if not chunk:
|
|
203
|
+
break
|
|
204
|
+
if log_file:
|
|
205
|
+
with log_lock:
|
|
206
|
+
log_file.write(chunk)
|
|
207
|
+
log_file.flush()
|
|
208
|
+
if cap is None:
|
|
209
|
+
stderr_lines.append(chunk)
|
|
210
|
+
elif cap > 0 and stderr_bytes < cap:
|
|
211
|
+
remaining = cap - stderr_bytes
|
|
212
|
+
if len(chunk) <= remaining:
|
|
213
|
+
stderr_lines.append(chunk)
|
|
214
|
+
stderr_bytes += len(chunk)
|
|
215
|
+
else:
|
|
216
|
+
stderr_lines.append(chunk[:remaining])
|
|
217
|
+
stderr_bytes = cap
|
|
218
|
+
|
|
219
|
+
stderr_thread = threading.Thread(target=read_stderr)
|
|
220
|
+
stderr_thread.start()
|
|
221
|
+
|
|
222
|
+
archive_names = []
|
|
223
|
+
archive_lines = []
|
|
224
|
+
if process.stdout is not None:
|
|
225
|
+
buffer = b""
|
|
226
|
+
while True:
|
|
227
|
+
chunk = process.stdout.read(1024)
|
|
228
|
+
if not chunk:
|
|
229
|
+
break
|
|
230
|
+
if log_file:
|
|
231
|
+
with log_lock:
|
|
232
|
+
log_file.write(chunk)
|
|
233
|
+
buffer += chunk
|
|
234
|
+
while b"\n" in buffer:
|
|
235
|
+
line, buffer = buffer.split(b"\n", 1)
|
|
236
|
+
stripped = line.strip()
|
|
237
|
+
if not stripped:
|
|
238
|
+
continue
|
|
239
|
+
decoded = stripped.decode("utf-8", errors="replace")
|
|
240
|
+
if "archive #" in decoded or "dar path" in decoded or "compression" in decoded:
|
|
241
|
+
continue
|
|
242
|
+
parts = decoded.split("\t")
|
|
243
|
+
if len(parts) >= 3:
|
|
244
|
+
archive_names.append(parts[2].strip())
|
|
245
|
+
archive_lines.append(decoded)
|
|
246
|
+
process.stdout.close()
|
|
247
|
+
|
|
248
|
+
process.wait()
|
|
249
|
+
stderr_thread.join()
|
|
250
|
+
if log_file:
|
|
251
|
+
log_file.close()
|
|
252
|
+
|
|
253
|
+
if process.returncode != 0:
|
|
254
|
+
logger.error(f'Error listing catalogs for: "{database_path}"')
|
|
255
|
+
stderr_text = "".join(stderr_lines)
|
|
256
|
+
if stderr_text:
|
|
257
|
+
logger.error(f"stderr: {stderr_text}")
|
|
258
|
+
return CommandResult(process.returncode, "", stderr_text)
|
|
152
259
|
|
|
153
260
|
# Sort by prefix and date
|
|
154
261
|
def extract_date(arch_name):
|
|
@@ -167,7 +274,7 @@ def list_catalogs(backup_def: str, config_settings: ConfigSettings, suppress_out
|
|
|
167
274
|
for name in archive_names:
|
|
168
275
|
print(name)
|
|
169
276
|
|
|
170
|
-
return
|
|
277
|
+
return CommandResult(0, "\n".join(archive_lines), "")
|
|
171
278
|
|
|
172
279
|
|
|
173
280
|
def cat_no_for_name(archive: str, config_settings: ConfigSettings) -> int:
|
|
@@ -184,9 +291,7 @@ def cat_no_for_name(archive: str, config_settings: ConfigSettings) -> int:
|
|
|
184
291
|
if process.returncode != 0:
|
|
185
292
|
logger.error(f"Error listing catalogs for backup def: '{backup_def}'")
|
|
186
293
|
return -1
|
|
187
|
-
line_no = 1
|
|
188
294
|
for line in process.stdout.splitlines():
|
|
189
|
-
line_no += 1
|
|
190
295
|
search = re.search(rf".*?(\d+)\s+.*?({archive}).*", line)
|
|
191
296
|
if search:
|
|
192
297
|
logger.info(f"Found archive: '{archive}', catalog #: '{search.group(1)}'")
|
|
@@ -215,26 +320,104 @@ def list_archive_contents(archive: str, config_settings: ConfigSettings) -> int:
|
|
|
215
320
|
|
|
216
321
|
|
|
217
322
|
command = ['dar_manager', '--base', database_path, '-u', f"{cat_no}"]
|
|
218
|
-
|
|
323
|
+
if runner is not None and not hasattr(runner, "default_capture_limit_bytes"):
|
|
324
|
+
process = runner.run(command, timeout=10)
|
|
325
|
+
stdout = process.stdout or ""
|
|
326
|
+
stderr = process.stderr or ""
|
|
327
|
+
if process.returncode != 0:
|
|
328
|
+
logger.error(f'Error listing catalogs for: "{database_path}"')
|
|
329
|
+
logger.error(f"stderr: {stderr}")
|
|
330
|
+
logger.error(f"stdout: {stdout}")
|
|
331
|
+
return process.returncode
|
|
219
332
|
|
|
333
|
+
combined_lines = (stdout + "\n" + stderr).splitlines()
|
|
334
|
+
file_lines = [line for line in combined_lines if line.strip().startswith("[ Saved ]")]
|
|
220
335
|
|
|
221
|
-
|
|
222
|
-
|
|
336
|
+
if file_lines:
|
|
337
|
+
for line in file_lines:
|
|
338
|
+
print(line)
|
|
339
|
+
else:
|
|
340
|
+
print(f"[info] Archive '{archive}' is empty.")
|
|
341
|
+
|
|
342
|
+
return process.returncode
|
|
343
|
+
|
|
344
|
+
stderr_lines: List[str] = []
|
|
345
|
+
stderr_bytes = 0
|
|
346
|
+
cap = getattr(config_settings, "command_capture_max_bytes", None)
|
|
347
|
+
log_file, log_lock = _open_command_log(command)
|
|
348
|
+
|
|
349
|
+
process = subprocess.Popen(
|
|
350
|
+
command,
|
|
351
|
+
stdout=subprocess.PIPE,
|
|
352
|
+
stderr=subprocess.PIPE,
|
|
353
|
+
text=False,
|
|
354
|
+
bufsize=0
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
def read_stderr():
|
|
358
|
+
nonlocal stderr_bytes
|
|
359
|
+
if process.stderr is None:
|
|
360
|
+
return
|
|
361
|
+
while True:
|
|
362
|
+
chunk = process.stderr.read(1024)
|
|
363
|
+
if not chunk:
|
|
364
|
+
break
|
|
365
|
+
if log_file:
|
|
366
|
+
with log_lock:
|
|
367
|
+
log_file.write(chunk)
|
|
368
|
+
log_file.flush()
|
|
369
|
+
if cap is None:
|
|
370
|
+
stderr_lines.append(chunk)
|
|
371
|
+
elif cap > 0 and stderr_bytes < cap:
|
|
372
|
+
remaining = cap - stderr_bytes
|
|
373
|
+
if len(chunk) <= remaining:
|
|
374
|
+
stderr_lines.append(chunk)
|
|
375
|
+
stderr_bytes += len(chunk)
|
|
376
|
+
else:
|
|
377
|
+
stderr_lines.append(chunk[:remaining])
|
|
378
|
+
stderr_bytes = cap
|
|
379
|
+
|
|
380
|
+
stderr_thread = threading.Thread(target=read_stderr)
|
|
381
|
+
stderr_thread.start()
|
|
382
|
+
|
|
383
|
+
found = False
|
|
384
|
+
if process.stdout is not None:
|
|
385
|
+
buffer = b""
|
|
386
|
+
while True:
|
|
387
|
+
chunk = process.stdout.read(1024)
|
|
388
|
+
if not chunk:
|
|
389
|
+
break
|
|
390
|
+
if log_file:
|
|
391
|
+
with log_lock:
|
|
392
|
+
log_file.write(chunk)
|
|
393
|
+
buffer += chunk
|
|
394
|
+
while b"\n" in buffer:
|
|
395
|
+
line, buffer = buffer.split(b"\n", 1)
|
|
396
|
+
if line.strip().startswith(b"[ Saved ]"):
|
|
397
|
+
print(line.decode("utf-8", errors="replace"))
|
|
398
|
+
found = True
|
|
399
|
+
process.stdout.close()
|
|
400
|
+
|
|
401
|
+
try:
|
|
402
|
+
process.wait(timeout=10)
|
|
403
|
+
except subprocess.TimeoutExpired:
|
|
404
|
+
process.kill()
|
|
405
|
+
stderr_thread.join()
|
|
406
|
+
logger.error(f"Timeout listing contents of archive: '{archive}'")
|
|
407
|
+
return 1
|
|
223
408
|
|
|
409
|
+
stderr_thread.join()
|
|
410
|
+
if log_file:
|
|
411
|
+
log_file.close()
|
|
224
412
|
|
|
225
413
|
if process.returncode != 0:
|
|
226
414
|
logger.error(f'Error listing catalogs for: "{database_path}"')
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
415
|
+
stderr_text = "".join(stderr_lines)
|
|
416
|
+
if stderr_text:
|
|
417
|
+
logger.error(f"stderr: {stderr_text}")
|
|
418
|
+
return process.returncode
|
|
230
419
|
|
|
231
|
-
|
|
232
|
-
file_lines = [line for line in combined_lines if line.strip().startswith("[ Saved ]")]
|
|
233
|
-
|
|
234
|
-
if file_lines:
|
|
235
|
-
for line in file_lines:
|
|
236
|
-
print(line)
|
|
237
|
-
else:
|
|
420
|
+
if not found:
|
|
238
421
|
print(f"[info] Archive '{archive}' is empty.")
|
|
239
422
|
|
|
240
423
|
return process.returncode
|
|
@@ -252,7 +435,7 @@ def list_catalog_contents(catalog_number: int, backup_def: str, config_settings:
|
|
|
252
435
|
logger.error(f'Catalog database not found: "{database_path}"')
|
|
253
436
|
return 1
|
|
254
437
|
command = ['dar_manager', '--base', database_path, '-u', f"{catalog_number}"]
|
|
255
|
-
process = runner.run(command)
|
|
438
|
+
process = runner.run(command, capture_output_limit_bytes=-1)
|
|
256
439
|
stdout, stderr = process.stdout, process.stderr
|
|
257
440
|
if process.returncode != 0:
|
|
258
441
|
logger.error(f'Error listing catalogs for: "{database_path}"')
|
|
@@ -273,7 +456,7 @@ def find_file(file, backup_def, config_settings):
|
|
|
273
456
|
logger.error(f'Database not found: "{database_path}"')
|
|
274
457
|
return 1
|
|
275
458
|
command = ['dar_manager', '--base', database_path, '-f', f"{file}"]
|
|
276
|
-
process = runner.run(command)
|
|
459
|
+
process = runner.run(command, capture_output_limit_bytes=-1)
|
|
277
460
|
stdout, stderr = process.stdout, process.stderr
|
|
278
461
|
if process.returncode != 0:
|
|
279
462
|
logger.error(f'Error finding file: {file} in: "{database_path}"')
|
|
@@ -284,6 +467,693 @@ def find_file(file, backup_def, config_settings):
|
|
|
284
467
|
return process.returncode
|
|
285
468
|
|
|
286
469
|
|
|
470
|
+
def restore_at(backup_def: str, paths: List[str], when: str, target: str, config_settings: ConfigSettings, verbose: bool = False) -> int:
|
|
471
|
+
"""
|
|
472
|
+
Perform a Point-in-Time Recovery (PITR) using dar_manager.
|
|
473
|
+
|
|
474
|
+
Args:
|
|
475
|
+
backup_def: Backup definition name (prefix for the catalog DB, e.g. "example").
|
|
476
|
+
paths: One or more file or directory paths as stored in the DAR catalog
|
|
477
|
+
(must be relative, e.g. "tmp/unit-test/.../file.txt").
|
|
478
|
+
when: Date/time string to restore "as of". Parsed via dateparser and
|
|
479
|
+
converted to dar_manager format YYYY/MM/DD-HH:MM:SS. If None/empty,
|
|
480
|
+
the latest version is restored.
|
|
481
|
+
target: Destination directory for restore output. Required to avoid
|
|
482
|
+
restoring into an unintended working directory. Restore is rebased
|
|
483
|
+
under this directory using dar options (-R).
|
|
484
|
+
config_settings: Loaded ConfigSettings used to locate backup dirs/DB and
|
|
485
|
+
timeouts.
|
|
486
|
+
verbose: If True, enables dar_manager verbose logging (-v).
|
|
487
|
+
|
|
488
|
+
Returns:
|
|
489
|
+
Process return code (0 on success, non-zero on failure). If dar_manager
|
|
490
|
+
reports no files restored for a dated PITR, a fallback path is attempted
|
|
491
|
+
that selects the correct archive via dar_manager metadata and restores
|
|
492
|
+
the file(s) directly with dar.
|
|
493
|
+
"""
|
|
494
|
+
database = f"{backup_def}{DB_SUFFIX}"
|
|
495
|
+
database_path = os.path.join(get_db_dir(config_settings), database)
|
|
496
|
+
logger.debug(
|
|
497
|
+
"PITR restore requested: backup_def=%s paths=%d when=%s target=%s db=%s",
|
|
498
|
+
backup_def,
|
|
499
|
+
len(paths),
|
|
500
|
+
when,
|
|
501
|
+
target,
|
|
502
|
+
database_path,
|
|
503
|
+
)
|
|
504
|
+
|
|
505
|
+
if not os.path.exists(database_path):
|
|
506
|
+
logger.error(f'Database not found: "{database_path}"')
|
|
507
|
+
return 1
|
|
508
|
+
|
|
509
|
+
if not target:
|
|
510
|
+
logger.error("Restore target directory is required (--target).")
|
|
511
|
+
return 1
|
|
512
|
+
unsafe_reason = _restore_target_unsafe_reason(target)
|
|
513
|
+
if unsafe_reason:
|
|
514
|
+
logger.error(unsafe_reason)
|
|
515
|
+
return 1
|
|
516
|
+
|
|
517
|
+
# Parse date (or default to "now" for latest restore)
|
|
518
|
+
parsed_date = None
|
|
519
|
+
if when:
|
|
520
|
+
parsed_date = _parse_when(when)
|
|
521
|
+
if parsed_date:
|
|
522
|
+
date_arg = parsed_date.strftime("%Y/%m/%d-%H:%M:%S")
|
|
523
|
+
logger.info("Restoring files as of: %s (from input '%s')", date_arg, when)
|
|
524
|
+
logger.debug("Parsed PITR timestamp: %s -> %s", when, date_arg)
|
|
525
|
+
else:
|
|
526
|
+
logger.error(f"Could not parse date: '{when}'")
|
|
527
|
+
return 1
|
|
528
|
+
else:
|
|
529
|
+
parsed_date = datetime.now()
|
|
530
|
+
logger.info(
|
|
531
|
+
"Restoring files as of: %s (no --when provided; using current time)",
|
|
532
|
+
parsed_date.strftime("%Y/%m/%d-%H:%M:%S"),
|
|
533
|
+
)
|
|
534
|
+
|
|
535
|
+
# Target directory handling: pass -R and -n via dar_manager's -e option so dar
|
|
536
|
+
# rebases paths and fails fast instead of prompting to overwrite.
|
|
537
|
+
if target:
|
|
538
|
+
logger.debug("PITR target directory: %s (cwd=%s)", target, os.getcwd())
|
|
539
|
+
if not os.path.exists(target):
|
|
540
|
+
try:
|
|
541
|
+
os.makedirs(target, exist_ok=True)
|
|
542
|
+
except Exception as e:
|
|
543
|
+
logger.error(f"Could not create target directory '{target}': {e}")
|
|
544
|
+
return 1
|
|
545
|
+
logger.debug("Created target directory: %s", target)
|
|
546
|
+
# Fail fast if any requested paths already exist under target.
|
|
547
|
+
normalized_paths = [os.path.normpath(path.lstrip(os.sep)) for path in paths]
|
|
548
|
+
if normalized_paths:
|
|
549
|
+
logger.debug("Normalized restore paths count=%d sample=%s", len(normalized_paths), normalized_paths[:3])
|
|
550
|
+
existing = []
|
|
551
|
+
for rel_path in normalized_paths:
|
|
552
|
+
if not rel_path or rel_path == ".":
|
|
553
|
+
continue
|
|
554
|
+
candidate = os.path.join(target, rel_path)
|
|
555
|
+
if os.path.exists(candidate):
|
|
556
|
+
existing.append(rel_path)
|
|
557
|
+
if existing:
|
|
558
|
+
sample = ", ".join(existing[:3])
|
|
559
|
+
extra = f" (+{len(existing) - 3} more)" if len(existing) > 3 else ""
|
|
560
|
+
logger.error(
|
|
561
|
+
"Restore target '%s' already contains path(s) to restore: %s%s. For safety, PITR restores abort "
|
|
562
|
+
"without overwriting existing files. Use a clean/empty target.",
|
|
563
|
+
target,
|
|
564
|
+
sample,
|
|
565
|
+
extra,
|
|
566
|
+
)
|
|
567
|
+
return 1
|
|
568
|
+
|
|
569
|
+
# For PITR restores, skip dar_manager -w restore to avoid interactive prompts.
|
|
570
|
+
# Use dar_manager metadata for selection, then restore directly with dar.
|
|
571
|
+
logger.info(
|
|
572
|
+
"PITR restore uses direct dar restore with catalog-derived chain (non-interactive)."
|
|
573
|
+
)
|
|
574
|
+
return _restore_with_dar(backup_def, paths, parsed_date, target, config_settings)
|
|
575
|
+
|
|
576
|
+
|
|
577
|
+
def _restore_target_unsafe_reason(target: str) -> Optional[str]:
|
|
578
|
+
target_abs = os.path.abspath(target)
|
|
579
|
+
target_norm = os.path.normpath(target_abs)
|
|
580
|
+
|
|
581
|
+
allow_prefixes = (
|
|
582
|
+
"/tmp",
|
|
583
|
+
"/var/tmp",
|
|
584
|
+
"/home",
|
|
585
|
+
)
|
|
586
|
+
if target_norm in allow_prefixes or target_norm.startswith(allow_prefixes):
|
|
587
|
+
return None
|
|
588
|
+
|
|
589
|
+
protected_prefixes = (
|
|
590
|
+
"/bin",
|
|
591
|
+
"/sbin",
|
|
592
|
+
"/usr",
|
|
593
|
+
"/etc",
|
|
594
|
+
"/lib",
|
|
595
|
+
"/lib64",
|
|
596
|
+
"/boot",
|
|
597
|
+
"/proc",
|
|
598
|
+
"/sys",
|
|
599
|
+
"/dev",
|
|
600
|
+
"/var",
|
|
601
|
+
"/root",
|
|
602
|
+
)
|
|
603
|
+
if target_norm == "/" or target_norm in protected_prefixes:
|
|
604
|
+
return f"Restore target '{target_norm}' is a protected system directory. Choose a safer location."
|
|
605
|
+
if any(target_norm.startswith(prefix + os.sep) for prefix in protected_prefixes):
|
|
606
|
+
return f"Restore target '{target_norm}' is under a protected system directory. Choose a safer location."
|
|
607
|
+
|
|
608
|
+
return None
|
|
609
|
+
|
|
610
|
+
|
|
611
|
+
def _local_tzinfo() -> tzinfo:
|
|
612
|
+
return datetime.now().astimezone().tzinfo
|
|
613
|
+
|
|
614
|
+
|
|
615
|
+
def _normalize_when_dt(dt: datetime) -> datetime:
|
|
616
|
+
if dt.tzinfo is None or dt.tzinfo.utcoffset(dt) is None:
|
|
617
|
+
return dt
|
|
618
|
+
local_tz = _local_tzinfo()
|
|
619
|
+
return dt.astimezone(local_tz).replace(tzinfo=None)
|
|
620
|
+
|
|
621
|
+
|
|
622
|
+
def _parse_when(when: str) -> Optional[datetime]:
|
|
623
|
+
parsed = dateparser.parse(when)
|
|
624
|
+
if not parsed:
|
|
625
|
+
return None
|
|
626
|
+
normalized = _normalize_when_dt(parsed)
|
|
627
|
+
if normalized is not parsed:
|
|
628
|
+
logger.debug("Normalized PITR timestamp with timezone: %s -> %s", parsed, normalized)
|
|
629
|
+
return normalized
|
|
630
|
+
|
|
631
|
+
|
|
632
|
+
def _coerce_timeout(value: Optional[int]) -> Optional[int]:
|
|
633
|
+
if value is None:
|
|
634
|
+
return None
|
|
635
|
+
if isinstance(value, bool):
|
|
636
|
+
return None
|
|
637
|
+
if isinstance(value, int):
|
|
638
|
+
return None if value <= 0 else value
|
|
639
|
+
if isinstance(value, str):
|
|
640
|
+
try:
|
|
641
|
+
value_int = int(value)
|
|
642
|
+
except ValueError:
|
|
643
|
+
return None
|
|
644
|
+
return None if value_int <= 0 else value_int
|
|
645
|
+
return None
|
|
646
|
+
|
|
647
|
+
|
|
648
|
+
def _parse_archive_map(list_output: str) -> Dict[int, str]:
|
|
649
|
+
archives: Dict[int, str] = {}
|
|
650
|
+
for line in list_output.splitlines():
|
|
651
|
+
stripped = line.strip()
|
|
652
|
+
if not stripped or stripped.startswith("archive #") or stripped.startswith("-"):
|
|
653
|
+
continue
|
|
654
|
+
parts = stripped.split()
|
|
655
|
+
if len(parts) < 3 or not parts[0].isdigit():
|
|
656
|
+
continue
|
|
657
|
+
num = int(parts[0])
|
|
658
|
+
basename = parts[-1]
|
|
659
|
+
path = " ".join(parts[1:-1])
|
|
660
|
+
archives[num] = os.path.join(path, basename)
|
|
661
|
+
return archives
|
|
662
|
+
|
|
663
|
+
|
|
664
|
+
def _replace_path_prefix(path: str, old_prefix: str, new_prefix: str) -> Optional[str]:
|
|
665
|
+
old_norm = os.path.normpath(old_prefix)
|
|
666
|
+
new_norm = os.path.normpath(new_prefix)
|
|
667
|
+
if path == old_norm:
|
|
668
|
+
return new_norm
|
|
669
|
+
if path.startswith(old_norm + os.sep):
|
|
670
|
+
suffix = path[len(old_norm):]
|
|
671
|
+
return os.path.normpath(new_norm + suffix)
|
|
672
|
+
return None
|
|
673
|
+
|
|
674
|
+
|
|
675
|
+
def relocate_archive_paths(
|
|
676
|
+
backup_def: str,
|
|
677
|
+
old_prefix: str,
|
|
678
|
+
new_prefix: str,
|
|
679
|
+
config_settings: ConfigSettings,
|
|
680
|
+
dry_run: bool = False,
|
|
681
|
+
) -> int:
|
|
682
|
+
database = f"{backup_def}{DB_SUFFIX}"
|
|
683
|
+
database_path = os.path.join(get_db_dir(config_settings), database)
|
|
684
|
+
if not os.path.exists(database_path):
|
|
685
|
+
logger.error(f'Database not found: "{database_path}"')
|
|
686
|
+
return 1
|
|
687
|
+
|
|
688
|
+
timeout = _coerce_timeout(getattr(config_settings, "command_timeout_secs", None))
|
|
689
|
+
list_result = runner.run(["dar_manager", "--base", database_path, "--list"], timeout=timeout)
|
|
690
|
+
stdout = list_result.stdout or ""
|
|
691
|
+
stderr = list_result.stderr or ""
|
|
692
|
+
if list_result.returncode != 0:
|
|
693
|
+
logger.error(f'Error listing catalogs for: "{database_path}"')
|
|
694
|
+
logger.error(f"stderr: {stderr}")
|
|
695
|
+
logger.error(f"stdout: {stdout}")
|
|
696
|
+
return list_result.returncode
|
|
697
|
+
|
|
698
|
+
archive_map = _parse_archive_map(stdout)
|
|
699
|
+
if not archive_map:
|
|
700
|
+
logger.error("Could not determine archive list from dar_manager output.")
|
|
701
|
+
return 1
|
|
702
|
+
|
|
703
|
+
updates: List[Tuple[int, str, str, str]] = []
|
|
704
|
+
for catalog_no, full_path in archive_map.items():
|
|
705
|
+
current_dir = os.path.dirname(full_path)
|
|
706
|
+
new_dir = _replace_path_prefix(current_dir, old_prefix, new_prefix)
|
|
707
|
+
if new_dir and new_dir != current_dir:
|
|
708
|
+
updates.append((catalog_no, current_dir, new_dir, os.path.basename(full_path)))
|
|
709
|
+
|
|
710
|
+
if not updates:
|
|
711
|
+
logger.info(
|
|
712
|
+
"No archive paths matched '%s' in database '%s'.",
|
|
713
|
+
os.path.normpath(old_prefix),
|
|
714
|
+
database_path,
|
|
715
|
+
)
|
|
716
|
+
return 0
|
|
717
|
+
|
|
718
|
+
logger.info(
|
|
719
|
+
"Updating %d archive path(s) from '%s' to '%s' in database '%s'.",
|
|
720
|
+
len(updates),
|
|
721
|
+
os.path.normpath(old_prefix),
|
|
722
|
+
os.path.normpath(new_prefix),
|
|
723
|
+
database_path,
|
|
724
|
+
)
|
|
725
|
+
failures = 0
|
|
726
|
+
for catalog_no, current_dir, new_dir, basename in updates:
|
|
727
|
+
logger.info("Archive #%d (%s): %s -> %s", catalog_no, basename, current_dir, new_dir)
|
|
728
|
+
if dry_run:
|
|
729
|
+
continue
|
|
730
|
+
result = runner.run(
|
|
731
|
+
["dar_manager", "--base", database_path, "-p", str(catalog_no), new_dir],
|
|
732
|
+
timeout=timeout,
|
|
733
|
+
)
|
|
734
|
+
if result.returncode != 0:
|
|
735
|
+
failures += 1
|
|
736
|
+
logger.error(
|
|
737
|
+
"Failed updating archive #%d path to '%s' (returncode=%s).",
|
|
738
|
+
catalog_no,
|
|
739
|
+
new_dir,
|
|
740
|
+
result.returncode,
|
|
741
|
+
)
|
|
742
|
+
logger.error(f"stderr: {result.stderr}")
|
|
743
|
+
|
|
744
|
+
if failures:
|
|
745
|
+
logger.error("Relocate completed with %d failure(s).", failures)
|
|
746
|
+
return 1
|
|
747
|
+
logger.info("Relocate completed successfully.")
|
|
748
|
+
return 0
|
|
749
|
+
|
|
750
|
+
|
|
751
|
+
def _parse_archive_info(archive_map: Dict[int, str]) -> List[Tuple[int, datetime, str]]:
|
|
752
|
+
info: List[Tuple[int, datetime, str]] = []
|
|
753
|
+
pattern = re.compile(r"^(.*)_(FULL|DIFF|INCR)_(\d{4}-\d{2}-\d{2})(?:_(\d{6}))?(?:_.*)?$")
|
|
754
|
+
for catalog_no, path in archive_map.items():
|
|
755
|
+
base = os.path.basename(path)
|
|
756
|
+
match = pattern.match(base)
|
|
757
|
+
if not match:
|
|
758
|
+
continue
|
|
759
|
+
_, archive_type, date_str, time_str = match.groups()
|
|
760
|
+
try:
|
|
761
|
+
if time_str:
|
|
762
|
+
archive_date = datetime.strptime(f"{date_str}_{time_str}", "%Y-%m-%d_%H%M%S")
|
|
763
|
+
else:
|
|
764
|
+
archive_date = datetime.strptime(date_str, "%Y-%m-%d")
|
|
765
|
+
except ValueError:
|
|
766
|
+
continue
|
|
767
|
+
info.append((catalog_no, archive_date, archive_type))
|
|
768
|
+
return info
|
|
769
|
+
|
|
770
|
+
|
|
771
|
+
def _select_archive_chain(archive_info: List[Tuple[int, datetime, str]], when_dt: datetime) -> List[int]:
|
|
772
|
+
order = {"FULL": 0, "DIFF": 1, "INCR": 2}
|
|
773
|
+
candidates = [
|
|
774
|
+
(catalog_no, date, archive_type)
|
|
775
|
+
for catalog_no, date, archive_type in archive_info
|
|
776
|
+
if date <= when_dt
|
|
777
|
+
]
|
|
778
|
+
candidates.sort(key=lambda item: (item[1], order.get(item[2], 99), item[0]))
|
|
779
|
+
last_full = None
|
|
780
|
+
last_full_key = None
|
|
781
|
+
for catalog_no, date, archive_type in candidates:
|
|
782
|
+
if archive_type == "FULL":
|
|
783
|
+
last_full = catalog_no
|
|
784
|
+
last_full_key = (date, order["FULL"], catalog_no)
|
|
785
|
+
if last_full is None:
|
|
786
|
+
return []
|
|
787
|
+
|
|
788
|
+
last_diff = None
|
|
789
|
+
last_diff_key = None
|
|
790
|
+
for catalog_no, date, archive_type in candidates:
|
|
791
|
+
key = (date, order.get(archive_type, 99), catalog_no)
|
|
792
|
+
if key <= last_full_key:
|
|
793
|
+
continue
|
|
794
|
+
if archive_type == "DIFF":
|
|
795
|
+
last_diff = catalog_no
|
|
796
|
+
last_diff_key = key
|
|
797
|
+
|
|
798
|
+
base_key = last_diff_key or last_full_key
|
|
799
|
+
last_incr = None
|
|
800
|
+
for catalog_no, date, archive_type in candidates:
|
|
801
|
+
key = (date, order.get(archive_type, 99), catalog_no)
|
|
802
|
+
if key <= base_key:
|
|
803
|
+
continue
|
|
804
|
+
if archive_type == "INCR":
|
|
805
|
+
last_incr = catalog_no
|
|
806
|
+
|
|
807
|
+
chain = [last_full]
|
|
808
|
+
if last_diff is not None:
|
|
809
|
+
chain.append(last_diff)
|
|
810
|
+
if last_incr is not None:
|
|
811
|
+
chain.append(last_incr)
|
|
812
|
+
return chain
|
|
813
|
+
|
|
814
|
+
|
|
815
|
+
def _is_directory_path(path: str) -> bool:
|
|
816
|
+
return os.path.isdir(os.path.join(os.sep, path))
|
|
817
|
+
|
|
818
|
+
|
|
819
|
+
def _looks_like_directory(path: str) -> bool:
|
|
820
|
+
if not path:
|
|
821
|
+
return False
|
|
822
|
+
normalized = path.rstrip(os.sep)
|
|
823
|
+
if not normalized:
|
|
824
|
+
return True
|
|
825
|
+
if path.endswith(os.sep):
|
|
826
|
+
return True
|
|
827
|
+
base = os.path.basename(normalized)
|
|
828
|
+
_, ext = os.path.splitext(base)
|
|
829
|
+
return ext == ""
|
|
830
|
+
|
|
831
|
+
|
|
832
|
+
def _treat_as_directory(path: str) -> bool:
|
|
833
|
+
if _is_directory_path(path):
|
|
834
|
+
return True
|
|
835
|
+
if _looks_like_directory(path):
|
|
836
|
+
logger.debug("Treating restore path '%s' as directory (heuristic).", path)
|
|
837
|
+
return True
|
|
838
|
+
return False
|
|
839
|
+
|
|
840
|
+
def _format_chain_item(
|
|
841
|
+
catalog_no: int,
|
|
842
|
+
info_by_no: Dict[int, Tuple[datetime, str]],
|
|
843
|
+
status: str,
|
|
844
|
+
) -> str:
|
|
845
|
+
info = info_by_no.get(catalog_no)
|
|
846
|
+
if info:
|
|
847
|
+
dt, archive_type = info
|
|
848
|
+
return f"#{catalog_no} {archive_type}@{dt} [{status}]"
|
|
849
|
+
return f"#{catalog_no} [unknown] [{status}]"
|
|
850
|
+
|
|
851
|
+
|
|
852
|
+
def _describe_archive(
|
|
853
|
+
catalog_no: int,
|
|
854
|
+
archive_map: Dict[int, str],
|
|
855
|
+
info_by_no: Dict[int, Tuple[datetime, str]],
|
|
856
|
+
) -> str:
|
|
857
|
+
archive_path = archive_map.get(catalog_no)
|
|
858
|
+
base = os.path.basename(archive_path) if archive_path else "unknown"
|
|
859
|
+
info = info_by_no.get(catalog_no)
|
|
860
|
+
if info:
|
|
861
|
+
dt, archive_type = info
|
|
862
|
+
dt_str = dt.strftime("%Y-%m-%d %H:%M:%S")
|
|
863
|
+
return f"#{catalog_no} {archive_type}@{dt_str} {base}"
|
|
864
|
+
return f"#{catalog_no} {base}"
|
|
865
|
+
|
|
866
|
+
|
|
867
|
+
def _missing_chain_elements(chain: List[int], archive_map: Dict[int, str]) -> List[str]:
|
|
868
|
+
missing = []
|
|
869
|
+
for catalog_no in chain:
|
|
870
|
+
archive_path = archive_map.get(catalog_no)
|
|
871
|
+
if not archive_path:
|
|
872
|
+
missing.append(f"catalog #{catalog_no} missing from archive map")
|
|
873
|
+
continue
|
|
874
|
+
slice_path = f"{archive_path}.1.dar"
|
|
875
|
+
if not os.path.exists(slice_path):
|
|
876
|
+
missing.append(slice_path)
|
|
877
|
+
return missing
|
|
878
|
+
|
|
879
|
+
|
|
880
|
+
def _pitr_chain_report(
|
|
881
|
+
backup_def: str,
|
|
882
|
+
paths: List[str],
|
|
883
|
+
when: str,
|
|
884
|
+
config_settings: ConfigSettings,
|
|
885
|
+
) -> int:
|
|
886
|
+
"""
|
|
887
|
+
Report the PITR archive chain that would be used for a restore at `when`,
|
|
888
|
+
without performing any restore actions. Returns non-zero if required
|
|
889
|
+
archives are missing or no chain/candidates can be determined.
|
|
890
|
+
"""
|
|
891
|
+
if not when:
|
|
892
|
+
logger.error("PITR report requires --when.")
|
|
893
|
+
return 1
|
|
894
|
+
|
|
895
|
+
parsed_date = _parse_when(when)
|
|
896
|
+
if not parsed_date:
|
|
897
|
+
logger.error(f"Could not parse date: '{when}'")
|
|
898
|
+
return 1
|
|
899
|
+
|
|
900
|
+
database = f"{backup_def}{DB_SUFFIX}"
|
|
901
|
+
database_path = os.path.join(get_db_dir(config_settings), database)
|
|
902
|
+
timeout = _coerce_timeout(getattr(config_settings, "command_timeout_secs", None))
|
|
903
|
+
list_result = runner.run(['dar_manager', '--base', database_path, '--list'], timeout=timeout)
|
|
904
|
+
archive_map = _parse_archive_map(list_result.stdout)
|
|
905
|
+
if not archive_map:
|
|
906
|
+
logger.error("Could not determine archive list from dar_manager output.")
|
|
907
|
+
return 1
|
|
908
|
+
|
|
909
|
+
archive_info = _parse_archive_info(archive_map)
|
|
910
|
+
info_by_no = {catalog_no: (dt, archive_type) for catalog_no, dt, archive_type in archive_info}
|
|
911
|
+
failures = 0
|
|
912
|
+
successes = 0
|
|
913
|
+
|
|
914
|
+
for path in paths:
|
|
915
|
+
if _treat_as_directory(path):
|
|
916
|
+
chain = _select_archive_chain(archive_info, parsed_date)
|
|
917
|
+
if not chain:
|
|
918
|
+
logger.error(f"No FULL archive found at or before {parsed_date} for '{path}'")
|
|
919
|
+
failures += 1
|
|
920
|
+
continue
|
|
921
|
+
missing = []
|
|
922
|
+
chain_display_parts = []
|
|
923
|
+
for catalog_no in chain:
|
|
924
|
+
archive_path = archive_map.get(catalog_no)
|
|
925
|
+
status = "ok"
|
|
926
|
+
if not archive_path:
|
|
927
|
+
status = "missing"
|
|
928
|
+
missing.append(f"catalog #{catalog_no} missing from archive map")
|
|
929
|
+
else:
|
|
930
|
+
slice_path = f"{archive_path}.1.dar"
|
|
931
|
+
if not os.path.exists(slice_path):
|
|
932
|
+
status = "missing"
|
|
933
|
+
missing.append(slice_path)
|
|
934
|
+
chain_display_parts.append(_format_chain_item(catalog_no, info_by_no, status))
|
|
935
|
+
chain_display = ", ".join(chain_display_parts)
|
|
936
|
+
logger.info("PITR chain report for '%s': %s", path, chain_display)
|
|
937
|
+
if missing:
|
|
938
|
+
for item in missing:
|
|
939
|
+
logger.error("PITR chain report missing archive: %s", item)
|
|
940
|
+
failures += 1
|
|
941
|
+
else:
|
|
942
|
+
successes += 1
|
|
943
|
+
continue
|
|
944
|
+
|
|
945
|
+
file_result = runner.run(['dar_manager', '--base', database_path, '-f', path], timeout=timeout)
|
|
946
|
+
versions = _parse_file_versions(file_result.stdout)
|
|
947
|
+
candidates = [(num, dt) for num, dt in versions if dt <= parsed_date]
|
|
948
|
+
candidates.sort(key=lambda item: item[1], reverse=True)
|
|
949
|
+
logger.info(
|
|
950
|
+
"PITR chain report candidates for '%s': %s",
|
|
951
|
+
path,
|
|
952
|
+
", ".join(f"#{num}@{dt}" for num, dt in candidates) or "<none>",
|
|
953
|
+
)
|
|
954
|
+
if not candidates:
|
|
955
|
+
logger.error(f"No archive version found for '{path}' at or before {parsed_date}")
|
|
956
|
+
failures += 1
|
|
957
|
+
continue
|
|
958
|
+
catalog_no, dt = candidates[0]
|
|
959
|
+
archive_path = archive_map.get(catalog_no)
|
|
960
|
+
if not archive_path:
|
|
961
|
+
logger.error("PITR chain report missing archive map entry for #%d (%s)", catalog_no, path)
|
|
962
|
+
failures += 1
|
|
963
|
+
continue
|
|
964
|
+
slice_path = f"{archive_path}.1.dar"
|
|
965
|
+
if not os.path.exists(slice_path):
|
|
966
|
+
logger.error("PITR chain report missing archive slice: %s", slice_path)
|
|
967
|
+
failures += 1
|
|
968
|
+
continue
|
|
969
|
+
logger.info("PITR chain report selected archive #%d (%s) for '%s'.", catalog_no, dt, path)
|
|
970
|
+
successes += 1
|
|
971
|
+
|
|
972
|
+
logger.info("PITR chain report summary: %d ok, %d failed.", successes, failures)
|
|
973
|
+
return 0 if failures == 0 else 1
|
|
974
|
+
|
|
975
|
+
|
|
976
|
+
def _parse_file_versions(file_output: str) -> List[Tuple[int, datetime]]:
|
|
977
|
+
versions: List[Tuple[int, datetime]] = []
|
|
978
|
+
for line in file_output.splitlines():
|
|
979
|
+
stripped = line.strip()
|
|
980
|
+
if not stripped:
|
|
981
|
+
continue
|
|
982
|
+
match = re.match(r"^(\d+)\s+([A-Za-z]{3}\s+[A-Za-z]{3}\s+\d+\s+\d{2}:\d{2}:\d{2}\s+\d{4})", stripped)
|
|
983
|
+
if not match:
|
|
984
|
+
continue
|
|
985
|
+
try:
|
|
986
|
+
catalog_no = int(match.group(1))
|
|
987
|
+
dt = datetime.strptime(match.group(2), "%a %b %d %H:%M:%S %Y")
|
|
988
|
+
except Exception:
|
|
989
|
+
continue
|
|
990
|
+
versions.append((catalog_no, dt))
|
|
991
|
+
return versions
|
|
992
|
+
|
|
993
|
+
|
|
994
|
+
def _guess_darrc_path(config_settings: ConfigSettings) -> Optional[str]:
|
|
995
|
+
config_dir = os.path.dirname(config_settings.config_file)
|
|
996
|
+
candidate = os.path.join(config_dir, ".darrc")
|
|
997
|
+
if os.path.exists(candidate):
|
|
998
|
+
return candidate
|
|
999
|
+
script_dir = os.path.dirname(os.path.realpath(__file__))
|
|
1000
|
+
fallback = os.path.join(script_dir, ".darrc")
|
|
1001
|
+
if os.path.exists(fallback):
|
|
1002
|
+
return fallback
|
|
1003
|
+
return None
|
|
1004
|
+
|
|
1005
|
+
|
|
1006
|
+
def _restore_with_dar(backup_def: str, paths: List[str], when_dt: datetime, target: str, config_settings: ConfigSettings) -> int:
|
|
1007
|
+
"""
|
|
1008
|
+
Restore specific paths by selecting the best matching archive (<= when_dt)
|
|
1009
|
+
using dar_manager metadata, then invoking dar directly.
|
|
1010
|
+
|
|
1011
|
+
This is a fallback for PITR when dar_manager reports that nothing could be
|
|
1012
|
+
restored for a dated request. It inspects the catalog to choose an archive
|
|
1013
|
+
for each path and restores into the provided target directory.
|
|
1014
|
+
"""
|
|
1015
|
+
database = f"{backup_def}{DB_SUFFIX}"
|
|
1016
|
+
database_path = os.path.join(get_db_dir(config_settings), database)
|
|
1017
|
+
timeout = _coerce_timeout(getattr(config_settings, "command_timeout_secs", None))
|
|
1018
|
+
list_result = runner.run(['dar_manager', '--base', database_path, '--list'], timeout=timeout)
|
|
1019
|
+
archive_map = _parse_archive_map(list_result.stdout)
|
|
1020
|
+
if not archive_map:
|
|
1021
|
+
logger.error("Could not determine archive list from dar_manager output.")
|
|
1022
|
+
return 1
|
|
1023
|
+
logger.debug("PITR archive map: %s", ", ".join(f"#{k}={v}" for k, v in sorted(archive_map.items())))
|
|
1024
|
+
archive_info = _parse_archive_info(archive_map)
|
|
1025
|
+
info_by_no = {catalog_no: (dt, archive_type) for catalog_no, dt, archive_type in archive_info}
|
|
1026
|
+
|
|
1027
|
+
darrc_path = _guess_darrc_path(config_settings)
|
|
1028
|
+
failures = 0
|
|
1029
|
+
successes = 0
|
|
1030
|
+
missing_archives = set()
|
|
1031
|
+
|
|
1032
|
+
for path in paths:
|
|
1033
|
+
file_result = runner.run(['dar_manager', '--base', database_path, '-f', path], timeout=timeout)
|
|
1034
|
+
if _treat_as_directory(path):
|
|
1035
|
+
chain = _select_archive_chain(archive_info, when_dt)
|
|
1036
|
+
if not chain:
|
|
1037
|
+
logger.error(f"No FULL archive found at or before {when_dt} for '{path}'")
|
|
1038
|
+
failures += 1
|
|
1039
|
+
continue
|
|
1040
|
+
missing = _missing_chain_elements(chain, archive_map)
|
|
1041
|
+
if missing:
|
|
1042
|
+
for item in missing:
|
|
1043
|
+
missing_archives.add(item)
|
|
1044
|
+
logger.error("PITR restore missing archive in chain for '%s': %s", path, item)
|
|
1045
|
+
failures += 1
|
|
1046
|
+
continue
|
|
1047
|
+
logger.info(
|
|
1048
|
+
"PITR restore directory '%s' using archive chain: %s",
|
|
1049
|
+
path,
|
|
1050
|
+
", ".join(_describe_archive(num, archive_map, info_by_no) for num in chain),
|
|
1051
|
+
)
|
|
1052
|
+
restored = True
|
|
1053
|
+
for catalog_no in chain:
|
|
1054
|
+
archive_path = archive_map.get(catalog_no)
|
|
1055
|
+
if not archive_path:
|
|
1056
|
+
missing_archives.add(f"catalog #{catalog_no} missing from archive map")
|
|
1057
|
+
logger.error(f"Archive number {catalog_no} missing from archive list; cannot restore '{path}'.")
|
|
1058
|
+
restored = False
|
|
1059
|
+
break
|
|
1060
|
+
if not os.path.exists(f"{archive_path}.1.dar"):
|
|
1061
|
+
missing_archives.add(f"{archive_path}.1.dar")
|
|
1062
|
+
logger.error(f"Archive slice missing for '{archive_path}.1.dar', cannot complete restore.")
|
|
1063
|
+
restored = False
|
|
1064
|
+
break
|
|
1065
|
+
cmd = ['dar', '-x', archive_path, '-wa', '-g', path, '--noconf', '-Q']
|
|
1066
|
+
if target:
|
|
1067
|
+
cmd.extend(['-R', target])
|
|
1068
|
+
if darrc_path:
|
|
1069
|
+
cmd.extend(['-B', darrc_path, 'restore-options'])
|
|
1070
|
+
logger.info(
|
|
1071
|
+
"Applying archive %s for '%s'.",
|
|
1072
|
+
_describe_archive(catalog_no, archive_map, info_by_no),
|
|
1073
|
+
path,
|
|
1074
|
+
)
|
|
1075
|
+
result = runner.run(cmd, timeout=timeout)
|
|
1076
|
+
if result.returncode != 0:
|
|
1077
|
+
logger.error(f"dar restore failed for '{path}' from '{archive_path}': {result.stderr}")
|
|
1078
|
+
restored = False
|
|
1079
|
+
break
|
|
1080
|
+
if restored:
|
|
1081
|
+
successes += 1
|
|
1082
|
+
else:
|
|
1083
|
+
failures += 1
|
|
1084
|
+
continue
|
|
1085
|
+
|
|
1086
|
+
versions = _parse_file_versions(file_result.stdout)
|
|
1087
|
+
candidates = [(num, dt) for num, dt in versions if dt <= when_dt]
|
|
1088
|
+
candidates.sort(key=lambda item: item[1], reverse=True)
|
|
1089
|
+
logger.debug(
|
|
1090
|
+
"PITR candidates for '%s': %s",
|
|
1091
|
+
path,
|
|
1092
|
+
", ".join(f"#{num}@{dt}" for num, dt in candidates) or "<none>",
|
|
1093
|
+
)
|
|
1094
|
+
if not candidates:
|
|
1095
|
+
logger.error(f"No archive version found for '{path}' at or before {when_dt}")
|
|
1096
|
+
failures += 1
|
|
1097
|
+
continue
|
|
1098
|
+
|
|
1099
|
+
restored = False
|
|
1100
|
+
for catalog_no, dt in candidates:
|
|
1101
|
+
archive_path = archive_map.get(catalog_no)
|
|
1102
|
+
if not archive_path:
|
|
1103
|
+
missing_archives.add(f"catalog #{catalog_no} missing from archive map")
|
|
1104
|
+
logger.error(f"Archive number {catalog_no} missing from archive list; cannot restore '{path}'.")
|
|
1105
|
+
restored = False
|
|
1106
|
+
break
|
|
1107
|
+
if not os.path.exists(f"{archive_path}.1.dar"):
|
|
1108
|
+
missing_archives.add(f"{archive_path}.1.dar")
|
|
1109
|
+
logger.error(f"Archive slice missing for '{archive_path}.1.dar', cannot restore '{path}'.")
|
|
1110
|
+
restored = False
|
|
1111
|
+
break
|
|
1112
|
+
logger.info(
|
|
1113
|
+
"PITR restore file '%s' using archive %s.",
|
|
1114
|
+
path,
|
|
1115
|
+
_describe_archive(catalog_no, archive_map, info_by_no),
|
|
1116
|
+
)
|
|
1117
|
+
cmd = ['dar', '-x', archive_path, '-wa', '-g', path, '--noconf', '-Q']
|
|
1118
|
+
if target:
|
|
1119
|
+
cmd.extend(['-R', target])
|
|
1120
|
+
if darrc_path:
|
|
1121
|
+
cmd.extend(['-B', darrc_path, 'restore-options'])
|
|
1122
|
+
logger.info(
|
|
1123
|
+
"Restoring '%s' from archive %s using dar.",
|
|
1124
|
+
path,
|
|
1125
|
+
_describe_archive(catalog_no, archive_map, info_by_no),
|
|
1126
|
+
)
|
|
1127
|
+
result = runner.run(cmd, timeout=timeout)
|
|
1128
|
+
if result.returncode == 0:
|
|
1129
|
+
restored = True
|
|
1130
|
+
successes += 1
|
|
1131
|
+
break
|
|
1132
|
+
logger.error(f"dar restore failed for '{path}' from '{archive_path}': {result.stderr}")
|
|
1133
|
+
|
|
1134
|
+
if not restored:
|
|
1135
|
+
failures += 1
|
|
1136
|
+
|
|
1137
|
+
logger.info("PITR restore summary: %d succeeded, %d failed.", successes, failures)
|
|
1138
|
+
if missing_archives:
|
|
1139
|
+
missing_list = sorted(missing_archives)
|
|
1140
|
+
sample = ", ".join(missing_list[:3])
|
|
1141
|
+
extra = f" (+{len(missing_list) - 3} more)" if len(missing_list) > 3 else ""
|
|
1142
|
+
logger.error("Missing archives detected during PITR restore: %s%s", sample, extra)
|
|
1143
|
+
ts = datetime.now().strftime("%Y-%m-%d %H:%M")
|
|
1144
|
+
send_discord_message(
|
|
1145
|
+
f"{ts} - manager: PITR restore missing archives ({len(missing_list)} missing).",
|
|
1146
|
+
config_settings=config_settings,
|
|
1147
|
+
)
|
|
1148
|
+
if failures:
|
|
1149
|
+
ts = datetime.now().strftime("%Y-%m-%d %H:%M")
|
|
1150
|
+
send_discord_message(
|
|
1151
|
+
f"{ts} - manager: PITR restore completed with failures ({failures} failed, {successes} succeeded).",
|
|
1152
|
+
config_settings=config_settings,
|
|
1153
|
+
)
|
|
1154
|
+
return 0 if failures == 0 else 1
|
|
1155
|
+
|
|
1156
|
+
|
|
287
1157
|
def add_specific_archive(archive: str, config_settings: ConfigSettings, directory: str = None) -> int:
|
|
288
1158
|
"""
|
|
289
1159
|
Adds the specified archive to its catalog database. Prompts for confirmation if it's older than existing entries.
|
|
@@ -393,27 +1263,30 @@ def add_directory(args: argparse.ArgumentParser, config_settings: ConfigSettings
|
|
|
393
1263
|
dar_pattern = re.compile(r'^(.*?_(FULL|DIFF|INCR)_(\d{4}-\d{2}-\d{2}))\.1.dar$') # just read slice #1 of an archive
|
|
394
1264
|
# List of DAR archives with their dates and base names
|
|
395
1265
|
dar_archives = []
|
|
1266
|
+
type_order = {"FULL": 0, "DIFF": 1, "INCR": 2}
|
|
396
1267
|
|
|
397
1268
|
for filename in os.listdir(args.add_dir):
|
|
398
1269
|
logger.debug(f"check if '{filename}' is a dar archive slice #1?")
|
|
399
1270
|
match = dar_pattern.match(filename)
|
|
400
1271
|
if match:
|
|
401
1272
|
base_name = match.group(1)
|
|
1273
|
+
archive_type = match.group(2)
|
|
402
1274
|
date_str = match.group(3)
|
|
403
1275
|
date_obj = datetime.strptime(date_str, '%Y-%m-%d')
|
|
404
|
-
dar_archives.append((date_obj, base_name))
|
|
405
|
-
logger.debug(f" -> yes: base name: {base_name}, date: {date_str}")
|
|
1276
|
+
dar_archives.append((date_obj, type_order.get(archive_type, 99), base_name, archive_type))
|
|
1277
|
+
logger.debug(f" -> yes: base name: {base_name}, type: {archive_type}, date: {date_str}")
|
|
406
1278
|
|
|
407
1279
|
if not dar_archives or len(dar_archives) == 0:
|
|
408
1280
|
logger.info(f"No 'dar' archives found in directory {args.add_dir}")
|
|
409
1281
|
return
|
|
410
1282
|
|
|
411
|
-
# Sort the DAR archives by date
|
|
1283
|
+
# Sort the DAR archives by date then type (FULL -> DIFF -> INCR) to avoid interactive ordering prompts.
|
|
412
1284
|
dar_archives.sort()
|
|
1285
|
+
logger.debug("Sorted archives for add-dir: %s", [(d.strftime("%Y-%m-%d"), t, n) for d, t, n, _ in dar_archives])
|
|
413
1286
|
|
|
414
1287
|
# Loop over the sorted DAR archives and process them
|
|
415
1288
|
result: List[Dict] = []
|
|
416
|
-
for
|
|
1289
|
+
for _date_obj, _type_order, base_name, _archive_type in dar_archives:
|
|
417
1290
|
logger.info(f"Adding dar archive: '{base_name}' to it's catalog database")
|
|
418
1291
|
result_archive = add_specific_archive(base_name, config_settings, args.add_dir)
|
|
419
1292
|
result.append({ f"{base_name}" : result_archive})
|
|
@@ -477,7 +1350,8 @@ def remove_specific_archive(archive: str, config_settings: ConfigSettings) -> in
|
|
|
477
1350
|
cat_no:int = cat_no_for_name(archive, config_settings)
|
|
478
1351
|
if cat_no >= 0:
|
|
479
1352
|
command = ['dar_manager', '--base', database_path, "--delete", str(cat_no)]
|
|
480
|
-
|
|
1353
|
+
timeout = _coerce_timeout(getattr(config_settings, "command_timeout_secs", None))
|
|
1354
|
+
process: CommandResult = runner.run(command, timeout=timeout)
|
|
481
1355
|
logger.info(f"CommandResult: {process}")
|
|
482
1356
|
else:
|
|
483
1357
|
logger.warning(f"archive: '{archive}' not found in it's catalog database: {database_path}")
|
|
@@ -504,6 +1378,26 @@ def build_arg_parser():
|
|
|
504
1378
|
parser.add_argument('-l', '--list-catalogs', action='store_true', help='List catalogs in databases for all backup definitions')
|
|
505
1379
|
parser.add_argument('--list-archive-contents', type=str, help="List contents of the archive's catalog. Argument is the archive name.").completer = archive_content_completer
|
|
506
1380
|
parser.add_argument('--find-file', type=str, help="List catalogs containing <path>/file. '-d <definition>' argument is also required")
|
|
1381
|
+
parser.add_argument('--restore-path', nargs='+', help="Restore specific path(s) (Point-in-Time Recovery).")
|
|
1382
|
+
parser.add_argument('--when', type=str, help="Date/time for restoration (used with --restore-path).")
|
|
1383
|
+
parser.add_argument('--target', type=str, default=None, help="Target directory for restoration (default: current dir).")
|
|
1384
|
+
parser.add_argument('--pitr-report', action='store_true', help="Report PITR archive chain for --restore-path/--when without restoring.")
|
|
1385
|
+
parser.add_argument(
|
|
1386
|
+
'--pitr-report-first',
|
|
1387
|
+
action='store_true',
|
|
1388
|
+
help="Run PITR chain report before restore and abort if missing archives.",
|
|
1389
|
+
)
|
|
1390
|
+
parser.add_argument(
|
|
1391
|
+
'--relocate-archive-path',
|
|
1392
|
+
nargs=2,
|
|
1393
|
+
metavar=("OLD", "NEW"),
|
|
1394
|
+
help="Rewrite archive path prefix in the catalog DB (requires --backup-def).",
|
|
1395
|
+
)
|
|
1396
|
+
parser.add_argument(
|
|
1397
|
+
'--relocate-archive-path-dry-run',
|
|
1398
|
+
action='store_true',
|
|
1399
|
+
help="Show archive path changes without applying them (use with --relocate-archive-path).",
|
|
1400
|
+
)
|
|
507
1401
|
parser.add_argument('--verbose', action='store_true', help='Be more verbose')
|
|
508
1402
|
parser.add_argument('--log-level', type=str, help="`debug` or `trace`, default is `info`", default="info")
|
|
509
1403
|
parser.add_argument('--log-stdout', action='store_true', help='also print log messages to stdout')
|
|
@@ -544,7 +1438,14 @@ def main():
|
|
|
544
1438
|
raise SystemExit(127)
|
|
545
1439
|
args.config_file = config_settings_path
|
|
546
1440
|
|
|
547
|
-
|
|
1441
|
+
try:
|
|
1442
|
+
config_settings = ConfigSettings(args.config_file)
|
|
1443
|
+
except Exception as exc:
|
|
1444
|
+
msg = f"Config error: {exc}"
|
|
1445
|
+
print(msg, file=stderr)
|
|
1446
|
+
ts = datetime.now().strftime("%Y-%m-%d_%H:%M")
|
|
1447
|
+
send_discord_message(f"{ts} - manager: FAILURE - {msg}")
|
|
1448
|
+
sys.exit(127)
|
|
548
1449
|
|
|
549
1450
|
if not os.path.dirname(config_settings.logfile_location):
|
|
550
1451
|
print(f"Directory for log file '{config_settings.logfile_location}' does not exist, exiting")
|
|
@@ -552,9 +1453,25 @@ def main():
|
|
|
552
1453
|
return
|
|
553
1454
|
|
|
554
1455
|
command_output_log = config_settings.logfile_location.replace("dar-backup.log", "dar-backup-commands.log")
|
|
555
|
-
|
|
1456
|
+
trace_log_file = derive_trace_log_path(config_settings.logfile_location)
|
|
1457
|
+
logger = setup_logging(
|
|
1458
|
+
config_settings.logfile_location,
|
|
1459
|
+
command_output_log,
|
|
1460
|
+
args.log_level,
|
|
1461
|
+
args.log_stdout,
|
|
1462
|
+
logfile_max_bytes=config_settings.logfile_max_bytes,
|
|
1463
|
+
logfile_backup_count=config_settings.logfile_backup_count,
|
|
1464
|
+
trace_log_file=trace_log_file,
|
|
1465
|
+
trace_log_max_bytes=getattr(config_settings, "trace_log_max_bytes", 10485760),
|
|
1466
|
+
trace_log_backup_count=getattr(config_settings, "trace_log_backup_count", 1)
|
|
1467
|
+
)
|
|
556
1468
|
command_logger = get_logger(command_output_logger=True)
|
|
557
|
-
runner = CommandRunner(
|
|
1469
|
+
runner = CommandRunner(
|
|
1470
|
+
logger=logger,
|
|
1471
|
+
command_logger=command_logger,
|
|
1472
|
+
default_timeout=getattr(config_settings, "command_timeout_secs", 30) or 30,
|
|
1473
|
+
default_capture_limit_bytes=getattr(config_settings, "command_capture_max_bytes", None)
|
|
1474
|
+
)
|
|
558
1475
|
|
|
559
1476
|
start_msgs: List[Tuple[str, str]] = []
|
|
560
1477
|
|
|
@@ -567,10 +1484,12 @@ def main():
|
|
|
567
1484
|
start_msgs.append(("Config file:", args.config_file))
|
|
568
1485
|
args.verbose and start_msgs.append(("Backup dir:", config_settings.backup_dir))
|
|
569
1486
|
start_msgs.append(("Logfile:", config_settings.logfile_location))
|
|
1487
|
+
args.verbose and start_msgs.append(("Trace log:", trace_log_file))
|
|
570
1488
|
args.verbose and start_msgs.append(("Logfile max size (bytes):", config_settings.logfile_max_bytes))
|
|
571
1489
|
args.verbose and start_msgs.append(("Logfile backup count:", config_settings.logfile_backup_count))
|
|
572
1490
|
args.verbose and start_msgs.append(("--alternate-archive-dir:", args.alternate_archive_dir))
|
|
573
1491
|
args.verbose and start_msgs.append(("--remove-specific-archive:", args.remove_specific_archive))
|
|
1492
|
+
args.verbose and start_msgs.append(("--relocate-archive-path:", args.relocate_archive_path))
|
|
574
1493
|
dar_manager_properties = get_binary_info(command='dar_manager')
|
|
575
1494
|
start_msgs.append(("dar_manager:", dar_manager_properties['path']))
|
|
576
1495
|
start_msgs.append(("dar_manager v.:", dar_manager_properties['version']))
|
|
@@ -604,7 +1523,7 @@ def main():
|
|
|
604
1523
|
return
|
|
605
1524
|
|
|
606
1525
|
if args.backup_def and not args.backup_def.strip():
|
|
607
|
-
logger.error(
|
|
1526
|
+
logger.error("No backup definition given to --backup-def")
|
|
608
1527
|
sys.exit(1)
|
|
609
1528
|
return
|
|
610
1529
|
|
|
@@ -616,73 +1535,143 @@ def main():
|
|
|
616
1535
|
return
|
|
617
1536
|
|
|
618
1537
|
if args.list_archive_contents and not args.list_archive_contents.strip():
|
|
619
|
-
logger.error(
|
|
1538
|
+
logger.error("--list-archive-contents <param> not given, exiting")
|
|
1539
|
+
sys.exit(1)
|
|
1540
|
+
return
|
|
1541
|
+
|
|
1542
|
+
if args.relocate_archive_path and not args.backup_def:
|
|
1543
|
+
logger.error("--relocate-archive-path requires the --backup-def, exiting")
|
|
1544
|
+
sys.exit(1)
|
|
1545
|
+
return
|
|
1546
|
+
|
|
1547
|
+
if args.relocate_archive_path_dry_run and not args.relocate_archive_path:
|
|
1548
|
+
logger.error("--relocate-archive-path-dry-run requires --relocate-archive-path, exiting")
|
|
620
1549
|
sys.exit(1)
|
|
621
1550
|
return
|
|
622
1551
|
|
|
623
1552
|
if args.find_file and not args.backup_def:
|
|
624
|
-
logger.error(
|
|
1553
|
+
logger.error("--find-file requires the --backup-def, exiting")
|
|
625
1554
|
sys.exit(1)
|
|
626
1555
|
return
|
|
627
1556
|
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
1557
|
+
if args.restore_path and not args.backup_def:
|
|
1558
|
+
logger.error("--restore-path requires the --backup-def, exiting")
|
|
1559
|
+
sys.exit(1)
|
|
1560
|
+
|
|
1561
|
+
if args.restore_path and not args.target and not args.pitr_report:
|
|
1562
|
+
logger.error("--restore-path requires the --target directory, exiting")
|
|
1563
|
+
sys.exit(1)
|
|
1564
|
+
return
|
|
1565
|
+
|
|
1566
|
+
if args.pitr_report:
|
|
1567
|
+
if not args.restore_path:
|
|
1568
|
+
logger.error("--pitr-report requires --restore-path, exiting")
|
|
632
1569
|
sys.exit(1)
|
|
633
1570
|
return
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
if args.create_db:
|
|
638
|
-
if args.backup_def:
|
|
639
|
-
sys.exit(create_db(args.backup_def, config_settings, logger, runner))
|
|
1571
|
+
if not args.when:
|
|
1572
|
+
logger.error("--pitr-report requires --when, exiting")
|
|
1573
|
+
sys.exit(1)
|
|
640
1574
|
return
|
|
641
|
-
else:
|
|
642
|
-
for root, dirs, files in os.walk(config_settings.backup_d_dir):
|
|
643
|
-
for file in files:
|
|
644
|
-
current_backupdef = os.path.basename(file)
|
|
645
|
-
logger.debug(f"Create catalog db for backup definition: '{current_backupdef}'")
|
|
646
|
-
result = create_db(current_backupdef, config_settings, logger, runner)
|
|
647
|
-
if result != 0:
|
|
648
|
-
sys.exit(result)
|
|
649
|
-
return
|
|
650
|
-
|
|
651
|
-
if args.add_specific_archive:
|
|
652
|
-
sys.exit(add_specific_archive(args.add_specific_archive, config_settings))
|
|
653
|
-
return
|
|
654
1575
|
|
|
655
|
-
if args.
|
|
656
|
-
|
|
1576
|
+
if args.pitr_report_first and not args.restore_path:
|
|
1577
|
+
logger.error("--pitr-report-first requires --restore-path, exiting")
|
|
1578
|
+
sys.exit(1)
|
|
657
1579
|
return
|
|
658
1580
|
|
|
659
|
-
|
|
660
|
-
|
|
1581
|
+
# --- Modify settings ---
|
|
1582
|
+
try:
|
|
1583
|
+
if args.alternate_archive_dir:
|
|
1584
|
+
if not os.path.exists(args.alternate_archive_dir):
|
|
1585
|
+
logger.error(f"Alternate archive dir '{args.alternate_archive_dir}' does not exist, exiting")
|
|
1586
|
+
sys.exit(1)
|
|
1587
|
+
return
|
|
1588
|
+
config_settings.backup_dir = args.alternate_archive_dir
|
|
1589
|
+
|
|
1590
|
+
# --- Functional logic ---
|
|
1591
|
+
if args.create_db:
|
|
1592
|
+
if args.backup_def:
|
|
1593
|
+
sys.exit(create_db(args.backup_def, config_settings, logger, runner))
|
|
1594
|
+
return
|
|
1595
|
+
else:
|
|
1596
|
+
for root, dirs, files in os.walk(config_settings.backup_d_dir):
|
|
1597
|
+
for file in files:
|
|
1598
|
+
current_backupdef = os.path.basename(file)
|
|
1599
|
+
logger.debug(f"Create catalog db for backup definition: '{current_backupdef}'")
|
|
1600
|
+
result = create_db(current_backupdef, config_settings, logger, runner)
|
|
1601
|
+
if result != 0:
|
|
1602
|
+
sys.exit(result)
|
|
1603
|
+
return
|
|
1604
|
+
|
|
1605
|
+
if args.add_specific_archive:
|
|
1606
|
+
sys.exit(add_specific_archive(args.add_specific_archive, config_settings))
|
|
1607
|
+
return
|
|
661
1608
|
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
result = process.returncode
|
|
666
|
-
else:
|
|
667
|
-
result = 0
|
|
668
|
-
for root, dirs, files in os.walk(config_settings.backup_d_dir):
|
|
669
|
-
for file in files:
|
|
670
|
-
current_backupdef = os.path.basename(file)
|
|
671
|
-
if list_catalogs(current_backupdef, config_settings).returncode != 0:
|
|
672
|
-
result = 1
|
|
673
|
-
sys.exit(result)
|
|
674
|
-
return
|
|
1609
|
+
if args.add_dir:
|
|
1610
|
+
sys.exit(add_directory(args, config_settings))
|
|
1611
|
+
return
|
|
675
1612
|
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
1613
|
+
if args.remove_specific_archive:
|
|
1614
|
+
return remove_specific_archive(args.remove_specific_archive, config_settings)
|
|
1615
|
+
|
|
1616
|
+
if args.list_catalogs:
|
|
1617
|
+
if args.backup_def:
|
|
1618
|
+
process = list_catalogs(args.backup_def, config_settings)
|
|
1619
|
+
result = process.returncode
|
|
1620
|
+
else:
|
|
1621
|
+
result = 0
|
|
1622
|
+
for root, dirs, files in os.walk(config_settings.backup_d_dir):
|
|
1623
|
+
for file in files:
|
|
1624
|
+
current_backupdef = os.path.basename(file)
|
|
1625
|
+
if list_catalogs(current_backupdef, config_settings).returncode != 0:
|
|
1626
|
+
result = 1
|
|
1627
|
+
sys.exit(result)
|
|
1628
|
+
return
|
|
680
1629
|
|
|
1630
|
+
if args.list_archive_contents:
|
|
1631
|
+
result = list_archive_contents(args.list_archive_contents, config_settings)
|
|
1632
|
+
sys.exit(result)
|
|
1633
|
+
return
|
|
681
1634
|
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
1635
|
+
if args.relocate_archive_path:
|
|
1636
|
+
old_prefix, new_prefix = args.relocate_archive_path
|
|
1637
|
+
result = relocate_archive_paths(
|
|
1638
|
+
args.backup_def,
|
|
1639
|
+
old_prefix,
|
|
1640
|
+
new_prefix,
|
|
1641
|
+
config_settings,
|
|
1642
|
+
dry_run=args.relocate_archive_path_dry_run,
|
|
1643
|
+
)
|
|
1644
|
+
sys.exit(result)
|
|
1645
|
+
return
|
|
1646
|
+
|
|
1647
|
+
|
|
1648
|
+
if args.find_file:
|
|
1649
|
+
result = find_file(args.find_file, args.backup_def, config_settings)
|
|
1650
|
+
sys.exit(result)
|
|
1651
|
+
return
|
|
1652
|
+
|
|
1653
|
+
if args.pitr_report:
|
|
1654
|
+
result = _pitr_chain_report(args.backup_def, args.restore_path, args.when, config_settings)
|
|
1655
|
+
sys.exit(result)
|
|
1656
|
+
return
|
|
1657
|
+
|
|
1658
|
+
if args.restore_path:
|
|
1659
|
+
if args.pitr_report_first:
|
|
1660
|
+
report_when = args.when or "now"
|
|
1661
|
+
result = _pitr_chain_report(args.backup_def, args.restore_path, report_when, config_settings)
|
|
1662
|
+
if result != 0:
|
|
1663
|
+
sys.exit(result)
|
|
1664
|
+
return
|
|
1665
|
+
result = restore_at(args.backup_def, args.restore_path, args.when, args.target, config_settings, verbose=args.verbose)
|
|
1666
|
+
sys.exit(result)
|
|
1667
|
+
return
|
|
1668
|
+
|
|
1669
|
+
except Exception as e:
|
|
1670
|
+
msg = f"Unexpected error during manager operation: {e}"
|
|
1671
|
+
logger.error(msg, exc_info=True)
|
|
1672
|
+
ts = datetime.now().strftime("%Y-%m-%d_%H:%M")
|
|
1673
|
+
send_discord_message(f"{ts} - manager: FAILURE - {msg}", config_settings=config_settings)
|
|
1674
|
+
sys.exit(1)
|
|
686
1675
|
|
|
687
1676
|
|
|
688
1677
|
if __name__ == "__main__":
|