dar-backup 0.6.17__py3-none-any.whl → 0.6.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dar_backup/Changelog.md +232 -0
- dar_backup/README.md +1117 -0
- dar_backup/__about__.py +1 -1
- dar_backup/clean_log.py +14 -7
- dar_backup/cleanup.py +41 -43
- dar_backup/command_runner.py +59 -9
- dar_backup/dar_backup.py +156 -41
- dar_backup/dar_backup_systemd.py +119 -0
- dar_backup/installer.py +39 -23
- dar_backup/manager.py +210 -89
- dar_backup/rich_progress.py +101 -0
- dar_backup/util.py +289 -46
- {dar_backup-0.6.17.dist-info → dar_backup-0.6.19.dist-info}/METADATA +212 -27
- dar_backup-0.6.19.dist-info/RECORD +21 -0
- {dar_backup-0.6.17.dist-info → dar_backup-0.6.19.dist-info}/entry_points.txt +1 -0
- dar_backup-0.6.17.dist-info/RECORD +0 -17
- {dar_backup-0.6.17.dist-info → dar_backup-0.6.19.dist-info}/WHEEL +0 -0
- {dar_backup-0.6.17.dist-info → dar_backup-0.6.19.dist-info}/licenses/LICENSE +0 -0
dar_backup/manager.py
CHANGED
|
@@ -20,11 +20,14 @@
|
|
|
20
20
|
This script creates and maintains `dar` databases with catalogs.
|
|
21
21
|
"""
|
|
22
22
|
|
|
23
|
-
|
|
23
|
+
import argcomplete
|
|
24
24
|
import argparse
|
|
25
25
|
import os
|
|
26
26
|
import re
|
|
27
27
|
import sys
|
|
28
|
+
import subprocess
|
|
29
|
+
|
|
30
|
+
from inputimeout import inputimeout, TimeoutOccurred
|
|
28
31
|
|
|
29
32
|
|
|
30
33
|
from . import __about__ as about
|
|
@@ -32,9 +35,11 @@ from dar_backup.config_settings import ConfigSettings
|
|
|
32
35
|
from dar_backup.util import setup_logging
|
|
33
36
|
from dar_backup.util import CommandResult
|
|
34
37
|
from dar_backup.util import get_logger
|
|
38
|
+
from dar_backup.util import get_binary_info
|
|
35
39
|
|
|
36
40
|
from dar_backup.command_runner import CommandRunner
|
|
37
41
|
from dar_backup.command_runner import CommandResult
|
|
42
|
+
from dar_backup.util import backup_definition_completer, list_archive_completer, archive_content_completer, add_specific_archive_completer
|
|
38
43
|
|
|
39
44
|
from datetime import datetime
|
|
40
45
|
from time import time
|
|
@@ -82,40 +87,58 @@ def create_db(backup_def: str, config_settings: ConfigSettings):
|
|
|
82
87
|
|
|
83
88
|
return process.returncode
|
|
84
89
|
|
|
85
|
-
|
|
86
|
-
def list_catalogs(backup_def: str, config_settings: ConfigSettings) -> NamedTuple:
|
|
90
|
+
def list_catalogs(backup_def: str, config_settings: ConfigSettings, suppress_output=False) -> CommandResult:
|
|
87
91
|
"""
|
|
92
|
+
List catalogs from the database for the given backup definition.
|
|
93
|
+
|
|
88
94
|
Returns:
|
|
89
|
-
|
|
90
|
-
- process: of type subprocess.CompletedProcess: The result of the command execution.
|
|
91
|
-
- stdout: of type str: The standard output of the command.
|
|
92
|
-
- stderr: of type str: The standard error of the command.
|
|
93
|
-
- returncode: of type int: The return code of the command.
|
|
94
|
-
- timeout: of type int: The timeout value in seconds used to run the command.
|
|
95
|
-
- command: of type list[str): The command executed.
|
|
95
|
+
A CommandResult containing the raw stdout/stderr and return code.
|
|
96
96
|
"""
|
|
97
97
|
database = f"{backup_def}{DB_SUFFIX}"
|
|
98
98
|
database_path = os.path.join(config_settings.backup_dir, database)
|
|
99
|
+
|
|
99
100
|
if not os.path.exists(database_path):
|
|
100
101
|
error_msg = f'Database not found: "{database_path}"'
|
|
101
102
|
logger.error(error_msg)
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
stdout='',
|
|
105
|
-
stderr=error_msg,
|
|
106
|
-
returncode=1,
|
|
107
|
-
timeout=1,
|
|
108
|
-
command=[])
|
|
109
|
-
return commandResult
|
|
103
|
+
return CommandResult(1, '', error_msg)
|
|
104
|
+
|
|
110
105
|
command = ['dar_manager', '--base', database_path, '--list']
|
|
111
106
|
process = runner.run(command)
|
|
112
|
-
stdout, stderr = process.stdout, process.stderr
|
|
107
|
+
stdout, stderr = process.stdout, process.stderr
|
|
108
|
+
|
|
113
109
|
if process.returncode != 0:
|
|
114
110
|
logger.error(f'Error listing catalogs for: "{database_path}"')
|
|
115
|
-
logger.error(f"stderr: {stderr}")
|
|
111
|
+
logger.error(f"stderr: {stderr}")
|
|
116
112
|
logger.error(f"stdout: {stdout}")
|
|
117
|
-
|
|
118
|
-
|
|
113
|
+
return process
|
|
114
|
+
|
|
115
|
+
# Extract only archive basenames from stdout
|
|
116
|
+
archive_names = []
|
|
117
|
+
for line in stdout.splitlines():
|
|
118
|
+
line = line.strip()
|
|
119
|
+
if not line or "archive #" in line or "dar path" in line or "compression" in line:
|
|
120
|
+
continue
|
|
121
|
+
parts = line.split("\t")
|
|
122
|
+
if len(parts) >= 3:
|
|
123
|
+
archive_names.append(parts[2].strip())
|
|
124
|
+
|
|
125
|
+
# Sort by prefix and date
|
|
126
|
+
def extract_date(arch_name):
|
|
127
|
+
match = re.search(r"(\d{4}-\d{2}-\d{2})", arch_name)
|
|
128
|
+
if match:
|
|
129
|
+
return datetime.strptime(match.group(1), "%Y-%m-%d")
|
|
130
|
+
return datetime.min
|
|
131
|
+
|
|
132
|
+
def sort_key(name):
|
|
133
|
+
prefix = name.split("_", 1)[0]
|
|
134
|
+
return (prefix, extract_date(name))
|
|
135
|
+
|
|
136
|
+
archive_names = sorted(archive_names, key=sort_key)
|
|
137
|
+
|
|
138
|
+
if not suppress_output:
|
|
139
|
+
for name in archive_names:
|
|
140
|
+
print(name)
|
|
141
|
+
|
|
119
142
|
return process
|
|
120
143
|
|
|
121
144
|
|
|
@@ -127,47 +150,65 @@ def cat_no_for_name(archive: str, config_settings: ConfigSettings) -> int:
|
|
|
127
150
|
- the found number, if the archive catalog is present in the database
|
|
128
151
|
- "-1" if the archive is not found
|
|
129
152
|
"""
|
|
153
|
+
|
|
130
154
|
backup_def = backup_def_from_archive(archive)
|
|
131
|
-
process = list_catalogs(backup_def, config_settings)
|
|
155
|
+
process = list_catalogs(backup_def, config_settings, suppress_output=True)
|
|
132
156
|
if process.returncode != 0:
|
|
133
157
|
logger.error(f"Error listing catalogs for backup def: '{backup_def}'")
|
|
134
158
|
return -1
|
|
135
159
|
line_no = 1
|
|
136
160
|
for line in process.stdout.splitlines():
|
|
137
|
-
#print(f"{line_no}: '{line}'")
|
|
138
161
|
line_no += 1
|
|
139
|
-
search = re.search(
|
|
162
|
+
search = re.search(rf".*?(\d+)\s+.*?({archive}).*", line)
|
|
140
163
|
if search:
|
|
141
|
-
#print(f"FOUND: archive: {search.group(2)}, catalog #: '{search.group(1)}'")
|
|
142
164
|
logger.info(f"Found archive: '{archive}', catalog #: '{search.group(1)}'")
|
|
143
165
|
return int(search.group(1))
|
|
144
166
|
return -1
|
|
145
167
|
|
|
146
168
|
|
|
147
|
-
|
|
148
|
-
def list_archive_contents(archive: str, config_settings: ConfigSettings) -> int :
|
|
169
|
+
def list_archive_contents(archive: str, config_settings: ConfigSettings) -> int:
|
|
149
170
|
"""
|
|
150
|
-
List the contents of a specific archive, given the archive name
|
|
171
|
+
List the contents of a specific archive, given the archive name.
|
|
172
|
+
Prints only actual file entries (lines beginning with '[ Saved ]').
|
|
173
|
+
If none are found, a notice is printed instead.
|
|
151
174
|
"""
|
|
152
175
|
backup_def = backup_def_from_archive(archive)
|
|
153
176
|
database = f"{backup_def}{DB_SUFFIX}"
|
|
154
177
|
database_path = os.path.join(config_settings.backup_dir, database)
|
|
178
|
+
|
|
155
179
|
if not os.path.exists(database_path):
|
|
156
180
|
logger.error(f'Database not found: "{database_path}"')
|
|
157
181
|
return 1
|
|
182
|
+
|
|
158
183
|
cat_no = cat_no_for_name(archive, config_settings)
|
|
159
184
|
if cat_no < 0:
|
|
160
185
|
logger.error(f"archive: '{archive}' not found in database: '{database_path}'")
|
|
161
186
|
return 1
|
|
187
|
+
|
|
188
|
+
|
|
162
189
|
command = ['dar_manager', '--base', database_path, '-u', f"{cat_no}"]
|
|
163
|
-
process = runner.run(command)
|
|
164
|
-
|
|
190
|
+
process = runner.run(command, timeout = 10)
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
stdout = process.stdout or ""
|
|
194
|
+
stderr = process.stderr or ""
|
|
195
|
+
|
|
196
|
+
|
|
165
197
|
if process.returncode != 0:
|
|
166
198
|
logger.error(f'Error listing catalogs for: "{database_path}"')
|
|
167
|
-
logger.error(f"stderr: {stderr}")
|
|
199
|
+
logger.error(f"stderr: {stderr}")
|
|
168
200
|
logger.error(f"stdout: {stdout}")
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
combined_lines = (stdout + "\n" + stderr).splitlines()
|
|
204
|
+
file_lines = [line for line in combined_lines if line.strip().startswith("[ Saved ]")]
|
|
205
|
+
|
|
206
|
+
if file_lines:
|
|
207
|
+
for line in file_lines:
|
|
208
|
+
print(line)
|
|
169
209
|
else:
|
|
170
|
-
print(
|
|
210
|
+
print(f"[info] Archive '{archive}' is empty.")
|
|
211
|
+
|
|
171
212
|
return process.returncode
|
|
172
213
|
|
|
173
214
|
|
|
@@ -214,42 +255,82 @@ def find_file(file, backup_def, config_settings):
|
|
|
214
255
|
return process.returncode
|
|
215
256
|
|
|
216
257
|
|
|
217
|
-
def add_specific_archive(archive: str, config_settings: ConfigSettings, directory: str =None) -> int:
|
|
218
|
-
|
|
258
|
+
def add_specific_archive(archive: str, config_settings: ConfigSettings, directory: str = None) -> int:
|
|
259
|
+
"""
|
|
260
|
+
Adds the specified archive to its catalog database. Prompts for confirmation if it's older than existing entries.
|
|
261
|
+
|
|
262
|
+
Returns:
|
|
263
|
+
0 on success
|
|
264
|
+
1 on failure
|
|
265
|
+
"""
|
|
266
|
+
# Determine archive path
|
|
219
267
|
if not directory:
|
|
220
268
|
directory = config_settings.backup_dir
|
|
221
|
-
archive = os.path.basename(archive) #
|
|
222
|
-
archive_path = os.path.join(directory,
|
|
269
|
+
archive = os.path.basename(archive) # strip path if present
|
|
270
|
+
archive_path = os.path.join(directory, archive)
|
|
271
|
+
archive_test_path = os.path.join(directory, f'{archive}.1.dar')
|
|
223
272
|
|
|
224
|
-
archive_test_path = os.path.join(directory, f'{archive}.1.dar')
|
|
225
273
|
if not os.path.exists(archive_test_path):
|
|
226
274
|
logger.error(f'dar backup: "{archive_test_path}" not found, exiting')
|
|
227
275
|
return 1
|
|
228
|
-
|
|
229
|
-
#
|
|
276
|
+
|
|
277
|
+
# Validate backup definition
|
|
230
278
|
backup_definition = archive.split('_')[0]
|
|
231
279
|
backup_def_path = os.path.join(config_settings.backup_d_dir, backup_definition)
|
|
232
280
|
if not os.path.exists(backup_def_path):
|
|
233
281
|
logger.error(f'backup definition "{backup_definition}" not found (--add-specific-archive option probably not correct), exiting')
|
|
234
282
|
return 1
|
|
235
|
-
|
|
283
|
+
|
|
284
|
+
# Determine catalog DB path
|
|
236
285
|
database = f"{backup_definition}{DB_SUFFIX}"
|
|
237
286
|
database_path = os.path.realpath(os.path.join(config_settings.backup_dir, database))
|
|
287
|
+
|
|
288
|
+
# Safety check: is archive older than latest in catalog?
|
|
289
|
+
try:
|
|
290
|
+
result = subprocess.run(
|
|
291
|
+
["dar_manager", "--base", database_path, "--list"],
|
|
292
|
+
stdout=subprocess.PIPE,
|
|
293
|
+
stderr=subprocess.DEVNULL,
|
|
294
|
+
text=True,
|
|
295
|
+
check=True
|
|
296
|
+
)
|
|
297
|
+
all_lines = result.stdout.splitlines()
|
|
298
|
+
date_pattern = re.compile(r"\d{4}-\d{2}-\d{2}")
|
|
299
|
+
|
|
300
|
+
catalog_dates = [
|
|
301
|
+
datetime.strptime(date_match.group(), "%Y-%m-%d")
|
|
302
|
+
for line in all_lines
|
|
303
|
+
if (date_match := date_pattern.search(line))
|
|
304
|
+
]
|
|
305
|
+
|
|
306
|
+
if catalog_dates:
|
|
307
|
+
latest_date = max(catalog_dates)
|
|
308
|
+
archive_date_match = date_pattern.search(archive)
|
|
309
|
+
if archive_date_match:
|
|
310
|
+
archive_date = datetime.strptime(archive_date_match.group(), "%Y-%m-%d")
|
|
311
|
+
if archive_date < latest_date:
|
|
312
|
+
if not confirm_add_old_archive(archive, latest_date.strftime("%Y-%m-%d")):
|
|
313
|
+
logger.info(f"Archive {archive} skipped due to user declining to add older archive.")
|
|
314
|
+
return 1
|
|
315
|
+
|
|
316
|
+
except subprocess.CalledProcessError:
|
|
317
|
+
logger.warning("Could not determine latest catalog date for chronological check.")
|
|
318
|
+
|
|
238
319
|
logger.info(f'Add "{archive_path}" to catalog: "{database}"')
|
|
239
|
-
|
|
240
|
-
command = ['dar_manager', '--base', database_path, "--add", archive_path, "-Q"]
|
|
320
|
+
|
|
321
|
+
command = ['dar_manager', '--base', database_path, "--add", archive_path, "-Q", "--alter=ignore-order"]
|
|
241
322
|
process = runner.run(command)
|
|
242
323
|
stdout, stderr = process.stdout, process.stderr
|
|
243
324
|
|
|
244
325
|
if process.returncode == 0:
|
|
245
|
-
logger.info(f'"{archive_path}" added to
|
|
326
|
+
logger.info(f'"{archive_path}" added to its catalog')
|
|
246
327
|
elif process.returncode == 5:
|
|
247
|
-
logger.warning(f'Something did not go completely right adding "{archive_path}" to
|
|
248
|
-
else:
|
|
249
|
-
logger.error(f'something went wrong adding "{archive_path}" to
|
|
328
|
+
logger.warning(f'Something did not go completely right adding "{archive_path}" to its catalog, dar_manager error: "{process.returncode}"')
|
|
329
|
+
else:
|
|
330
|
+
logger.error(f'something went wrong adding "{archive_path}" to its catalog, dar_manager error: "{process.returncode}"')
|
|
250
331
|
logger.error(f"stderr: {stderr}")
|
|
251
332
|
logger.error(f"stdout: {stdout}")
|
|
252
|
-
|
|
333
|
+
|
|
253
334
|
return process.returncode
|
|
254
335
|
|
|
255
336
|
|
|
@@ -327,6 +408,31 @@ def backup_def_from_archive(archive: str) -> str:
|
|
|
327
408
|
return None
|
|
328
409
|
|
|
329
410
|
|
|
411
|
+
def confirm_add_old_archive(archive_name: str, latest_known_date: str, timeout_secs: int = 20) -> bool:
|
|
412
|
+
"""
|
|
413
|
+
Confirm with the user if they want to proceed with adding an archive older than the most recent in the catalog.
|
|
414
|
+
Returns True if the user confirms with "yes", False otherwise.
|
|
415
|
+
"""
|
|
416
|
+
try:
|
|
417
|
+
prompt = (
|
|
418
|
+
f"⚠️ Archive '{archive_name}' is older than the latest in the catalog ({latest_known_date}).\n"
|
|
419
|
+
f"Adding older archives may lead to inconsistent restore chains.\n"
|
|
420
|
+
f"Are you sure you want to continue? (yes/no): "
|
|
421
|
+
)
|
|
422
|
+
confirmation = inputimeout(prompt=prompt, timeout=timeout_secs)
|
|
423
|
+
|
|
424
|
+
if confirmation is None:
|
|
425
|
+
logger.info(f"No confirmation received for old archive: {archive_name}. Skipping.")
|
|
426
|
+
return False
|
|
427
|
+
return confirmation.strip().lower() == "yes"
|
|
428
|
+
|
|
429
|
+
except TimeoutOccurred:
|
|
430
|
+
logger.info(f"Timeout waiting for confirmation for old archive: {archive_name}. Skipping.")
|
|
431
|
+
return False
|
|
432
|
+
except KeyboardInterrupt:
|
|
433
|
+
logger.info(f"User interrupted confirmation for old archive: {archive_name}. Skipping.")
|
|
434
|
+
return False
|
|
435
|
+
|
|
330
436
|
|
|
331
437
|
def remove_specific_archive(archive: str, config_settings: ConfigSettings) -> int:
|
|
332
438
|
"""
|
|
@@ -357,26 +463,17 @@ def remove_specific_archive(archive: str, config_settings: ConfigSettings) -> in
|
|
|
357
463
|
return 1
|
|
358
464
|
|
|
359
465
|
|
|
360
|
-
|
|
361
|
-
def main():
|
|
362
|
-
global logger, runner
|
|
363
|
-
|
|
364
|
-
MIN_PYTHON_VERSION = (3, 9)
|
|
365
|
-
if sys.version_info < MIN_PYTHON_VERSION:
|
|
366
|
-
sys.stderr.write(f"Error: This script requires Python {'.'.join(map(str, MIN_PYTHON_VERSION))} or higher.\n")
|
|
367
|
-
sys.exit(1)
|
|
368
|
-
|
|
466
|
+
def build_arg_parser():
|
|
369
467
|
parser = argparse.ArgumentParser(description="Creates/maintains `dar` database catalogs")
|
|
370
468
|
parser.add_argument('-c', '--config-file', type=str, help="Path to 'dar-backup.conf'", default='~/.config/dar-backup/dar-backup.conf')
|
|
371
469
|
parser.add_argument('--create-db', action='store_true', help='Create missing databases for all backup definitions')
|
|
372
470
|
parser.add_argument('--alternate-archive-dir', type=str, help='Use this directory instead of BACKUP_DIR in config file')
|
|
373
471
|
parser.add_argument('--add-dir', type=str, help='Add all archive catalogs in this directory to databases')
|
|
374
|
-
parser.add_argument('-d', '--backup-def', type=str, help='Restrict to work only on this backup definition')
|
|
375
|
-
parser.add_argument('--add-specific-archive', type=str, help='Add this archive to catalog database')
|
|
376
|
-
parser.add_argument('--remove-specific-archive', type=str, help='Remove this archive from catalog database')
|
|
472
|
+
parser.add_argument('-d', '--backup-def', type=str, help='Restrict to work only on this backup definition').completer = backup_definition_completer
|
|
473
|
+
parser.add_argument('--add-specific-archive', type=str, help='Add this archive to catalog database').completer = add_specific_archive_completer
|
|
474
|
+
parser.add_argument('--remove-specific-archive', type=str, help='Remove this archive from catalog database').completer = archive_content_completer
|
|
377
475
|
parser.add_argument('-l', '--list-catalogs', action='store_true', help='List catalogs in databases for all backup definitions')
|
|
378
|
-
parser.add_argument('--list-
|
|
379
|
-
parser.add_argument('--list-archive-contents', type=str, help="List contents of the archive's catalog.")
|
|
476
|
+
parser.add_argument('--list-archive-contents', type=str, help="List contents of the archive's catalog. Argument is the archive name.").completer = archive_content_completer
|
|
380
477
|
parser.add_argument('--find-file', type=str, help="List catalogs containing <path>/file. '-d <definition>' argument is also required")
|
|
381
478
|
parser.add_argument('--verbose', action='store_true', help='Be more verbose')
|
|
382
479
|
parser.add_argument('--log-level', type=str, help="`debug` or `trace`, default is `info`", default="info")
|
|
@@ -384,11 +481,31 @@ def main():
|
|
|
384
481
|
parser.add_argument('--more-help', action='store_true', help='Show extended help message')
|
|
385
482
|
parser.add_argument('--version', action='store_true', help='Show version & license')
|
|
386
483
|
|
|
484
|
+
return parser
|
|
485
|
+
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
|
|
489
|
+
def main():
|
|
490
|
+
global logger, runner
|
|
491
|
+
|
|
492
|
+
MIN_PYTHON_VERSION = (3, 9)
|
|
493
|
+
if sys.version_info < MIN_PYTHON_VERSION:
|
|
494
|
+
sys.stderr.write(f"Error: This script requires Python {'.'.join(map(str, MIN_PYTHON_VERSION))} or higher.\n")
|
|
495
|
+
sys.exit(1)
|
|
496
|
+
return
|
|
497
|
+
|
|
498
|
+
parser = argparse.ArgumentParser(description="Creates/maintains `dar` database catalogs")
|
|
499
|
+
parser = build_arg_parser()
|
|
500
|
+
|
|
501
|
+
argcomplete.autocomplete(parser)
|
|
502
|
+
|
|
387
503
|
args = parser.parse_args()
|
|
388
504
|
|
|
389
505
|
if args.more_help:
|
|
390
506
|
show_more_help()
|
|
391
507
|
sys.exit(0)
|
|
508
|
+
return
|
|
392
509
|
|
|
393
510
|
if args.version:
|
|
394
511
|
print(f"{SCRIPTNAME} {about.__version__}")
|
|
@@ -397,86 +514,91 @@ def main():
|
|
|
397
514
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW, not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
|
398
515
|
See section 15 and section 16 in the supplied "LICENSE" file.''')
|
|
399
516
|
sys.exit(0)
|
|
517
|
+
return
|
|
400
518
|
|
|
401
|
-
# setup logging
|
|
402
519
|
args.config_file = os.path.expanduser(os.path.expandvars(args.config_file))
|
|
403
520
|
config_settings = ConfigSettings(args.config_file)
|
|
404
521
|
if not os.path.dirname(config_settings.logfile_location):
|
|
405
522
|
print(f"Directory for log file '{config_settings.logfile_location}' does not exist, exiting")
|
|
406
|
-
sys.exit(1)
|
|
523
|
+
sys.exit(1)
|
|
524
|
+
return
|
|
407
525
|
|
|
408
|
-
# command_output_log = os.path.join(config_settings.logfile_location.removesuffix("dar-backup.log"), "dar-backup-commands.log")
|
|
409
526
|
command_output_log = config_settings.logfile_location.replace("dar-backup.log", "dar-backup-commands.log")
|
|
410
527
|
logger = setup_logging(config_settings.logfile_location, command_output_log, args.log_level, args.log_stdout)
|
|
411
|
-
command_logger = get_logger(command_output_logger
|
|
528
|
+
command_logger = get_logger(command_output_logger=True)
|
|
412
529
|
runner = CommandRunner(logger=logger, command_logger=command_logger)
|
|
413
530
|
|
|
414
|
-
|
|
415
|
-
start_time=int(time())
|
|
416
|
-
logger.info(f"=====================================")
|
|
531
|
+
start_time = int(time())
|
|
417
532
|
logger.info(f"{SCRIPTNAME} started, version: {about.__version__}")
|
|
418
533
|
logger.info(f"START TIME: {start_time}")
|
|
419
534
|
logger.debug(f"`args`:\n{args}")
|
|
420
535
|
logger.debug(f"`config_settings`:\n{config_settings}")
|
|
536
|
+
dar_manager_properties = get_binary_info(command='dar_manager')
|
|
537
|
+
logger.debug(f"dar_manager location: {dar_manager_properties['path']}")
|
|
538
|
+
logger.debug(f"dar_manager version: {dar_manager_properties['version']}")
|
|
539
|
+
|
|
421
540
|
|
|
422
|
-
|
|
423
|
-
# Sanity checks before starting
|
|
541
|
+
# --- Sanity checks ---
|
|
424
542
|
if args.add_dir and not args.add_dir.strip():
|
|
425
543
|
logger.error("archive dir not given, exiting")
|
|
426
544
|
sys.exit(1)
|
|
545
|
+
return
|
|
427
546
|
|
|
428
|
-
if args.add_specific_archive and not args.add_specific_archive.strip():
|
|
547
|
+
if args.add_specific_archive is not None and not args.add_specific_archive.strip():
|
|
429
548
|
logger.error("specific archive to add not given, exiting")
|
|
430
549
|
sys.exit(1)
|
|
550
|
+
return
|
|
431
551
|
|
|
432
552
|
if args.remove_specific_archive and not args.remove_specific_archive.strip():
|
|
433
553
|
logger.error("specific archive to remove not given, exiting")
|
|
434
554
|
sys.exit(1)
|
|
555
|
+
return
|
|
435
556
|
|
|
436
557
|
if args.add_specific_archive and args.remove_specific_archive:
|
|
437
558
|
logger.error("you can't add and remove archives in the same operation, exiting")
|
|
438
559
|
sys.exit(1)
|
|
560
|
+
return
|
|
439
561
|
|
|
440
562
|
if args.add_dir and args.add_specific_archive:
|
|
441
563
|
logger.error("you cannot add both a directory and an archive")
|
|
442
564
|
sys.exit(1)
|
|
565
|
+
return
|
|
443
566
|
|
|
444
567
|
if args.backup_def and not args.backup_def.strip():
|
|
445
568
|
logger.error(f"No backup definition given to --backup-def")
|
|
569
|
+
sys.exit(1)
|
|
570
|
+
return
|
|
446
571
|
|
|
447
572
|
if args.backup_def:
|
|
448
573
|
backup_def_path = os.path.join(config_settings.backup_d_dir, args.backup_def)
|
|
449
574
|
if not os.path.exists(backup_def_path):
|
|
450
575
|
logger.error(f"Backup definition {args.backup_def} does not exist, exiting")
|
|
451
576
|
sys.exit(1)
|
|
452
|
-
|
|
577
|
+
return
|
|
453
578
|
|
|
454
579
|
if args.list_archive_contents and not args.list_archive_contents.strip():
|
|
455
580
|
logger.error(f"--list-archive-contents <param> not given, exiting")
|
|
456
581
|
sys.exit(1)
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
if args.list_catalog_contents and not args.backup_def:
|
|
460
|
-
logger.error(f"--list-catalog-contents requires the --backup-def, exiting")
|
|
461
|
-
sys.exit(1)
|
|
582
|
+
return
|
|
462
583
|
|
|
463
584
|
if args.find_file and not args.backup_def:
|
|
464
585
|
logger.error(f"--find-file requires the --backup-def, exiting")
|
|
465
586
|
sys.exit(1)
|
|
466
|
-
|
|
467
|
-
|
|
587
|
+
return
|
|
468
588
|
|
|
469
|
-
# Modify
|
|
589
|
+
# --- Modify settings ---
|
|
470
590
|
if args.alternate_archive_dir:
|
|
471
591
|
if not os.path.exists(args.alternate_archive_dir):
|
|
472
592
|
logger.error(f"Alternate archive dir '{args.alternate_archive_dir}' does not exist, exiting")
|
|
473
593
|
sys.exit(1)
|
|
594
|
+
return
|
|
474
595
|
config_settings.backup_dir = args.alternate_archive_dir
|
|
475
596
|
|
|
476
|
-
|
|
597
|
+
# --- Functional logic ---
|
|
477
598
|
if args.create_db:
|
|
478
599
|
if args.backup_def:
|
|
479
600
|
sys.exit(create_db(args.backup_def, config_settings))
|
|
601
|
+
return
|
|
480
602
|
else:
|
|
481
603
|
for root, dirs, files in os.walk(config_settings.backup_d_dir):
|
|
482
604
|
for file in files:
|
|
@@ -485,19 +607,19 @@ See section 15 and section 16 in the supplied "LICENSE" file.''')
|
|
|
485
607
|
result = create_db(current_backupdef, config_settings)
|
|
486
608
|
if result != 0:
|
|
487
609
|
sys.exit(result)
|
|
610
|
+
return
|
|
488
611
|
|
|
489
612
|
if args.add_specific_archive:
|
|
490
613
|
sys.exit(add_specific_archive(args.add_specific_archive, config_settings))
|
|
614
|
+
return
|
|
491
615
|
|
|
492
616
|
if args.add_dir:
|
|
493
617
|
sys.exit(add_directory(args, config_settings))
|
|
494
|
-
|
|
618
|
+
return
|
|
495
619
|
|
|
496
620
|
if args.remove_specific_archive:
|
|
497
621
|
return remove_specific_archive(args.remove_specific_archive, config_settings)
|
|
498
622
|
|
|
499
|
-
|
|
500
|
-
|
|
501
623
|
if args.list_catalogs:
|
|
502
624
|
if args.backup_def:
|
|
503
625
|
process = list_catalogs(args.backup_def, config_settings)
|
|
@@ -510,19 +632,18 @@ See section 15 and section 16 in the supplied "LICENSE" file.''')
|
|
|
510
632
|
if list_catalogs(current_backupdef, config_settings).returncode != 0:
|
|
511
633
|
result = 1
|
|
512
634
|
sys.exit(result)
|
|
513
|
-
|
|
635
|
+
return
|
|
514
636
|
|
|
515
637
|
if args.list_archive_contents:
|
|
516
638
|
result = list_archive_contents(args.list_archive_contents, config_settings)
|
|
517
639
|
sys.exit(result)
|
|
640
|
+
return
|
|
518
641
|
|
|
519
|
-
if args.list_catalog_contents:
|
|
520
|
-
result = list_catalog_contents(args.list_catalog_contents, args.backup_def, config_settings)
|
|
521
|
-
sys.exit(result)
|
|
522
642
|
|
|
523
643
|
if args.find_file:
|
|
524
644
|
result = find_file(args.find_file, args.backup_def, config_settings)
|
|
525
645
|
sys.exit(result)
|
|
646
|
+
return
|
|
526
647
|
|
|
527
648
|
|
|
528
649
|
if __name__ == "__main__":
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import time
|
|
3
|
+
from threading import Event
|
|
4
|
+
from rich.console import Console, Group
|
|
5
|
+
from rich.live import Live
|
|
6
|
+
from rich.text import Text
|
|
7
|
+
|
|
8
|
+
def is_terminal():
|
|
9
|
+
return Console().is_terminal
|
|
10
|
+
|
|
11
|
+
def tail_log_file(log_path, stop_event, session_marker=None):
|
|
12
|
+
"""Yields new lines from the log file, starting only after the session_marker is found."""
|
|
13
|
+
last_size = 0
|
|
14
|
+
marker_found = session_marker is None
|
|
15
|
+
|
|
16
|
+
while not stop_event.is_set():
|
|
17
|
+
if not os.path.exists(log_path):
|
|
18
|
+
time.sleep(0.5)
|
|
19
|
+
continue
|
|
20
|
+
|
|
21
|
+
try:
|
|
22
|
+
with open(log_path, "r") as f:
|
|
23
|
+
if last_size > os.path.getsize(log_path):
|
|
24
|
+
f.seek(0)
|
|
25
|
+
else:
|
|
26
|
+
f.seek(last_size)
|
|
27
|
+
|
|
28
|
+
while not stop_event.is_set():
|
|
29
|
+
line = f.readline()
|
|
30
|
+
if not line:
|
|
31
|
+
break
|
|
32
|
+
|
|
33
|
+
line = line.strip()
|
|
34
|
+
last_size = f.tell()
|
|
35
|
+
|
|
36
|
+
if not marker_found:
|
|
37
|
+
if session_marker in line:
|
|
38
|
+
marker_found = True
|
|
39
|
+
continue
|
|
40
|
+
|
|
41
|
+
yield line
|
|
42
|
+
|
|
43
|
+
except Exception as e:
|
|
44
|
+
print(f"[!] Error reading log: {e}")
|
|
45
|
+
|
|
46
|
+
time.sleep(0.5)
|
|
47
|
+
|
|
48
|
+
def get_green_shade(step, max_width):
|
|
49
|
+
"""Returns a green color from light to dark across the bar."""
|
|
50
|
+
start = 180
|
|
51
|
+
end = 20
|
|
52
|
+
value = int(start - ((start - end) * (step / max_width)))
|
|
53
|
+
return f"rgb(0,{value},0)"
|
|
54
|
+
|
|
55
|
+
def show_log_driven_bar(log_path: str, stop_event: Event, session_marker: str, max_width=50):
|
|
56
|
+
console = Console()
|
|
57
|
+
|
|
58
|
+
if not console.is_terminal:
|
|
59
|
+
console.log("[~] Not a terminal — progress bar skipped.")
|
|
60
|
+
return
|
|
61
|
+
|
|
62
|
+
progress = 0
|
|
63
|
+
dir_count = 0
|
|
64
|
+
last_dir = "Waiting for directory..."
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
with Live(console=console, refresh_per_second=5) as live:
|
|
69
|
+
for line in tail_log_file(log_path, stop_event, session_marker):
|
|
70
|
+
lowered = line.lower()
|
|
71
|
+
|
|
72
|
+
updated = False
|
|
73
|
+
|
|
74
|
+
# Update directory name on "Inspecting"
|
|
75
|
+
if "inspecting directory" in lowered and "finished" not in lowered:
|
|
76
|
+
last_dir = line.split("Inspecting directory")[-1].strip()
|
|
77
|
+
updated = True
|
|
78
|
+
|
|
79
|
+
# Advance progress on "Finished"
|
|
80
|
+
if "finished inspecting directory" in lowered:
|
|
81
|
+
dir_count += 1
|
|
82
|
+
progress = (progress + 1) % (max_width + 1)
|
|
83
|
+
updated = True
|
|
84
|
+
|
|
85
|
+
if updated:
|
|
86
|
+
bar_text = ""
|
|
87
|
+
for i in range(max_width):
|
|
88
|
+
if i < progress:
|
|
89
|
+
color = get_green_shade(i, max_width)
|
|
90
|
+
bar_text += f"[{color}]#[/{color}]"
|
|
91
|
+
else:
|
|
92
|
+
bar_text += "-"
|
|
93
|
+
|
|
94
|
+
bar = Text.from_markup(f"[white][{bar_text}][/white] [dim]Dirs: {dir_count}[/dim]")
|
|
95
|
+
dir_display = Text(f"📂 {last_dir}", style="dim")
|
|
96
|
+
|
|
97
|
+
live.update(Group(bar, dir_display))
|
|
98
|
+
|
|
99
|
+
if stop_event.is_set():
|
|
100
|
+
break
|
|
101
|
+
|