dar-backup 1.0.0__py3-none-any.whl → 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dar_backup/Changelog.md +35 -1
- dar_backup/README.md +283 -22
- dar_backup/__about__.py +3 -1
- dar_backup/cleanup.py +153 -38
- dar_backup/command_runner.py +135 -102
- dar_backup/config_settings.py +143 -0
- dar_backup/dar-backup.conf +11 -0
- dar_backup/dar-backup.conf.j2 +42 -0
- dar_backup/dar_backup.py +391 -90
- dar_backup/manager.py +9 -3
- dar_backup/util.py +383 -130
- {dar_backup-1.0.0.dist-info → dar_backup-1.0.1.dist-info}/METADATA +285 -24
- dar_backup-1.0.1.dist-info/RECORD +25 -0
- {dar_backup-1.0.0.dist-info → dar_backup-1.0.1.dist-info}/WHEEL +1 -1
- dar_backup-1.0.0.dist-info/RECORD +0 -25
- {dar_backup-1.0.0.dist-info → dar_backup-1.0.1.dist-info}/entry_points.txt +0 -0
- {dar_backup-1.0.0.dist-info → dar_backup-1.0.1.dist-info}/licenses/LICENSE +0 -0
dar_backup/cleanup.py
CHANGED
|
@@ -24,24 +24,29 @@ import sys
|
|
|
24
24
|
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..")))
|
|
25
25
|
|
|
26
26
|
|
|
27
|
-
|
|
28
27
|
from datetime import datetime, timedelta
|
|
29
28
|
from inputimeout import inputimeout, TimeoutOccurred
|
|
29
|
+
from pathlib import Path
|
|
30
|
+
from sys import stderr
|
|
30
31
|
from time import time
|
|
31
32
|
from typing import Dict, List, NamedTuple, Tuple
|
|
33
|
+
import glob
|
|
32
34
|
|
|
33
35
|
|
|
34
36
|
from . import __about__ as about
|
|
35
37
|
from dar_backup.config_settings import ConfigSettings
|
|
36
38
|
from dar_backup.util import list_backups
|
|
37
39
|
from dar_backup.util import setup_logging
|
|
40
|
+
from dar_backup.util import get_config_file
|
|
38
41
|
from dar_backup.util import get_logger
|
|
39
42
|
from dar_backup.util import requirements
|
|
40
43
|
from dar_backup.util import show_version
|
|
41
44
|
from dar_backup.util import get_invocation_command_line
|
|
42
45
|
from dar_backup.util import print_aligned_settings
|
|
43
46
|
from dar_backup.util import backup_definition_completer, list_archive_completer
|
|
47
|
+
from dar_backup.util import is_archive_name_allowed
|
|
44
48
|
from dar_backup.util import is_safe_filename
|
|
49
|
+
from dar_backup.util import safe_remove_file
|
|
45
50
|
from dar_backup.util import show_scriptname
|
|
46
51
|
|
|
47
52
|
from dar_backup.command_runner import CommandRunner
|
|
@@ -50,7 +55,73 @@ from dar_backup.command_runner import CommandResult
|
|
|
50
55
|
logger = None
|
|
51
56
|
runner = None
|
|
52
57
|
|
|
53
|
-
def
|
|
58
|
+
def _delete_par2_files(
|
|
59
|
+
archive_name: str,
|
|
60
|
+
backup_dir: str,
|
|
61
|
+
config_settings: ConfigSettings = None,
|
|
62
|
+
backup_definition: str = None,
|
|
63
|
+
dry_run: bool = False,
|
|
64
|
+
) -> None:
|
|
65
|
+
if config_settings and hasattr(config_settings, "get_par2_config"):
|
|
66
|
+
par2_config = config_settings.get_par2_config(backup_definition)
|
|
67
|
+
else:
|
|
68
|
+
par2_config = {
|
|
69
|
+
"par2_dir": None,
|
|
70
|
+
"par2_mode": None,
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
par2_dir = par2_config.get("par2_dir") or backup_dir
|
|
74
|
+
par2_dir = os.path.expanduser(os.path.expandvars(par2_dir))
|
|
75
|
+
if not os.path.isdir(par2_dir):
|
|
76
|
+
logger.warning(f"PAR2 directory not found, skipping cleanup: {par2_dir}")
|
|
77
|
+
return
|
|
78
|
+
|
|
79
|
+
par2_mode = (par2_config.get("par2_mode") or "per-slice").lower()
|
|
80
|
+
|
|
81
|
+
if par2_mode == "per-archive":
|
|
82
|
+
par2_glob = os.path.join(par2_dir, f"{archive_name}*.par2")
|
|
83
|
+
targets = glob.glob(par2_glob)
|
|
84
|
+
manifest_path = os.path.join(par2_dir, f"{archive_name}.par2.manifest.ini")
|
|
85
|
+
if os.path.exists(manifest_path):
|
|
86
|
+
targets.append(manifest_path)
|
|
87
|
+
if not targets:
|
|
88
|
+
logger.info("No par2 files matched the per-archive cleanup pattern.")
|
|
89
|
+
return
|
|
90
|
+
for file_path in sorted(set(targets)):
|
|
91
|
+
try:
|
|
92
|
+
if dry_run:
|
|
93
|
+
logger.info(f"Dry run: would delete PAR2 file: {file_path}")
|
|
94
|
+
else:
|
|
95
|
+
safe_remove_file(file_path, base_dir=Path(par2_dir))
|
|
96
|
+
logger.info(f"Deleted PAR2 file: {file_path}")
|
|
97
|
+
except Exception as e:
|
|
98
|
+
logger.error(f"Error deleting PAR2 file {file_path}: {e}")
|
|
99
|
+
return
|
|
100
|
+
|
|
101
|
+
if par2_mode != "per-slice":
|
|
102
|
+
logger.error(f"Unsupported PAR2_MODE during cleanup: {par2_mode}")
|
|
103
|
+
return
|
|
104
|
+
|
|
105
|
+
par2_regex = re.compile(rf"^{re.escape(archive_name)}\.[0-9]+\.dar.*\.par2$")
|
|
106
|
+
files_deleted = False
|
|
107
|
+
for filename in sorted(os.listdir(par2_dir)):
|
|
108
|
+
if par2_regex.match(filename):
|
|
109
|
+
file_path = os.path.join(par2_dir, filename)
|
|
110
|
+
try:
|
|
111
|
+
if dry_run:
|
|
112
|
+
logger.info(f"Dry run: would delete PAR2 file: {file_path}")
|
|
113
|
+
else:
|
|
114
|
+
safe_remove_file(file_path, base_dir=Path(par2_dir))
|
|
115
|
+
logger.info(f"Deleted PAR2 file: {file_path}")
|
|
116
|
+
files_deleted = True
|
|
117
|
+
except Exception as e:
|
|
118
|
+
logger.error(f"Error deleting PAR2 file {file_path}: {e}")
|
|
119
|
+
|
|
120
|
+
if not files_deleted:
|
|
121
|
+
logger.info("No .par2 matched the regex for deletion.")
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def delete_old_backups(backup_dir, age, backup_type, args, backup_definition=None, config_settings: ConfigSettings = None):
|
|
54
125
|
"""
|
|
55
126
|
Delete backups older than the specified age in days.
|
|
56
127
|
Only .dar and .par2 files are considered for deletion.
|
|
@@ -66,8 +137,9 @@ def delete_old_backups(backup_dir, age, backup_type, args, backup_definition=Non
|
|
|
66
137
|
|
|
67
138
|
archives_deleted = {}
|
|
68
139
|
|
|
140
|
+
dry_run = getattr(args, "dry_run", False) is True
|
|
69
141
|
for filename in sorted(os.listdir(backup_dir)):
|
|
70
|
-
if not
|
|
142
|
+
if not filename.endswith('.dar'):
|
|
71
143
|
continue
|
|
72
144
|
if backup_definition and not filename.startswith(backup_definition):
|
|
73
145
|
continue
|
|
@@ -82,8 +154,11 @@ def delete_old_backups(backup_dir, age, backup_type, args, backup_definition=Non
|
|
|
82
154
|
if file_date < cutoff_date:
|
|
83
155
|
file_path = os.path.join(backup_dir, filename)
|
|
84
156
|
try:
|
|
85
|
-
|
|
86
|
-
|
|
157
|
+
if dry_run:
|
|
158
|
+
logger.info(f"Dry run: would delete {backup_type} backup: {file_path}")
|
|
159
|
+
else:
|
|
160
|
+
safe_remove_file(file_path, base_dir=Path(backup_dir))
|
|
161
|
+
logger.info(f"Deleted {backup_type} backup: {file_path}")
|
|
87
162
|
archive_name = filename.split('.')[0]
|
|
88
163
|
if not archive_name in archives_deleted:
|
|
89
164
|
logger.debug(f"Archive name: '{archive_name}' added to catalog deletion list")
|
|
@@ -92,10 +167,17 @@ def delete_old_backups(backup_dir, age, backup_type, args, backup_definition=Non
|
|
|
92
167
|
logger.error(f"Error deleting file {file_path}: {e}")
|
|
93
168
|
|
|
94
169
|
for archive_name in archives_deleted.keys():
|
|
95
|
-
|
|
170
|
+
if not is_archive_name_allowed(archive_name):
|
|
171
|
+
raise ValueError(f"Refusing unsafe archive name: {archive_name}")
|
|
172
|
+
archive_definition = archive_name.split('_')[0]
|
|
173
|
+
_delete_par2_files(archive_name, backup_dir, config_settings, archive_definition, dry_run=dry_run)
|
|
174
|
+
if dry_run:
|
|
175
|
+
logger.info(f"Dry run: would run manager to delete archive '{archive_name}'")
|
|
176
|
+
else:
|
|
177
|
+
delete_catalog(archive_name, args)
|
|
96
178
|
|
|
97
179
|
|
|
98
|
-
def delete_archive(backup_dir, archive_name, args):
|
|
180
|
+
def delete_archive(backup_dir, archive_name, args, config_settings: ConfigSettings = None):
|
|
99
181
|
"""
|
|
100
182
|
Delete all .dar and .par2 files in the backup directory for the given archive name.
|
|
101
183
|
|
|
@@ -107,36 +189,30 @@ def delete_archive(backup_dir, archive_name, args):
|
|
|
107
189
|
|
|
108
190
|
# Delete the specified .dar files according to the naming convention
|
|
109
191
|
files_deleted = False
|
|
192
|
+
dry_run = getattr(args, "dry_run", False) is True
|
|
110
193
|
for filename in sorted(os.listdir(backup_dir)):
|
|
111
194
|
if archive_regex.match(filename):
|
|
112
195
|
file_path = os.path.join(backup_dir, filename)
|
|
113
196
|
try:
|
|
114
|
-
|
|
115
|
-
|
|
197
|
+
if dry_run:
|
|
198
|
+
logger.info(f"Dry run: would delete archive slice: {file_path}")
|
|
199
|
+
else:
|
|
200
|
+
is_safe_filename(file_path) and os.remove(file_path)
|
|
201
|
+
logger.info(f"Deleted archive slice: {file_path}")
|
|
116
202
|
files_deleted = True
|
|
117
203
|
except Exception as e:
|
|
118
204
|
logger.error(f"Error deleting archive slice {file_path}: {e}")
|
|
119
205
|
|
|
120
206
|
if files_deleted:
|
|
121
|
-
|
|
207
|
+
if dry_run:
|
|
208
|
+
logger.info(f"Dry run: would run manager to delete archive '{archive_name}'")
|
|
209
|
+
else:
|
|
210
|
+
delete_catalog(archive_name, args)
|
|
122
211
|
else:
|
|
123
212
|
logger.info("No .dar files matched the regex for deletion.")
|
|
124
213
|
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
files_deleted = False
|
|
128
|
-
for filename in sorted(os.listdir(backup_dir)):
|
|
129
|
-
if par2_regex.match(filename):
|
|
130
|
-
file_path = os.path.join(backup_dir, filename)
|
|
131
|
-
try:
|
|
132
|
-
is_safe_filename(file_path) and os.remove(file_path)
|
|
133
|
-
logger.info(f"Deleted PAR2 file: {file_path}")
|
|
134
|
-
files_deleted = True
|
|
135
|
-
except Exception as e:
|
|
136
|
-
logger.error(f"Error deleting PAR2 file {file_path}: {e}")
|
|
137
|
-
|
|
138
|
-
if not files_deleted:
|
|
139
|
-
logger.info("No .par2 matched the regex for deletion.")
|
|
214
|
+
archive_definition = archive_name.split('_')[0]
|
|
215
|
+
_delete_par2_files(archive_name, backup_dir, config_settings, archive_definition, dry_run=dry_run)
|
|
140
216
|
|
|
141
217
|
|
|
142
218
|
def delete_catalog(catalog_name: str, args: NamedTuple) -> bool:
|
|
@@ -190,27 +266,48 @@ def main():
|
|
|
190
266
|
|
|
191
267
|
parser = argparse.ArgumentParser(description="Cleanup old archives according to AGE configuration.")
|
|
192
268
|
parser.add_argument('-d', '--backup-definition', help="Specific backup definition to cleanup.").completer = backup_definition_completer
|
|
193
|
-
parser.add_argument('-c', '--config-file', '-c', type=str, help="Path to 'dar-backup.conf'", default=
|
|
269
|
+
parser.add_argument('-c', '--config-file', '-c', type=str, help="Path to 'dar-backup.conf'", default=None)
|
|
194
270
|
parser.add_argument('-v', '--version', action='store_true', help="Show version information.")
|
|
195
271
|
parser.add_argument('--alternate-archive-dir', type=str, help="Cleanup in this directory instead of the default one.")
|
|
196
|
-
parser.add_argument(
|
|
272
|
+
parser.add_argument(
|
|
273
|
+
'--cleanup-specific-archives',
|
|
274
|
+
type=str,
|
|
275
|
+
nargs='?',
|
|
276
|
+
const="",
|
|
277
|
+
default=None,
|
|
278
|
+
help="Comma separated list of archives to cleanup",
|
|
279
|
+
).completer = list_archive_completer
|
|
280
|
+
parser.add_argument(
|
|
281
|
+
'cleanup_specific_archives_list',
|
|
282
|
+
nargs='*',
|
|
283
|
+
help=argparse.SUPPRESS,
|
|
284
|
+
).completer = list_archive_completer
|
|
197
285
|
parser.add_argument('-l', '--list', action='store_true', help="List available archives.")
|
|
198
286
|
parser.add_argument('--verbose', action='store_true', help="Print various status messages to screen")
|
|
199
287
|
parser.add_argument('--log-level', type=str, help="`debug` or `trace`, default is `info`", default="info")
|
|
200
288
|
parser.add_argument('--log-stdout', action='store_true', help='also print log messages to stdout')
|
|
201
289
|
parser.add_argument('--test-mode', action='store_true', help='Read envvars in order to run some pytest cases')
|
|
290
|
+
parser.add_argument('--dry-run', action='store_true', help='Show what would be deleted without removing files')
|
|
202
291
|
|
|
203
|
-
|
|
292
|
+
comp_line = os.environ.get("COMP_LINE", "")
|
|
293
|
+
only_archives = "--cleanup-specific-archives" in comp_line
|
|
294
|
+
argcomplete.autocomplete(parser, always_complete_options=not only_archives)
|
|
204
295
|
|
|
205
296
|
args = parser.parse_args()
|
|
206
297
|
|
|
207
|
-
args.config_file = os.path.expanduser(os.path.expandvars(args.config_file))
|
|
208
|
-
|
|
209
|
-
|
|
210
298
|
if args.version:
|
|
211
299
|
show_version()
|
|
212
300
|
sys.exit(0)
|
|
213
301
|
|
|
302
|
+
config_settings_path = get_config_file(args)
|
|
303
|
+
if not (os.path.isfile(config_settings_path) and os.access(config_settings_path, os.R_OK)):
|
|
304
|
+
if args.test_mode or os.getenv("PYTEST_CURRENT_TEST"):
|
|
305
|
+
args.config_file = config_settings_path
|
|
306
|
+
else:
|
|
307
|
+
print(f"Config file {config_settings_path} must exist and be readable.", file=stderr)
|
|
308
|
+
raise SystemExit(127)
|
|
309
|
+
args.config_file = config_settings_path
|
|
310
|
+
|
|
214
311
|
config_settings = ConfigSettings(args.config_file)
|
|
215
312
|
|
|
216
313
|
start_time=int(time())
|
|
@@ -225,7 +322,6 @@ def main():
|
|
|
225
322
|
|
|
226
323
|
start_msgs.append((f"{show_scriptname()}:", about.__version__))
|
|
227
324
|
|
|
228
|
-
logger.info(f"START TIME: {start_time}")
|
|
229
325
|
logger.debug(f"Command line: {get_invocation_command_line()}")
|
|
230
326
|
logger.debug(f"`args`:\n{args}")
|
|
231
327
|
logger.debug(f"`config_settings`:\n{config_settings}")
|
|
@@ -239,6 +335,7 @@ def main():
|
|
|
239
335
|
args.verbose and start_msgs.append(("Logfile backup count:", config_settings.logfile_backup_count))
|
|
240
336
|
args.verbose and start_msgs.append(("--alternate-archive-dir:", args.alternate_archive_dir))
|
|
241
337
|
args.verbose and start_msgs.append(("--cleanup-specific-archives:", args.cleanup_specific_archives))
|
|
338
|
+
args.verbose and start_msgs.append(("--dry-run:", args.dry_run))
|
|
242
339
|
|
|
243
340
|
dangerous_keywords = ["--cleanup", "_FULL_"] # TODO: add more dangerous keywords
|
|
244
341
|
print_aligned_settings(start_msgs, highlight_keywords=dangerous_keywords, quiet=not args.verbose)
|
|
@@ -258,16 +355,20 @@ def main():
|
|
|
258
355
|
if args.cleanup_specific_archives is None and args.test_mode:
|
|
259
356
|
logger.info("No --cleanup-specific-archives provided; skipping specific archive deletion in test mode.")
|
|
260
357
|
|
|
261
|
-
if args.cleanup_specific_archives:
|
|
262
|
-
|
|
263
|
-
|
|
358
|
+
if args.cleanup_specific_archives or args.cleanup_specific_archives_list:
|
|
359
|
+
combined = []
|
|
360
|
+
if args.cleanup_specific_archives:
|
|
361
|
+
combined.extend(args.cleanup_specific_archives.split(','))
|
|
362
|
+
combined.extend(args.cleanup_specific_archives_list or [])
|
|
363
|
+
archive_names = [name.strip() for name in combined if name.strip()]
|
|
364
|
+
logger.info(f"Cleaning up specific archives: {', '.join(archive_names)}")
|
|
264
365
|
for archive_name in archive_names:
|
|
265
366
|
if "_FULL_" in archive_name:
|
|
266
367
|
if not confirm_full_archive_deletion(archive_name, args.test_mode):
|
|
267
368
|
continue
|
|
268
369
|
archive_path = os.path.join(config_settings.backup_dir, archive_name.strip())
|
|
269
370
|
logger.info(f"Deleting archive: {archive_path}")
|
|
270
|
-
delete_archive(config_settings.backup_dir, archive_name.strip(), args)
|
|
371
|
+
delete_archive(config_settings.backup_dir, archive_name.strip(), args, config_settings)
|
|
271
372
|
elif args.list:
|
|
272
373
|
list_backups(config_settings.backup_dir, args.backup_definition)
|
|
273
374
|
else:
|
|
@@ -280,8 +381,22 @@ def main():
|
|
|
280
381
|
backup_definitions.append(file.split('.')[0])
|
|
281
382
|
|
|
282
383
|
for definition in backup_definitions:
|
|
283
|
-
delete_old_backups(
|
|
284
|
-
|
|
384
|
+
delete_old_backups(
|
|
385
|
+
config_settings.backup_dir,
|
|
386
|
+
config_settings.diff_age,
|
|
387
|
+
'DIFF',
|
|
388
|
+
args,
|
|
389
|
+
backup_definition=definition,
|
|
390
|
+
config_settings=config_settings
|
|
391
|
+
)
|
|
392
|
+
delete_old_backups(
|
|
393
|
+
config_settings.backup_dir,
|
|
394
|
+
config_settings.incr_age,
|
|
395
|
+
'INCR',
|
|
396
|
+
args,
|
|
397
|
+
backup_definition=definition,
|
|
398
|
+
config_settings=config_settings
|
|
399
|
+
)
|
|
285
400
|
|
|
286
401
|
# run POST scripts
|
|
287
402
|
requirements('POSTREQ', config_settings)
|
dar_backup/command_runner.py
CHANGED
|
@@ -8,6 +8,10 @@ import os
|
|
|
8
8
|
import re
|
|
9
9
|
import shlex
|
|
10
10
|
import sys
|
|
11
|
+
try:
|
|
12
|
+
import termios
|
|
13
|
+
except ImportError:
|
|
14
|
+
termios = None
|
|
11
15
|
import tempfile
|
|
12
16
|
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../src")))
|
|
13
17
|
from typing import List, Optional, Union
|
|
@@ -125,124 +129,153 @@ class CommandRunner:
|
|
|
125
129
|
timeout: Optional[int] = None,
|
|
126
130
|
check: bool = False,
|
|
127
131
|
capture_output: bool = True,
|
|
128
|
-
text: bool = True
|
|
132
|
+
text: bool = True,
|
|
133
|
+
cwd: Optional[str] = None,
|
|
134
|
+
stdin: Optional[int] = subprocess.DEVNULL
|
|
129
135
|
) -> CommandResult:
|
|
130
|
-
self._text_mode = text
|
|
136
|
+
self._text_mode = text
|
|
131
137
|
timeout = timeout or self.default_timeout
|
|
132
138
|
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
#command = f"Executing command: {' '.join(cmd)} (timeout={timeout}s)"
|
|
152
|
-
command = f"Executing command: {' '.join(shlex.quote(arg) for arg in cmd)} (timeout={timeout}s)"
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
self.command_logger.info(command)
|
|
156
|
-
self.logger.debug(command)
|
|
157
|
-
|
|
158
|
-
stdout_lines = []
|
|
159
|
-
stderr_lines = []
|
|
139
|
+
tty_fd = None
|
|
140
|
+
tty_file = None
|
|
141
|
+
saved_tty_attrs = None
|
|
142
|
+
if termios is not None:
|
|
143
|
+
try:
|
|
144
|
+
if os.path.exists("/dev/tty"):
|
|
145
|
+
tty_file = open("/dev/tty")
|
|
146
|
+
tty_fd = tty_file.fileno()
|
|
147
|
+
elif sys.stdin and sys.stdin.isatty():
|
|
148
|
+
tty_fd = sys.stdin.fileno()
|
|
149
|
+
if tty_fd is not None:
|
|
150
|
+
saved_tty_attrs = termios.tcgetattr(tty_fd)
|
|
151
|
+
except Exception:
|
|
152
|
+
tty_fd = None
|
|
153
|
+
saved_tty_attrs = None
|
|
154
|
+
if tty_file:
|
|
155
|
+
tty_file.close()
|
|
156
|
+
tty_file = None
|
|
160
157
|
|
|
161
158
|
try:
|
|
162
|
-
|
|
163
|
-
cmd,
|
|
164
|
-
stdout=subprocess.PIPE if capture_output else None,
|
|
165
|
-
stderr=subprocess.PIPE if capture_output else None,
|
|
166
|
-
text=False,
|
|
167
|
-
bufsize=-1
|
|
168
|
-
)
|
|
169
|
-
except Exception as e:
|
|
170
|
-
stack = traceback.format_exc()
|
|
171
|
-
return CommandResult(
|
|
172
|
-
returncode=-1,
|
|
173
|
-
stdout='',
|
|
174
|
-
stderr=str(e),
|
|
175
|
-
stack=stack
|
|
176
|
-
)
|
|
159
|
+
cmd_sanitized = None
|
|
177
160
|
|
|
178
|
-
def stream_output(stream, lines, level):
|
|
179
161
|
try:
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
self.logger.warning(f"stream_output decode error: {e}")
|
|
162
|
+
cmd_sanitized = sanitize_cmd(cmd)
|
|
163
|
+
except ValueError as e:
|
|
164
|
+
stack = traceback.format_exc()
|
|
165
|
+
self.logger.error(f"Command sanitation failed: {e}")
|
|
166
|
+
return CommandResult(
|
|
167
|
+
returncode=-1,
|
|
168
|
+
note=f"Sanitizing failed: command: {' '.join(cmd)}",
|
|
169
|
+
stdout='',
|
|
170
|
+
stderr=str(e),
|
|
171
|
+
stack=stack,
|
|
172
|
+
|
|
173
|
+
)
|
|
193
174
|
finally:
|
|
194
|
-
|
|
175
|
+
cmd = cmd_sanitized
|
|
195
176
|
|
|
177
|
+
#command = f"Executing command: {' '.join(cmd)} (timeout={timeout}s)"
|
|
178
|
+
command = f"Executing command: {' '.join(shlex.quote(arg) for arg in cmd)} (timeout={timeout}s)"
|
|
196
179
|
|
|
197
180
|
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
t_out = threading.Thread(target=stream_output, args=(process.stdout, stdout_lines, logging.INFO))
|
|
201
|
-
t_out.start()
|
|
202
|
-
threads.append(t_out)
|
|
203
|
-
if capture_output and process.stderr:
|
|
204
|
-
t_err = threading.Thread(target=stream_output, args=(process.stderr, stderr_lines, logging.ERROR))
|
|
205
|
-
t_err.start()
|
|
206
|
-
threads.append(t_err)
|
|
207
|
-
|
|
208
|
-
try:
|
|
209
|
-
process.wait(timeout=timeout)
|
|
210
|
-
except subprocess.TimeoutExpired:
|
|
211
|
-
process.kill()
|
|
212
|
-
log_msg = f"Command timed out after {timeout} seconds: {' '.join(cmd)}:\n"
|
|
213
|
-
self.logger.error(log_msg)
|
|
214
|
-
return CommandResult(-1, ''.join(stdout_lines), log_msg.join(stderr_lines))
|
|
215
|
-
except Exception as e:
|
|
216
|
-
stack = traceback.format_exc()
|
|
217
|
-
log_msg = f"Command execution failed: {' '.join(cmd)} with error: {e}\n"
|
|
218
|
-
self.logger.error(log_msg)
|
|
219
|
-
return CommandResult(-1, ''.join(stdout_lines), log_msg.join(stderr_lines), stack)
|
|
220
|
-
|
|
221
|
-
for t in threads:
|
|
222
|
-
t.join()
|
|
223
|
-
|
|
181
|
+
self.command_logger.info(command)
|
|
182
|
+
self.logger.debug(command)
|
|
224
183
|
|
|
184
|
+
stdout_lines = []
|
|
185
|
+
stderr_lines = []
|
|
225
186
|
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
187
|
+
try:
|
|
188
|
+
process = subprocess.Popen(
|
|
189
|
+
cmd,
|
|
190
|
+
stdout=subprocess.PIPE if capture_output else None,
|
|
191
|
+
stderr=subprocess.PIPE if capture_output else None,
|
|
192
|
+
stdin=stdin,
|
|
193
|
+
text=False,
|
|
194
|
+
bufsize=-1,
|
|
195
|
+
cwd=cwd
|
|
196
|
+
)
|
|
197
|
+
except Exception as e:
|
|
198
|
+
stack = traceback.format_exc()
|
|
199
|
+
return CommandResult(
|
|
200
|
+
returncode=-1,
|
|
201
|
+
stdout='',
|
|
202
|
+
stderr=str(e),
|
|
203
|
+
stack=stack
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
def stream_output(stream, lines, level):
|
|
207
|
+
try:
|
|
208
|
+
while True:
|
|
209
|
+
chunk = stream.read(1024)
|
|
210
|
+
if not chunk:
|
|
211
|
+
break
|
|
212
|
+
if self._text_mode:
|
|
213
|
+
decoded = chunk.decode('utf-8', errors='replace')
|
|
214
|
+
lines.append(decoded)
|
|
215
|
+
self.command_logger.log(level, decoded.strip())
|
|
216
|
+
else:
|
|
217
|
+
lines.append(chunk)
|
|
218
|
+
# Avoid logging raw binary data to prevent garbled logs
|
|
219
|
+
except Exception as e:
|
|
220
|
+
self.logger.warning(f"stream_output decode error: {e}")
|
|
221
|
+
finally:
|
|
222
|
+
stream.close()
|
|
223
|
+
|
|
224
|
+
threads = []
|
|
225
|
+
if capture_output and process.stdout:
|
|
226
|
+
t_out = threading.Thread(target=stream_output, args=(process.stdout, stdout_lines, logging.INFO))
|
|
227
|
+
t_out.start()
|
|
228
|
+
threads.append(t_out)
|
|
229
|
+
if capture_output and process.stderr:
|
|
230
|
+
t_err = threading.Thread(target=stream_output, args=(process.stderr, stderr_lines, logging.ERROR))
|
|
231
|
+
t_err.start()
|
|
232
|
+
threads.append(t_err)
|
|
232
233
|
|
|
234
|
+
try:
|
|
235
|
+
process.wait(timeout=timeout)
|
|
236
|
+
except subprocess.TimeoutExpired:
|
|
237
|
+
process.kill()
|
|
238
|
+
log_msg = f"Command timed out after {timeout} seconds: {' '.join(cmd)}:\n"
|
|
239
|
+
self.logger.error(log_msg)
|
|
240
|
+
return CommandResult(-1, ''.join(stdout_lines), log_msg.join(stderr_lines))
|
|
241
|
+
except Exception as e:
|
|
242
|
+
stack = traceback.format_exc()
|
|
243
|
+
log_msg = f"Command execution failed: {' '.join(cmd)} with error: {e}\n"
|
|
244
|
+
self.logger.error(log_msg)
|
|
245
|
+
return CommandResult(-1, ''.join(stdout_lines), log_msg.join(stderr_lines), stack)
|
|
246
|
+
|
|
247
|
+
for t in threads:
|
|
248
|
+
t.join()
|
|
249
|
+
|
|
250
|
+
if self._text_mode:
|
|
251
|
+
stdout_combined = ''.join(stdout_lines)
|
|
252
|
+
stderr_combined = ''.join(stderr_lines)
|
|
253
|
+
else:
|
|
254
|
+
stdout_combined = b''.join(stdout_lines)
|
|
255
|
+
stderr_combined = b''.join(stderr_lines)
|
|
256
|
+
|
|
257
|
+
if check and process.returncode != 0:
|
|
258
|
+
self.logger.error(f"Command failed with exit code {process.returncode}")
|
|
259
|
+
return CommandResult(
|
|
260
|
+
process.returncode,
|
|
261
|
+
stdout_combined,
|
|
262
|
+
stderr_combined,
|
|
263
|
+
stack=traceback.format_stack()
|
|
264
|
+
)
|
|
233
265
|
|
|
234
|
-
if check and process.returncode != 0:
|
|
235
|
-
self.logger.error(f"Command failed with exit code {process.returncode}")
|
|
236
266
|
return CommandResult(
|
|
237
267
|
process.returncode,
|
|
238
|
-
stdout_combined,
|
|
239
|
-
stderr_combined,
|
|
240
|
-
stack=traceback.format_stack()
|
|
241
|
-
)
|
|
242
|
-
|
|
243
|
-
return CommandResult(
|
|
244
|
-
process.returncode,
|
|
245
268
|
stdout_combined,
|
|
246
269
|
stderr_combined
|
|
247
|
-
|
|
248
|
-
|
|
270
|
+
)
|
|
271
|
+
finally:
|
|
272
|
+
if termios is not None and saved_tty_attrs is not None and tty_fd is not None:
|
|
273
|
+
try:
|
|
274
|
+
termios.tcsetattr(tty_fd, termios.TCSADRAIN, saved_tty_attrs)
|
|
275
|
+
except Exception:
|
|
276
|
+
self.logger.debug("Failed to restore terminal attributes", exc_info=True)
|
|
277
|
+
if tty_file is not None:
|
|
278
|
+
try:
|
|
279
|
+
tty_file.close()
|
|
280
|
+
except Exception:
|
|
281
|
+
self.logger.debug("Failed to close /dev/tty handle", exc_info=True)
|