utils_devops 0.1.129__tar.gz → 0.1.136__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {utils_devops-0.1.129 → utils_devops-0.1.136}/PKG-INFO +1 -1
- {utils_devops-0.1.129 → utils_devops-0.1.136}/pyproject.toml +1 -1
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/core/systems.py +241 -95
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/extras/docker_ops.py +1 -2
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/extras/ssh_ops.py +137 -9
- {utils_devops-0.1.129 → utils_devops-0.1.136}/README.md +0 -0
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/__init__.py +0 -0
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/core/__init__.py +0 -0
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/core/datetimes.py +0 -0
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/core/envs.py +0 -0
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/core/files.py +0 -0
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/core/logs.py +0 -0
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/core/script_helpers.py +0 -0
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/core/strings.py +0 -0
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/extras/__init__.py +0 -0
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/extras/aws_ops.py +0 -0
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/extras/git_ops.py +0 -0
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/extras/interaction_ops.py +0 -0
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/extras/metrics_ops.py +0 -0
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/extras/network_ops.py +0 -0
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/extras/nginx_ops.py +0 -0
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/extras/notification_ops.py +0 -0
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/extras/performance_ops.py +0 -0
- {utils_devops-0.1.129 → utils_devops-0.1.136}/src/utils_devops/extras/vault_ops.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: utils_devops
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.136
|
|
4
4
|
Summary: Lightweight DevOps utilities for automation scripts: config editing (YAML/JSON/INI/.env), templating, diffing, and CLI tools
|
|
5
5
|
License: MIT
|
|
6
6
|
Keywords: devops,automation,nginx,cli,jinja2,yaml,config,diff,templating,logging,docker,compose,file-ops
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "utils_devops"
|
|
3
|
-
version = "0.1.
|
|
3
|
+
version = "0.1.136" # Bumped for new string features + diffing
|
|
4
4
|
description = "Lightweight DevOps utilities for automation scripts: config editing (YAML/JSON/INI/.env), templating, diffing, and CLI tools"
|
|
5
5
|
authors = ["Hamed Sheikhan <sh.sheikhan.m@gmail.com>"]
|
|
6
6
|
license = "MIT"
|
|
@@ -11,6 +11,7 @@ import shutil
|
|
|
11
11
|
import time
|
|
12
12
|
import socket
|
|
13
13
|
import getpass
|
|
14
|
+
import threading
|
|
14
15
|
import ctypes # For Windows admin check
|
|
15
16
|
from typing import Optional, List, Dict, Union, Any, Callable, Tuple
|
|
16
17
|
from rich.console import Console
|
|
@@ -293,157 +294,301 @@ def run(
|
|
|
293
294
|
elevated: bool = False,
|
|
294
295
|
capture: bool = True,
|
|
295
296
|
logger: Optional[logger] = None,
|
|
296
|
-
stream: bool = False
|
|
297
|
+
stream: bool = False
|
|
297
298
|
) -> subprocess.CompletedProcess:
|
|
298
299
|
"""
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
300
|
+
Robust run() with cross-platform threaded streaming + smart carriage-return handling.
|
|
301
|
+
|
|
302
|
+
- stream=True : streams output in real-time using threads, understands '\r' updates.
|
|
303
|
+
- capture controls whether stdout/stderr are returned in CompletedProcess (when stream=True,
|
|
304
|
+
capture=True will also collect into strings).
|
|
305
|
+
- If streaming fails for any reason we fallback to communicate() to collect remaining output.
|
|
306
|
+
- Works on Linux and Windows.
|
|
306
307
|
"""
|
|
307
|
-
logger = logger or DEFAULT_LOGGER
|
|
308
|
+
logger = logger or DEFAULT_LOGGER
|
|
309
|
+
|
|
308
310
|
if shell is None:
|
|
309
311
|
shell = isinstance(cmd, str)
|
|
310
|
-
|
|
312
|
+
|
|
313
|
+
# Normalize command forms
|
|
311
314
|
if isinstance(cmd, (list, tuple)):
|
|
312
315
|
cmd_str = subprocess.list2cmdline(cmd)
|
|
313
316
|
cmd_list: Union[List[str], str] = list(cmd)
|
|
314
317
|
else:
|
|
315
318
|
cmd_str = str(cmd)
|
|
316
319
|
cmd_list = cmd_str if shell else [cmd_str]
|
|
320
|
+
|
|
317
321
|
if dry_run:
|
|
318
322
|
logger.info(f"[DRY-RUN] {cmd_str}")
|
|
319
323
|
return subprocess.CompletedProcess(cmd_list if not shell else cmd_str, 0, stdout="", stderr="")
|
|
324
|
+
|
|
320
325
|
stdin_input: Optional[str] = None
|
|
321
326
|
use_list: Union[List[str], str] = cmd_list
|
|
327
|
+
|
|
328
|
+
# Determine platform
|
|
329
|
+
try:
|
|
330
|
+
is_win = (is_windows())
|
|
331
|
+
except NameError:
|
|
332
|
+
is_win = (os.name == "nt") or sys.platform.startswith("win")
|
|
333
|
+
|
|
334
|
+
# Handle elevated
|
|
322
335
|
if elevated:
|
|
323
|
-
if
|
|
324
|
-
#
|
|
336
|
+
if is_win:
|
|
337
|
+
# Windows: use Start-Process via powershell RunAs
|
|
325
338
|
ps = f"Start-Process -Verb RunAs -FilePath powershell -ArgumentList '-NoProfile','-Command','{cmd_str}' -Wait -PassThru"
|
|
326
339
|
use_list = ["powershell", "-NoProfile", "-Command", ps]
|
|
327
340
|
shell = False
|
|
328
341
|
else:
|
|
329
|
-
# Unix:
|
|
330
|
-
|
|
331
|
-
|
|
342
|
+
# Unix: attempt to get cached sudo password via user-provided helper
|
|
343
|
+
try:
|
|
344
|
+
pw = _get_sudo_password()
|
|
345
|
+
except NameError:
|
|
346
|
+
raise RuntimeError("elevated=True requested but _get_sudo_password() is not implemented in the environment.")
|
|
332
347
|
if isinstance(cmd, (list, tuple)):
|
|
333
348
|
base_list = list(cmd)
|
|
334
349
|
else:
|
|
335
|
-
# if original was a string and shell=True, pass the string to sudo as a single shell invocation
|
|
336
350
|
base_list = [cmd_str] if shell else [cmd_str]
|
|
337
351
|
use_list = ["sudo", "-S"] + base_list
|
|
338
352
|
stdin_input = (pw + "\n") if pw is not None else None
|
|
339
|
-
shell = False
|
|
340
|
-
|
|
353
|
+
shell = False
|
|
354
|
+
|
|
355
|
+
# Log final command (avoid logging sensitive data)
|
|
341
356
|
try:
|
|
342
357
|
if isinstance(use_list, list):
|
|
343
358
|
logger.debug(f"Executing (list): {' '.join(use_list)}")
|
|
344
359
|
else:
|
|
345
360
|
logger.debug(f"Executing (shell): {use_list}")
|
|
346
|
-
|
|
361
|
+
|
|
347
362
|
proc = subprocess.Popen(
|
|
348
363
|
use_list if not shell else (cmd_str),
|
|
349
364
|
cwd=str(cwd) if cwd else None,
|
|
350
365
|
env=env,
|
|
351
|
-
stdout=subprocess.PIPE if capture else None,
|
|
352
|
-
stderr=subprocess.PIPE if capture else None,
|
|
366
|
+
stdout=subprocess.PIPE if (capture or stream) else None,
|
|
367
|
+
stderr=subprocess.PIPE if (capture or stream) else None,
|
|
353
368
|
stdin=subprocess.PIPE if stdin_input is not None else None,
|
|
354
369
|
text=True,
|
|
355
370
|
shell=shell,
|
|
356
|
-
bufsize=1,
|
|
371
|
+
bufsize=1,
|
|
357
372
|
universal_newlines=True,
|
|
358
373
|
)
|
|
359
|
-
|
|
374
|
+
|
|
375
|
+
# Send sudo password if needed
|
|
360
376
|
if stdin_input is not None and proc.stdin:
|
|
361
|
-
# write password and flush; do not keep password in logs
|
|
362
377
|
try:
|
|
363
378
|
proc.stdin.write(stdin_input)
|
|
364
379
|
proc.stdin.flush()
|
|
365
380
|
proc.stdin.close()
|
|
366
381
|
except Exception:
|
|
367
|
-
# If writing fails, ensure we close and continue to wait for process
|
|
368
382
|
try:
|
|
369
383
|
proc.stdin.close()
|
|
370
384
|
except Exception:
|
|
371
385
|
pass
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
386
|
+
|
|
387
|
+
# Output collectors
|
|
388
|
+
stdout_lines: List[str] = []
|
|
389
|
+
stderr_lines: List[str] = []
|
|
390
|
+
|
|
391
|
+
# --- Smart threaded streaming implementation ---
|
|
392
|
+
def _smart_reader(pipe, log_func, collector: Optional[List[str]], stop_event: threading.Event):
|
|
393
|
+
"""
|
|
394
|
+
Read from pipe in chunks, handle '\r' (line replace) and '\n' (new line).
|
|
395
|
+
Appends to collector (if provided) and logs via log_func.
|
|
396
|
+
"""
|
|
397
|
+
try:
|
|
398
|
+
buffer = ""
|
|
399
|
+
last_rendered = None # used to avoid repeated identical logs for \r updates
|
|
400
|
+
|
|
401
|
+
# We'll read in reasonably-sized chunks. read() will block, but on separate thread that's fine.
|
|
402
|
+
while not stop_event.is_set():
|
|
403
|
+
chunk = pipe.read(1024)
|
|
404
|
+
if not chunk:
|
|
405
|
+
# EOF reached
|
|
406
|
+
break
|
|
407
|
+
buffer += chunk
|
|
408
|
+
|
|
409
|
+
# Process as long as there's control chars
|
|
410
|
+
while True:
|
|
411
|
+
# find next control char indices
|
|
412
|
+
idx_n = buffer.find("\n")
|
|
413
|
+
idx_r = buffer.find("\r")
|
|
414
|
+
|
|
415
|
+
if idx_n == -1 and idx_r == -1:
|
|
416
|
+
break
|
|
417
|
+
|
|
418
|
+
# Which control comes first?
|
|
419
|
+
if idx_r != -1 and (idx_n == -1 or idx_r < idx_n):
|
|
420
|
+
# Carriage return: replace current line
|
|
421
|
+
line = buffer[:idx_r]
|
|
422
|
+
buffer = buffer[idx_r + 1:]
|
|
423
|
+
# Only log if changed (avoids spamming identical updates)
|
|
424
|
+
if line != last_rendered:
|
|
425
|
+
# strip trailing CR/LF but preserve internal whitespace
|
|
426
|
+
to_log = line.rstrip("\r\n")
|
|
427
|
+
try:
|
|
428
|
+
log_func(to_log)
|
|
429
|
+
except Exception:
|
|
430
|
+
# logging should not raise to user
|
|
431
|
+
pass
|
|
432
|
+
if collector is not None:
|
|
433
|
+
collector.append(line + ("\n" if collector is not None else ""))
|
|
434
|
+
last_rendered = line
|
|
435
|
+
else:
|
|
436
|
+
# Newline: finalize this line
|
|
437
|
+
line = buffer[:idx_n]
|
|
438
|
+
buffer = buffer[idx_n + 1:]
|
|
439
|
+
to_log = line.rstrip("\r\n")
|
|
440
|
+
try:
|
|
441
|
+
log_func(to_log)
|
|
442
|
+
except Exception:
|
|
443
|
+
pass
|
|
444
|
+
if collector is not None:
|
|
445
|
+
collector.append(line + "\n")
|
|
446
|
+
last_rendered = None
|
|
447
|
+
|
|
448
|
+
# Flush whatever remains in buffer
|
|
449
|
+
if buffer:
|
|
450
|
+
buf_strip = buffer.rstrip("\r\n")
|
|
451
|
+
if buf_strip:
|
|
452
|
+
try:
|
|
453
|
+
log_func(buf_strip)
|
|
454
|
+
except Exception:
|
|
455
|
+
pass
|
|
456
|
+
if collector is not None:
|
|
457
|
+
collector.append(buffer)
|
|
458
|
+
# close pipe
|
|
459
|
+
try:
|
|
460
|
+
pipe.close()
|
|
461
|
+
except Exception:
|
|
462
|
+
pass
|
|
463
|
+
except Exception as exc:
|
|
464
|
+
# Ensure we don't crash the thread; bubble up via logging
|
|
465
|
+
logger.exception(f"stream reader error: {exc}")
|
|
466
|
+
try:
|
|
467
|
+
pipe.close()
|
|
468
|
+
except Exception:
|
|
469
|
+
pass
|
|
470
|
+
# re-raise to let outer context know (we'll catch in caller via threads status)
|
|
471
|
+
raise
|
|
472
|
+
|
|
473
|
+
rc = None
|
|
474
|
+
if stream and (proc.stdout is not None or proc.stderr is not None):
|
|
378
475
|
logger.info("Streaming command output...")
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
while True:
|
|
386
|
-
# Check if process has terminated
|
|
387
|
-
if proc.poll() is not None:
|
|
388
|
-
break
|
|
389
|
-
|
|
390
|
-
# Use select to wait for data
|
|
391
|
-
read_fds = []
|
|
476
|
+
stop_event = threading.Event()
|
|
477
|
+
threads: List[threading.Thread] = []
|
|
478
|
+
stream_exception = None
|
|
479
|
+
|
|
480
|
+
# Start threads
|
|
481
|
+
try:
|
|
392
482
|
if proc.stdout:
|
|
393
|
-
|
|
483
|
+
t_out = threading.Thread(
|
|
484
|
+
target=_smart_reader,
|
|
485
|
+
args=(proc.stdout, logger.info, stdout_lines if capture else None, stop_event),
|
|
486
|
+
daemon=True
|
|
487
|
+
)
|
|
488
|
+
t_out.start()
|
|
489
|
+
threads.append(t_out)
|
|
490
|
+
|
|
394
491
|
if proc.stderr:
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
492
|
+
t_err = threading.Thread(
|
|
493
|
+
target=_smart_reader,
|
|
494
|
+
args=(proc.stderr, logger.warning, stderr_lines if capture else None, stop_event),
|
|
495
|
+
daemon=True
|
|
496
|
+
)
|
|
497
|
+
t_err.start()
|
|
498
|
+
threads.append(t_err)
|
|
499
|
+
|
|
500
|
+
# Wait for process while allowing KeyboardInterrupt
|
|
501
|
+
try:
|
|
502
|
+
rc = proc.wait()
|
|
503
|
+
except KeyboardInterrupt:
|
|
504
|
+
logger.debug("KeyboardInterrupt caught: terminating child process")
|
|
505
|
+
stop_event.set()
|
|
506
|
+
try:
|
|
507
|
+
proc.terminate()
|
|
508
|
+
except Exception:
|
|
509
|
+
pass
|
|
510
|
+
# wait a bit then force kill
|
|
511
|
+
try:
|
|
512
|
+
proc.wait(timeout=2)
|
|
513
|
+
except Exception:
|
|
514
|
+
try:
|
|
515
|
+
proc.kill()
|
|
516
|
+
except Exception:
|
|
517
|
+
pass
|
|
518
|
+
rc = proc.returncode if proc.returncode is not None else -1
|
|
519
|
+
|
|
520
|
+
except Exception as exc:
|
|
521
|
+
# If any exception occurs while starting/monitoring threads -> fallback
|
|
522
|
+
stream_exception = exc
|
|
523
|
+
logger.exception(f"Streaming failed, falling back to communicate(): {exc}")
|
|
524
|
+
finally:
|
|
525
|
+
# Signal threads to stop and join
|
|
526
|
+
stop_event.set()
|
|
527
|
+
for t in threads:
|
|
528
|
+
t.join(timeout=1)
|
|
529
|
+
|
|
530
|
+
# If streaming had exception, fallback to communicate to collect remaining output safely.
|
|
531
|
+
if stream_exception is not None:
|
|
532
|
+
try:
|
|
533
|
+
# communicate will return remaining output that reader threads didn't capture
|
|
534
|
+
comm_out, comm_err = proc.communicate(timeout=5)
|
|
535
|
+
except Exception:
|
|
536
|
+
try:
|
|
537
|
+
# best-effort: kill and read whatever
|
|
538
|
+
proc.kill()
|
|
539
|
+
except Exception:
|
|
540
|
+
pass
|
|
541
|
+
try:
|
|
542
|
+
comm_out, comm_err = proc.communicate(timeout=5)
|
|
543
|
+
except Exception:
|
|
544
|
+
comm_out, comm_err = ("", "")
|
|
545
|
+
# append remaining to collectors if capture True
|
|
546
|
+
if capture:
|
|
547
|
+
if comm_out:
|
|
548
|
+
stdout_lines.append(comm_out)
|
|
549
|
+
if comm_err:
|
|
550
|
+
stderr_lines.append(comm_err)
|
|
551
|
+
# set rc if not set
|
|
552
|
+
if rc is None:
|
|
553
|
+
rc = proc.returncode if proc.returncode is not None else 0
|
|
554
|
+
|
|
555
|
+
# Compose final stdout/stderr
|
|
556
|
+
stdout = "".join(stdout_lines) if capture else ""
|
|
557
|
+
stderr = "".join(stderr_lines) if capture else ""
|
|
558
|
+
if rc is None:
|
|
559
|
+
rc = proc.returncode if proc.returncode is not None else 0
|
|
560
|
+
|
|
428
561
|
else:
|
|
429
|
-
#
|
|
430
|
-
|
|
562
|
+
# original behavior: blocking wait & capture (or not)
|
|
563
|
+
try:
|
|
564
|
+
stdout, stderr = proc.communicate()
|
|
565
|
+
except Exception as exc:
|
|
566
|
+
# if communicate fails, try to kill and fallback
|
|
567
|
+
logger.exception(f"proc.communicate() failed: {exc}")
|
|
568
|
+
try:
|
|
569
|
+
proc.kill()
|
|
570
|
+
except Exception:
|
|
571
|
+
pass
|
|
572
|
+
try:
|
|
573
|
+
stdout, stderr = proc.communicate(timeout=5)
|
|
574
|
+
except Exception:
|
|
575
|
+
stdout, stderr = ("", "")
|
|
431
576
|
rc = proc.returncode
|
|
432
|
-
|
|
577
|
+
|
|
578
|
+
# Build CompletedProcess result
|
|
433
579
|
result = subprocess.CompletedProcess(
|
|
434
580
|
use_list if not shell else cmd_str,
|
|
435
581
|
rc,
|
|
436
582
|
stdout=stdout or "",
|
|
437
583
|
stderr=stderr or "",
|
|
438
584
|
)
|
|
439
|
-
|
|
585
|
+
|
|
586
|
+
# Post-run handling & sudo auth checks (Unix)
|
|
440
587
|
if rc == 0:
|
|
441
588
|
logger.info(f"Command succeeded (rc={rc})")
|
|
442
589
|
else:
|
|
443
|
-
|
|
444
|
-
if elevated and not is_windows():
|
|
590
|
+
if elevated and not is_win:
|
|
445
591
|
lowerr = (stderr or "").lower()
|
|
446
|
-
# Common sudo auth failure tokens
|
|
447
592
|
auth_tokens = [
|
|
448
593
|
"incorrect password",
|
|
449
594
|
"authentication failure",
|
|
@@ -454,25 +599,26 @@ def run(
|
|
|
454
599
|
"pam_authenticate",
|
|
455
600
|
]
|
|
456
601
|
if any(tok in lowerr for tok in auth_tokens):
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
602
|
+
try:
|
|
603
|
+
clear_sudo_password()
|
|
604
|
+
except NameError:
|
|
605
|
+
logger.error("sudo authentication failed and clear_sudo_password() not implemented.")
|
|
606
|
+
else:
|
|
607
|
+
logger.error("sudo authentication failed. Cached sudo password cleared.")
|
|
461
608
|
raise subprocess.CalledProcessError(rc, use_list if not shell else cmd_str, output=stdout, stderr=stderr)
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
609
|
+
logger.error(f"Command failed (rc={rc}) – { (stderr or '').strip() }")
|
|
610
|
+
|
|
465
611
|
if rc != 0 and not no_die:
|
|
466
612
|
raise subprocess.CalledProcessError(rc, use_list if not shell else cmd_str, output=stdout, stderr=stderr)
|
|
467
|
-
|
|
613
|
+
|
|
468
614
|
return result
|
|
469
|
-
|
|
615
|
+
|
|
470
616
|
except subprocess.CalledProcessError:
|
|
471
|
-
# Re-raise CalledProcessError so callers can handle; do not clear cached password here
|
|
472
617
|
raise
|
|
473
618
|
except Exception as e:
|
|
474
619
|
logger.exception(f"Unexpected error running command: {e}")
|
|
475
620
|
raise
|
|
621
|
+
|
|
476
622
|
# -------------------------------------------------
|
|
477
623
|
# Exec helper – shows command + output + logs
|
|
478
624
|
# -------------------------------------------------
|
|
@@ -430,8 +430,7 @@ def read_compose_file(compose_file: str, env_file: Optional[str] = None) -> Dict
|
|
|
430
430
|
envs.import_env_to_system(get_env_compose(env_file))
|
|
431
431
|
|
|
432
432
|
# Read compose file content
|
|
433
|
-
|
|
434
|
-
content = strings.parse_yaml(compose_file)
|
|
433
|
+
content = files.read_file(compose_file)
|
|
435
434
|
|
|
436
435
|
# Expand environment variables in the content
|
|
437
436
|
data = _expand_env_vars_in_compose(content)
|
|
@@ -340,18 +340,35 @@ def ssh_connect(
|
|
|
340
340
|
key_path, key_content = _resolve_ssh_key(key_file)
|
|
341
341
|
|
|
342
342
|
if key_content:
|
|
343
|
+
# **FIXED HERE**: Ensure proper line endings in the key content
|
|
344
|
+
# SSH keys MUST have newlines at specific positions
|
|
345
|
+
key_content = _normalize_ssh_key_content(key_content)
|
|
346
|
+
|
|
343
347
|
# Create temporary file for key content
|
|
344
348
|
import tempfile
|
|
345
349
|
temp_key_file = tempfile.NamedTemporaryFile(
|
|
346
|
-
mode='w',
|
|
350
|
+
mode='w', # Text mode for proper newline handling
|
|
347
351
|
suffix='_ssh_key',
|
|
348
|
-
delete=False
|
|
352
|
+
delete=False,
|
|
353
|
+
newline='\n' # Explicitly use LF newlines
|
|
349
354
|
)
|
|
355
|
+
|
|
356
|
+
# **CRITICAL FIX**: Write with proper line endings
|
|
350
357
|
temp_key_file.write(key_content)
|
|
358
|
+
temp_key_file.flush() # Ensure data is written
|
|
359
|
+
os.fsync(temp_key_file.fileno()) # Force sync to disk
|
|
351
360
|
temp_key_file.close()
|
|
352
361
|
|
|
353
|
-
# Set permissions
|
|
362
|
+
# Set strict permissions for SSH keys
|
|
354
363
|
os.chmod(temp_key_file.name, 0o600)
|
|
364
|
+
|
|
365
|
+
# **DEBUG**: Verify what was written
|
|
366
|
+
with open(temp_key_file.name, 'r') as f:
|
|
367
|
+
written_content = f.read()
|
|
368
|
+
_logger.debug(f"Written key length: {len(written_content)}")
|
|
369
|
+
_logger.debug(f"Key first 100 chars: {written_content[:100]}")
|
|
370
|
+
_logger.debug(f"Key lines: {written_content.count(chr(10))}")
|
|
371
|
+
|
|
355
372
|
connect_kwargs['key_filename'] = temp_key_file.name
|
|
356
373
|
_logger.debug(f"Using temporary SSH key file: {temp_key_file.name}")
|
|
357
374
|
|
|
@@ -390,8 +407,8 @@ def ssh_connect(
|
|
|
390
407
|
try:
|
|
391
408
|
os.unlink(temp_key_file.name)
|
|
392
409
|
_logger.debug(f"Cleaned up temporary key file: {temp_key_file.name}")
|
|
393
|
-
except:
|
|
394
|
-
|
|
410
|
+
except Exception as e:
|
|
411
|
+
_logger.warning(f"Failed to cleanup temp key file: {e}")
|
|
395
412
|
|
|
396
413
|
if client:
|
|
397
414
|
try:
|
|
@@ -400,6 +417,117 @@ def ssh_connect(
|
|
|
400
417
|
except:
|
|
401
418
|
pass
|
|
402
419
|
|
|
420
|
+
|
|
421
|
+
def _normalize_ssh_key_content(key_content: str) -> str:
|
|
422
|
+
"""
|
|
423
|
+
Normalize SSH key content to ensure proper formatting.
|
|
424
|
+
|
|
425
|
+
SSH keys require specific line breaks:
|
|
426
|
+
- Must start with -----BEGIN ...-----
|
|
427
|
+
- Base64 content should be on separate lines (64 chars max per line)
|
|
428
|
+
- Must end with -----END ...-----
|
|
429
|
+
|
|
430
|
+
Args:
|
|
431
|
+
key_content: Raw key content (may be single line or malformed)
|
|
432
|
+
|
|
433
|
+
Returns:
|
|
434
|
+
Properly formatted SSH key
|
|
435
|
+
"""
|
|
436
|
+
# If already looks properly formatted, return as-is
|
|
437
|
+
if "-----BEGIN" in key_content and "-----END" in key_content and '\n' in key_content:
|
|
438
|
+
# Verify line endings
|
|
439
|
+
lines = key_content.splitlines()
|
|
440
|
+
if len(lines) > 1:
|
|
441
|
+
return key_content
|
|
442
|
+
|
|
443
|
+
# Parse and reformat
|
|
444
|
+
lines = []
|
|
445
|
+
current_line = []
|
|
446
|
+
|
|
447
|
+
# Remove all existing whitespace and split
|
|
448
|
+
clean_content = key_content.strip()
|
|
449
|
+
|
|
450
|
+
if clean_content.startswith("-----BEGIN"):
|
|
451
|
+
# Extract header
|
|
452
|
+
header_end = clean_content.find("-----", 10) + 5
|
|
453
|
+
header = clean_content[:header_end]
|
|
454
|
+
lines.append(header)
|
|
455
|
+
|
|
456
|
+
# Get base64 content between headers
|
|
457
|
+
base64_start = header_end
|
|
458
|
+
base64_end = clean_content.find("-----END")
|
|
459
|
+
base64_content = clean_content[base64_start:base64_end].strip()
|
|
460
|
+
|
|
461
|
+
# Remove all whitespace from base64
|
|
462
|
+
base64_clean = ''.join(base64_content.split())
|
|
463
|
+
|
|
464
|
+
# Split into 64 character lines (standard for SSH keys)
|
|
465
|
+
for i in range(0, len(base64_clean), 64):
|
|
466
|
+
lines.append(base64_clean[i:i+64])
|
|
467
|
+
|
|
468
|
+
# Add footer
|
|
469
|
+
footer_start = base64_end
|
|
470
|
+
lines.append(clean_content[footer_start:].strip())
|
|
471
|
+
else:
|
|
472
|
+
# Assume it's just base64, wrap in OpenSSH header/footer
|
|
473
|
+
lines.append("-----BEGIN OPENSSH PRIVATE KEY-----")
|
|
474
|
+
clean_base64 = ''.join(clean_content.split())
|
|
475
|
+
for i in range(0, len(clean_base64), 64):
|
|
476
|
+
lines.append(clean_base64[i:i+64])
|
|
477
|
+
lines.append("-----END OPENSSH PRIVATE KEY-----")
|
|
478
|
+
|
|
479
|
+
return '\n'.join(lines) + '\n'
|
|
480
|
+
|
|
481
|
+
|
|
482
|
+
# Also update your _resolve_ssh_key function to preserve newlines:
|
|
483
|
+
def _resolve_ssh_key(key_input: Optional[Union[str, Path]]) -> Tuple[Optional[Path], Optional[str]]:
|
|
484
|
+
"""
|
|
485
|
+
Resolve SSH key input to either a file path or key content.
|
|
486
|
+
|
|
487
|
+
Args:
|
|
488
|
+
key_input: Could be:
|
|
489
|
+
- Path to key file
|
|
490
|
+
- Environment variable name ($VAR)
|
|
491
|
+
- Raw key content string
|
|
492
|
+
|
|
493
|
+
Returns:
|
|
494
|
+
Tuple of (file_path, key_content)
|
|
495
|
+
"""
|
|
496
|
+
if not key_input:
|
|
497
|
+
return None, None
|
|
498
|
+
|
|
499
|
+
# If it's a Path object
|
|
500
|
+
if isinstance(key_input, Path):
|
|
501
|
+
if key_input.exists():
|
|
502
|
+
return key_input, None
|
|
503
|
+
else:
|
|
504
|
+
raise FileNotFoundError(f"SSH key file not found: {key_input}")
|
|
505
|
+
|
|
506
|
+
key_input_str = str(key_input)
|
|
507
|
+
|
|
508
|
+
# Check if it's an environment variable reference
|
|
509
|
+
if key_input_str.startswith('$'):
|
|
510
|
+
env_var = key_input_str[1:]
|
|
511
|
+
key_content = os.environ.get(env_var)
|
|
512
|
+
if not key_content:
|
|
513
|
+
raise ValueError(f"Environment variable not set: {env_var}")
|
|
514
|
+
|
|
515
|
+
# **CRITICAL**: Don't strip newlines from env var content!
|
|
516
|
+
# SSH keys in env vars often have escaped newlines (\n)
|
|
517
|
+
# Replace literal \n with actual newlines
|
|
518
|
+
if '\\n' in key_content:
|
|
519
|
+
key_content = key_content.replace('\\n', '\n')
|
|
520
|
+
|
|
521
|
+
return None, key_content
|
|
522
|
+
|
|
523
|
+
# Check if it's a file path
|
|
524
|
+
if os.path.exists(key_input_str):
|
|
525
|
+
return Path(key_input_str), None
|
|
526
|
+
|
|
527
|
+
# Assume it's raw key content
|
|
528
|
+
# **IMPORTANT**: Preserve any newlines in the content
|
|
529
|
+
return None, key_input_str
|
|
530
|
+
|
|
403
531
|
def ssh_execute_command(
|
|
404
532
|
host: str,
|
|
405
533
|
command: Union[str, List[str]],
|
|
@@ -1465,10 +1593,10 @@ def validate_machine_config(machine: MachineConfig) -> List[str]:
|
|
|
1465
1593
|
errors.append(f"Invalid deploy_dir format: {e}")
|
|
1466
1594
|
|
|
1467
1595
|
# Validate key file if specified
|
|
1468
|
-
if machine.key_file:
|
|
1469
|
-
|
|
1470
|
-
|
|
1471
|
-
|
|
1596
|
+
# if machine.key_file:
|
|
1597
|
+
# key_path = Path(machine.key_file).expanduser()
|
|
1598
|
+
# if not key_path.exists():
|
|
1599
|
+
# errors.append(f"SSH key file not found: {key_path}")
|
|
1472
1600
|
|
|
1473
1601
|
return errors
|
|
1474
1602
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|