redundanet 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- redundanet/__init__.py +16 -0
- redundanet/__main__.py +6 -0
- redundanet/auth/__init__.py +9 -0
- redundanet/auth/gpg.py +323 -0
- redundanet/auth/keyserver.py +219 -0
- redundanet/cli/__init__.py +5 -0
- redundanet/cli/main.py +247 -0
- redundanet/cli/network.py +194 -0
- redundanet/cli/node.py +305 -0
- redundanet/cli/storage.py +267 -0
- redundanet/core/__init__.py +31 -0
- redundanet/core/config.py +200 -0
- redundanet/core/exceptions.py +84 -0
- redundanet/core/manifest.py +325 -0
- redundanet/core/node.py +135 -0
- redundanet/network/__init__.py +11 -0
- redundanet/network/discovery.py +218 -0
- redundanet/network/dns.py +180 -0
- redundanet/network/validation.py +279 -0
- redundanet/storage/__init__.py +13 -0
- redundanet/storage/client.py +306 -0
- redundanet/storage/furl.py +196 -0
- redundanet/storage/introducer.py +175 -0
- redundanet/storage/storage.py +195 -0
- redundanet/utils/__init__.py +15 -0
- redundanet/utils/files.py +165 -0
- redundanet/utils/logging.py +93 -0
- redundanet/utils/process.py +226 -0
- redundanet/vpn/__init__.py +12 -0
- redundanet/vpn/keys.py +173 -0
- redundanet/vpn/mesh.py +201 -0
- redundanet/vpn/tinc.py +323 -0
- redundanet-2.0.0.dist-info/LICENSE +674 -0
- redundanet-2.0.0.dist-info/METADATA +265 -0
- redundanet-2.0.0.dist-info/RECORD +37 -0
- redundanet-2.0.0.dist-info/WHEEL +4 -0
- redundanet-2.0.0.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
"""File utility functions for RedundaNet."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import shutil
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
import yaml
|
|
10
|
+
|
|
11
|
+
from redundanet.utils.logging import get_logger
|
|
12
|
+
|
|
13
|
+
logger = get_logger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def ensure_dir(path: Path | str, mode: int = 0o755) -> Path:
|
|
17
|
+
"""Ensure a directory exists, creating it if necessary.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
path: Directory path
|
|
21
|
+
mode: Permission mode for the directory
|
|
22
|
+
|
|
23
|
+
Returns:
|
|
24
|
+
The Path object for the directory
|
|
25
|
+
"""
|
|
26
|
+
path = Path(path)
|
|
27
|
+
if not path.exists():
|
|
28
|
+
path.mkdir(parents=True, mode=mode)
|
|
29
|
+
logger.debug("Created directory", path=str(path))
|
|
30
|
+
return path
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def read_yaml(path: Path | str) -> dict[str, Any]:
|
|
34
|
+
"""Read and parse a YAML file.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
path: Path to the YAML file
|
|
38
|
+
|
|
39
|
+
Returns:
|
|
40
|
+
Parsed YAML content as dictionary
|
|
41
|
+
|
|
42
|
+
Raises:
|
|
43
|
+
FileNotFoundError: If file doesn't exist
|
|
44
|
+
yaml.YAMLError: If YAML parsing fails
|
|
45
|
+
"""
|
|
46
|
+
path = Path(path)
|
|
47
|
+
if not path.exists():
|
|
48
|
+
raise FileNotFoundError(f"YAML file not found: {path}")
|
|
49
|
+
|
|
50
|
+
with path.open() as f:
|
|
51
|
+
data = yaml.safe_load(f)
|
|
52
|
+
|
|
53
|
+
return data if isinstance(data, dict) else {}
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def write_yaml(path: Path | str, data: dict[str, Any], mode: int = 0o644) -> None:
|
|
57
|
+
"""Write data to a YAML file.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
path: Path to write to
|
|
61
|
+
data: Dictionary to serialize as YAML
|
|
62
|
+
mode: File permission mode
|
|
63
|
+
"""
|
|
64
|
+
path = Path(path)
|
|
65
|
+
ensure_dir(path.parent)
|
|
66
|
+
|
|
67
|
+
with path.open("w") as f:
|
|
68
|
+
yaml.dump(data, f, default_flow_style=False, sort_keys=False)
|
|
69
|
+
|
|
70
|
+
path.chmod(mode)
|
|
71
|
+
logger.debug("Wrote YAML file", path=str(path))
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def safe_copy(src: Path | str, dst: Path | str, mode: int | None = None) -> Path:
|
|
75
|
+
"""Safely copy a file, preserving permissions.
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
src: Source file path
|
|
79
|
+
dst: Destination file path
|
|
80
|
+
mode: Optional permission mode to set on destination
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
Path to the destination file
|
|
84
|
+
"""
|
|
85
|
+
src = Path(src)
|
|
86
|
+
dst = Path(dst)
|
|
87
|
+
|
|
88
|
+
ensure_dir(dst.parent)
|
|
89
|
+
shutil.copy2(src, dst)
|
|
90
|
+
|
|
91
|
+
if mode is not None:
|
|
92
|
+
dst.chmod(mode)
|
|
93
|
+
|
|
94
|
+
logger.debug("Copied file", src=str(src), dst=str(dst))
|
|
95
|
+
return dst
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def read_file(path: Path | str) -> str:
|
|
99
|
+
"""Read a text file.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
path: Path to the file
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
File contents as string
|
|
106
|
+
"""
|
|
107
|
+
path = Path(path)
|
|
108
|
+
return path.read_text()
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def write_file(
|
|
112
|
+
path: Path | str,
|
|
113
|
+
content: str,
|
|
114
|
+
mode: int = 0o644,
|
|
115
|
+
executable: bool = False,
|
|
116
|
+
) -> Path:
|
|
117
|
+
"""Write content to a text file.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
path: Path to write to
|
|
121
|
+
content: Content to write
|
|
122
|
+
mode: File permission mode
|
|
123
|
+
executable: If True, make the file executable
|
|
124
|
+
|
|
125
|
+
Returns:
|
|
126
|
+
Path to the written file
|
|
127
|
+
"""
|
|
128
|
+
path = Path(path)
|
|
129
|
+
ensure_dir(path.parent)
|
|
130
|
+
|
|
131
|
+
path.write_text(content)
|
|
132
|
+
|
|
133
|
+
if executable:
|
|
134
|
+
mode = mode | 0o111
|
|
135
|
+
|
|
136
|
+
path.chmod(mode)
|
|
137
|
+
logger.debug("Wrote file", path=str(path))
|
|
138
|
+
return path
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def remove_path(path: Path | str, ignore_errors: bool = False) -> bool:
|
|
142
|
+
"""Remove a file or directory.
|
|
143
|
+
|
|
144
|
+
Args:
|
|
145
|
+
path: Path to remove
|
|
146
|
+
ignore_errors: If True, don't raise errors
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
True if removal succeeded, False otherwise
|
|
150
|
+
"""
|
|
151
|
+
path = Path(path)
|
|
152
|
+
try:
|
|
153
|
+
if path.is_file():
|
|
154
|
+
path.unlink()
|
|
155
|
+
elif path.is_dir():
|
|
156
|
+
shutil.rmtree(path)
|
|
157
|
+
else:
|
|
158
|
+
return False
|
|
159
|
+
logger.debug("Removed path", path=str(path))
|
|
160
|
+
return True
|
|
161
|
+
except (OSError, PermissionError) as e:
|
|
162
|
+
if ignore_errors:
|
|
163
|
+
logger.warning("Failed to remove path", path=str(path), error=str(e))
|
|
164
|
+
return False
|
|
165
|
+
raise
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
"""Structured logging configuration for RedundaNet."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import contextlib
|
|
6
|
+
import logging
|
|
7
|
+
import sys
|
|
8
|
+
from typing import Any
|
|
9
|
+
|
|
10
|
+
import structlog
|
|
11
|
+
from structlog.typing import Processor
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def setup_logging(
|
|
15
|
+
level: str = "INFO",
|
|
16
|
+
json_output: bool = False,
|
|
17
|
+
log_file: str | None = None,
|
|
18
|
+
) -> None:
|
|
19
|
+
"""Configure structured logging for the application.
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
level: Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
|
23
|
+
json_output: If True, output logs as JSON (useful for production)
|
|
24
|
+
log_file: Optional file path to write logs to
|
|
25
|
+
"""
|
|
26
|
+
# Configure standard library logging
|
|
27
|
+
logging.basicConfig(
|
|
28
|
+
format="%(message)s",
|
|
29
|
+
stream=sys.stderr,
|
|
30
|
+
level=getattr(logging, level.upper()),
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
# Set up processors
|
|
34
|
+
shared_processors: list[Processor] = [
|
|
35
|
+
structlog.contextvars.merge_contextvars,
|
|
36
|
+
structlog.stdlib.add_log_level,
|
|
37
|
+
structlog.stdlib.add_logger_name,
|
|
38
|
+
structlog.stdlib.PositionalArgumentsFormatter(),
|
|
39
|
+
structlog.processors.TimeStamper(fmt="iso"),
|
|
40
|
+
structlog.processors.StackInfoRenderer(),
|
|
41
|
+
structlog.processors.UnicodeDecoder(),
|
|
42
|
+
]
|
|
43
|
+
|
|
44
|
+
if json_output:
|
|
45
|
+
# JSON output for production
|
|
46
|
+
processors: list[Processor] = [
|
|
47
|
+
*shared_processors,
|
|
48
|
+
structlog.processors.format_exc_info,
|
|
49
|
+
structlog.processors.JSONRenderer(),
|
|
50
|
+
]
|
|
51
|
+
else:
|
|
52
|
+
# Console output for development
|
|
53
|
+
processors = [
|
|
54
|
+
*shared_processors,
|
|
55
|
+
structlog.dev.ConsoleRenderer(colors=True),
|
|
56
|
+
]
|
|
57
|
+
|
|
58
|
+
structlog.configure(
|
|
59
|
+
processors=processors,
|
|
60
|
+
wrapper_class=structlog.stdlib.BoundLogger,
|
|
61
|
+
context_class=dict,
|
|
62
|
+
logger_factory=structlog.stdlib.LoggerFactory(),
|
|
63
|
+
cache_logger_on_first_use=True,
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
# Configure file handler if specified
|
|
67
|
+
if log_file:
|
|
68
|
+
file_handler = logging.FileHandler(log_file)
|
|
69
|
+
file_handler.setLevel(getattr(logging, level.upper()))
|
|
70
|
+
logging.getLogger().addHandler(file_handler)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def get_logger(name: str | None = None) -> structlog.stdlib.BoundLogger:
|
|
74
|
+
"""Get a structured logger instance.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
name: Logger name (typically __name__)
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
A configured structlog logger
|
|
81
|
+
"""
|
|
82
|
+
return structlog.get_logger(name) # type: ignore[no-any-return]
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def log_context(**kwargs: Any) -> contextlib.AbstractContextManager[None]:
|
|
86
|
+
"""Context manager to add context to all logs within the block.
|
|
87
|
+
|
|
88
|
+
Example:
|
|
89
|
+
with log_context(node_name="node1", operation="sync"):
|
|
90
|
+
logger.info("Starting operation")
|
|
91
|
+
# All logs here will include node_name and operation
|
|
92
|
+
"""
|
|
93
|
+
return structlog.contextvars.bound_contextvars(**kwargs)
|
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
"""Process management utilities for RedundaNet."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import shlex
|
|
7
|
+
import subprocess
|
|
8
|
+
from collections.abc import Sequence
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
from redundanet.utils.logging import get_logger
|
|
13
|
+
|
|
14
|
+
logger = get_logger(__name__)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@dataclass
|
|
18
|
+
class CommandResult:
|
|
19
|
+
"""Result of a command execution."""
|
|
20
|
+
|
|
21
|
+
returncode: int
|
|
22
|
+
stdout: str
|
|
23
|
+
stderr: str
|
|
24
|
+
command: str
|
|
25
|
+
|
|
26
|
+
@property
|
|
27
|
+
def success(self) -> bool:
|
|
28
|
+
"""Check if the command succeeded."""
|
|
29
|
+
return self.returncode == 0
|
|
30
|
+
|
|
31
|
+
def check(self) -> None:
|
|
32
|
+
"""Raise exception if command failed."""
|
|
33
|
+
if not self.success:
|
|
34
|
+
raise subprocess.CalledProcessError(
|
|
35
|
+
self.returncode,
|
|
36
|
+
self.command,
|
|
37
|
+
self.stdout,
|
|
38
|
+
self.stderr,
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def run_command(
|
|
43
|
+
command: str | Sequence[str],
|
|
44
|
+
*,
|
|
45
|
+
cwd: Path | str | None = None,
|
|
46
|
+
env: dict[str, str] | None = None,
|
|
47
|
+
timeout: float | None = None,
|
|
48
|
+
capture_output: bool = True,
|
|
49
|
+
check: bool = False,
|
|
50
|
+
input_text: str | None = None,
|
|
51
|
+
) -> CommandResult:
|
|
52
|
+
"""Run a shell command and return the result.
|
|
53
|
+
|
|
54
|
+
Args:
|
|
55
|
+
command: Command to run (string or list of arguments)
|
|
56
|
+
cwd: Working directory
|
|
57
|
+
env: Environment variables
|
|
58
|
+
timeout: Timeout in seconds
|
|
59
|
+
capture_output: Whether to capture stdout/stderr
|
|
60
|
+
check: If True, raise exception on non-zero exit
|
|
61
|
+
input_text: Text to send to stdin
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
CommandResult with return code, stdout, stderr
|
|
65
|
+
"""
|
|
66
|
+
if isinstance(command, str):
|
|
67
|
+
cmd_str = command
|
|
68
|
+
args = shlex.split(command)
|
|
69
|
+
else:
|
|
70
|
+
cmd_str = " ".join(command)
|
|
71
|
+
args = list(command)
|
|
72
|
+
|
|
73
|
+
logger.debug("Running command", command=cmd_str, cwd=str(cwd) if cwd else None)
|
|
74
|
+
|
|
75
|
+
try:
|
|
76
|
+
result = subprocess.run(
|
|
77
|
+
args,
|
|
78
|
+
cwd=cwd,
|
|
79
|
+
env=env,
|
|
80
|
+
timeout=timeout,
|
|
81
|
+
capture_output=capture_output,
|
|
82
|
+
text=True,
|
|
83
|
+
input=input_text,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
cmd_result = CommandResult(
|
|
87
|
+
returncode=result.returncode,
|
|
88
|
+
stdout=result.stdout if capture_output else "",
|
|
89
|
+
stderr=result.stderr if capture_output else "",
|
|
90
|
+
command=cmd_str,
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
if check:
|
|
94
|
+
cmd_result.check()
|
|
95
|
+
|
|
96
|
+
return cmd_result
|
|
97
|
+
|
|
98
|
+
except subprocess.TimeoutExpired:
|
|
99
|
+
logger.error("Command timed out", command=cmd_str, timeout=timeout)
|
|
100
|
+
return CommandResult(
|
|
101
|
+
returncode=-1,
|
|
102
|
+
stdout="",
|
|
103
|
+
stderr=f"Command timed out after {timeout}s",
|
|
104
|
+
command=cmd_str,
|
|
105
|
+
)
|
|
106
|
+
except FileNotFoundError:
|
|
107
|
+
logger.error("Command not found", command=args[0])
|
|
108
|
+
return CommandResult(
|
|
109
|
+
returncode=-1,
|
|
110
|
+
stdout="",
|
|
111
|
+
stderr=f"Command not found: {args[0]}",
|
|
112
|
+
command=cmd_str,
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
async def run_command_async(
|
|
117
|
+
command: str | Sequence[str],
|
|
118
|
+
*,
|
|
119
|
+
cwd: Path | str | None = None,
|
|
120
|
+
env: dict[str, str] | None = None,
|
|
121
|
+
timeout: float | None = None,
|
|
122
|
+
input_text: str | None = None,
|
|
123
|
+
) -> CommandResult:
|
|
124
|
+
"""Run a shell command asynchronously.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
command: Command to run (string or list of arguments)
|
|
128
|
+
cwd: Working directory
|
|
129
|
+
env: Environment variables
|
|
130
|
+
timeout: Timeout in seconds
|
|
131
|
+
input_text: Text to send to stdin
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
CommandResult with return code, stdout, stderr
|
|
135
|
+
"""
|
|
136
|
+
if isinstance(command, str):
|
|
137
|
+
cmd_str = command
|
|
138
|
+
# Use shell=True for string commands
|
|
139
|
+
process = await asyncio.create_subprocess_shell(
|
|
140
|
+
command,
|
|
141
|
+
cwd=cwd,
|
|
142
|
+
env=env,
|
|
143
|
+
stdout=asyncio.subprocess.PIPE,
|
|
144
|
+
stderr=asyncio.subprocess.PIPE,
|
|
145
|
+
stdin=asyncio.subprocess.PIPE if input_text else None,
|
|
146
|
+
)
|
|
147
|
+
else:
|
|
148
|
+
cmd_str = " ".join(command)
|
|
149
|
+
process = await asyncio.create_subprocess_exec(
|
|
150
|
+
*command,
|
|
151
|
+
cwd=cwd,
|
|
152
|
+
env=env,
|
|
153
|
+
stdout=asyncio.subprocess.PIPE,
|
|
154
|
+
stderr=asyncio.subprocess.PIPE,
|
|
155
|
+
stdin=asyncio.subprocess.PIPE if input_text else None,
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
logger.debug("Running async command", command=cmd_str)
|
|
159
|
+
|
|
160
|
+
try:
|
|
161
|
+
stdin_bytes = input_text.encode() if input_text else None
|
|
162
|
+
stdout, stderr = await asyncio.wait_for(
|
|
163
|
+
process.communicate(input=stdin_bytes),
|
|
164
|
+
timeout=timeout,
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
return CommandResult(
|
|
168
|
+
returncode=process.returncode or 0,
|
|
169
|
+
stdout=stdout.decode() if stdout else "",
|
|
170
|
+
stderr=stderr.decode() if stderr else "",
|
|
171
|
+
command=cmd_str,
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
except asyncio.TimeoutError:
|
|
175
|
+
process.kill()
|
|
176
|
+
await process.wait()
|
|
177
|
+
logger.error("Async command timed out", command=cmd_str, timeout=timeout)
|
|
178
|
+
return CommandResult(
|
|
179
|
+
returncode=-1,
|
|
180
|
+
stdout="",
|
|
181
|
+
stderr=f"Command timed out after {timeout}s",
|
|
182
|
+
command=cmd_str,
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
def is_command_available(command: str) -> bool:
|
|
187
|
+
"""Check if a command is available in PATH.
|
|
188
|
+
|
|
189
|
+
Args:
|
|
190
|
+
command: Command name to check
|
|
191
|
+
|
|
192
|
+
Returns:
|
|
193
|
+
True if command is available
|
|
194
|
+
"""
|
|
195
|
+
result = run_command(f"which {command}", check=False)
|
|
196
|
+
return result.success
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def get_pid_of(process_name: str) -> list[int]:
|
|
200
|
+
"""Get PIDs of processes matching a name.
|
|
201
|
+
|
|
202
|
+
Args:
|
|
203
|
+
process_name: Process name to search for
|
|
204
|
+
|
|
205
|
+
Returns:
|
|
206
|
+
List of PIDs
|
|
207
|
+
"""
|
|
208
|
+
result = run_command(f"pgrep -f {process_name}", check=False)
|
|
209
|
+
if result.success and result.stdout.strip():
|
|
210
|
+
return [int(pid) for pid in result.stdout.strip().split("\n")]
|
|
211
|
+
return []
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def kill_process(pid: int, force: bool = False) -> bool:
|
|
215
|
+
"""Kill a process by PID.
|
|
216
|
+
|
|
217
|
+
Args:
|
|
218
|
+
pid: Process ID
|
|
219
|
+
force: If True, use SIGKILL instead of SIGTERM
|
|
220
|
+
|
|
221
|
+
Returns:
|
|
222
|
+
True if successful
|
|
223
|
+
"""
|
|
224
|
+
signal = "-9" if force else "-15"
|
|
225
|
+
result = run_command(f"kill {signal} {pid}", check=False)
|
|
226
|
+
return result.success
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"""VPN management module for RedundaNet."""
|
|
2
|
+
|
|
3
|
+
from redundanet.vpn.keys import VPNKeyManager
|
|
4
|
+
from redundanet.vpn.mesh import MeshNetwork
|
|
5
|
+
from redundanet.vpn.tinc import TincConfig, TincManager
|
|
6
|
+
|
|
7
|
+
__all__ = [
|
|
8
|
+
"TincManager",
|
|
9
|
+
"TincConfig",
|
|
10
|
+
"VPNKeyManager",
|
|
11
|
+
"MeshNetwork",
|
|
12
|
+
]
|
redundanet/vpn/keys.py
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
"""VPN key management for RedundaNet."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import TYPE_CHECKING
|
|
7
|
+
|
|
8
|
+
from redundanet.core.exceptions import VPNError
|
|
9
|
+
from redundanet.utils.files import ensure_dir, read_file, write_file
|
|
10
|
+
from redundanet.utils.logging import get_logger
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from redundanet.vpn.tinc import TincConfig
|
|
14
|
+
|
|
15
|
+
logger = get_logger(__name__)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class VPNKeyManager:
|
|
19
|
+
"""Manages VPN keys for node authentication."""
|
|
20
|
+
|
|
21
|
+
def __init__(self, config: TincConfig) -> None:
|
|
22
|
+
self.config = config
|
|
23
|
+
self._keys_dir = config.network_dir / "keys"
|
|
24
|
+
|
|
25
|
+
def ensure_keys_directory(self) -> Path:
|
|
26
|
+
"""Ensure the keys directory exists."""
|
|
27
|
+
return ensure_dir(self._keys_dir, mode=0o700)
|
|
28
|
+
|
|
29
|
+
def import_public_key(self, node_name: str, public_key: str) -> Path:
|
|
30
|
+
"""Import a public key for a remote node.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
node_name: Name of the remote node
|
|
34
|
+
public_key: The public key content
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
Path to the saved key file
|
|
38
|
+
"""
|
|
39
|
+
if not public_key.strip():
|
|
40
|
+
raise VPNError(f"Empty public key for node {node_name}")
|
|
41
|
+
|
|
42
|
+
# Validate it looks like an RSA public key
|
|
43
|
+
if "BEGIN RSA PUBLIC KEY" not in public_key:
|
|
44
|
+
raise VPNError(f"Invalid public key format for node {node_name}")
|
|
45
|
+
|
|
46
|
+
host_file = self.config.hosts_dir / node_name
|
|
47
|
+
|
|
48
|
+
# Read existing host file or create new one
|
|
49
|
+
if host_file.exists():
|
|
50
|
+
content = read_file(host_file)
|
|
51
|
+
# Remove old key if present
|
|
52
|
+
lines = content.split("\n")
|
|
53
|
+
new_lines = []
|
|
54
|
+
in_key = False
|
|
55
|
+
for line in lines:
|
|
56
|
+
if "BEGIN RSA PUBLIC KEY" in line:
|
|
57
|
+
in_key = True
|
|
58
|
+
if not in_key:
|
|
59
|
+
new_lines.append(line)
|
|
60
|
+
if "END RSA PUBLIC KEY" in line:
|
|
61
|
+
in_key = False
|
|
62
|
+
|
|
63
|
+
content = "\n".join(new_lines)
|
|
64
|
+
else:
|
|
65
|
+
content = f"# Host file for {node_name}\n"
|
|
66
|
+
|
|
67
|
+
# Append the new key
|
|
68
|
+
content = content.rstrip() + "\n\n" + public_key.strip() + "\n"
|
|
69
|
+
|
|
70
|
+
write_file(host_file, content, mode=0o644)
|
|
71
|
+
logger.info("Imported public key", node=node_name)
|
|
72
|
+
|
|
73
|
+
return host_file
|
|
74
|
+
|
|
75
|
+
def export_public_key(self) -> str:
|
|
76
|
+
"""Export this node's public key.
|
|
77
|
+
|
|
78
|
+
Returns:
|
|
79
|
+
The public key as a string
|
|
80
|
+
"""
|
|
81
|
+
host_file = self.config.hosts_dir / self.config.node_name
|
|
82
|
+
|
|
83
|
+
if not host_file.exists():
|
|
84
|
+
raise VPNError(f"No public key found for {self.config.node_name}")
|
|
85
|
+
|
|
86
|
+
content = read_file(host_file)
|
|
87
|
+
|
|
88
|
+
# Extract just the key portion
|
|
89
|
+
lines = content.split("\n")
|
|
90
|
+
key_lines = []
|
|
91
|
+
in_key = False
|
|
92
|
+
|
|
93
|
+
for line in lines:
|
|
94
|
+
if "BEGIN RSA PUBLIC KEY" in line:
|
|
95
|
+
in_key = True
|
|
96
|
+
if in_key:
|
|
97
|
+
key_lines.append(line)
|
|
98
|
+
if "END RSA PUBLIC KEY" in line:
|
|
99
|
+
break
|
|
100
|
+
|
|
101
|
+
if not key_lines:
|
|
102
|
+
raise VPNError(f"No public key found in host file for {self.config.node_name}")
|
|
103
|
+
|
|
104
|
+
return "\n".join(key_lines)
|
|
105
|
+
|
|
106
|
+
def export_host_file(self) -> str:
|
|
107
|
+
"""Export the complete host file for this node.
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
The complete host file content
|
|
111
|
+
"""
|
|
112
|
+
host_file = self.config.hosts_dir / self.config.node_name
|
|
113
|
+
|
|
114
|
+
if not host_file.exists():
|
|
115
|
+
raise VPNError(f"No host file found for {self.config.node_name}")
|
|
116
|
+
|
|
117
|
+
return read_file(host_file)
|
|
118
|
+
|
|
119
|
+
def list_imported_keys(self) -> list[str]:
|
|
120
|
+
"""List all imported node keys.
|
|
121
|
+
|
|
122
|
+
Returns:
|
|
123
|
+
List of node names with imported keys
|
|
124
|
+
"""
|
|
125
|
+
nodes = []
|
|
126
|
+
if self.config.hosts_dir.exists():
|
|
127
|
+
for host_file in self.config.hosts_dir.iterdir():
|
|
128
|
+
if host_file.is_file() and host_file.name != self.config.node_name:
|
|
129
|
+
content = read_file(host_file)
|
|
130
|
+
if "BEGIN RSA PUBLIC KEY" in content:
|
|
131
|
+
nodes.append(host_file.name)
|
|
132
|
+
return sorted(nodes)
|
|
133
|
+
|
|
134
|
+
def remove_key(self, node_name: str) -> bool:
|
|
135
|
+
"""Remove a node's key.
|
|
136
|
+
|
|
137
|
+
Args:
|
|
138
|
+
node_name: Name of the node to remove
|
|
139
|
+
|
|
140
|
+
Returns:
|
|
141
|
+
True if removed, False if not found
|
|
142
|
+
"""
|
|
143
|
+
if node_name == self.config.node_name:
|
|
144
|
+
raise VPNError("Cannot remove own node's key")
|
|
145
|
+
|
|
146
|
+
host_file = self.config.hosts_dir / node_name
|
|
147
|
+
if host_file.exists():
|
|
148
|
+
host_file.unlink()
|
|
149
|
+
logger.info("Removed key", node=node_name)
|
|
150
|
+
return True
|
|
151
|
+
return False
|
|
152
|
+
|
|
153
|
+
def verify_key(self, node_name: str) -> bool:
|
|
154
|
+
"""Verify that a node's key is properly formatted.
|
|
155
|
+
|
|
156
|
+
Args:
|
|
157
|
+
node_name: Name of the node to verify
|
|
158
|
+
|
|
159
|
+
Returns:
|
|
160
|
+
True if key is valid
|
|
161
|
+
"""
|
|
162
|
+
host_file = self.config.hosts_dir / node_name
|
|
163
|
+
if not host_file.exists():
|
|
164
|
+
return False
|
|
165
|
+
|
|
166
|
+
content = read_file(host_file)
|
|
167
|
+
|
|
168
|
+
# Check for required components
|
|
169
|
+
has_subnet = "Subnet = " in content
|
|
170
|
+
has_key_start = "BEGIN RSA PUBLIC KEY" in content
|
|
171
|
+
has_key_end = "END RSA PUBLIC KEY" in content
|
|
172
|
+
|
|
173
|
+
return has_subnet and has_key_start and has_key_end
|