mrok 0.1.6__py3-none-any.whl → 0.1.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. mrok/__init__.py +6 -0
  2. mrok/agent/__init__.py +0 -0
  3. mrok/agent/sidecar/__init__.py +3 -0
  4. mrok/agent/sidecar/app.py +30 -0
  5. mrok/agent/sidecar/main.py +27 -0
  6. mrok/agent/ziticorn.py +29 -0
  7. mrok/cli/__init__.py +3 -0
  8. mrok/cli/commands/__init__.py +7 -0
  9. mrok/cli/commands/admin/__init__.py +12 -0
  10. mrok/cli/commands/admin/bootstrap.py +58 -0
  11. mrok/cli/commands/admin/list/__init__.py +8 -0
  12. mrok/cli/commands/admin/list/extensions.py +144 -0
  13. mrok/cli/commands/admin/list/instances.py +167 -0
  14. mrok/cli/commands/admin/register/__init__.py +8 -0
  15. mrok/cli/commands/admin/register/extensions.py +46 -0
  16. mrok/cli/commands/admin/register/instances.py +60 -0
  17. mrok/cli/commands/admin/unregister/__init__.py +8 -0
  18. mrok/cli/commands/admin/unregister/extensions.py +33 -0
  19. mrok/cli/commands/admin/unregister/instances.py +34 -0
  20. mrok/cli/commands/admin/utils.py +49 -0
  21. mrok/cli/commands/agent/__init__.py +6 -0
  22. mrok/cli/commands/agent/run/__init__.py +7 -0
  23. mrok/cli/commands/agent/run/asgi.py +49 -0
  24. mrok/cli/commands/agent/run/sidecar.py +54 -0
  25. mrok/cli/commands/controller/__init__.py +7 -0
  26. mrok/cli/commands/controller/openapi.py +47 -0
  27. mrok/cli/commands/controller/run.py +87 -0
  28. mrok/cli/main.py +97 -0
  29. mrok/cli/rich.py +18 -0
  30. mrok/conf.py +32 -0
  31. mrok/controller/__init__.py +0 -0
  32. mrok/controller/app.py +62 -0
  33. mrok/controller/auth.py +87 -0
  34. mrok/controller/dependencies/__init__.py +4 -0
  35. mrok/controller/dependencies/conf.py +7 -0
  36. mrok/controller/dependencies/ziti.py +27 -0
  37. mrok/controller/openapi/__init__.py +3 -0
  38. mrok/controller/openapi/examples.py +44 -0
  39. mrok/controller/openapi/utils.py +35 -0
  40. mrok/controller/pagination.py +79 -0
  41. mrok/controller/routes.py +294 -0
  42. mrok/controller/schemas.py +67 -0
  43. mrok/errors.py +2 -0
  44. mrok/http/__init__.py +0 -0
  45. mrok/http/config.py +65 -0
  46. mrok/http/forwarder.py +299 -0
  47. mrok/http/lifespan.py +10 -0
  48. mrok/http/master.py +90 -0
  49. mrok/http/protocol.py +11 -0
  50. mrok/http/server.py +14 -0
  51. mrok/logging.py +76 -0
  52. mrok/ziti/__init__.py +15 -0
  53. mrok/ziti/api.py +481 -0
  54. mrok/ziti/bootstrap.py +71 -0
  55. mrok/ziti/constants.py +9 -0
  56. mrok/ziti/errors.py +25 -0
  57. mrok/ziti/identities.py +169 -0
  58. mrok/ziti/pki.py +52 -0
  59. mrok/ziti/services.py +87 -0
  60. {mrok-0.1.6.dist-info → mrok-0.1.8.dist-info}/METADATA +7 -9
  61. mrok-0.1.8.dist-info/RECORD +64 -0
  62. {mrok-0.1.6.dist-info → mrok-0.1.8.dist-info}/WHEEL +1 -2
  63. mrok-0.1.6.dist-info/RECORD +0 -6
  64. mrok-0.1.6.dist-info/top_level.txt +0 -1
  65. {mrok-0.1.6.dist-info → mrok-0.1.8.dist-info}/entry_points.txt +0 -0
  66. {mrok-0.1.6.dist-info → mrok-0.1.8.dist-info}/licenses/LICENSE.txt +0 -0
mrok/http/forwarder.py ADDED
@@ -0,0 +1,299 @@
1
+ import abc
2
+ import asyncio
3
+ import logging
4
+ from collections.abc import Awaitable, Callable
5
+ from typing import Any
6
+
7
+ logger = logging.getLogger("mrok.proxy")
8
+
9
+ Scope = dict[str, Any]
10
+ ASGIReceive = Callable[[], Awaitable[dict[str, Any]]]
11
+ ASGISend = Callable[[dict[str, Any]], Awaitable[None]]
12
+
13
+
14
+ class BackendNotFoundError(Exception):
15
+ pass
16
+
17
+
18
+ class ForwardAppBase(abc.ABC):
19
+ """Generic HTTP forwarder base class.
20
+
21
+ Subclasses must implement `select_backend(scope)` to return an
22
+ (asyncio.StreamReader, asyncio.StreamWriter) pair connected to the
23
+ desired backend. The base class implements the HTTP/1.1 framing
24
+ and streaming logic (requests and responses).
25
+ """
26
+
27
+ def __init__(self, read_chunk_size: int = 65536) -> None:
28
+ # number of bytes to read per iteration when streaming bodies
29
+ self._read_chunk_size: int = int(read_chunk_size)
30
+
31
+ @abc.abstractmethod
32
+ async def select_backend(
33
+ self,
34
+ scope: Scope,
35
+ headers: dict[str, str],
36
+ ) -> tuple[asyncio.StreamReader, asyncio.StreamWriter] | tuple[None, None]:
37
+ """Return (reader, writer) connected to the target backend."""
38
+
39
+ async def __call__(self, scope: Scope, receive: ASGIReceive, send: ASGISend) -> None:
40
+ """ASGI callable entry point.
41
+
42
+ Delegates to smaller helper methods for readability. Subclasses only
43
+ need to implement backend selection.
44
+ """
45
+ if scope.get("type") != "http":
46
+ await send({"type": "http.response.start", "status": 500, "headers": []})
47
+ await send({"type": "http.response.body", "body": b"Unsupported"})
48
+ return
49
+
50
+ method = scope.get("method", "GET")
51
+ path_qs = self.format_path(scope)
52
+
53
+ headers = list(scope.get("headers", []))
54
+ headers = self.ensure_host_header(headers, scope)
55
+ reader, writer = await self.select_backend(
56
+ scope, {k[0].decode().lower(): k[1].decode() for k in headers}
57
+ )
58
+
59
+ if not (reader and writer):
60
+ await send({"type": "http.response.start", "status": 502, "headers": []})
61
+ await send({"type": "http.response.body", "body": b"Bad Gateway"})
62
+ return
63
+
64
+ use_chunked = self.ensure_chunked_if_needed(headers)
65
+
66
+ await self.write_request_line_and_headers(writer, method, path_qs, headers)
67
+
68
+ await self.stream_request_body(receive, writer, use_chunked)
69
+
70
+ status_line = await reader.readline()
71
+ if not status_line:
72
+ await send({"type": "http.response.start", "status": 502, "headers": []})
73
+ await send({"type": "http.response.body", "body": b"Bad Gateway"})
74
+ writer.close()
75
+ await writer.wait_closed()
76
+ return
77
+
78
+ status, headers_out, raw_headers = await self.read_status_and_headers(reader, status_line)
79
+
80
+ await send({"type": "http.response.start", "status": status, "headers": headers_out})
81
+
82
+ await self.stream_response_body(reader, send, raw_headers)
83
+
84
+ writer.close()
85
+ await writer.wait_closed()
86
+
87
+ def format_path(self, scope: Scope) -> str:
88
+ raw_path = scope.get("raw_path")
89
+ if raw_path:
90
+ return raw_path.decode()
91
+ q = scope.get("query_string", b"")
92
+ path = scope.get("path", "/")
93
+ path_qs = path
94
+ if q:
95
+ path_qs += "?" + q.decode()
96
+ return path_qs
97
+
98
+ def ensure_host_header(
99
+ self, headers: list[tuple[bytes, bytes]], scope: Scope
100
+ ) -> list[tuple[bytes, bytes]]:
101
+ if any(n.lower() == b"host" for n, _ in headers):
102
+ return headers
103
+ server = scope.get("server")
104
+ if server:
105
+ host = f"{server[0]}:{server[1]}" if server[1] else server[0]
106
+ headers.append((b"host", host.encode()))
107
+ return headers
108
+
109
+ def ensure_chunked_if_needed(self, headers: list[tuple[bytes, bytes]]) -> bool:
110
+ has_content_length = any(n.lower() == b"content-length" for n, _ in headers)
111
+ has_transfer_encoding = any(n.lower() == b"transfer-encoding" for n, _ in headers)
112
+ if not has_content_length and not has_transfer_encoding:
113
+ headers.append((b"transfer-encoding", b"chunked"))
114
+ return True
115
+ return False
116
+
117
+ async def write_request_line_and_headers(
118
+ self,
119
+ writer: asyncio.StreamWriter,
120
+ method: str,
121
+ path_qs: str,
122
+ headers: list[tuple[bytes, bytes]],
123
+ ) -> None:
124
+ writer.write(f"{method} {path_qs} HTTP/1.1\r\n".encode())
125
+ for name, value in headers:
126
+ if name.lower() == b"expect":
127
+ continue
128
+ writer.write(name + b": " + value + b"\r\n")
129
+ writer.write(b"\r\n")
130
+ await writer.drain()
131
+
132
+ async def stream_request_body(
133
+ self, receive: ASGIReceive, writer: asyncio.StreamWriter, use_chunked: bool
134
+ ) -> None:
135
+ if use_chunked:
136
+ await self.stream_request_chunked(receive, writer)
137
+ return
138
+
139
+ await self.stream_request_until_end(receive, writer)
140
+
141
+ async def stream_request_chunked(
142
+ self, receive: ASGIReceive, writer: asyncio.StreamWriter
143
+ ) -> None:
144
+ """Send request body to backend using HTTP/1.1 chunked encoding."""
145
+ while True:
146
+ event = await receive()
147
+ if event["type"] == "http.request":
148
+ body = event.get("body", b"") or b""
149
+ if body:
150
+ writer.write(f"{len(body):X}\r\n".encode())
151
+ writer.write(body)
152
+ writer.write(b"\r\n")
153
+ await writer.drain()
154
+ if not event.get("more_body", False):
155
+ break
156
+ elif event["type"] == "http.disconnect":
157
+ writer.close()
158
+ return
159
+
160
+ writer.write(b"0\r\n\r\n")
161
+ await writer.drain()
162
+
163
+ async def stream_request_until_end(
164
+ self, receive: ASGIReceive, writer: asyncio.StreamWriter
165
+ ) -> None:
166
+ """Send request body to backend when content length/transfer-encoding
167
+ already provided (no chunking).
168
+ """
169
+ while True:
170
+ event = await receive()
171
+ if event["type"] == "http.request":
172
+ body = event.get("body", b"") or b""
173
+ if body:
174
+ writer.write(body)
175
+ await writer.drain()
176
+ if not event.get("more_body", False):
177
+ break
178
+ elif event["type"] == "http.disconnect":
179
+ writer.close()
180
+ return
181
+
182
+ async def read_status_and_headers(
183
+ self, reader: asyncio.StreamReader, first_line: bytes
184
+ ) -> tuple[int, list[tuple[bytes, bytes]], dict[bytes, bytes]]:
185
+ parts = first_line.decode(errors="ignore").split(" ", 2)
186
+ status = int(parts[1]) if len(parts) >= 2 and parts[1].isdigit() else 502
187
+ headers: list[tuple[bytes, bytes]] = []
188
+ raw_headers: dict[bytes, bytes] = {}
189
+ while True:
190
+ line = await reader.readline()
191
+ if line in (b"\r\n", b"\n", b""):
192
+ break
193
+ i = line.find(b":")
194
+ if i == -1:
195
+ continue
196
+ name = line[:i].strip().lower()
197
+ value = line[i + 1 :].strip()
198
+ headers.append((name, value))
199
+ raw_headers[name] = value
200
+
201
+ return status, headers, raw_headers
202
+
203
+ def is_chunked(self, te_value: bytes) -> bool:
204
+ """Return True if transfer-encoding header tokens include 'chunked'."""
205
+ if not te_value:
206
+ return False
207
+ # split on commas, strip spaces and check tokens
208
+ tokens = [t.strip() for t in te_value.split(b",")]
209
+ return any(t.lower() == b"chunked" for t in tokens)
210
+
211
+ def parse_content_length(self, cl_value: bytes | None) -> int | None:
212
+ """Parse Content-Length header value to int, or return None if invalid."""
213
+ if cl_value is None:
214
+ return None
215
+ try:
216
+ return int(cl_value)
217
+ except Exception:
218
+ return None
219
+
220
+ async def drain_trailers(self, reader: asyncio.StreamReader) -> None:
221
+ """Consume trailer header lines until an empty line is encountered."""
222
+ while True:
223
+ trailer = await reader.readline()
224
+ if trailer in (b"\r\n", b"\n", b""):
225
+ break
226
+
227
+ async def stream_response_chunked(self, reader: asyncio.StreamReader, send: ASGISend) -> None:
228
+ """Read chunked-encoded response from reader, decode and forward to ASGI send."""
229
+ while True:
230
+ size_line = await reader.readline()
231
+ if not size_line:
232
+ break
233
+ size_str = size_line.split(b";", 1)[0].strip()
234
+ try:
235
+ size = int(size_str, 16)
236
+ except Exception:
237
+ break
238
+ if size == 0:
239
+ # consume trailers
240
+ await self.drain_trailers(reader)
241
+ break
242
+ try:
243
+ chunk = await reader.readexactly(size)
244
+ except Exception:
245
+ break
246
+ # consume the CRLF after the chunk
247
+ try:
248
+ await reader.readexactly(2)
249
+ except Exception:
250
+ logger.warning("failed to read CRLF after chunk from backend")
251
+ await send({"type": "http.response.body", "body": chunk, "more_body": True})
252
+
253
+ await send({"type": "http.response.body", "body": b"", "more_body": False})
254
+
255
+ async def stream_response_with_content_length(
256
+ self, reader: asyncio.StreamReader, send: ASGISend, content_length: int
257
+ ) -> None:
258
+ """Read exactly content_length bytes and forward to ASGI send events."""
259
+ remaining = content_length
260
+ sent_final = False
261
+ while remaining > 0:
262
+ to_read = min(self._read_chunk_size, remaining)
263
+ chunk = await reader.read(to_read)
264
+ if not chunk:
265
+ break
266
+ remaining -= len(chunk)
267
+ more = remaining > 0
268
+ await send({"type": "http.response.body", "body": chunk, "more_body": more})
269
+ if not more:
270
+ sent_final = True
271
+
272
+ if not sent_final:
273
+ await send({"type": "http.response.body", "body": b"", "more_body": False})
274
+
275
+ async def stream_response_until_eof(self, reader: asyncio.StreamReader, send: ASGISend) -> None:
276
+ """Read from reader until EOF and forward chunks to ASGI send events."""
277
+ while True:
278
+ chunk = await reader.read(self._read_chunk_size)
279
+ if not chunk:
280
+ break
281
+ await send({"type": "http.response.body", "body": chunk, "more_body": True})
282
+ await send({"type": "http.response.body", "body": b"", "more_body": False})
283
+
284
+ async def stream_response_body(
285
+ self, reader: asyncio.StreamReader, send: ASGISend, raw_headers: dict[bytes, bytes]
286
+ ) -> None:
287
+ te = raw_headers.get(b"transfer-encoding", b"").lower()
288
+ cl = raw_headers.get(b"content-length")
289
+
290
+ if self.is_chunked(te):
291
+ await self.stream_response_chunked(reader, send)
292
+ return
293
+
294
+ content_length = self.parse_content_length(cl)
295
+ if content_length is not None:
296
+ await self.stream_response_with_content_length(reader, send, content_length)
297
+ return
298
+
299
+ await self.stream_response_until_eof(reader, send)
mrok/http/lifespan.py ADDED
@@ -0,0 +1,10 @@
1
+ import logging
2
+
3
+ from uvicorn.config import Config
4
+ from uvicorn.lifespan.on import LifespanOn
5
+
6
+
7
+ class MrokLifespan(LifespanOn):
8
+ def __init__(self, config: Config) -> None:
9
+ super().__init__(config)
10
+ self.logger = logging.getLogger("mrok.proxy")
mrok/http/master.py ADDED
@@ -0,0 +1,90 @@
1
+ import logging
2
+ import os
3
+ import signal
4
+ import threading
5
+ from collections.abc import Callable
6
+ from pathlib import Path
7
+
8
+ from watchfiles import watch
9
+ from watchfiles.filters import PythonFilter
10
+ from watchfiles.run import CombinedProcess, start_process
11
+
12
+ logger = logging.getLogger("mrok.agent")
13
+
14
+
15
+ def print_path(path):
16
+ try:
17
+ return f'"{path.relative_to(Path.cwd())}"'
18
+ except ValueError:
19
+ return f'"{path}"'
20
+
21
+
22
+ class Master:
23
+ def __init__(
24
+ self,
25
+ start_fn: Callable,
26
+ workers: int,
27
+ reload: bool,
28
+ ):
29
+ self.start_fn = start_fn
30
+ self.workers = workers
31
+ self.reload = reload
32
+ self.worker_processes: list[CombinedProcess] = []
33
+ self.stop_event = threading.Event()
34
+ self.watch_filter = PythonFilter(ignore_paths=None)
35
+ self.watcher = watch(
36
+ Path.cwd(),
37
+ watch_filter=self.watch_filter,
38
+ stop_event=self.stop_event,
39
+ yield_on_timeout=True,
40
+ )
41
+ self.setup_signals_handler()
42
+
43
+ def setup_signals_handler(self):
44
+ for sig in (signal.SIGINT, signal.SIGTERM):
45
+ signal.signal(sig, self.handle_signal)
46
+
47
+ def handle_signal(self, *args, **kwargs):
48
+ self.stop_event.set()
49
+
50
+ def start(self):
51
+ for _ in range(self.workers):
52
+ p = start_process(
53
+ self.start_fn,
54
+ "function",
55
+ (),
56
+ None,
57
+ )
58
+ logger.info(f"Worker [{p.pid}] started")
59
+ self.worker_processes.append(p)
60
+
61
+ def stop(self):
62
+ for process in self.worker_processes:
63
+ process.stop(sigint_timeout=5, sigkill_timeout=1)
64
+ self.worker_processes = []
65
+
66
+ def restart(self):
67
+ self.stop()
68
+ self.start()
69
+
70
+ def __iter__(self):
71
+ return self
72
+
73
+ def __next__(self):
74
+ changes = next(self.watcher)
75
+ if changes:
76
+ return list({Path(change[1]) for change in changes})
77
+ return None
78
+
79
+ def run(self):
80
+ self.start()
81
+ logger.info(f"Master process started: {os.getpid()}")
82
+ if self.reload:
83
+ for files_changed in self:
84
+ if files_changed:
85
+ logger.warning(
86
+ f"{', '.join(map(print_path, files_changed))} changed, reloading...",
87
+ )
88
+ self.restart()
89
+ else:
90
+ self.stop_event.wait()
mrok/http/protocol.py ADDED
@@ -0,0 +1,11 @@
1
+ import logging
2
+
3
+ from uvicorn.protocols.http.httptools_impl import HttpToolsProtocol
4
+
5
+
6
+ class MrokHttpToolsProtocol(HttpToolsProtocol):
7
+ def __init__(self, *args, **kwargs):
8
+ super().__init__(*args, **kwargs)
9
+ self.logger = logging.getLogger("mrok.proxy")
10
+ self.access_logger = logging.getLogger("mrok.access")
11
+ self.access_log = self.access_logger.hasHandlers()
mrok/http/server.py ADDED
@@ -0,0 +1,14 @@
1
+ import logging
2
+ import socket
3
+
4
+ from uvicorn import server
5
+
6
+ server.logger = logging.getLogger("mrok.proxy")
7
+
8
+
9
+ class MrokServer(server.Server):
10
+ async def serve(self, sockets: list[socket.socket] | None = None) -> None:
11
+ if not sockets:
12
+ sockets = [self.config.bind_socket()]
13
+ with self.capture_signals():
14
+ await self._serve(sockets)
mrok/logging.py ADDED
@@ -0,0 +1,76 @@
1
+ import logging.config
2
+
3
+ from mrok.conf import Settings
4
+
5
+
6
+ def get_logging_config(settings: Settings, cli_mode: bool = False) -> dict:
7
+ log_level = "DEBUG" if settings.logging.debug else "INFO"
8
+ handler = "rich" if settings.logging.rich else "console"
9
+
10
+ if cli_mode:
11
+ mrok_handler = "cli"
12
+ else:
13
+ mrok_handler = handler
14
+
15
+ logging_config = {
16
+ "version": 1,
17
+ "disable_existing_loggers": False,
18
+ "formatters": {
19
+ "verbose": {
20
+ "format": "{asctime} {name} {levelname} (pid: {process}) {message}",
21
+ "style": "{",
22
+ },
23
+ "rich": {
24
+ "format": "{name} {message}",
25
+ "style": "{",
26
+ },
27
+ "plain": {"format": "%(message)s"},
28
+ },
29
+ "handlers": {
30
+ "console": {
31
+ "class": "logging.StreamHandler",
32
+ "formatter": "verbose",
33
+ "stream": "ext://sys.stderr",
34
+ },
35
+ "cli": {
36
+ "class": "logging.StreamHandler",
37
+ "formatter": "plain",
38
+ "stream": "ext://sys.stdout",
39
+ },
40
+ "rich": {
41
+ "class": "rich.logging.RichHandler",
42
+ "level": log_level,
43
+ "formatter": "rich",
44
+ "log_time_format": lambda x: x.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3],
45
+ "rich_tracebacks": True,
46
+ },
47
+ },
48
+ "root": {
49
+ "handlers": ["rich"],
50
+ "level": "WARNING",
51
+ },
52
+ "loggers": {
53
+ "gunicorn.access": {
54
+ "handlers": [handler],
55
+ "level": log_level,
56
+ "propagate": False,
57
+ },
58
+ "gunicorn.error": {
59
+ "handlers": [handler],
60
+ "level": log_level,
61
+ "propagate": False,
62
+ },
63
+ "mrok": {
64
+ "handlers": [mrok_handler],
65
+ "level": log_level,
66
+ "propagate": False,
67
+ },
68
+ },
69
+ }
70
+
71
+ return logging_config
72
+
73
+
74
+ def setup_logging(settings: Settings, cli_mode: bool = False) -> None:
75
+ logging_config = get_logging_config(settings, cli_mode)
76
+ logging.config.dictConfig(logging_config)
mrok/ziti/__init__.py ADDED
@@ -0,0 +1,15 @@
1
+ from mrok.ziti.api import (
2
+ ZitiAPIError,
3
+ ZitiAuthError,
4
+ ZitiBadRequestError,
5
+ ZitiClientAPI,
6
+ ZitiManagementAPI,
7
+ )
8
+
9
+ __all__ = [
10
+ "ZitiAPIError",
11
+ "ZitiAuthError",
12
+ "ZitiBadRequestError",
13
+ "ZitiClientAPI",
14
+ "ZitiManagementAPI",
15
+ ]