mrok 0.4.1__py3-none-any.whl → 0.4.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mrok/proxy/app.py CHANGED
@@ -1,12 +1,12 @@
1
+ import asyncio
1
2
  import logging
2
- import os
3
3
  from pathlib import Path
4
4
 
5
5
  from mrok.conf import get_settings
6
6
  from mrok.http.forwarder import ForwardAppBase
7
7
  from mrok.http.types import Scope, StreamReader, StreamWriter
8
8
  from mrok.logging import setup_logging
9
- from mrok.proxy.ziti import ZitiConnectionManager
9
+ from mrok.proxy.ziti import ZitiSocketCache
10
10
 
11
11
  logger = logging.getLogger("mrok.proxy")
12
12
 
@@ -21,8 +21,6 @@ class ProxyApp(ForwardAppBase):
21
21
  identity_file: str | Path,
22
22
  *,
23
23
  read_chunk_size: int = 65536,
24
- ziti_connection_ttl_seconds: float = 60,
25
- ziti_conn_cache_purge_interval_seconds: float = 10,
26
24
  ) -> None:
27
25
  super().__init__(read_chunk_size=read_chunk_size)
28
26
  self._identity_file = identity_file
@@ -32,40 +30,28 @@ class ProxyApp(ForwardAppBase):
32
30
  if settings.proxy.domain[0] == "."
33
31
  else f".{settings.proxy.domain}"
34
32
  )
35
- self._conn_manager = ZitiConnectionManager(
36
- identity_file,
37
- ttl_seconds=ziti_connection_ttl_seconds,
38
- purge_interval=ziti_conn_cache_purge_interval_seconds,
39
- )
40
-
41
- def get_target_from_header(self, name: str, headers: dict[str, str]) -> str:
42
- header_value = headers.get(name)
43
- if not header_value:
44
- raise ProxyError(
45
- f"Header {name} not found!",
46
- )
47
- if ":" in header_value:
48
- header_value, _ = header_value.split(":", 1)
49
- if not header_value.endswith(self._proxy_wildcard_domain):
50
- raise ProxyError(f"Unexpected value for {name} header: `{header_value}`.")
33
+ self._ziti_socket_cache = ZitiSocketCache(self._identity_file)
51
34
 
52
- return header_value[: -len(self._proxy_wildcard_domain)]
35
+ def get_target_from_header(self, headers: dict[str, str], name: str) -> str | None:
36
+ header_value = headers.get(name, "")
37
+ if self._proxy_wildcard_domain in header_value:
38
+ if ":" in header_value:
39
+ header_value, _ = header_value.split(":", 1)
40
+ return header_value[: -len(self._proxy_wildcard_domain)]
53
41
 
54
42
  def get_target_name(self, headers: dict[str, str]) -> str:
55
- try:
56
- return self.get_target_from_header("x-forwared-for", headers)
57
- except ProxyError as pe:
58
- logger.warning(pe)
59
- return self.get_target_from_header("host", headers)
43
+ target = self.get_target_from_header(headers, "x-forwarded-host")
44
+ if not target:
45
+ target = self.get_target_from_header(headers, "host")
46
+ if not target:
47
+ raise ProxyError("Neither Host nor X-Forwarded-Host contain a valid target name")
48
+ return target
60
49
 
61
50
  async def startup(self):
62
51
  setup_logging(get_settings())
63
- await self._conn_manager.start()
64
- logger.info(f"Proxy app startup completed: {os.getpid()}")
65
52
 
66
53
  async def shutdown(self):
67
- await self._conn_manager.stop()
68
- logger.info(f"Proxy app shutdown completed: {os.getpid()}")
54
+ await self._ziti_socket_cache.stop()
69
55
 
70
56
  async def select_backend(
71
57
  self,
@@ -73,5 +59,6 @@ class ProxyApp(ForwardAppBase):
73
59
  headers: dict[str, str],
74
60
  ) -> tuple[StreamReader, StreamWriter] | tuple[None, None]:
75
61
  target_name = self.get_target_name(headers)
76
-
77
- return await self._conn_manager.get(target_name)
62
+ sock = self._ziti_socket_cache.get_or_create(target_name)
63
+ reader, writer = await asyncio.open_connection(sock=sock)
64
+ return reader, writer
mrok/proxy/ziti.py CHANGED
@@ -1,173 +1,103 @@
1
- """Ziti-backed connection manager for the proxy.
2
-
3
- This manager owns creation of connections via an OpenZiti context, wraps
4
- streams to observe IO errors, evicts idle entries, and serializes creation
5
- per-key.
6
- """
7
-
8
1
  import asyncio
9
- import logging
10
- import time
2
+ import contextlib
3
+ from asyncio import Task
11
4
  from pathlib import Path
12
5
 
13
- # typing imports intentionally minimized
14
6
  import openziti
7
+ from aiocache import Cache
8
+ from openziti.context import ZitiContext
9
+ from openziti.zitisock import ZitiSocket
15
10
 
16
- from mrok.http.types import StreamReader, StreamWriter
17
- from mrok.proxy.dataclasses import CachedStreamEntry
18
- from mrok.proxy.streams import CachedStreamReader, CachedStreamWriter
19
- from mrok.proxy.types import CachedStream, ConnectionKey
20
-
21
- logger = logging.getLogger("mrok.proxy")
22
11
 
23
-
24
- class ZitiConnectionManager:
12
+ class ZitiSocketCache:
25
13
  def __init__(
26
14
  self,
27
15
  identity_file: str | Path,
28
- ziti_timeout_ms: int = 10000,
16
+ ziti_ctx_timeout_ms: int = 10_000,
29
17
  ttl_seconds: float = 60.0,
30
- purge_interval: float = 10.0,
31
- ):
18
+ cleanup_interval: float = 10.0,
19
+ ) -> None:
32
20
  self._identity_file = identity_file
33
- self._ziti_ctx = None
34
- self._ziti_timeout_ms = ziti_timeout_ms
35
- self._ttl = float(ttl_seconds)
36
- self._purge_interval = float(purge_interval)
37
- self._cache: dict[ConnectionKey, CachedStreamEntry] = {}
38
- self._lock = asyncio.Lock()
39
- self._in_progress: dict[ConnectionKey, asyncio.Lock] = {}
40
- self._purge_task: asyncio.Task | None = None
41
-
42
- async def get(self, target: str) -> tuple[StreamReader, StreamWriter] | tuple[None, None]:
43
- head, _, tail = target.partition(".")
44
- terminator = target if head and tail else ""
45
- service = tail if tail else head
46
- r, w = await self._get_or_create_key((service, terminator))
47
- return r, w
48
-
49
- async def invalidate(self, key: ConnectionKey) -> None:
50
- async with self._lock:
51
- item = self._cache.pop(key, None)
52
- if item is None:
53
- return
54
- await self._close_writer(item.writer)
21
+ self._ziti_ctx_timeout_ms = ziti_ctx_timeout_ms
22
+ self._ttl_seconds = ttl_seconds
23
+ self._cleanup_interval = cleanup_interval
55
24
 
56
- async def start(self) -> None:
25
+ self._ziti_ctx: ZitiContext | None = None
26
+ self._cache = Cache(Cache.MEMORY)
27
+ self._active_sockets: dict[str, ZitiSocket] = {}
28
+ self._cleanup_task: Task | None = None
29
+
30
+ def _get_ziti_ctx(self) -> ZitiContext:
57
31
  if self._ziti_ctx is None:
58
- ctx, err = openziti.load(str(self._identity_file), timeout=self._ziti_timeout_ms)
32
+ ctx, err = openziti.load(str(self._identity_file), timeout=self._ziti_ctx_timeout_ms)
59
33
  if err != 0:
60
34
  raise Exception(f"Cannot create a Ziti context from the identity file: {err}")
61
35
  self._ziti_ctx = ctx
62
- if self._purge_task is None:
63
- self._purge_task = asyncio.create_task(self._purge_loop())
64
- logger.info("Ziti connection manager started")
65
-
66
- async def stop(self) -> None:
67
- if self._purge_task is not None:
68
- self._purge_task.cancel()
69
- try:
70
- await self._purge_task
71
- except asyncio.CancelledError:
72
- logger.debug("Purge task was cancelled")
73
- except Exception as e:
74
- logger.warning(f"An error occurred stopping the purge task: {e}")
75
- self._purge_task = None
76
- logger.info("Ziti connection manager stopped")
77
-
78
- async with self._lock:
79
- items = list(self._cache.items())
80
- self._cache.clear()
81
-
82
- for _, item in items:
83
- await self._close_writer(item.writer)
84
-
85
- async def _purge_loop(self) -> None:
36
+ return self._ziti_ctx
37
+
38
+ async def _create_socket(self, key: str):
39
+ return self._get_ziti_ctx().connect(key)
40
+
41
+ async def get_or_create(self, key: str):
42
+ sock = await self._cache.get(key)
43
+
44
+ if sock:
45
+ await self._cache.set(key, sock, ttl_seconds=self._ttl_seconds)
46
+ self._active_sockets[key] = sock
47
+ return sock
48
+
49
+ sock = await self._create_socket(key)
50
+ await self._cache.set(key, sock, ttl_seconds=self._ttl_seconds)
51
+ self._active_sockets[key] = sock
52
+ return sock
53
+
54
+ async def invalidate(self, key: str):
55
+ sock = await self._cache.get(key)
56
+ if sock:
57
+ await self._close_socket(sock)
58
+
59
+ await self._cache.delete(key)
60
+ self._active_sockets.pop(key, None)
61
+
62
+ async def start(self):
63
+ self._cleanup_task = asyncio.create_task(self._periodic_cleanup())
64
+ # Warmup ziti context
65
+ self._get_ziti_ctx()
66
+
67
+ async def stop(self):
68
+ """
69
+ Cleanup: stop background task + close all sockets.
70
+ """
71
+ self._cleanup_task.cancel()
72
+ with contextlib.suppress(Exception):
73
+ await self._cleanup_task
74
+
75
+ for sock in list(self._active_sockets.values()):
76
+ await self._close_socket(sock)
77
+
78
+ self._active_sockets.clear()
79
+ await self._cache.clear()
80
+
81
+ @staticmethod
82
+ async def _close_socket(sock: ZitiSocket):
83
+ with contextlib.suppress(Exception):
84
+ sock.close()
85
+
86
+ async def _periodic_cleanup(self):
86
87
  try:
87
88
  while True:
88
- await asyncio.sleep(self._purge_interval)
89
- await self._purge_once()
89
+ await asyncio.sleep(self._cleanup_interval)
90
+ await self._cleanup_once()
90
91
  except asyncio.CancelledError:
91
92
  return
92
93
 
93
- async def _purge_once(self) -> None:
94
- to_close: list[tuple[StreamReader, StreamWriter]] = []
95
- async with self._lock:
96
- now = time.time()
97
- for key, item in list(self._cache.items()):
98
- if now - item.last_access > self._ttl:
99
- to_close.append((item.reader, item.writer))
100
- del self._cache[key]
94
+ async def _cleanup_once(self):
95
+ keys_now = set(await self._cache.keys())
96
+ known_keys = set(self._active_sockets.keys())
101
97
 
102
- for _, writer in to_close:
103
- writer.close()
104
- await self._close_writer(writer)
98
+ expired = known_keys - keys_now
105
99
 
106
- def _is_writer_closed(self, writer: StreamWriter) -> bool:
107
- return writer.transport.is_closing()
108
-
109
- async def _close_writer(self, writer: StreamWriter) -> None:
110
- writer.close()
111
- try:
112
- await writer.wait_closed()
113
- except Exception as e:
114
- logger.debug(f"Error closing writer: {e}")
115
-
116
- async def _get_or_create_key(self, key: ConnectionKey) -> CachedStream:
117
- """Internal: create or return a cached wrapped pair for the concrete key."""
118
- await self._purge_once()
119
- to_close = None
120
- async with self._lock:
121
- if key in self._cache:
122
- now = time.time()
123
- item = self._cache[key]
124
- reader, writer = item.reader, item.writer
125
- if not self._is_writer_closed(writer) and not reader.at_eof():
126
- self._cache[key] = CachedStreamEntry(reader, writer, now)
127
- return reader, writer
128
- to_close = writer
129
- del self._cache[key]
130
-
131
- lock = self._in_progress.get(key)
132
- if lock is None:
133
- lock = asyncio.Lock()
134
- self._in_progress[key] = lock
135
-
136
- if to_close:
137
- await self._close_writer(to_close)
138
-
139
- async with lock:
140
- try:
141
- # # double-check cache after acquiring the per-key lock
142
- # async with self._lock:
143
- # now = time.time()
144
- # if key in self._cache:
145
- # r, w, _ = self._cache[key]
146
- # if not self._is_writer_closed(w) and not r.at_eof():
147
- # self._cache[key] = (r, w, now)
148
- # return r, w
149
-
150
- # perform creation via ziti context
151
- extension, instance = key
152
- logger.info(f"Create connection to {extension}: {instance}")
153
- # loop = asyncio.get_running_loop()
154
- # sock = await loop.run_in_executor(None, self._ziti_ctx.connect,
155
- # extension, instance)
156
- if instance:
157
- sock = self._ziti_ctx.connect(
158
- extension, terminator=instance
159
- ) # , terminator=instance)
160
- else:
161
- sock = self._ziti_ctx.connect(extension)
162
- orig_reader, orig_writer = await asyncio.open_connection(sock=sock)
163
-
164
- reader = CachedStreamReader(orig_reader, key, self)
165
- writer = CachedStreamWriter(orig_writer, key, self)
166
-
167
- async with self._lock:
168
- self._cache[key] = CachedStreamEntry(reader, writer, time.time())
169
-
170
- return reader, writer
171
- finally:
172
- async with self._lock:
173
- self._in_progress.pop(key, None)
100
+ for key in expired:
101
+ sock = self._active_sockets.pop(key, None)
102
+ if sock:
103
+ await self._close_socket(sock)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mrok
3
- Version: 0.4.1
3
+ Version: 0.4.3
4
4
  Summary: MPT Extensions OpenZiti Orchestrator
5
5
  Author: SoftwareOne AG
6
6
  License: Apache License
@@ -206,6 +206,7 @@ License: Apache License
206
206
  limitations under the License.
207
207
  License-File: LICENSE.txt
208
208
  Requires-Python: <4,>=3.12
209
+ Requires-Dist: aiocache<0.13.0,>=0.12.3
209
210
  Requires-Dist: asn1crypto<2.0.0,>=1.5.1
210
211
  Requires-Dist: cryptography<46.0.0,>=45.0.7
211
212
  Requires-Dist: dynaconf<4.0.0,>=3.2.11
@@ -71,12 +71,9 @@ mrok/http/server.py,sha256=Mj7C85fc-DXp-WTBWaOd7ag808oliLmFBH5bf-G2FHg,370
71
71
  mrok/http/types.py,sha256=XpNrvbfpANKvmjOBYtLF1FmDHoJF3z_MIMQHXoJlvmE,1302
72
72
  mrok/http/utils.py,sha256=sOixYu3R9-nNoMFYdifrreYvcFRIHYVtb6AAmtVzaLE,2125
73
73
  mrok/proxy/__init__.py,sha256=vWXyImroqM1Eq8e_oFPBup8VJ3reyp8SVjFTbLzRkI8,51
74
- mrok/proxy/app.py,sha256=YP4hM3BIRf_MFw9YNEnsFrrBz76zVegrNFcvcsKTFvI,2579
75
- mrok/proxy/dataclasses.py,sha256=DtX-Yuma-uOECOPefJnoQJhZMEtT6Za_27cd-lJE9Iw,237
74
+ mrok/proxy/app.py,sha256=-xmHKSSqQhNdwISRXQCUVLI1n6HWs3d46cgKQ6_NC3A,2157
76
75
  mrok/proxy/main.py,sha256=ZXpticE6J4FABaslDB_8J5qklPsf3e7xIFSZmcPAAjQ,1588
77
- mrok/proxy/streams.py,sha256=6TMZwrQPbSyQqpqavsoTeyUmS2O026pJfiCnxLopPqg,3425
78
- mrok/proxy/types.py,sha256=dgWqAj6dFGVH_Q8-k8sU5h18yoUF_fTn-SRPIfEs_gA,308
79
- mrok/proxy/ziti.py,sha256=dKd6UzmEAFu9-gey871sPEDUZTkt4YVPyCYRzeA5mlA,6539
76
+ mrok/proxy/ziti.py,sha256=Ce02EimVJNffyluFr1nhOMXh6kZ0kY2nt6Shg58wkBw,3206
80
77
  mrok/ziti/__init__.py,sha256=20OWMiexRhOovZOX19zlX87-V78QyWnEnSZfyAftUdE,263
81
78
  mrok/ziti/api.py,sha256=KvGiT9d4oSgC3JbFWLDQyuHcLX2HuZJoJ8nHmWtCDkY,16154
82
79
  mrok/ziti/bootstrap.py,sha256=QIDhlkIxPW2QRuumFq2D1WDbD003P5f3z24pAUsyeBI,2696
@@ -85,8 +82,8 @@ mrok/ziti/errors.py,sha256=yYCbVDwktnR0AYduqtynIjo73K3HOhIrwA_vQimvEd4,368
85
82
  mrok/ziti/identities.py,sha256=1BcwfqAJHMBhc3vRaf0aLaIkoHskj5Xe2Lsq2lO9Vs8,6735
86
83
  mrok/ziti/pki.py,sha256=o2tySqHC8-7bvFuI2Tqxg9vX6H6ZSxWxfP_9x29e19M,1954
87
84
  mrok/ziti/services.py,sha256=zR1PEBYwXVou20iJK4euh0ZZFAo9UB8PZk8f6SDmiUE,3194
88
- mrok-0.4.1.dist-info/METADATA,sha256=PuydR9Z4yuDSDMuJrn1FqWGb18fJTA6TFChAj-LwWZE,15796
89
- mrok-0.4.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
90
- mrok-0.4.1.dist-info/entry_points.txt,sha256=tloXwvU1uJicBJR2h-8HoVclPgwJWDwuREMHN8Zq-nU,38
91
- mrok-0.4.1.dist-info/licenses/LICENSE.txt,sha256=6PaICaoA3yNsZKLv5G6OKqSfLSoX7MakYqTDgJoTCBs,11346
92
- mrok-0.4.1.dist-info/RECORD,,
85
+ mrok-0.4.3.dist-info/METADATA,sha256=rfG5MT0fAz27oU7Jzr8RFD3LLA9Syci51df1gHFSgLI,15836
86
+ mrok-0.4.3.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
87
+ mrok-0.4.3.dist-info/entry_points.txt,sha256=tloXwvU1uJicBJR2h-8HoVclPgwJWDwuREMHN8Zq-nU,38
88
+ mrok-0.4.3.dist-info/licenses/LICENSE.txt,sha256=6PaICaoA3yNsZKLv5G6OKqSfLSoX7MakYqTDgJoTCBs,11346
89
+ mrok-0.4.3.dist-info/RECORD,,
mrok/proxy/dataclasses.py DELETED
@@ -1,12 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from dataclasses import dataclass
4
-
5
- from mrok.http.types import StreamReader, StreamWriter
6
-
7
-
8
- @dataclass
9
- class CachedStreamEntry:
10
- reader: StreamReader
11
- writer: StreamWriter
12
- last_access: float
mrok/proxy/streams.py DELETED
@@ -1,124 +0,0 @@
1
- import asyncio
2
-
3
- from mrok.proxy.types import ConnectionCache, ConnectionKey
4
-
5
-
6
- class CachedStreamReader:
7
- def __init__(
8
- self,
9
- reader: asyncio.StreamReader,
10
- key: ConnectionKey,
11
- manager: ConnectionCache,
12
- ):
13
- self._reader = reader
14
- self._key = key
15
- self._manager = manager
16
-
17
- async def read(self, n: int = -1) -> bytes:
18
- try:
19
- return await self._reader.read(n)
20
- except (
21
- asyncio.CancelledError,
22
- asyncio.IncompleteReadError,
23
- asyncio.LimitOverrunError,
24
- BrokenPipeError,
25
- ConnectionAbortedError,
26
- ConnectionResetError,
27
- RuntimeError,
28
- TimeoutError,
29
- UnicodeDecodeError,
30
- ):
31
- asyncio.create_task(self._manager.invalidate(self._key))
32
- raise
33
-
34
- async def readexactly(self, n: int) -> bytes:
35
- try:
36
- return await self._reader.readexactly(n)
37
- except (
38
- asyncio.CancelledError,
39
- asyncio.IncompleteReadError,
40
- asyncio.LimitOverrunError,
41
- BrokenPipeError,
42
- ConnectionAbortedError,
43
- ConnectionResetError,
44
- RuntimeError,
45
- TimeoutError,
46
- UnicodeDecodeError,
47
- ):
48
- asyncio.create_task(self._manager.invalidate(self._key))
49
- raise
50
-
51
- async def readline(self) -> bytes:
52
- try:
53
- return await self._reader.readline()
54
- except (
55
- asyncio.CancelledError,
56
- asyncio.IncompleteReadError,
57
- asyncio.LimitOverrunError,
58
- BrokenPipeError,
59
- ConnectionAbortedError,
60
- ConnectionResetError,
61
- RuntimeError,
62
- TimeoutError,
63
- UnicodeDecodeError,
64
- ):
65
- asyncio.create_task(self._manager.invalidate(self._key))
66
- raise
67
-
68
- def at_eof(self) -> bool:
69
- return self._reader.at_eof()
70
-
71
- @property
72
- def underlying(self) -> asyncio.StreamReader:
73
- return self._reader
74
-
75
-
76
- class CachedStreamWriter:
77
- def __init__(
78
- self,
79
- writer: asyncio.StreamWriter,
80
- key: ConnectionKey,
81
- manager: ConnectionCache,
82
- ):
83
- self._writer = writer
84
- self._key = key
85
- self._manager = manager
86
-
87
- def write(self, data: bytes) -> None:
88
- try:
89
- return self._writer.write(data)
90
- except (RuntimeError, TypeError):
91
- asyncio.create_task(self._manager.invalidate(self._key))
92
- raise
93
-
94
- async def drain(self) -> None:
95
- try:
96
- return await self._writer.drain()
97
- except (
98
- asyncio.CancelledError,
99
- BrokenPipeError,
100
- ConnectionAbortedError,
101
- ConnectionResetError,
102
- RuntimeError,
103
- TimeoutError,
104
- ):
105
- asyncio.create_task(self._manager.invalidate(self._key))
106
- raise
107
-
108
- def close(self) -> None:
109
- return self._writer.close()
110
-
111
- async def wait_closed(self) -> None:
112
- try:
113
- return await self._writer.wait_closed()
114
- except (ConnectionResetError, BrokenPipeError):
115
- asyncio.create_task(self._manager.invalidate(self._key))
116
- raise
117
-
118
- @property
119
- def transport(self):
120
- return self._writer.transport
121
-
122
- @property
123
- def underlying(self) -> asyncio.StreamWriter:
124
- return self._writer
mrok/proxy/types.py DELETED
@@ -1,12 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from typing import Protocol
4
-
5
- from mrok.http.types import StreamReader, StreamWriter
6
-
7
- ConnectionKey = tuple[str, str | None]
8
- CachedStream = tuple[StreamReader, StreamWriter]
9
-
10
-
11
- class ConnectionCache(Protocol):
12
- async def invalidate(self, key: ConnectionKey) -> None: ...
File without changes