mrok 0.4.2__py3-none-any.whl → 0.4.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mrok/proxy/app.py CHANGED
@@ -1,3 +1,4 @@
1
+ import asyncio
1
2
  import logging
2
3
  from pathlib import Path
3
4
 
@@ -5,7 +6,7 @@ from mrok.conf import get_settings
5
6
  from mrok.http.forwarder import ForwardAppBase
6
7
  from mrok.http.types import Scope, StreamReader, StreamWriter
7
8
  from mrok.logging import setup_logging
8
- from mrok.proxy.ziti import ZitiConnectionManager
9
+ from mrok.proxy.ziti import ZitiSocketCache
9
10
 
10
11
  logger = logging.getLogger("mrok.proxy")
11
12
 
@@ -20,8 +21,6 @@ class ProxyApp(ForwardAppBase):
20
21
  identity_file: str | Path,
21
22
  *,
22
23
  read_chunk_size: int = 65536,
23
- ziti_connection_ttl_seconds: float = 60,
24
- ziti_conn_cache_purge_interval_seconds: float = 10,
25
24
  ) -> None:
26
25
  super().__init__(read_chunk_size=read_chunk_size)
27
26
  self._identity_file = identity_file
@@ -31,11 +30,7 @@ class ProxyApp(ForwardAppBase):
31
30
  if settings.proxy.domain[0] == "."
32
31
  else f".{settings.proxy.domain}"
33
32
  )
34
- self._conn_manager = ZitiConnectionManager(
35
- identity_file,
36
- ttl_seconds=ziti_connection_ttl_seconds,
37
- cleanup_interval=ziti_conn_cache_purge_interval_seconds,
38
- )
33
+ self._ziti_socket_cache = ZitiSocketCache(self._identity_file)
39
34
 
40
35
  def get_target_from_header(self, headers: dict[str, str], name: str) -> str | None:
41
36
  header_value = headers.get(name, "")
@@ -54,10 +49,9 @@ class ProxyApp(ForwardAppBase):
54
49
 
55
50
  async def startup(self):
56
51
  setup_logging(get_settings())
57
- await self._conn_manager.start()
58
52
 
59
53
  async def shutdown(self):
60
- await self._conn_manager.stop()
54
+ await self._ziti_socket_cache.stop()
61
55
 
62
56
  async def select_backend(
63
57
  self,
@@ -65,5 +59,6 @@ class ProxyApp(ForwardAppBase):
65
59
  headers: dict[str, str],
66
60
  ) -> tuple[StreamReader, StreamWriter] | tuple[None, None]:
67
61
  target_name = self.get_target_name(headers)
68
-
69
- return await self._conn_manager.get_or_create(target_name)
62
+ sock = await self._ziti_socket_cache.get_or_create(target_name)
63
+ reader, writer = await asyncio.open_connection(sock=sock)
64
+ return reader, writer
mrok/proxy/ziti.py CHANGED
@@ -1,117 +1,102 @@
1
1
  import asyncio
2
2
  import contextlib
3
3
  import logging
4
+ from asyncio import Task
4
5
  from pathlib import Path
5
6
 
6
7
  import openziti
7
8
  from aiocache import Cache
8
-
9
- from mrok.proxy.streams import CachedStreamReader, CachedStreamWriter
10
- from mrok.proxy.types import StreamPair
9
+ from openziti.context import ZitiContext
10
+ from openziti.zitisock import ZitiSocket
11
11
 
12
12
  logger = logging.getLogger("mrok.proxy")
13
13
 
14
14
 
15
- class ZitiConnectionManager:
15
+ class ZitiSocketCache:
16
16
  def __init__(
17
17
  self,
18
18
  identity_file: str | Path,
19
- ziti_timeout_ms: int = 10000,
19
+ ziti_ctx_timeout_ms: int = 10_000,
20
20
  ttl_seconds: float = 60.0,
21
21
  cleanup_interval: float = 10.0,
22
- ):
23
- self.identity_file = identity_file
24
- self.ziti_timeout_ms = ziti_timeout_ms
25
- self.ttl_seconds = ttl_seconds
26
- self.cleanup_interval = cleanup_interval
27
-
28
- self.cache = Cache(Cache.MEMORY)
29
-
30
- self._active_pairs: dict[str, StreamPair] = {}
31
-
32
- self._cleanup_task: asyncio.Task | None = None
33
- self._ziti_ctx: openziti.context.ZitiContext | None = None
34
-
35
- async def create_stream_pair(self, key: str) -> StreamPair:
36
- if not self._ziti_ctx:
37
- raise Exception("ZitiConnectionManager is not started")
38
- sock = self._ziti_ctx.connect(key)
39
- orig_reader, orig_writer = await asyncio.open_connection(sock=sock)
40
-
41
- reader = CachedStreamReader(orig_reader, key, self)
42
- writer = CachedStreamWriter(orig_writer, key, self)
43
- return (reader, writer)
44
-
45
- async def get_or_create(self, key: str) -> StreamPair:
46
- pair = await self.cache.get(key)
47
-
48
- if pair:
49
- logger.info(f"return cached connection for {key}")
50
- await self.cache.set(key, pair, ttl=self.ttl_seconds)
51
- self._active_pairs[key] = pair
52
- return pair
53
-
54
- pair = await self.create_stream_pair(key)
55
- await self.cache.set(key, pair, ttl=self.ttl_seconds)
56
- self._active_pairs[key] = pair
57
- logger.info(f"return new connection for {key}")
58
- return pair
59
-
60
- async def invalidate(self, key: str) -> None:
61
- logger.info(f"invalidating connection for {key}")
62
- pair = await self.cache.get(key)
63
- if pair:
64
- await self._close_pair(pair)
65
-
66
- await self.cache.delete(key)
67
- self._active_pairs.pop(key, None)
68
-
69
- async def start(self) -> None:
70
- if self._cleanup_task is None:
71
- self._cleanup_task = asyncio.create_task(self._periodic_cleanup())
22
+ ) -> None:
23
+ self._identity_file = identity_file
24
+ self._ziti_ctx_timeout_ms = ziti_ctx_timeout_ms
25
+ self._ttl_seconds = ttl_seconds
26
+ self._cleanup_interval = cleanup_interval
27
+
28
+ self._ziti_ctx: ZitiContext | None = None
29
+ self._cache = Cache(Cache.MEMORY)
30
+ self._active_sockets: dict[str, ZitiSocket] = {}
31
+ self._cleanup_task: Task | None = None
32
+
33
+ def _get_ziti_ctx(self) -> ZitiContext:
72
34
  if self._ziti_ctx is None:
73
- ctx, err = openziti.load(str(self.identity_file), timeout=self.ziti_timeout_ms)
35
+ ctx, err = openziti.load(str(self._identity_file), timeout=self._ziti_ctx_timeout_ms)
74
36
  if err != 0:
75
37
  raise Exception(f"Cannot create a Ziti context from the identity file: {err}")
76
38
  self._ziti_ctx = ctx
39
+ return self._ziti_ctx
40
+
41
+ async def _create_socket(self, key: str):
42
+ return self._get_ziti_ctx().connect(key)
43
+
44
+ async def get_or_create(self, key: str):
45
+ sock = await self._cache.get(key)
77
46
 
78
- async def stop(self) -> None:
79
- if self._cleanup_task:
80
- self._cleanup_task.cancel()
81
- with contextlib.suppress(Exception):
82
- await self._cleanup_task
47
+ if sock:
48
+ await self._cache.expire(key, self._ttl_seconds)
49
+ self._active_sockets[key] = sock
50
+ logger.debug(f"Ziti socket found for service {key}")
51
+ return sock
83
52
 
84
- for pair in list(self._active_pairs.values()):
85
- await self._close_pair(pair)
53
+ sock = await self._create_socket(key)
54
+ await self._cache.set(key, sock, self._ttl_seconds)
55
+ self._active_sockets[key] = sock
56
+ logger.info(f"New Ziti socket created for service {key}")
57
+ return sock
86
58
 
87
- self._active_pairs.clear()
88
- await self.cache.clear()
89
- openziti.shutdown()
59
+ # async def invalidate(self, key: str):
60
+ # sock = await self._cache.get(key)
61
+ # if sock:
62
+ # await self._close_socket(sock)
63
+
64
+ # await self._cache.delete(key)
65
+ # self._active_sockets.pop(key, None)
66
+
67
+ async def start(self):
68
+ self._cleanup_task = asyncio.create_task(self._periodic_cleanup())
69
+ # Warmup ziti context
70
+ self._get_ziti_ctx()
71
+
72
+ async def stop(self):
73
+ self._cleanup_task.cancel()
74
+ with contextlib.suppress(Exception):
75
+ await self._cleanup_task
76
+
77
+ for sock in list(self._active_sockets.values()):
78
+ await self._close_socket(sock)
79
+
80
+ self._active_sockets.clear()
81
+ await self._cache.clear()
90
82
 
91
83
  @staticmethod
92
- async def _close_pair(pair: StreamPair) -> None:
93
- reader, writer = pair
94
- writer.close()
84
+ async def _close_socket(sock: ZitiSocket):
95
85
  with contextlib.suppress(Exception):
96
- await writer.wait_closed()
86
+ sock.close()
97
87
 
98
- async def _periodic_cleanup(self) -> None:
88
+ async def _periodic_cleanup(self):
99
89
  try:
100
90
  while True:
101
- await asyncio.sleep(self.cleanup_interval)
91
+ await asyncio.sleep(self._cleanup_interval)
102
92
  await self._cleanup_once()
103
93
  except asyncio.CancelledError:
104
94
  return
105
95
 
106
- async def _cleanup_once(self) -> None:
107
- # Keys currently stored in aiocache
108
- keys_in_cache = set(await self.cache.keys())
109
- # Keys we think are alive
110
- known_keys = set(self._active_pairs.keys())
111
-
112
- expired_keys = known_keys - keys_in_cache
113
-
114
- for key in expired_keys:
115
- pair = self._active_pairs.pop(key, None)
116
- if pair:
117
- await self._close_pair(pair)
96
+ async def _cleanup_once(self):
97
+ expired = {key for key in self._active_sockets.keys() if not self._cache.exists(key)}
98
+ for key in expired:
99
+ logger.debug(f"Cleaning up expired socket connection {key}")
100
+ sock = self._active_sockets.pop(key, None)
101
+ if sock:
102
+ await self._close_socket(sock)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mrok
3
- Version: 0.4.2
3
+ Version: 0.4.4
4
4
  Summary: MPT Extensions OpenZiti Orchestrator
5
5
  Author: SoftwareOne AG
6
6
  License: Apache License
@@ -71,12 +71,9 @@ mrok/http/server.py,sha256=Mj7C85fc-DXp-WTBWaOd7ag808oliLmFBH5bf-G2FHg,370
71
71
  mrok/http/types.py,sha256=XpNrvbfpANKvmjOBYtLF1FmDHoJF3z_MIMQHXoJlvmE,1302
72
72
  mrok/http/utils.py,sha256=sOixYu3R9-nNoMFYdifrreYvcFRIHYVtb6AAmtVzaLE,2125
73
73
  mrok/proxy/__init__.py,sha256=vWXyImroqM1Eq8e_oFPBup8VJ3reyp8SVjFTbLzRkI8,51
74
- mrok/proxy/app.py,sha256=xNgT-lqXRe53HWzZFz0ceyttEbz__1PD8J07nme8L2s,2339
75
- mrok/proxy/dataclasses.py,sha256=DtX-Yuma-uOECOPefJnoQJhZMEtT6Za_27cd-lJE9Iw,237
74
+ mrok/proxy/app.py,sha256=yulfBdTdxesVxF1h2lli_5zjd5wP-jTx17FRdbkaV7A,2163
76
75
  mrok/proxy/main.py,sha256=ZXpticE6J4FABaslDB_8J5qklPsf3e7xIFSZmcPAAjQ,1588
77
- mrok/proxy/streams.py,sha256=a7EMKn3R7JB3iHKdmbs8QiEHd1xlT4N-vnrzuaiZSTU,3390
78
- mrok/proxy/types.py,sha256=XpAfTklmJfcQilyKVTkYbaFHvWZSTcr_6Rg_feiq9Mw,257
79
- mrok/proxy/ziti.py,sha256=kWnX1d-BaZcc0tdk_xwSp8rmQ3joZIxs7MlLScHPvMg,3879
76
+ mrok/proxy/ziti.py,sha256=rKgIXpOvtBeVopZkQlNUZa3Fdci9jgiog_i6egb17ps,3318
80
77
  mrok/ziti/__init__.py,sha256=20OWMiexRhOovZOX19zlX87-V78QyWnEnSZfyAftUdE,263
81
78
  mrok/ziti/api.py,sha256=KvGiT9d4oSgC3JbFWLDQyuHcLX2HuZJoJ8nHmWtCDkY,16154
82
79
  mrok/ziti/bootstrap.py,sha256=QIDhlkIxPW2QRuumFq2D1WDbD003P5f3z24pAUsyeBI,2696
@@ -85,8 +82,8 @@ mrok/ziti/errors.py,sha256=yYCbVDwktnR0AYduqtynIjo73K3HOhIrwA_vQimvEd4,368
85
82
  mrok/ziti/identities.py,sha256=1BcwfqAJHMBhc3vRaf0aLaIkoHskj5Xe2Lsq2lO9Vs8,6735
86
83
  mrok/ziti/pki.py,sha256=o2tySqHC8-7bvFuI2Tqxg9vX6H6ZSxWxfP_9x29e19M,1954
87
84
  mrok/ziti/services.py,sha256=zR1PEBYwXVou20iJK4euh0ZZFAo9UB8PZk8f6SDmiUE,3194
88
- mrok-0.4.2.dist-info/METADATA,sha256=b2BYs3KtydCUqoBQxxet4p7KJFJKECthQ6Vn_0M-uvM,15836
89
- mrok-0.4.2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
90
- mrok-0.4.2.dist-info/entry_points.txt,sha256=tloXwvU1uJicBJR2h-8HoVclPgwJWDwuREMHN8Zq-nU,38
91
- mrok-0.4.2.dist-info/licenses/LICENSE.txt,sha256=6PaICaoA3yNsZKLv5G6OKqSfLSoX7MakYqTDgJoTCBs,11346
92
- mrok-0.4.2.dist-info/RECORD,,
85
+ mrok-0.4.4.dist-info/METADATA,sha256=nzjalRGet1yhkJf1L4t022A-NTDG-xQ9a5cWZfbDkdg,15836
86
+ mrok-0.4.4.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
87
+ mrok-0.4.4.dist-info/entry_points.txt,sha256=tloXwvU1uJicBJR2h-8HoVclPgwJWDwuREMHN8Zq-nU,38
88
+ mrok-0.4.4.dist-info/licenses/LICENSE.txt,sha256=6PaICaoA3yNsZKLv5G6OKqSfLSoX7MakYqTDgJoTCBs,11346
89
+ mrok-0.4.4.dist-info/RECORD,,
mrok/proxy/dataclasses.py DELETED
@@ -1,12 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from dataclasses import dataclass
4
-
5
- from mrok.http.types import StreamReader, StreamWriter
6
-
7
-
8
- @dataclass
9
- class CachedStreamEntry:
10
- reader: StreamReader
11
- writer: StreamWriter
12
- last_access: float
mrok/proxy/streams.py DELETED
@@ -1,124 +0,0 @@
1
- import asyncio
2
-
3
- from mrok.proxy.types import ConnectionCache
4
-
5
-
6
- class CachedStreamReader:
7
- def __init__(
8
- self,
9
- reader: asyncio.StreamReader,
10
- key: str,
11
- manager: ConnectionCache,
12
- ):
13
- self._reader = reader
14
- self._key = key
15
- self._manager = manager
16
-
17
- async def read(self, n: int = -1) -> bytes:
18
- try:
19
- return await self._reader.read(n)
20
- except (
21
- asyncio.CancelledError,
22
- asyncio.IncompleteReadError,
23
- asyncio.LimitOverrunError,
24
- BrokenPipeError,
25
- ConnectionAbortedError,
26
- ConnectionResetError,
27
- RuntimeError,
28
- TimeoutError,
29
- UnicodeDecodeError,
30
- ):
31
- asyncio.create_task(self._manager.invalidate(self._key))
32
- raise
33
-
34
- async def readexactly(self, n: int) -> bytes:
35
- try:
36
- return await self._reader.readexactly(n)
37
- except (
38
- asyncio.CancelledError,
39
- asyncio.IncompleteReadError,
40
- asyncio.LimitOverrunError,
41
- BrokenPipeError,
42
- ConnectionAbortedError,
43
- ConnectionResetError,
44
- RuntimeError,
45
- TimeoutError,
46
- UnicodeDecodeError,
47
- ):
48
- asyncio.create_task(self._manager.invalidate(self._key))
49
- raise
50
-
51
- async def readline(self) -> bytes:
52
- try:
53
- return await self._reader.readline()
54
- except (
55
- asyncio.CancelledError,
56
- asyncio.IncompleteReadError,
57
- asyncio.LimitOverrunError,
58
- BrokenPipeError,
59
- ConnectionAbortedError,
60
- ConnectionResetError,
61
- RuntimeError,
62
- TimeoutError,
63
- UnicodeDecodeError,
64
- ):
65
- asyncio.create_task(self._manager.invalidate(self._key))
66
- raise
67
-
68
- def at_eof(self) -> bool:
69
- return self._reader.at_eof()
70
-
71
- @property
72
- def underlying(self) -> asyncio.StreamReader:
73
- return self._reader
74
-
75
-
76
- class CachedStreamWriter:
77
- def __init__(
78
- self,
79
- writer: asyncio.StreamWriter,
80
- key: str,
81
- manager: ConnectionCache,
82
- ):
83
- self._writer = writer
84
- self._key = key
85
- self._manager = manager
86
-
87
- def write(self, data: bytes) -> None:
88
- try:
89
- return self._writer.write(data)
90
- except (RuntimeError, TypeError):
91
- asyncio.create_task(self._manager.invalidate(self._key))
92
- raise
93
-
94
- async def drain(self) -> None:
95
- try:
96
- return await self._writer.drain()
97
- except (
98
- asyncio.CancelledError,
99
- BrokenPipeError,
100
- ConnectionAbortedError,
101
- ConnectionResetError,
102
- RuntimeError,
103
- TimeoutError,
104
- ):
105
- asyncio.create_task(self._manager.invalidate(self._key))
106
- raise
107
-
108
- def close(self) -> None:
109
- return self._writer.close()
110
-
111
- async def wait_closed(self) -> None:
112
- try:
113
- return await self._writer.wait_closed()
114
- except (ConnectionResetError, BrokenPipeError):
115
- asyncio.create_task(self._manager.invalidate(self._key))
116
- raise
117
-
118
- @property
119
- def transport(self):
120
- return self._writer.transport
121
-
122
- @property
123
- def underlying(self) -> asyncio.StreamWriter:
124
- return self._writer
mrok/proxy/types.py DELETED
@@ -1,11 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from typing import Protocol
4
-
5
- from mrok.http.types import StreamReader, StreamWriter
6
-
7
- StreamPair = tuple[StreamReader, StreamWriter]
8
-
9
-
10
- class ConnectionCache(Protocol):
11
- async def invalidate(self, key: str) -> None: ...
File without changes