webmediator 0.9.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Leonid Salavatov
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,68 @@
1
+ Metadata-Version: 2.4
2
+ Name: webmediator
3
+ Version: 0.9.2
4
+ Summary: Python sync/async client for the WebMediator API
5
+ Home-page: https://github.com/mustaddon/webmediator.git
6
+ Author: Leonid Salavatov
7
+ Author-email: mustaddon@gmail.com
8
+ Keywords: WebMediator,CQRS,Mediator
9
+ Classifier: License :: OSI Approved :: MIT License
10
+ Classifier: Operating System :: OS Independent
11
+ Classifier: Programming Language :: Python
12
+ Classifier: Programming Language :: Python :: 3
13
+ Classifier: Programming Language :: Python :: 3.8
14
+ Classifier: Programming Language :: Python :: 3.9
15
+ Classifier: Programming Language :: Python :: 3.10
16
+ Classifier: Programming Language :: Python :: 3.11
17
+ Classifier: Programming Language :: Python :: 3.12
18
+ Classifier: Programming Language :: Python :: 3.13
19
+ Classifier: Programming Language :: Python :: 3.14
20
+ Requires-Python: >=3.8
21
+ Description-Content-Type: text/markdown
22
+ License-File: LICENSE
23
+ Requires-Dist: httpx
24
+ Dynamic: author
25
+ Dynamic: author-email
26
+ Dynamic: classifier
27
+ Dynamic: description
28
+ Dynamic: description-content-type
29
+ Dynamic: home-page
30
+ Dynamic: keywords
31
+ Dynamic: license-file
32
+ Dynamic: requires-dist
33
+ Dynamic: requires-python
34
+ Dynamic: summary
35
+
36
+ # webmediator
37
+ Python sync/async client for the WebMediator API.
38
+
39
+
40
+ *Sync*
41
+ ```python
42
+ import webmediator
43
+
44
+ client = webmediator.Client('https://localhost:7263/mediator')
45
+
46
+ response = client.send('Ping', {'Message':'EXAMPLE' }):
47
+ print(response)
48
+ ```
49
+
50
+ *Async*
51
+ ```python
52
+ import webmediator
53
+ import asyncio
54
+
55
+ async def main():
56
+ mediator = webmediator.AsyncClient('http://localhost:5263/mediator')
57
+
58
+ response = await client.send('Ping', {'Message':'EXAMPLE' }):
59
+ print(response)
60
+
61
+ if __name__ == "__main__":
62
+ asyncio.get_event_loop().run_until_complete(main())
63
+ ```
64
+
65
+ *Console output:*
66
+ ```
67
+ type: Pong, data: {'Message': 'EXAMPLE PONG'}
68
+ ```
@@ -0,0 +1,33 @@
1
+ # webmediator
2
+ Python sync/async client for the WebMediator API.
3
+
4
+
5
+ *Sync*
6
+ ```python
7
+ import webmediator
8
+
9
+ client = webmediator.Client('https://localhost:7263/mediator')
10
+
11
+ response = client.send('Ping', {'Message':'EXAMPLE' }):
12
+ print(response)
13
+ ```
14
+
15
+ *Async*
16
+ ```python
17
+ import webmediator
18
+ import asyncio
19
+
20
+ async def main():
21
+ mediator = webmediator.AsyncClient('http://localhost:5263/mediator')
22
+
23
+ response = await client.send('Ping', {'Message':'EXAMPLE' }):
24
+ print(response)
25
+
26
+ if __name__ == "__main__":
27
+ asyncio.get_event_loop().run_until_complete(main())
28
+ ```
29
+
30
+ *Console output:*
31
+ ```
32
+ type: Pong, data: {'Message': 'EXAMPLE PONG'}
33
+ ```
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,42 @@
1
+ # pip install httpx
2
+ # pip install --upgrade setuptools wheel
3
+ # python setup.py sdist bdist_wheel
4
+ # pip install --upgrade twine
5
+ ## python -m twine upload dist/*
6
+
7
+
8
+ import setuptools
9
+
10
+
11
+ with open("README.md", "r") as fh:
12
+ long_description = fh.read()
13
+
14
+ setuptools.setup(
15
+ name="webmediator",
16
+ version="0.9.2",
17
+ author="Leonid Salavatov",
18
+ author_email="mustaddon@gmail.com",
19
+ description="Python sync/async client for the WebMediator API",
20
+ long_description=long_description,
21
+ long_description_content_type="text/markdown",
22
+ url="https://github.com/mustaddon/webmediator.git",
23
+ keywords=["WebMediator", "CQRS", "Mediator"],
24
+ #packages=['webmediator'],
25
+ # package_dir={'': 'src', 'webmediator': 'src/webmediator'},
26
+ package_dir={'': 'src'},
27
+ packages=setuptools.find_packages(where='src'),
28
+ classifiers=[
29
+ "License :: OSI Approved :: MIT License",
30
+ "Operating System :: OS Independent","Programming Language :: Python",
31
+ "Programming Language :: Python :: 3",
32
+ "Programming Language :: Python :: 3.8",
33
+ "Programming Language :: Python :: 3.9",
34
+ "Programming Language :: Python :: 3.10",
35
+ "Programming Language :: Python :: 3.11",
36
+ "Programming Language :: Python :: 3.12",
37
+ "Programming Language :: Python :: 3.13",
38
+ "Programming Language :: Python :: 3.14",
39
+ ],
40
+ install_requires=[ 'httpx' ],
41
+ python_requires='>=3.8',
42
+ )
@@ -0,0 +1,20 @@
1
+ from ._client import *
2
+ from ._client_async import *
3
+ from ._client_base import *
4
+ from ._stream import *
5
+ from ._stream_async import *
6
+
7
+ __all__ = [
8
+ "Client",
9
+ "HttpStreamIO",
10
+
11
+ "AsyncClient",
12
+ "AsyncHttpStreamIO",
13
+
14
+ "Response"
15
+ ]
16
+
17
+ __locals = locals()
18
+ for __name in __all__:
19
+ if not __name.startswith("__"):
20
+ setattr(__locals[__name], "__module__", "webmediator") # noqa
@@ -0,0 +1,125 @@
1
+ import typing
2
+ import ssl
3
+
4
+ import httpx
5
+ from httpx._types import AuthTypes, HeaderTypes, CookieTypes, CertTypes, ProxyTypes,TimeoutTypes
6
+ from httpx._transports import BaseTransport
7
+ from httpx._config import Limits, DEFAULT_LIMITS, DEFAULT_TIMEOUT_CONFIG, DEFAULT_MAX_REDIRECTS
8
+ from httpx._client import EventHook
9
+
10
+ from ._client_base import BaseClient, Response
11
+ from ._stream import HttpStreamIO
12
+
13
+
14
+ class Client(BaseClient):
15
+ def __init__(self, endpoint_url: str,
16
+ auth: AuthTypes | None = None,
17
+ headers: HeaderTypes | None = None,
18
+ cookies: CookieTypes | None = None,
19
+ verify: ssl.SSLContext | str | bool = False,
20
+ cert: CertTypes | None = None,
21
+ trust_env: bool = True,
22
+ http1: bool = True,
23
+ http2: bool = False,
24
+ proxy: ProxyTypes | None = None,
25
+ mounts: None | (typing.Mapping[str, BaseTransport | None]) = None,
26
+ timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
27
+ follow_redirects: bool = False,
28
+ limits: Limits = DEFAULT_LIMITS,
29
+ max_redirects: int = DEFAULT_MAX_REDIRECTS,
30
+ event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None,
31
+ transport: BaseTransport | None = None,
32
+ default_encoding: str | typing.Callable[[bytes], str] = "utf-8",
33
+ response_stream_as_bytes = False
34
+ ):
35
+ super().__init__(endpoint_url)
36
+
37
+ self._response_stream_as_bytes = response_stream_as_bytes
38
+ self._client = httpx.Client(
39
+ base_url=self._endpoint_url,
40
+ auth=auth,
41
+ headers=headers,
42
+ cookies=cookies,
43
+ verify=verify,
44
+ cert=cert,
45
+ trust_env=trust_env,
46
+ http1=http1,
47
+ http2=http2,
48
+ proxy=proxy,
49
+ mounts=mounts,
50
+ timeout=timeout,
51
+ follow_redirects=follow_redirects,
52
+ limits=limits,
53
+ max_redirects=max_redirects,
54
+ event_hooks=event_hooks,
55
+ transport=transport,
56
+ default_encoding=default_encoding)
57
+
58
+ def __enter__(self):
59
+ return self
60
+
61
+ def __exit__(self, exception_type, exception_value, exception_traceback):
62
+ self.close()
63
+
64
+ def close(self):
65
+ self._client.close()
66
+
67
+ def _get_result_stream(self, res: httpx.Response):
68
+ if not self._response_stream_as_bytes:
69
+ return HttpStreamIO(res)
70
+
71
+ content=res.read()
72
+ res.close()
73
+ return content
74
+
75
+ def _get_result(self, res: httpx.Response):
76
+ res.raise_for_status()
77
+
78
+ data_type = self._get_data_type(res)
79
+
80
+ if res.status_code== 204:
81
+ res.close()
82
+ return Response(data_type)
83
+
84
+ if not self._is_json_ctype(res):
85
+ streamProp = self._get_data_stream_property(res)
86
+
87
+ if streamProp:
88
+ data = self._get_data(res)
89
+ data[streamProp] = self._get_result_stream(res)
90
+
91
+ return Response(data_type,
92
+ data = data,
93
+ stream_ref = data[streamProp])
94
+
95
+ data=self._get_result_stream(res)
96
+ return Response(data_type,
97
+ data = data,
98
+ stream_ref = data)
99
+
100
+ res.read()
101
+ result = Response(data_type, res.json())
102
+ res.close()
103
+ return result
104
+
105
+ def send(self, type: str, data=None):
106
+ if data == None:
107
+ return self._get_result(self._post(type))
108
+
109
+ if self._is_stream(data):
110
+ return self._get_result(self._post(type, content=self._stream_content(data)))
111
+
112
+ io_prop = self._get_stream_prop(data)
113
+
114
+ if io_prop:
115
+ copy = data.copy()
116
+ del copy[io_prop]
117
+ return self._get_result(self._post(
118
+ url=f"{type}?data={self._encode_json(copy)}",
119
+ content=self._stream_content(data[io_prop])))
120
+
121
+ return self._get_result(self._post(type, json=data))
122
+
123
+ def _post(self, url, json = None, content = None):
124
+ req = self._client.build_request('POST', url, json=json, content=content)
125
+ return self._client.send(req, stream=True)
@@ -0,0 +1,145 @@
1
+ import asyncio
2
+ import typing
3
+ import ssl
4
+
5
+ import httpx
6
+ from httpx._types import AuthTypes, HeaderTypes, CookieTypes, CertTypes, ProxyTypes,TimeoutTypes
7
+ from httpx._transports import AsyncBaseTransport
8
+ from httpx._config import Limits, DEFAULT_LIMITS, DEFAULT_TIMEOUT_CONFIG, DEFAULT_MAX_REDIRECTS
9
+ from httpx._client import EventHook
10
+
11
+ from ._client_base import BaseClient, Response
12
+ from ._stream_async import AsyncHttpStreamIO
13
+
14
+
15
+ class AsyncClient(BaseClient):
16
+ def __init__(self, endpoint_url: str,
17
+ auth: AuthTypes | None = None,
18
+ headers: HeaderTypes | None = None,
19
+ cookies: CookieTypes | None = None,
20
+ verify: ssl.SSLContext | str | bool = False,
21
+ cert: CertTypes | None = None,
22
+ trust_env: bool = True,
23
+ http1: bool = True,
24
+ http2: bool = False,
25
+ proxy: ProxyTypes | None = None,
26
+ mounts: None | (typing.Mapping[str, AsyncBaseTransport | None]) = None,
27
+ timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
28
+ follow_redirects: bool = False,
29
+ limits: Limits = DEFAULT_LIMITS,
30
+ max_redirects: int = DEFAULT_MAX_REDIRECTS,
31
+ event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None,
32
+ transport: AsyncBaseTransport | None = None,
33
+ default_encoding: str | typing.Callable[[bytes], str] = "utf-8",
34
+ response_stream_as_bytes = False
35
+ ):
36
+ super().__init__(endpoint_url)
37
+
38
+ self._response_stream_as_bytes = response_stream_as_bytes
39
+ self._client = httpx.AsyncClient(
40
+ base_url=self._endpoint_url,
41
+ auth=auth,
42
+ headers=headers,
43
+ cookies=cookies,
44
+ verify=verify,
45
+ cert=cert,
46
+ trust_env=trust_env,
47
+ http1=http1,
48
+ http2=http2,
49
+ proxy=proxy,
50
+ mounts=mounts,
51
+ timeout=timeout,
52
+ follow_redirects=follow_redirects,
53
+ limits=limits,
54
+ max_redirects=max_redirects,
55
+ event_hooks=event_hooks,
56
+ transport=transport,
57
+ default_encoding=default_encoding)
58
+
59
+ def __enter__(self):
60
+ return self
61
+
62
+ def __exit__(self, exception_type, exception_value, exception_traceback):
63
+ self.close()
64
+
65
+ def close(self):
66
+ self._client.close()
67
+
68
+ async def aclose(self):
69
+ await self._client.aclose()
70
+
71
+ async def _get_result_stream(self, res: httpx.Response):
72
+ if not self._response_stream_as_bytes:
73
+ return AsyncHttpStreamIO(res)
74
+
75
+ content = await res.aread()
76
+ await res.aclose()
77
+ return content
78
+
79
+ async def _get_result(self, res: httpx.Response):
80
+ res.raise_for_status()
81
+
82
+ data_type = self._get_data_type(res)
83
+
84
+ if res.status_code== 204:
85
+ await res.aclose()
86
+ return Response(data_type)
87
+
88
+ if not self._is_json_ctype(res):
89
+ streamProp = self._get_data_stream_property(res)
90
+ if streamProp:
91
+ data = self._get_data(res)
92
+ data[streamProp] = await self._get_result_stream(res)
93
+ return Response(data_type,
94
+ data = data,
95
+ stream_ref = data[streamProp])
96
+
97
+ data = await self._get_result_stream(res)
98
+ return Response(data_type,
99
+ data = data,
100
+ stream_ref = data)
101
+
102
+ await res.aread()
103
+ result = Response(data_type, res.json())
104
+ await res.aclose()
105
+ return result
106
+
107
+ async def _stream_content_async(self, data):
108
+ it = iter(self._stream_content(data))
109
+ done = object()
110
+
111
+ while True:
112
+ try:
113
+ value = await asyncio.to_thread(next, it, done)
114
+ except StopIteration:
115
+ break
116
+
117
+ if value is done:
118
+ break
119
+
120
+ yield value
121
+
122
+
123
+ async def send(self, type: str, data=None):
124
+ if data == None:
125
+ return await self._get_result(await self._post(type))
126
+
127
+ if self._is_stream(data):
128
+ return await self._get_result(
129
+ await self._post(type, content=self._stream_content_async(data)))
130
+
131
+ io_prop = self._get_stream_prop(data)
132
+
133
+ if io_prop:
134
+ copy = data.copy()
135
+ del copy[io_prop]
136
+ return await self._get_result(
137
+ await self._post(f"{type}?data={self._encode_json(copy)}",
138
+ content=self._stream_content_async(data[io_prop])))
139
+
140
+ return await self._get_result(
141
+ await self._post(type, json=data))
142
+
143
+ async def _post(self, url, json = None, content = None):
144
+ req = self._client.build_request('POST', url, json=json, content=content)
145
+ return await self._client.send(req, stream=True)
@@ -0,0 +1,101 @@
1
+ import io
2
+ import json
3
+ from urllib.parse import quote
4
+ import base64
5
+ import httpx
6
+
7
+ from ._stream import CHUNK_SIZE, HttpStreamIO
8
+ from ._stream_async import AsyncHttpStreamIO
9
+
10
+ class BaseClient:
11
+ def __init__(self, endpoint_url: str):
12
+ self._endpoint_url = endpoint_url if endpoint_url[-1] == '/' else endpoint_url+'/'
13
+
14
+ def _encode_json(self, obj):
15
+ return quote(json.dumps(obj), safe='')
16
+
17
+ def _is_stream(self, obj):
18
+ return isinstance(obj, io.IOBase) or isinstance(obj, bytes)
19
+
20
+ def _get_stream_prop(self, obj):
21
+ if isinstance(obj, dict):
22
+ for key, value in obj.items():
23
+ if value != None and self._is_stream(value):
24
+ return key
25
+ return None
26
+
27
+ def _stream_content(self, data):
28
+ if isinstance(data, bytes):
29
+ yield data
30
+ return
31
+
32
+ if isinstance(data, io.TextIOBase):
33
+ while chunk := data.read(CHUNK_SIZE):
34
+ yield chunk.encode(data.encoding)
35
+ return
36
+
37
+ while chunk := data.read(CHUNK_SIZE):
38
+ yield chunk
39
+
40
+ def _is_json_ctype(self, res: httpx.Response) -> bool:
41
+ ctype = res.headers.get('content-type')
42
+ return ctype != None and ctype.find('application/json') >= 0
43
+
44
+ def _get_data(self, res: httpx.Response):
45
+ return json.loads(base64.b64decode(res.headers.get('data')))
46
+
47
+ def _get_data_type(self, res: httpx.Response) -> str | None:
48
+ return res.headers.get('data-type')
49
+
50
+ def _get_data_stream_property(self, res: httpx.Response) -> str | None:
51
+ return res.headers.get('data-stream-property')
52
+
53
+ def get_link(self, type: str, data=None):
54
+ if data == None:
55
+ return self._endpoint_url+type
56
+
57
+ if self._is_stream(data) or self._get_stream_prop(data):
58
+ raise TypeError("The data contains stream property (IOBase or bytes) and cannot be presented as a link.")
59
+
60
+ return f"{self._endpoint_url}{type}?data={self._encode_json(data)}"
61
+
62
+
63
+
64
+ class Response:
65
+ def __init__(self,
66
+ type: str,
67
+ data = None,
68
+ stream_ref : HttpStreamIO | AsyncHttpStreamIO | None = None
69
+ ):
70
+ self.type = type
71
+ self.data = data
72
+ self.__stream_ref = stream_ref if isinstance(stream_ref, (HttpStreamIO, AsyncHttpStreamIO)) else None
73
+
74
+ def __enter__(self):
75
+ return self
76
+
77
+ def __exit__(self, exception_type, exception_value, exception_traceback):
78
+ self.close()
79
+
80
+ async def __aenter__(self):
81
+ return self
82
+
83
+ async def __aexit__(self, exception_type, exception_value, exception_traceback):
84
+ await self.aclose()
85
+
86
+ def __str__(self):
87
+ return f"type: {self.type}, data: {self.data}"
88
+
89
+ def close(self):
90
+ if self.__stream_ref != None:
91
+ self.__stream_ref.close()
92
+
93
+ async def aclose(self):
94
+ if self.__stream_ref == None:
95
+ return
96
+
97
+ if isinstance(self.__stream_ref, AsyncHttpStreamIO):
98
+ await self.__stream_ref.aclose()
99
+ else:
100
+ self.__stream_ref.close()
101
+
@@ -0,0 +1,51 @@
1
+ import io
2
+ import httpx
3
+
4
+ CHUNK_SIZE = 4096
5
+
6
+ class HttpStreamIO(io.RawIOBase):
7
+ def __init__(self, response: httpx.Response):
8
+ self._response = response
9
+ self._generator = response.iter_raw(CHUNK_SIZE)
10
+ self._buffer = b""
11
+
12
+ def __enter__(self):
13
+ return self
14
+
15
+ def __exit__(self, exception_type, exception_value, exception_traceback):
16
+ self.close()
17
+
18
+ def readable(self):
19
+ return True
20
+
21
+ def close(self):
22
+ if not self.closed:
23
+ self._response.close()
24
+ super().close()
25
+
26
+ def read(self, size=-1):
27
+ if size == 0:
28
+ return b""
29
+
30
+ # Keep fetching from the generator until we have enough data or it's exhausted
31
+ while size < 0 or len(self._buffer) < size:
32
+ try:
33
+ chunk = next(self._generator)
34
+ if not isinstance(chunk, bytes):
35
+ raise TypeError("Generator must yield bytes objects")
36
+ self._buffer += chunk
37
+ except StopIteration:
38
+ self.close()
39
+ break # Generator exhausted
40
+ except:
41
+ self.close()
42
+ raise
43
+
44
+ if size < 0:
45
+ result = self._buffer
46
+ self._buffer = b""
47
+ else:
48
+ result = self._buffer[:size]
49
+ self._buffer = self._buffer[size:]
50
+
51
+ return result
@@ -0,0 +1,67 @@
1
+ import io
2
+ import httpx
3
+ from ._utils_async import run_coroutine_sync
4
+ from ._stream import CHUNK_SIZE
5
+
6
+
7
+ class AsyncHttpStreamIO(io.RawIOBase):
8
+ def __init__(self, response: httpx.Response):
9
+ self._response = response
10
+ self._generator = response.aiter_raw(CHUNK_SIZE)
11
+ self._buffer = b""
12
+
13
+ def __enter__(self):
14
+ return self
15
+
16
+ def __exit__(self, exception_type, exception_value, exception_traceback):
17
+ self.close()
18
+
19
+ async def __aenter__(self):
20
+ return self
21
+
22
+ async def __aexit__(self, exception_type, exception_value, exception_traceback):
23
+ await self.aclose()
24
+
25
+ def readable(self):
26
+ return True
27
+
28
+ def close(self):
29
+ if not self.closed:
30
+ run_coroutine_sync(self.aclose())
31
+ super().close()
32
+
33
+ async def aclose(self):
34
+ if not self.closed:
35
+ await self._response.aclose()
36
+ super().close()
37
+
38
+ def read(self, size=-1):
39
+ return run_coroutine_sync(self.aread(size))
40
+
41
+ async def aread(self, size=-1):
42
+ if size == 0:
43
+ return b""
44
+
45
+ # Keep fetching from the generator until we have enough data or it's exhausted
46
+ while size < 0 or len(self._buffer) < size:
47
+ try:
48
+ chunk = await anext(self._generator)
49
+ if not isinstance(chunk, bytes):
50
+ raise TypeError("Generator must yield bytes objects")
51
+ self._buffer += chunk
52
+ except StopAsyncIteration:
53
+ await self.aclose()
54
+ break # Generator exhausted
55
+ except:
56
+ await self.aclose()
57
+ raise
58
+
59
+ if size < 0:
60
+ result = self._buffer
61
+ self._buffer = b""
62
+ else:
63
+ result = self._buffer[:size]
64
+ self._buffer = self._buffer[size:]
65
+
66
+ return result
67
+
@@ -0,0 +1,30 @@
1
+ import asyncio
2
+ import threading
3
+ from concurrent.futures import ThreadPoolExecutor
4
+ from typing import Any, Coroutine, TypeVar
5
+
6
+ T = TypeVar("T")
7
+
8
+ def run_coroutine_sync(coroutine: Coroutine[Any, Any, T], timeout: float = 30) -> T:
9
+ def run_in_new_loop():
10
+ new_loop = asyncio.new_event_loop()
11
+ asyncio.set_event_loop(new_loop)
12
+ try:
13
+ return new_loop.run_until_complete(coroutine)
14
+ finally:
15
+ new_loop.close()
16
+
17
+ try:
18
+ loop = asyncio.get_running_loop()
19
+ except RuntimeError:
20
+ return asyncio.run(coroutine)
21
+
22
+ if threading.current_thread() is threading.main_thread():
23
+ if not loop.is_running():
24
+ return loop.run_until_complete(coroutine)
25
+ else:
26
+ with ThreadPoolExecutor() as pool:
27
+ future = pool.submit(run_in_new_loop)
28
+ return future.result(timeout=timeout)
29
+ else:
30
+ return asyncio.run_coroutine_threadsafe(coroutine, loop).result()
@@ -0,0 +1,68 @@
1
+ Metadata-Version: 2.4
2
+ Name: webmediator
3
+ Version: 0.9.2
4
+ Summary: Python sync/async client for the WebMediator API
5
+ Home-page: https://github.com/mustaddon/webmediator.git
6
+ Author: Leonid Salavatov
7
+ Author-email: mustaddon@gmail.com
8
+ Keywords: WebMediator,CQRS,Mediator
9
+ Classifier: License :: OSI Approved :: MIT License
10
+ Classifier: Operating System :: OS Independent
11
+ Classifier: Programming Language :: Python
12
+ Classifier: Programming Language :: Python :: 3
13
+ Classifier: Programming Language :: Python :: 3.8
14
+ Classifier: Programming Language :: Python :: 3.9
15
+ Classifier: Programming Language :: Python :: 3.10
16
+ Classifier: Programming Language :: Python :: 3.11
17
+ Classifier: Programming Language :: Python :: 3.12
18
+ Classifier: Programming Language :: Python :: 3.13
19
+ Classifier: Programming Language :: Python :: 3.14
20
+ Requires-Python: >=3.8
21
+ Description-Content-Type: text/markdown
22
+ License-File: LICENSE
23
+ Requires-Dist: httpx
24
+ Dynamic: author
25
+ Dynamic: author-email
26
+ Dynamic: classifier
27
+ Dynamic: description
28
+ Dynamic: description-content-type
29
+ Dynamic: home-page
30
+ Dynamic: keywords
31
+ Dynamic: license-file
32
+ Dynamic: requires-dist
33
+ Dynamic: requires-python
34
+ Dynamic: summary
35
+
36
+ # webmediator
37
+ Python sync/async client for the WebMediator API.
38
+
39
+
40
+ *Sync*
41
+ ```python
42
+ import webmediator
43
+
44
+ client = webmediator.Client('https://localhost:7263/mediator')
45
+
46
+ response = client.send('Ping', {'Message':'EXAMPLE' }):
47
+ print(response)
48
+ ```
49
+
50
+ *Async*
51
+ ```python
52
+ import webmediator
53
+ import asyncio
54
+
55
+ async def main():
56
+ mediator = webmediator.AsyncClient('http://localhost:5263/mediator')
57
+
58
+ response = await client.send('Ping', {'Message':'EXAMPLE' }):
59
+ print(response)
60
+
61
+ if __name__ == "__main__":
62
+ asyncio.get_event_loop().run_until_complete(main())
63
+ ```
64
+
65
+ *Console output:*
66
+ ```
67
+ type: Pong, data: {'Message': 'EXAMPLE PONG'}
68
+ ```
@@ -0,0 +1,15 @@
1
+ LICENSE
2
+ README.md
3
+ setup.py
4
+ src/webmediator/__init__.py
5
+ src/webmediator/_client.py
6
+ src/webmediator/_client_async.py
7
+ src/webmediator/_client_base.py
8
+ src/webmediator/_stream.py
9
+ src/webmediator/_stream_async.py
10
+ src/webmediator/_utils_async.py
11
+ src/webmediator.egg-info/PKG-INFO
12
+ src/webmediator.egg-info/SOURCES.txt
13
+ src/webmediator.egg-info/dependency_links.txt
14
+ src/webmediator.egg-info/requires.txt
15
+ src/webmediator.egg-info/top_level.txt
@@ -0,0 +1 @@
1
+ webmediator