aiohttp-msal 0.6.3__tar.gz → 0.6.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/PKG-INFO +3 -13
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/README.md +2 -12
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/aiohttp_msal/__init__.py +1 -1
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/aiohttp_msal/msal_async.py +15 -10
- aiohttp_msal-0.6.5/aiohttp_msal/redis_tools.py +112 -0
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/aiohttp_msal.egg-info/PKG-INFO +3 -13
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/aiohttp_msal.egg-info/SOURCES.txt +1 -0
- aiohttp_msal-0.6.5/pyproject.toml +4 -0
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/setup.cfg +0 -10
- aiohttp_msal-0.6.3/aiohttp_msal/redis_tools.py +0 -83
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/LICENSE +0 -0
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/aiohttp_msal/routes.py +0 -0
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/aiohttp_msal/settings.py +0 -0
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/aiohttp_msal/settings_base.py +0 -0
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/aiohttp_msal/user_info.py +0 -0
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/aiohttp_msal.egg-info/dependency_links.txt +0 -0
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/aiohttp_msal.egg-info/requires.txt +0 -0
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/aiohttp_msal.egg-info/top_level.txt +0 -0
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/aiohttp_msal.egg-info/zip-safe +0 -0
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/setup.py +0 -0
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/tests/__init__.py +0 -0
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/tests/test_init.py +0 -0
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/tests/test_msal_async.py +0 -0
- {aiohttp_msal-0.6.3 → aiohttp_msal-0.6.5}/tests/test_settings.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: aiohttp_msal
|
|
3
|
-
Version: 0.6.
|
|
3
|
+
Version: 0.6.5
|
|
4
4
|
Summary: Helper Library to use the Microsoft Authentication Library (MSAL) with aiohttp
|
|
5
5
|
Home-page: https://github.com/kellerza/aiohttp_msal
|
|
6
6
|
Author: Johann Kellerman
|
|
@@ -125,23 +125,13 @@ async def user_authorized(request: web.Request) -> web.Response:
|
|
|
125
125
|
|
|
126
126
|
```python
|
|
127
127
|
from aiohttp_msal import ENV, AsyncMSAL
|
|
128
|
-
from aiohttp_msal.redis_tools import
|
|
129
|
-
|
|
130
|
-
async def get_async_msal(email: str) -> AsyncMSAL:
|
|
131
|
-
"""Clean redis and get a session."""
|
|
132
|
-
red = get_redis()
|
|
133
|
-
try:
|
|
134
|
-
return await get_session(red, email)
|
|
135
|
-
finally:
|
|
136
|
-
await red.close()
|
|
137
|
-
|
|
128
|
+
from aiohttp_msal.redis_tools import get_session
|
|
138
129
|
|
|
139
130
|
def main()
|
|
140
131
|
# Uses the redis.asyncio driver to retrieve the current token
|
|
141
132
|
# Will update the token_cache if a RefreshToken was used
|
|
142
|
-
ases = asyncio.run(
|
|
133
|
+
ases = asyncio.run(get_session(MYEMAIL))
|
|
143
134
|
client = GraphClient(ases.get_token)
|
|
144
135
|
# ...
|
|
145
136
|
# use the Graphclient
|
|
146
|
-
# ...
|
|
147
137
|
```
|
|
@@ -90,23 +90,13 @@ async def user_authorized(request: web.Request) -> web.Response:
|
|
|
90
90
|
|
|
91
91
|
```python
|
|
92
92
|
from aiohttp_msal import ENV, AsyncMSAL
|
|
93
|
-
from aiohttp_msal.redis_tools import
|
|
94
|
-
|
|
95
|
-
async def get_async_msal(email: str) -> AsyncMSAL:
|
|
96
|
-
"""Clean redis and get a session."""
|
|
97
|
-
red = get_redis()
|
|
98
|
-
try:
|
|
99
|
-
return await get_session(red, email)
|
|
100
|
-
finally:
|
|
101
|
-
await red.close()
|
|
102
|
-
|
|
93
|
+
from aiohttp_msal.redis_tools import get_session
|
|
103
94
|
|
|
104
95
|
def main()
|
|
105
96
|
# Uses the redis.asyncio driver to retrieve the current token
|
|
106
97
|
# Will update the token_cache if a RefreshToken was used
|
|
107
|
-
ases = asyncio.run(
|
|
98
|
+
ases = asyncio.run(get_session(MYEMAIL))
|
|
108
99
|
client = GraphClient(ases.get_token)
|
|
109
100
|
# ...
|
|
110
101
|
# use the Graphclient
|
|
111
|
-
# ...
|
|
112
102
|
```
|
|
@@ -23,7 +23,7 @@ HTTP_PATCH = "patch"
|
|
|
23
23
|
HTTP_DELETE = "delete"
|
|
24
24
|
HTTP_ALLOWED = [HTTP_GET, HTTP_POST, HTTP_PUT, HTTP_PATCH, HTTP_DELETE]
|
|
25
25
|
|
|
26
|
-
|
|
26
|
+
DEFAULT_SCOPES = ["User.Read", "User.Read.All"]
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
def async_wrap(func: Callable) -> Callable:
|
|
@@ -71,11 +71,12 @@ class AsyncMSAL:
|
|
|
71
71
|
https://msal-python.readthedocs.io/en/latest/#msal.ClientApplication.initiate_auth_code_flow
|
|
72
72
|
|
|
73
73
|
The caller is expected to:
|
|
74
|
-
1.somehow store this content, typically inside the current session of the
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
74
|
+
1. somehow store this content, typically inside the current session of the
|
|
75
|
+
server,
|
|
76
|
+
2. guide the end user (i.e. resource owner) to visit that auth_uri,
|
|
77
|
+
typically with a redirect
|
|
78
|
+
3. and then relay this dict and subsequent auth response to
|
|
79
|
+
acquire_token_by_auth_code_flow().
|
|
79
80
|
|
|
80
81
|
[1. and part of 3.] is stored by this class in the aiohttp_session
|
|
81
82
|
|
|
@@ -147,12 +148,14 @@ class AsyncMSAL:
|
|
|
147
148
|
if hasattr(self, "save_token_cache"):
|
|
148
149
|
self.save_token_cache(self.token_cache)
|
|
149
150
|
|
|
150
|
-
def build_auth_code_flow(
|
|
151
|
+
def build_auth_code_flow(
|
|
152
|
+
self, redirect_uri: str, scopes: Optional[list[str]] = None
|
|
153
|
+
) -> str:
|
|
151
154
|
"""First step - Start the flow."""
|
|
152
155
|
self.session[TOKEN_CACHE] = None # type: ignore
|
|
153
156
|
self.session[USER_EMAIL] = None # type: ignore
|
|
154
157
|
self.session[FLOW_CACHE] = res = self.app.initiate_auth_code_flow(
|
|
155
|
-
|
|
158
|
+
scopes or DEFAULT_SCOPES,
|
|
156
159
|
redirect_uri=redirect_uri,
|
|
157
160
|
response_mode="form_post"
|
|
158
161
|
# max_age=1209600,
|
|
@@ -182,11 +185,13 @@ class AsyncMSAL:
|
|
|
182
185
|
None, self.acquire_token_by_auth_code_flow, auth_response
|
|
183
186
|
)
|
|
184
187
|
|
|
185
|
-
def get_token(self) -> Optional[dict[str, Any]]:
|
|
188
|
+
def get_token(self, scopes: Optional[list[str]] = None) -> Optional[dict[str, Any]]:
|
|
186
189
|
"""Acquire a token based on username."""
|
|
187
190
|
accounts = self.app.get_accounts()
|
|
188
191
|
if accounts:
|
|
189
|
-
result = self.app.acquire_token_silent(
|
|
192
|
+
result = self.app.acquire_token_silent(
|
|
193
|
+
scopes=scopes or DEFAULT_SCOPES, account=accounts[0]
|
|
194
|
+
)
|
|
190
195
|
self._save_token_cache()
|
|
191
196
|
return result
|
|
192
197
|
return None
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
"""Redis tools for sessions."""
|
|
2
|
+
import asyncio
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
import time
|
|
6
|
+
from contextlib import AsyncExitStack, asynccontextmanager
|
|
7
|
+
from typing import Any, AsyncGenerator, Optional
|
|
8
|
+
|
|
9
|
+
from redis.asyncio import Redis, from_url
|
|
10
|
+
|
|
11
|
+
from aiohttp_msal.msal_async import AsyncMSAL
|
|
12
|
+
from aiohttp_msal.settings import ENV
|
|
13
|
+
|
|
14
|
+
_LOGGER = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
SES_KEYS = ("mail", "name", "m_mail", "m_name")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@asynccontextmanager
|
|
20
|
+
async def get_redis() -> AsyncGenerator[Redis, None]:
|
|
21
|
+
"""Get a Redis connection."""
|
|
22
|
+
if ENV.database:
|
|
23
|
+
_LOGGER.debug("Using redis from environment")
|
|
24
|
+
yield ENV.database
|
|
25
|
+
return
|
|
26
|
+
_LOGGER.info("Connect to Redis %s", ENV.REDIS)
|
|
27
|
+
redis = from_url(ENV.REDIS)
|
|
28
|
+
try:
|
|
29
|
+
yield redis
|
|
30
|
+
finally:
|
|
31
|
+
await redis.close()
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
async def session_iter(
|
|
35
|
+
redis: Redis,
|
|
36
|
+
*,
|
|
37
|
+
match: Optional[dict[str, str]] = None,
|
|
38
|
+
key_match: Optional[str] = None,
|
|
39
|
+
) -> AsyncGenerator[tuple[str, int, dict[str, Any]], None]:
|
|
40
|
+
"""Iterate over the Redis keys to find a specific session.
|
|
41
|
+
|
|
42
|
+
match: Filter based on session content (i.e. mail/name)
|
|
43
|
+
key_match: Filter the Redis keys. Defaults to ENV.cookie_name
|
|
44
|
+
"""
|
|
45
|
+
async for key in redis.scan_iter(
|
|
46
|
+
count=100, match=key_match or f"{ENV.COOKIE_NAME}*"
|
|
47
|
+
):
|
|
48
|
+
sval = await redis.get(key)
|
|
49
|
+
created, ses = 0, {}
|
|
50
|
+
try:
|
|
51
|
+
val = json.loads(sval) # type: ignore
|
|
52
|
+
created = int(val["created"])
|
|
53
|
+
ses = val["session"]
|
|
54
|
+
except Exception: # pylint: disable=broad-except
|
|
55
|
+
pass
|
|
56
|
+
if match:
|
|
57
|
+
# Ensure we match all the supplied terms
|
|
58
|
+
if not all(k in ses and v in ses[k] for k, v in match.items()):
|
|
59
|
+
continue
|
|
60
|
+
yield key, created, ses
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
async def session_clean(
|
|
64
|
+
redis: Redis, *, max_age: int = 90, expected_keys: Optional[dict] = None
|
|
65
|
+
) -> None:
|
|
66
|
+
"""Clear session entries older than max_age days."""
|
|
67
|
+
rem, keep = 0, 0
|
|
68
|
+
expire = int(time.time() - max_age * 24 * 60 * 60)
|
|
69
|
+
try:
|
|
70
|
+
async for key, created, ses in session_iter(redis):
|
|
71
|
+
all_keys = all(sk in ses for sk in (expected_keys or SES_KEYS))
|
|
72
|
+
if created < expire or not all_keys:
|
|
73
|
+
rem += 1
|
|
74
|
+
await redis.delete(key)
|
|
75
|
+
else:
|
|
76
|
+
keep += 1
|
|
77
|
+
finally:
|
|
78
|
+
if rem:
|
|
79
|
+
_LOGGER.info("Sessions removed: %s (%s total)", rem, keep)
|
|
80
|
+
else:
|
|
81
|
+
_LOGGER.debug("No sessions removed (%s total)", keep)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def _session_factory(key: str, created: str, session: dict) -> AsyncMSAL:
|
|
85
|
+
"""Create a AsyncMSAL session.
|
|
86
|
+
|
|
87
|
+
When get_token refreshes the token retrieved from Redis, the save_cache callback
|
|
88
|
+
will be responsible to update the cache in Redis."""
|
|
89
|
+
|
|
90
|
+
async def async_save_cache(_: dict) -> None:
|
|
91
|
+
"""Save the token cache to Redis."""
|
|
92
|
+
async with get_redis() as rd2:
|
|
93
|
+
await rd2.set(key, json.dumps({"created": created, "session": session}))
|
|
94
|
+
|
|
95
|
+
def save_cache(*args: Any) -> None:
|
|
96
|
+
"""Save the token cache to Redis."""
|
|
97
|
+
try:
|
|
98
|
+
asyncio.get_event_loop().create_task(async_save_cache(*args))
|
|
99
|
+
except RuntimeError:
|
|
100
|
+
asyncio.run(async_save_cache(*args))
|
|
101
|
+
|
|
102
|
+
return AsyncMSAL(session, save_cache=save_cache)
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
async def get_session(email: str, *, redis: Optional[Redis] = None) -> AsyncMSAL:
|
|
106
|
+
"""Get a session from Redis."""
|
|
107
|
+
async with AsyncExitStack() as stack:
|
|
108
|
+
if redis is None:
|
|
109
|
+
redis = await stack.enter_async_context(get_redis())
|
|
110
|
+
async for key, created, session in session_iter(redis, match={"mail": email}):
|
|
111
|
+
return _session_factory(key, str(created), session)
|
|
112
|
+
raise ValueError(f"Session for {email} not found")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: aiohttp-msal
|
|
3
|
-
Version: 0.6.
|
|
3
|
+
Version: 0.6.5
|
|
4
4
|
Summary: Helper Library to use the Microsoft Authentication Library (MSAL) with aiohttp
|
|
5
5
|
Home-page: https://github.com/kellerza/aiohttp_msal
|
|
6
6
|
Author: Johann Kellerman
|
|
@@ -125,23 +125,13 @@ async def user_authorized(request: web.Request) -> web.Response:
|
|
|
125
125
|
|
|
126
126
|
```python
|
|
127
127
|
from aiohttp_msal import ENV, AsyncMSAL
|
|
128
|
-
from aiohttp_msal.redis_tools import
|
|
129
|
-
|
|
130
|
-
async def get_async_msal(email: str) -> AsyncMSAL:
|
|
131
|
-
"""Clean redis and get a session."""
|
|
132
|
-
red = get_redis()
|
|
133
|
-
try:
|
|
134
|
-
return await get_session(red, email)
|
|
135
|
-
finally:
|
|
136
|
-
await red.close()
|
|
137
|
-
|
|
128
|
+
from aiohttp_msal.redis_tools import get_session
|
|
138
129
|
|
|
139
130
|
def main()
|
|
140
131
|
# Uses the redis.asyncio driver to retrieve the current token
|
|
141
132
|
# Will update the token_cache if a RefreshToken was used
|
|
142
|
-
ases = asyncio.run(
|
|
133
|
+
ases = asyncio.run(get_session(MYEMAIL))
|
|
143
134
|
client = GraphClient(ases.get_token)
|
|
144
135
|
# ...
|
|
145
136
|
# use the Graphclient
|
|
146
|
-
# ...
|
|
147
137
|
```
|
|
@@ -44,22 +44,12 @@ tests =
|
|
|
44
44
|
pytest-asyncio
|
|
45
45
|
pytest-env
|
|
46
46
|
|
|
47
|
-
[isort]
|
|
48
|
-
profile = black
|
|
49
|
-
|
|
50
|
-
[flake8]
|
|
51
|
-
extend-ignore = E203, E501, W503
|
|
52
|
-
|
|
53
47
|
[mypy]
|
|
54
48
|
disallow_untyped_defs = True
|
|
55
49
|
|
|
56
50
|
[mypy-msal.*]
|
|
57
51
|
ignore_missing_imports = True
|
|
58
52
|
|
|
59
|
-
[pydocstyle]
|
|
60
|
-
match_dir = aiohttp_msal
|
|
61
|
-
convention = google
|
|
62
|
-
|
|
63
53
|
[tool:pytest]
|
|
64
54
|
filterwarnings =
|
|
65
55
|
ignore:.+@coroutine.+deprecated.+
|
|
@@ -1,83 +0,0 @@
|
|
|
1
|
-
"""Redis tools for sessions."""
|
|
2
|
-
import asyncio
|
|
3
|
-
import json
|
|
4
|
-
import logging
|
|
5
|
-
import time
|
|
6
|
-
from typing import Any, AsyncGenerator, Optional
|
|
7
|
-
|
|
8
|
-
from redis.asyncio import Redis, from_url
|
|
9
|
-
|
|
10
|
-
from aiohttp_msal.msal_async import AsyncMSAL
|
|
11
|
-
from aiohttp_msal.settings import ENV
|
|
12
|
-
|
|
13
|
-
_LOGGER = logging.getLogger(__name__)
|
|
14
|
-
|
|
15
|
-
SES_KEYS = ("mail", "name", "m_mail", "m_name")
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
def get_redis() -> Redis:
|
|
19
|
-
"""Get a Redis connection."""
|
|
20
|
-
_LOGGER.info("Connect to Redis %s", ENV.REDIS)
|
|
21
|
-
ENV.database = from_url(ENV.REDIS) # pylint: disable=no-member
|
|
22
|
-
return ENV.database
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
async def iter_redis(
|
|
26
|
-
redis: Redis, *, clean: bool = False, match: Optional[dict[str, str]] = None
|
|
27
|
-
) -> AsyncGenerator[tuple[str, str, dict], None]:
|
|
28
|
-
"""Iterate over the Redis keys to find a specific session."""
|
|
29
|
-
async for key in redis.scan_iter(count=100, match=f"{ENV.COOKIE_NAME}*"):
|
|
30
|
-
sval = await redis.get(key)
|
|
31
|
-
if not isinstance(sval, (str, bytes, bytearray)):
|
|
32
|
-
if clean:
|
|
33
|
-
await redis.delete(key)
|
|
34
|
-
continue
|
|
35
|
-
val = json.loads(sval)
|
|
36
|
-
ses = val.get("session") or {}
|
|
37
|
-
created = val.get("created")
|
|
38
|
-
if clean and not ses or not created:
|
|
39
|
-
await redis.delete(key)
|
|
40
|
-
continue
|
|
41
|
-
if match and not all(v in ses[k] for k, v in match.items()):
|
|
42
|
-
continue
|
|
43
|
-
yield key, created or "0", ses
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
async def clean_redis(redis: Redis, max_age: int = 90) -> None:
|
|
47
|
-
"""Clear session entries older than max_age days."""
|
|
48
|
-
expire = int(time.time() - max_age * 24 * 60 * 60)
|
|
49
|
-
async for key, created, ses in iter_redis(redis, clean=True):
|
|
50
|
-
for key in SES_KEYS:
|
|
51
|
-
if not ses.get(key):
|
|
52
|
-
await redis.delete(key)
|
|
53
|
-
continue
|
|
54
|
-
if int(created) < expire:
|
|
55
|
-
await redis.delete(key)
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
def _session_factory(key: str, created: str, session: dict) -> AsyncMSAL:
|
|
59
|
-
"""Create a session with a save callback."""
|
|
60
|
-
|
|
61
|
-
async def async_save_cache(_: dict) -> None:
|
|
62
|
-
"""Save the token cache to Redis."""
|
|
63
|
-
rd2 = get_redis()
|
|
64
|
-
try:
|
|
65
|
-
await rd2.set(key, json.dumps({"created": created, "session": session}))
|
|
66
|
-
finally:
|
|
67
|
-
await rd2.close()
|
|
68
|
-
|
|
69
|
-
def save_cache(*args: Any) -> None:
|
|
70
|
-
"""Save the token cache to Redis."""
|
|
71
|
-
try:
|
|
72
|
-
asyncio.get_event_loop().create_task(async_save_cache(*args))
|
|
73
|
-
except RuntimeError:
|
|
74
|
-
asyncio.run(async_save_cache(*args))
|
|
75
|
-
|
|
76
|
-
return AsyncMSAL(session, save_cache=save_cache)
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
async def get_session(red: Redis, email: str) -> AsyncMSAL:
|
|
80
|
-
"""Get a session from Redis."""
|
|
81
|
-
async for key, created, session in iter_redis(red, match={"mail": email}):
|
|
82
|
-
return _session_factory(key, created, session)
|
|
83
|
-
raise ValueError(f"Session for {email} not found")
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|