skoll 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- skoll/__init__.py +0 -0
- skoll/application/__init__.py +2 -0
- skoll/application/protocols.py +46 -0
- skoll/application/types.py +123 -0
- skoll/config.py +34 -0
- skoll/domain/__init__.py +4 -0
- skoll/domain/base.py +157 -0
- skoll/domain/enums.py +17 -0
- skoll/domain/objects.py +176 -0
- skoll/domain/primitives.py +319 -0
- skoll/errors.py +141 -0
- skoll/infras/__init__.py +3 -0
- skoll/infras/mediator/__init__.py +2 -0
- skoll/infras/mediator/basic.py +60 -0
- skoll/infras/mediator/nats.py +51 -0
- skoll/infras/mediator/utils.py +71 -0
- skoll/infras/postgresql.py +154 -0
- skoll/infras/spicedb.py +183 -0
- skoll/result.py +58 -0
- skoll/utils/__init__.py +2 -0
- skoll/utils/dep_injection.py +89 -0
- skoll/utils/functional.py +169 -0
- skoll-0.0.1.dist-info/METADATA +32 -0
- skoll-0.0.1.dist-info/RECORD +27 -0
- skoll-0.0.1.dist-info/WHEEL +4 -0
- skoll-0.0.1.dist-info/entry_points.txt +2 -0
- skoll-0.0.1.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
import typing as t
|
|
2
|
+
from json import dumps
|
|
3
|
+
from attrs import define
|
|
4
|
+
import collections.abc as c
|
|
5
|
+
from asyncpg.pool import Pool, PoolConnectionProxy
|
|
6
|
+
from asyncpg import Record, create_pool, UniqueViolationError
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
from skoll.utils import from_json
|
|
10
|
+
from skoll.domain import EntityState
|
|
11
|
+
from skoll.result import Result, is_fail
|
|
12
|
+
from skoll.errors import InternalError, NotFound, Conflict
|
|
13
|
+
from skoll.application import DB, Repository, Criteria, ListCriteria, ListPage
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
__all__ = ["PostgresDB", "PostgresRepo", "parse_pg_row"]
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def parse_pg_row(row: t.Any, errors_hints: dict[str, t.Any] | None = None) -> dict[str, t.Any]:
|
|
20
|
+
raw = {}
|
|
21
|
+
if row is None:
|
|
22
|
+
raise NotFound(hints=errors_hints or {})
|
|
23
|
+
if not isinstance(row, Record):
|
|
24
|
+
raise InternalError(debug={"row": row, "message": "Invalid row PG data", "errors_hints": errors_hints})
|
|
25
|
+
for key, value in row.items():
|
|
26
|
+
json_value = from_json(value)
|
|
27
|
+
raw[key] = json_value if isinstance(value, str) and json_value is not None else value
|
|
28
|
+
return raw
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class PostgresDB(DB[PoolConnectionProxy]):
|
|
32
|
+
|
|
33
|
+
dsn: str
|
|
34
|
+
__pool: Pool | None
|
|
35
|
+
|
|
36
|
+
def __init__(self, dsn: str = "PG_DB_DSN") -> None:
|
|
37
|
+
self.dsn = dsn
|
|
38
|
+
self.__pool = None
|
|
39
|
+
|
|
40
|
+
@t.override
|
|
41
|
+
async def connect(self) -> None:
|
|
42
|
+
if self.__pool is None:
|
|
43
|
+
try:
|
|
44
|
+
self.__pool = await create_pool(dsn=self.dsn, min_size=1, max_size=10)
|
|
45
|
+
except Exception as exc:
|
|
46
|
+
raise InternalError.from_exception(exc)
|
|
47
|
+
|
|
48
|
+
@t.override
|
|
49
|
+
async def close(self) -> None:
|
|
50
|
+
if self.__pool is not None:
|
|
51
|
+
await self.__pool.close()
|
|
52
|
+
self.__pool = None
|
|
53
|
+
|
|
54
|
+
@t.override
|
|
55
|
+
async def session(self) -> c.AsyncGenerator[PoolConnectionProxy]:
|
|
56
|
+
if self.__pool is None:
|
|
57
|
+
raise RuntimeError("Database pool is not initialized.")
|
|
58
|
+
async with self.__pool.acquire() as conn:
|
|
59
|
+
yield conn
|
|
60
|
+
|
|
61
|
+
@t.override
|
|
62
|
+
async def transaction(self) -> c.AsyncGenerator[PoolConnectionProxy]:
|
|
63
|
+
if self.__pool is None:
|
|
64
|
+
raise RuntimeError("Database pool is not initialized.")
|
|
65
|
+
async with self.__pool.acquire() as conn:
|
|
66
|
+
async with conn.transaction():
|
|
67
|
+
yield conn
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
@define(kw_only=True, frozen=True, slots=True)
|
|
71
|
+
class PostgresRepo[T: EntityState](Repository[T]):
|
|
72
|
+
|
|
73
|
+
table: str
|
|
74
|
+
conn: PoolConnectionProxy
|
|
75
|
+
restore_func: t.Callable[[dict[str, t.Any]], Result[T]]
|
|
76
|
+
|
|
77
|
+
@t.override
|
|
78
|
+
async def get(self, criteria: Criteria) -> T | None:
|
|
79
|
+
try:
|
|
80
|
+
qry, params = criteria.as_sql
|
|
81
|
+
record = await self.conn.fetchrow(qry, *params)
|
|
82
|
+
if not isinstance(record, Record):
|
|
83
|
+
return None
|
|
84
|
+
res = self.restore_func(parse_pg_row(record))
|
|
85
|
+
if is_fail(res):
|
|
86
|
+
raise ValueError("Entity Parsing failed")
|
|
87
|
+
return res.value
|
|
88
|
+
except Exception as exc:
|
|
89
|
+
raise InternalError.from_exception(exc, extra={"criteria": criteria.as_sql})
|
|
90
|
+
|
|
91
|
+
@t.override
|
|
92
|
+
async def exists(self, criteria: Criteria) -> bool:
|
|
93
|
+
try:
|
|
94
|
+
qry, params = criteria.as_sql
|
|
95
|
+
record = await self.conn.fetchrow(qry, *params)
|
|
96
|
+
return record is not None
|
|
97
|
+
except Exception as exc:
|
|
98
|
+
raise InternalError.from_exception(exc, extra={"criteria": criteria.as_sql})
|
|
99
|
+
|
|
100
|
+
@t.override
|
|
101
|
+
async def delete(self, criteria: Criteria) -> None:
|
|
102
|
+
try:
|
|
103
|
+
qry, params = criteria.as_sql
|
|
104
|
+
await self.conn.execute(qry.replace("SELECT *", "DELETE"), *params)
|
|
105
|
+
except Exception as exc:
|
|
106
|
+
raise InternalError.from_exception(exc, extra={"criteria": criteria.as_sql})
|
|
107
|
+
|
|
108
|
+
@t.override
|
|
109
|
+
async def list(self, criteria: ListCriteria) -> ListPage[T]:
|
|
110
|
+
try:
|
|
111
|
+
qry, params = criteria.as_sql
|
|
112
|
+
rows = await self.conn.fetch(qry, *params)
|
|
113
|
+
items: list[T] = []
|
|
114
|
+
for row in rows:
|
|
115
|
+
if not isinstance(row, Record):
|
|
116
|
+
raise ValueError("Invalid row type")
|
|
117
|
+
res = self.restore_func(parse_pg_row(row))
|
|
118
|
+
if is_fail(res):
|
|
119
|
+
raise ValueError("Entity Parsing failed")
|
|
120
|
+
items.append(res.value)
|
|
121
|
+
return ListPage(cursor="NOOP", items=items) # TODO: Implement cursor
|
|
122
|
+
except Exception as exc:
|
|
123
|
+
raise InternalError.from_exception(exc, extra={"criteria": criteria.as_sql})
|
|
124
|
+
|
|
125
|
+
@t.override
|
|
126
|
+
async def save(self, state: T) -> None:
|
|
127
|
+
try:
|
|
128
|
+
raw = state.serialize()
|
|
129
|
+
sql_stm, params = self.__prepare_insert(raw) if state.version.value == 0 else self.__prepare_update(raw)
|
|
130
|
+
_ = await self.conn.execute(sql_stm, *params)
|
|
131
|
+
except UniqueViolationError as exc:
|
|
132
|
+
raise Conflict(debug={"raw": state.serialize(), "table": self.table})
|
|
133
|
+
except Exception as exc:
|
|
134
|
+
raise InternalError.from_exception(exc, extra={"raw": state.serialize(), "table": self.table})
|
|
135
|
+
|
|
136
|
+
def __prepare_insert(self, raw: dict[str, t.Any]):
|
|
137
|
+
params: list[t.Any] = []
|
|
138
|
+
attrs: list[str] = []
|
|
139
|
+
keys: list[str] = []
|
|
140
|
+
for idx, kv in enumerate(raw.items()):
|
|
141
|
+
attrs.append(kv[0])
|
|
142
|
+
keys.append(f"${idx + 1}")
|
|
143
|
+
params.append(dumps(kv[1]) if isinstance(kv[1], (dict, list)) else kv[1])
|
|
144
|
+
sql_stm = f"INSERT INTO {self.table}({", ".join(attrs)}) VALUES({", ".join(keys)})"
|
|
145
|
+
return sql_stm, params
|
|
146
|
+
|
|
147
|
+
def __prepare_update(self, raw: dict[str, t.Any]):
|
|
148
|
+
params = [raw["uid"], raw["version"] - 1]
|
|
149
|
+
changes: list[str] = []
|
|
150
|
+
for idx, kv in enumerate(raw.items()):
|
|
151
|
+
changes.append(f"{kv[0]} = ${idx + 3}")
|
|
152
|
+
params.append(dumps(kv[1]) if isinstance(kv[1], (dict, list)) else kv[1])
|
|
153
|
+
sql_stm = f"UPDATE {self.table} SET {", ".join(changes)} WHERE uid = $1 AND version = $2"
|
|
154
|
+
return sql_stm, params
|
skoll/infras/spicedb.py
ADDED
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
import typing as t
|
|
2
|
+
from json import loads
|
|
3
|
+
from attrs import define
|
|
4
|
+
from re import match as re_match
|
|
5
|
+
from aiohttp import ClientSession
|
|
6
|
+
|
|
7
|
+
from skoll.utils import sanitize_dict
|
|
8
|
+
from skoll.config import SpiceDBConfig
|
|
9
|
+
from skoll.errors import Forbidden, InternalError
|
|
10
|
+
from skoll.application import AuthzWriteChange, AuthzPrecondition, AuthzLookupResult, Authz
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
__all__ = ["SpiceDBAuthz"]
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
TUPLE_PATTERN = r"(?P<resource>.+?):(?P<resource_id>.*?)#(?P<relation>.*?)@(?P<subject>.*?):(?P<subject_id>.*)"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@define(frozen=True, kw_only=True)
|
|
20
|
+
class TupleObject:
|
|
21
|
+
resource: str
|
|
22
|
+
subject: str | None = None
|
|
23
|
+
relation: str | None = None
|
|
24
|
+
subject_id: str | None = None
|
|
25
|
+
resource_id: str | None = None
|
|
26
|
+
subject_relation: str | None = None
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class SpiceDBAuthz(Authz):
|
|
30
|
+
|
|
31
|
+
url: str
|
|
32
|
+
token: str
|
|
33
|
+
|
|
34
|
+
def __init__(self, config: SpiceDBConfig):
|
|
35
|
+
if config.uri is None or config.token is None:
|
|
36
|
+
raise InternalError(debug={"message": "Invalid SpiceDB config", "config": config})
|
|
37
|
+
self.url = config.uri
|
|
38
|
+
self.token = config.token
|
|
39
|
+
|
|
40
|
+
def make_headers(self) -> dict[str, str]:
|
|
41
|
+
return {"Authorization": f"Bearer {self.token}"}
|
|
42
|
+
|
|
43
|
+
@t.override
|
|
44
|
+
async def write(self, changes: list[AuthzWriteChange], preconditions: list[AuthzPrecondition] | None = None) -> str:
|
|
45
|
+
uri = self.url + "/v1/relationships/write"
|
|
46
|
+
data = {"updates": get_changes(changes), "optionalAuthzPreconditions": get_preconditions(preconditions or [])}
|
|
47
|
+
async with ClientSession() as session:
|
|
48
|
+
async with session.post(uri, json=sanitize_dict(data), ssl=False, headers=self.make_headers()) as response:
|
|
49
|
+
if response.status != 200:
|
|
50
|
+
print(await response.text())
|
|
51
|
+
raise ValueError(f"Failed to write changes {changes} with preconditions {preconditions}")
|
|
52
|
+
return (await response.json()).get("writtenAt", {}).get("token", "")
|
|
53
|
+
|
|
54
|
+
@t.override
|
|
55
|
+
async def lookup(
|
|
56
|
+
self, filter: str, cxt: dict[str, t.Any] | None = None, limit: int | None = None, cursor: str | None = None
|
|
57
|
+
) -> AuthzLookupResult:
|
|
58
|
+
tuple_obj = tuple_from(filter)
|
|
59
|
+
if not tuple_obj:
|
|
60
|
+
raise ValueError(f"Invalid tuple: {filter}")
|
|
61
|
+
|
|
62
|
+
data: dict[str, t.Any] = {
|
|
63
|
+
"context": cxt,
|
|
64
|
+
"consistency": {"minimizeLatency": True},
|
|
65
|
+
"optionalCursor": {"token": cursor} if cursor else None,
|
|
66
|
+
}
|
|
67
|
+
uri = self.url + "/v1/permissions"
|
|
68
|
+
if tuple_obj.subject_id is not None: # ResourceLookup
|
|
69
|
+
uri += "/resources"
|
|
70
|
+
data["optionalLimit"] = limit
|
|
71
|
+
data["permission"] = tuple_obj.relation
|
|
72
|
+
data["resourceObjectType"] = tuple_obj.resource
|
|
73
|
+
data["subject"] = {
|
|
74
|
+
"optionalRelation": tuple_obj.subject_relation,
|
|
75
|
+
"object": {"objectType": tuple_obj.subject, "objectId": tuple_obj.subject_id},
|
|
76
|
+
}
|
|
77
|
+
else: # SubjectLookup
|
|
78
|
+
uri += "/subjects"
|
|
79
|
+
data["optionalConcreteLimit"] = limit
|
|
80
|
+
data["permission"] = tuple_obj.relation
|
|
81
|
+
data["subjectObjectType"] = tuple_obj.subject
|
|
82
|
+
data["optionalSubjectRelation"] = tuple_obj.subject_relation
|
|
83
|
+
data["resource"] = {"objectType": tuple_obj.resource, "objectId": tuple_obj.resource_id}
|
|
84
|
+
|
|
85
|
+
async with ClientSession() as session:
|
|
86
|
+
async with session.post(uri, json=sanitize_dict(data), ssl=False, headers=self.make_headers()) as response:
|
|
87
|
+
if response.status != 200:
|
|
88
|
+
raise Exception(
|
|
89
|
+
f"Failed to lookup {filter} with context {cxt} and limit {limit} and cursor {cursor}. status code: {response.status}"
|
|
90
|
+
)
|
|
91
|
+
lines = [loads(line) for line in (await response.text()).split("\n")[:-1]]
|
|
92
|
+
id_key = "resourceObjectId" if tuple_obj.subject_id is not None else "subjectObjectId"
|
|
93
|
+
return AuthzLookupResult(
|
|
94
|
+
uids=[line.get("result", {}).get(id_key, "") for line in lines],
|
|
95
|
+
cursor=lines[-1].get("result", {}).get("afterResultCursor", {}).get("token"),
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
@t.override
|
|
99
|
+
async def check(self, tuple: str, cxt: dict[str, t.Any] | None = None) -> None:
|
|
100
|
+
uri = self.url + "/v1/permissions/check"
|
|
101
|
+
tuple_obj = tuple_from(tuple)
|
|
102
|
+
if not tuple_obj:
|
|
103
|
+
raise ValueError(f"Invalid tuple: {tuple}")
|
|
104
|
+
|
|
105
|
+
data: dict[str, t.Any] = {
|
|
106
|
+
"context": cxt,
|
|
107
|
+
"permission": tuple_obj.relation,
|
|
108
|
+
"consistency": {"minimizeLatency": True},
|
|
109
|
+
"resource": {"objectType": tuple_obj.resource, "objectId": tuple_obj.resource_id},
|
|
110
|
+
"subject": {
|
|
111
|
+
"optionalRelation": tuple_obj.subject_relation,
|
|
112
|
+
"object": {"objectType": tuple_obj.subject, "objectId": tuple_obj.subject_id},
|
|
113
|
+
},
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
async with ClientSession() as session:
|
|
117
|
+
async with session.post(uri, json=sanitize_dict(data), ssl=False, headers=self.make_headers()) as response:
|
|
118
|
+
res = await response.json()
|
|
119
|
+
if res.get("permissionship", "") != "PERMISSIONSHIP_HAS_PERMISSION":
|
|
120
|
+
raise Forbidden(debug={"tuple": tuple, "context": cxt})
|
|
121
|
+
return None
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def tuple_from(tuple_str: str) -> TupleObject | None:
|
|
125
|
+
match = re_match(TUPLE_PATTERN, tuple_str)
|
|
126
|
+
if match:
|
|
127
|
+
data = match.groupdict()
|
|
128
|
+
subject_id = data["subject_id"].split("#")
|
|
129
|
+
data["subject_id"] = subject_id[0]
|
|
130
|
+
data["subject_relation"] = subject_id[1] if len(subject_id) > 1 else None
|
|
131
|
+
return TupleObject(
|
|
132
|
+
resource=data["resource"],
|
|
133
|
+
subject_relation=data["subject_relation"],
|
|
134
|
+
subject=data["subject"] if len(data["subject"]) > 0 else None,
|
|
135
|
+
relation=data["relation"] if len(data["relation"]) > 0 else None,
|
|
136
|
+
subject_id=data["subject_id"] if len(data["subject_id"]) > 0 else None,
|
|
137
|
+
resource_id=data["resource_id"] if len(data["resource_id"]) > 0 else None,
|
|
138
|
+
)
|
|
139
|
+
return None
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def get_changes(changes: list[AuthzWriteChange]) -> list[dict[str, t.Any]]:
|
|
143
|
+
updates = []
|
|
144
|
+
for ch in changes:
|
|
145
|
+
tuple_obj = tuple_from(ch[1])
|
|
146
|
+
if not tuple_obj:
|
|
147
|
+
raise ValueError(f"Invalid tuple: {ch[1]}")
|
|
148
|
+
update: dict[str, t.Any] = {
|
|
149
|
+
"operation": "OPERATION_DELETE" if ch[0] == "DELETE" else "OPERATION_TOUCH",
|
|
150
|
+
"relationship": {
|
|
151
|
+
"relation": tuple_obj.relation,
|
|
152
|
+
"resource": {"objectType": tuple_obj.resource, "objectId": tuple_obj.resource_id},
|
|
153
|
+
"subject": {
|
|
154
|
+
"optionalRelation": tuple_obj.subject_relation,
|
|
155
|
+
"object": {"objectType": tuple_obj.subject, "objectId": tuple_obj.subject_id},
|
|
156
|
+
},
|
|
157
|
+
"optionalExpiresAt": ch[3].iso_format if ch[3] else None,
|
|
158
|
+
"optionalCaveat": {"caveatName": ch[2][0], "context": ch[2][1]} if ch[2] else None,
|
|
159
|
+
},
|
|
160
|
+
}
|
|
161
|
+
updates.append(update)
|
|
162
|
+
return updates
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def get_preconditions(precondition: list[AuthzPrecondition]) -> list[dict[str, t.Any]]:
|
|
166
|
+
return [{"operation": f"OPERATION_{pr[0]}", "filter": relation_filter_from(pr[1])} for pr in precondition]
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def relation_filter_from(tuple: str) -> dict[str, t.Any]:
|
|
170
|
+
tuple_obj = tuple_from(tuple)
|
|
171
|
+
if not tuple_obj:
|
|
172
|
+
raise ValueError(f"Invalid tuple: {tuple}")
|
|
173
|
+
return {
|
|
174
|
+
"resourceType": tuple_obj.resource,
|
|
175
|
+
"optionalResourceId": tuple_obj.resource_id,
|
|
176
|
+
"optionalResourceIdPrefix": tuple_obj.resource_id,
|
|
177
|
+
"optionalRelation": tuple_obj.relation,
|
|
178
|
+
"optionalSubjectFilter": {
|
|
179
|
+
"subjectType": tuple_obj.subject,
|
|
180
|
+
"optionalSubjectId": tuple_obj.subject_id,
|
|
181
|
+
"optionalRelation": {"relation": tuple_obj.subject_relation},
|
|
182
|
+
},
|
|
183
|
+
}
|
skoll/result.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import typing as t
|
|
2
|
+
from attrs import define
|
|
3
|
+
|
|
4
|
+
from .errors import Error, InvalidField
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
__all__ = ["Result", "ok", "fail", "is_ok", "is_fail", "combine"]
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
type Result[T] = _Ok[T] | _Fail
|
|
11
|
+
V = t.TypeVar("V", covariant=True)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@define(frozen=True, slots=True)
|
|
15
|
+
class _Ok(t.Generic[V]):
|
|
16
|
+
|
|
17
|
+
value: V
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@define(frozen=True, slots=True)
|
|
21
|
+
class _Fail:
|
|
22
|
+
|
|
23
|
+
err: Error
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def ok(value: V) -> Result[V]:
|
|
27
|
+
return _Ok(value)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def fail(err: Error) -> Result[t.Any]:
|
|
31
|
+
return _Fail(err)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def is_ok[T](res: Result[T]) -> t.TypeIs[_Ok[T]]:
|
|
35
|
+
return isinstance(res, _Ok)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def is_fail[T](res: Result[T]) -> t.TypeIs[_Fail]:
|
|
39
|
+
return isinstance(res, _Fail)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@t.overload
|
|
43
|
+
def combine(results: list[Result[t.Any]]) -> Result[list[t.Any]]: ...
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
@t.overload
|
|
47
|
+
def combine(results: dict[str, Result[t.Any]]) -> Result[dict[str, t.Any]]: ...
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def combine(results: list[Result[t.Any]] | dict[str, Result[t.Any]]):
|
|
51
|
+
if isinstance(results, dict):
|
|
52
|
+
dict_values = {k: r.value for k, r in results.items() if isinstance(r, _Ok)}
|
|
53
|
+
errs = [r.err for r in results.values() if isinstance(r, _Fail)]
|
|
54
|
+
return _Ok(dict_values) if not errs else _Fail(InvalidField(errors=errs))
|
|
55
|
+
|
|
56
|
+
list_values = [r.value for r in results if isinstance(r, _Ok)]
|
|
57
|
+
errs = [r.err for r in results if isinstance(r, _Fail)]
|
|
58
|
+
return _Ok(list_values) if not errs else _Fail(InvalidField(errors=errs))
|
skoll/utils/__init__.py
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
import typing as t
|
|
3
|
+
from attrs import define
|
|
4
|
+
import collections.abc as c
|
|
5
|
+
from contextlib import AsyncExitStack, AbstractAsyncContextManager
|
|
6
|
+
|
|
7
|
+
from .functional import to_context_manager, get_signature
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
type Cache = dict[t.Any, t.Any]
|
|
11
|
+
type Context = dict[str, t.Any]
|
|
12
|
+
type DepFn = t.Callable[..., t.Any]
|
|
13
|
+
type BaseFn[T] = t.Callable[..., c.Coroutine[t.Any, t.Any, T]]
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
__all__ = [
|
|
17
|
+
"depend",
|
|
18
|
+
"resolve",
|
|
19
|
+
"Dependent",
|
|
20
|
+
"get_dependant",
|
|
21
|
+
"call_with_dependencies",
|
|
22
|
+
]
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@define(slots=True, kw_only=True)
|
|
26
|
+
class Dependent:
|
|
27
|
+
|
|
28
|
+
call: t.Callable[..., t.Any]
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def depend(call: t.Callable[..., t.Any]) -> Dependent:
|
|
32
|
+
return Dependent(call=call)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
async def call_with_dependencies[T](fn: BaseFn[T], context: Context | None = None) -> T:
|
|
36
|
+
async with AsyncExitStack() as stack:
|
|
37
|
+
kwargs = await resolve(fn, cache={}, context=context or {}, exit_stack=stack, no_call=True)
|
|
38
|
+
return await fn(**kwargs)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def get_dependant(annotation: t.Any) -> Dependent | None:
|
|
42
|
+
if t.get_origin(annotation) is not t.Annotated:
|
|
43
|
+
return None
|
|
44
|
+
|
|
45
|
+
_, *metadata = t.get_args(annotation)
|
|
46
|
+
for meta in metadata:
|
|
47
|
+
if isinstance(meta, Dependent):
|
|
48
|
+
return meta
|
|
49
|
+
return None
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
async def resolve(
|
|
53
|
+
fn: DepFn, cache: Cache, context: Context, exit_stack: AsyncExitStack, no_call: bool = False
|
|
54
|
+
) -> t.Any:
|
|
55
|
+
if fn in cache:
|
|
56
|
+
return cache[fn]
|
|
57
|
+
|
|
58
|
+
kwargs: dict[str, t.Any] = {}
|
|
59
|
+
|
|
60
|
+
for param in get_signature(fn):
|
|
61
|
+
if context.get(param.name) is not None:
|
|
62
|
+
kwargs[param.name] = context[param.name]
|
|
63
|
+
continue
|
|
64
|
+
|
|
65
|
+
dep = get_dependant(param.annotation)
|
|
66
|
+
if dep is not None:
|
|
67
|
+
kwargs[param.name] = await resolve(dep.call, cache=cache, context=context, exit_stack=exit_stack)
|
|
68
|
+
continue
|
|
69
|
+
|
|
70
|
+
if param.default is inspect.Parameter.empty:
|
|
71
|
+
raise TypeError(f"Unresolvable dependency parameter: {param.name}")
|
|
72
|
+
|
|
73
|
+
kwargs[param.name] = param.default
|
|
74
|
+
|
|
75
|
+
if no_call:
|
|
76
|
+
return kwargs
|
|
77
|
+
|
|
78
|
+
result: t.Any = fn(**kwargs)
|
|
79
|
+
result = to_context_manager(result) if inspect.isasyncgen(result) else result
|
|
80
|
+
|
|
81
|
+
if isinstance(result, AbstractAsyncContextManager):
|
|
82
|
+
value = await exit_stack.enter_async_context(result)
|
|
83
|
+
elif inspect.isawaitable(result):
|
|
84
|
+
value = await result
|
|
85
|
+
else:
|
|
86
|
+
value = result
|
|
87
|
+
|
|
88
|
+
cache[fn] = value
|
|
89
|
+
return value
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
import re
|
|
2
|
+
import os
|
|
3
|
+
import inspect
|
|
4
|
+
import functools
|
|
5
|
+
import typing as t
|
|
6
|
+
from ulid import ulid
|
|
7
|
+
from json import loads
|
|
8
|
+
import collections.abc as c
|
|
9
|
+
from zoneinfo import ZoneInfo
|
|
10
|
+
from datetime import datetime, timezone
|
|
11
|
+
from contextlib import asynccontextmanager
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
__all__ = [
|
|
15
|
+
"to_tz",
|
|
16
|
+
"new_ulid",
|
|
17
|
+
"safe_call",
|
|
18
|
+
"from_json",
|
|
19
|
+
"serialize",
|
|
20
|
+
"impartial",
|
|
21
|
+
"sanitize_dict",
|
|
22
|
+
"to_camel_case",
|
|
23
|
+
"to_snake_case",
|
|
24
|
+
"get_signature",
|
|
25
|
+
"unwrapped_call",
|
|
26
|
+
"get_config_var",
|
|
27
|
+
"to_context_manager",
|
|
28
|
+
]
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
new_ulid: t.Callable[[], str] = lambda: ulid().lower()
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def from_json(val: t.Any) -> t.Any:
|
|
35
|
+
try:
|
|
36
|
+
return loads(val)
|
|
37
|
+
except:
|
|
38
|
+
return None
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def sanitize_dict(obj: dict[str, t.Any]) -> dict[str, t.Any]:
|
|
42
|
+
return {
|
|
43
|
+
k: sanitize_dict(t.cast(dict[str, t.Any], v)) if isinstance(v, dict) else v
|
|
44
|
+
for k, v in obj.items()
|
|
45
|
+
if v is not None
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def to_snake_case(val: t.Any) -> t.Any:
|
|
50
|
+
if isinstance(val, dict):
|
|
51
|
+
return {to_snake_case(k): to_snake_case(v) for k, v in val.items()}
|
|
52
|
+
if isinstance(val, list):
|
|
53
|
+
return [to_snake_case(i) for i in val]
|
|
54
|
+
if isinstance(val, str):
|
|
55
|
+
# Insert underscores before capitals and lowercase everything
|
|
56
|
+
s = re.sub(r"(.)([A-Z][a-z]+)", r"\1_\2", val)
|
|
57
|
+
return re.sub(r"([a-z0-9])([A-Z])", r"\1_\2", s).lower()
|
|
58
|
+
return val
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def to_camel_case(val: t.Any) -> t.Any:
|
|
62
|
+
if isinstance(val, dict):
|
|
63
|
+
return {to_camel_case(k): to_camel_case(v) for k, v in val.items()}
|
|
64
|
+
if isinstance(val, list):
|
|
65
|
+
return [to_camel_case(i) for i in val]
|
|
66
|
+
if isinstance(val, str):
|
|
67
|
+
components = val.split("_")
|
|
68
|
+
return components[0] + "".join(x.title() for x in components[1:])
|
|
69
|
+
return val
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def serialize(data: t.Any) -> t.Any:
|
|
73
|
+
if hasattr(data, "serialize") and callable(getattr(data, "serialize")):
|
|
74
|
+
return data.serialize()
|
|
75
|
+
if isinstance(data, (list, tuple)):
|
|
76
|
+
return [serialize(x) for x in data]
|
|
77
|
+
if isinstance(data, dict):
|
|
78
|
+
res_dict = {}
|
|
79
|
+
for key, data in data.items():
|
|
80
|
+
res_dict[key] = serialize(data)
|
|
81
|
+
return res_dict
|
|
82
|
+
return data
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def iso_to_timestamp(date_string: str, format: str = "%Y-%m-%dT%H:%M:%SZ"):
|
|
86
|
+
dt = datetime.strptime(date_string, format)
|
|
87
|
+
dt = dt.replace(tzinfo=timezone.utc)
|
|
88
|
+
return int(dt.timestamp() * 1000)
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def timestamp_to_iso(timestamp: int, format: str = "%Y-%m-%dT%H:%M:%SZ") -> str:
|
|
92
|
+
dt = datetime.fromtimestamp(timestamp / 1000, tz=timezone.utc)
|
|
93
|
+
return dt.strftime(format)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def find_item[T](match_with: t.Callable[[T], bool], items: list[T]) -> T | None:
|
|
97
|
+
found_item: T | None = None
|
|
98
|
+
for item in items:
|
|
99
|
+
if match_with(item):
|
|
100
|
+
found_item = item
|
|
101
|
+
break
|
|
102
|
+
return found_item
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def to_tz(tz_str: str) -> ZoneInfo:
|
|
106
|
+
try:
|
|
107
|
+
return ZoneInfo(key=tz_str)
|
|
108
|
+
except:
|
|
109
|
+
return ZoneInfo("UTC")
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def names_from_email(email: str) -> tuple[str, str]:
|
|
113
|
+
names = email.split("@")[0].split(".")
|
|
114
|
+
if len(names) == 0:
|
|
115
|
+
return "", ""
|
|
116
|
+
if len(names) == 1:
|
|
117
|
+
return names[0], ""
|
|
118
|
+
return " ".join(names[:-1]), names[-1]
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def safe_call[T](func: t.Callable[..., T], *args: t.Any, **kwargs: t.Any) -> T | None:
|
|
122
|
+
try:
|
|
123
|
+
return func(*args, **kwargs)
|
|
124
|
+
except:
|
|
125
|
+
return None
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
@asynccontextmanager
|
|
129
|
+
async def to_context_manager(gen: c.AsyncGenerator[t.Any, t.Any]):
|
|
130
|
+
try:
|
|
131
|
+
value = await gen.__anext__()
|
|
132
|
+
yield value
|
|
133
|
+
finally:
|
|
134
|
+
await gen.aclose()
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def get_signature(fn: t.Callable[..., t.Any]) -> list[inspect.Parameter]:
|
|
138
|
+
unwrapped = unwrapped_call(fn)
|
|
139
|
+
if unwrapped is None:
|
|
140
|
+
return []
|
|
141
|
+
return list(inspect.signature(unwrapped).parameters.values())
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def unwrapped_call(call: t.Callable[..., t.Any] | None) -> t.Callable[..., t.Any] | None:
|
|
145
|
+
if call is None:
|
|
146
|
+
return call
|
|
147
|
+
unwrapped = inspect.unwrap(impartial(call))
|
|
148
|
+
return unwrapped
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def impartial(func: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]:
|
|
152
|
+
while isinstance(func, functools.partial):
|
|
153
|
+
func = func.func
|
|
154
|
+
return func
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def get_config_var(keys: list[str], default: str | None = None) -> t.Callable[[], str | None]:
|
|
158
|
+
|
|
159
|
+
def get_var() -> str | None:
|
|
160
|
+
for key in keys:
|
|
161
|
+
if "/" in key:
|
|
162
|
+
with open(key, "r") as f:
|
|
163
|
+
return f.read().strip()
|
|
164
|
+
value = os.getenv(key)
|
|
165
|
+
if value:
|
|
166
|
+
return value
|
|
167
|
+
return default
|
|
168
|
+
|
|
169
|
+
return get_var
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: skoll
|
|
3
|
+
Version: 0.0.1
|
|
4
|
+
Summary: A simple package that provide a basic API python framework based on starlette and some domain driven design concepts
|
|
5
|
+
Author-email: Monzon Diarra <diarramonzon4@gmail.com>
|
|
6
|
+
License-File: LICENSE
|
|
7
|
+
Requires-Python: >=3.13
|
|
8
|
+
Requires-Dist: aiohttp>=3.13.3
|
|
9
|
+
Requires-Dist: asyncpg>=0.31.0
|
|
10
|
+
Requires-Dist: attrs>=25.4.0
|
|
11
|
+
Requires-Dist: certifi>=2026.1.4
|
|
12
|
+
Requires-Dist: starlette>=0.49.3
|
|
13
|
+
Requires-Dist: ulid>=1.1
|
|
14
|
+
Description-Content-Type: text/markdown
|
|
15
|
+
|
|
16
|
+
# Skoll
|
|
17
|
+
|
|
18
|
+
A simple package that provide a basic API python framework based on starlette and some domain driven design concepts.
|
|
19
|
+
|
|
20
|
+
## Installation
|
|
21
|
+
|
|
22
|
+
```bash
|
|
23
|
+
pip install skoll
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
## Usage
|
|
27
|
+
|
|
28
|
+
Comming soon...
|
|
29
|
+
|
|
30
|
+
## License
|
|
31
|
+
|
|
32
|
+
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|