omlish 0.0.0.dev304__py3-none-any.whl → 0.0.0.dev306__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. omlish/.manifests.json +14 -0
  2. omlish/__about__.py +3 -3
  3. omlish/dataclasses/__init__.py +5 -0
  4. omlish/dataclasses/api/__init__.py +1 -0
  5. omlish/dataclasses/api/fields/metadata.py +8 -0
  6. omlish/dataclasses/tools/only_.py +45 -0
  7. omlish/diag/_pycharm/runhack.py +4 -1
  8. omlish/formats/edn/LICENSE +16 -0
  9. omlish/formats/edn/__init__.py +15 -0
  10. omlish/formats/edn/codec.py +26 -0
  11. omlish/formats/edn/parsing.py +359 -0
  12. omlish/formats/edn/values.py +162 -0
  13. omlish/graphs/trees.py +6 -0
  14. omlish/lang/__init__.py +7 -0
  15. omlish/lang/cached/function.py +7 -2
  16. omlish/lang/objects.py +28 -0
  17. omlish/lang/recursion.py +109 -0
  18. omlish/marshal/__init__.py +1 -1
  19. omlish/marshal/objects/helpers.py +1 -1
  20. omlish/sql/api/__init__.py +8 -1
  21. omlish/sql/api/base.py +1 -1
  22. omlish/sql/api/funcs.py +70 -11
  23. omlish/sql/qualifiedname.py +20 -12
  24. omlish/sql/queries/__init__.py +3 -0
  25. omlish/sql/queries/base.py +16 -0
  26. omlish/sql/queries/idents.py +12 -1
  27. omlish/sql/queries/names.py +8 -1
  28. omlish/sql/queries/relations.py +0 -12
  29. omlish/sql/queries/rendering.py +0 -3
  30. {omlish-0.0.0.dev304.dist-info → omlish-0.0.0.dev306.dist-info}/METADATA +3 -3
  31. {omlish-0.0.0.dev304.dist-info → omlish-0.0.0.dev306.dist-info}/RECORD +35 -28
  32. {omlish-0.0.0.dev304.dist-info → omlish-0.0.0.dev306.dist-info}/WHEEL +0 -0
  33. {omlish-0.0.0.dev304.dist-info → omlish-0.0.0.dev306.dist-info}/entry_points.txt +0 -0
  34. {omlish-0.0.0.dev304.dist-info → omlish-0.0.0.dev306.dist-info}/licenses/LICENSE +0 -0
  35. {omlish-0.0.0.dev304.dist-info → omlish-0.0.0.dev306.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,162 @@
1
+ import dataclasses as dc
2
+ import typing as ta
3
+
4
+ from ... import check
5
+ from ... import lang
6
+ from ...lite.dataclasses import dataclass_cache_hash
7
+
8
+
9
+ ##
10
+
11
+
12
+ _DEBUG = __debug__
13
+ # _DEBUG = True
14
+
15
+
16
+ @dc.dataclass(frozen=True)
17
+ class Value(lang.Abstract, lang.Sealed):
18
+ pass
19
+
20
+
21
+ #
22
+
23
+
24
+ @dc.dataclass(frozen=True)
25
+ class Scalar(Value, lang.Abstract):
26
+ pass
27
+
28
+
29
+ @dataclass_cache_hash()
30
+ @dc.dataclass(frozen=True)
31
+ class Keyword(Scalar, lang.Final):
32
+ s: str
33
+
34
+ def __repr__(self) -> str:
35
+ return f'{self.__class__.__name__}({self.s!r})'
36
+
37
+ if _DEBUG:
38
+ def __post_init__(self) -> None:
39
+ check.isinstance(self.s, str)
40
+
41
+
42
+ @dataclass_cache_hash()
43
+ @dc.dataclass(frozen=True)
44
+ class Char(Scalar, lang.Final):
45
+ c: str
46
+
47
+ def __repr__(self) -> str:
48
+ return f'{self.__class__.__name__}({self.c!r})'
49
+
50
+ if _DEBUG:
51
+ def __post_init__(self) -> None:
52
+ check.isinstance(self.c, str)
53
+ check.equal(len(self.c), 1)
54
+
55
+
56
+ @dataclass_cache_hash()
57
+ @dc.dataclass(frozen=True)
58
+ class Symbol(Scalar, lang.Final):
59
+ n: str
60
+
61
+ def __repr__(self) -> str:
62
+ return f'{self.__class__.__name__}({self.n!r})'
63
+
64
+ if _DEBUG:
65
+ def __post_init__(self) -> None:
66
+ check.non_empty_str(self.n)
67
+
68
+
69
+ #
70
+
71
+
72
+ @dc.dataclass(frozen=True)
73
+ class Collection(Value, lang.Abstract):
74
+ pass
75
+
76
+
77
+ @dataclass_cache_hash()
78
+ @dc.dataclass(frozen=True)
79
+ class List(Collection, lang.Final):
80
+ items: ta.Sequence[ta.Any]
81
+
82
+ def __repr__(self) -> str:
83
+ return f'{self.__class__.__name__}({self.items!r})'
84
+
85
+ if _DEBUG:
86
+ def __post_init__(self) -> None:
87
+ check.isinstance(self.items, tuple)
88
+
89
+ @classmethod
90
+ def new(cls, items: ta.Iterable[ta.Any]) -> 'List':
91
+ return cls(tuple(items))
92
+
93
+
94
+ @dataclass_cache_hash()
95
+ @dc.dataclass(frozen=True)
96
+ class Vector(Collection, lang.Final):
97
+ items: ta.Sequence[ta.Any]
98
+
99
+ def __repr__(self) -> str:
100
+ return f'{self.__class__.__name__}({self.items!r})'
101
+
102
+ if _DEBUG:
103
+ def __post_init__(self) -> None:
104
+ check.isinstance(self.items, tuple)
105
+
106
+ @classmethod
107
+ def new(cls, items: ta.Iterable[ta.Any]) -> 'Vector':
108
+ return cls(tuple(items))
109
+
110
+
111
+ @dataclass_cache_hash()
112
+ @dc.dataclass(frozen=True)
113
+ class Set(Collection, lang.Final):
114
+ items: ta.Sequence[ta.Any]
115
+
116
+ def __repr__(self) -> str:
117
+ return f'{self.__class__.__name__}({self.items!r})'
118
+
119
+ if _DEBUG:
120
+ def __post_init__(self) -> None:
121
+ check.isinstance(self.items, tuple)
122
+
123
+ @classmethod
124
+ def new(cls, items: ta.Iterable[ta.Any]) -> 'Set':
125
+ return cls(tuple(items))
126
+
127
+
128
+ @dataclass_cache_hash()
129
+ @dc.dataclass(frozen=True)
130
+ class Map(Collection, lang.Final):
131
+ items: ta.Sequence[tuple[ta.Any, ta.Any]]
132
+
133
+ def __repr__(self) -> str:
134
+ return f'{self.__class__.__name__}({self.items!r})'
135
+
136
+ if _DEBUG:
137
+ def __post_init__(self) -> None:
138
+ check.isinstance(self.items, tuple)
139
+ for t in self.items:
140
+ check.isinstance(t, tuple)
141
+ check.equal(len(t), 2)
142
+
143
+ @classmethod
144
+ def new(cls, items: ta.Iterable[ta.Iterable[ta.Any]]) -> 'Map':
145
+ return cls(tuple((k, v) for k, v in items))
146
+
147
+
148
+ #
149
+
150
+
151
+ @dataclass_cache_hash()
152
+ @dc.dataclass(frozen=True)
153
+ class TaggedVal(Value, lang.Final):
154
+ t: str
155
+ v: ta.Any
156
+
157
+ def __repr__(self) -> str:
158
+ return f'{self.__class__.__name__}({self.t!r}, {self.v!r})'
159
+
160
+ if _DEBUG:
161
+ def __post_init__(self) -> None:
162
+ check.non_empty_str(self.t)
omlish/graphs/trees.py CHANGED
@@ -19,6 +19,9 @@ NodeWalker = ta.Callable[[NodeT], ta.Iterable[NodeT]]
19
19
  NodeGenerator = ta.Generator[NodeT, None, None]
20
20
 
21
21
 
22
+ ##
23
+
24
+
22
25
  class NodeError(ta.Generic[NodeT], Exception):
23
26
  def __init__(self, node: NodeT, msg: str, *args, **kwargs) -> None:
24
27
  super().__init__(msg, *args, **kwargs) # noqa
@@ -39,6 +42,9 @@ class UnknownNodeError(NodeError[NodeT]):
39
42
  super().__init__(node, f'Unknown node: {node!r}', *args, **kwargs)
40
43
 
41
44
 
45
+ #
46
+
47
+
42
48
  class BasicTreeAnalysis(ta.Generic[NodeT]):
43
49
  def __init__(
44
50
  self,
omlish/lang/__init__.py CHANGED
@@ -231,6 +231,7 @@ from .maybes import ( # noqa
231
231
  )
232
232
 
233
233
  from .objects import ( # noqa
234
+ Identity,
234
235
  SimpleProxy,
235
236
  anon_object,
236
237
  arg_repr,
@@ -269,6 +270,12 @@ from .params import ( # noqa
269
270
  param_render,
270
271
  )
271
272
 
273
+ from .recursion import ( # noqa
274
+ LimitedRecursionError,
275
+ recursion_limiting,
276
+ recursion_limiting_context,
277
+ )
278
+
272
279
  from .resolving import ( # noqa
273
280
  Resolvable,
274
281
  ResolvableClassNameError,
@@ -15,6 +15,8 @@ TODO:
15
15
  - and must be transient?
16
16
  - use __transient_dict__ to support common state nuking
17
17
  - use __set_name__ ?
18
+ - on_compute
19
+ - max_recursion?
18
20
  """
19
21
  import dataclasses as dc
20
22
  import functools
@@ -201,6 +203,9 @@ class _CachedFunction(ta.Generic[T], Abstract):
201
203
  except KeyError:
202
204
  pass
203
205
 
206
+ def call_value_fn():
207
+ return self._value_fn(*args, **kwargs)
208
+
204
209
  if self._lock is not None:
205
210
  with self._lock:
206
211
  try:
@@ -208,10 +213,10 @@ class _CachedFunction(ta.Generic[T], Abstract):
208
213
  except KeyError:
209
214
  pass
210
215
 
211
- value = self._value_fn(*args, **kwargs)
216
+ value = call_value_fn()
212
217
 
213
218
  else:
214
- value = self._value_fn(*args, **kwargs)
219
+ value = call_value_fn()
215
220
 
216
221
  self._values[k] = value
217
222
  return value
omlish/lang/objects.py CHANGED
@@ -222,3 +222,31 @@ class _AnonObject:
222
222
 
223
223
  def anon_object(**attrs: ta.Any) -> ta.Any:
224
224
  return _AnonObject(**attrs)
225
+
226
+
227
+ ##
228
+
229
+
230
+ class Identity(ta.Generic[T]):
231
+ def __init__(self, obj: T) -> None:
232
+ super().__init__()
233
+
234
+ self._obj = obj
235
+
236
+ def __bool__(self):
237
+ raise TypeError
238
+
239
+ @property
240
+ def obj(self) -> T:
241
+ return self._obj
242
+
243
+ def __repr__(self) -> str:
244
+ return f'{self.__class__.__name__}({self._obj!r})'
245
+
246
+ def __hash__(self) -> int:
247
+ return id(self._obj)
248
+
249
+ def __eq__(self, other):
250
+ if type(other) is not type(self):
251
+ return NotImplemented
252
+ return self._obj is other._obj # noqa
@@ -0,0 +1,109 @@
1
+ import contextlib
2
+ import dataclasses as dc
3
+ import functools
4
+ import threading
5
+ import typing as ta
6
+
7
+
8
+ T = ta.TypeVar('T')
9
+ P = ta.ParamSpec('P')
10
+
11
+
12
+ ##
13
+
14
+
15
+ _LOCK = threading.RLock()
16
+
17
+ _LOCAL: threading.local
18
+
19
+
20
+ def _local() -> threading.local:
21
+ global _LOCAL
22
+
23
+ try:
24
+ return _LOCAL
25
+ except NameError:
26
+ pass
27
+
28
+ with _LOCK:
29
+ try:
30
+ return _LOCAL
31
+ except NameError:
32
+ pass
33
+
34
+ _LOCAL = threading.local()
35
+ return _LOCAL
36
+
37
+
38
+ def _depth_map() -> dict[ta.Any, int]:
39
+ lo = _local()
40
+ try:
41
+ return lo.depth_map
42
+ except AttributeError:
43
+ dm = lo.depth_map = {}
44
+ return dm
45
+
46
+
47
+ ##
48
+
49
+
50
+ @dc.dataclass()
51
+ class LimitedRecursionError(RecursionError):
52
+ key: ta.Any
53
+ depth: int
54
+
55
+
56
+ @contextlib.contextmanager
57
+ def recursion_limiting_context(key: ta.Any, limit: int | None) -> ta.Iterator[int | None]:
58
+ if limit is None:
59
+ yield None
60
+ return
61
+
62
+ dm = _depth_map()
63
+
64
+ try:
65
+ pd: int | None = dm[key]
66
+ except KeyError:
67
+ pd = None
68
+ else:
69
+ if not isinstance(pd, int) and pd > 0: # type: ignore[operator]
70
+ raise RuntimeError
71
+
72
+ if pd is not None and pd >= limit:
73
+ raise LimitedRecursionError(key, pd)
74
+
75
+ nd = (pd or 0) + 1
76
+ dm[key] = nd
77
+
78
+ try:
79
+ yield nd
80
+
81
+ finally:
82
+ if dm.get(key) != nd:
83
+ raise RuntimeError
84
+
85
+ if pd is not None:
86
+ dm[key] = pd
87
+ else:
88
+ del dm[key]
89
+
90
+
91
+ ##
92
+
93
+
94
+ def recursion_limiting(limit: int | None) -> ta.Callable[[ta.Callable[P, T]], ta.Callable[P, T]]:
95
+ def outer(fn):
96
+ if not callable(fn):
97
+ raise TypeError(fn)
98
+
99
+ if limit is None:
100
+ return fn
101
+
102
+ @functools.wraps(fn)
103
+ def inner(*args, **kwargs):
104
+ with recursion_limiting_context(fn, limit):
105
+ return fn(*args, **kwargs)
106
+
107
+ return inner
108
+
109
+ return outer
@@ -82,9 +82,9 @@ from .objects.dataclasses import ( # noqa
82
82
  )
83
83
 
84
84
  from .objects.helpers import ( # noqa
85
- update_field_metadata,
86
85
  update_fields_metadata,
87
86
  update_object_metadata,
87
+ with_field_metadata,
88
88
  )
89
89
 
90
90
  from .objects.marshal import ( # noqa
@@ -11,7 +11,7 @@ T = ta.TypeVar('T')
11
11
  ##
12
12
 
13
13
 
14
- def update_field_metadata(**kwargs: ta.Any) -> dc.field_modifier:
14
+ def with_field_metadata(**kwargs: ta.Any) -> dc.field_modifier:
15
15
  @dc.field_modifier
16
16
  def inner(f: dc.Field) -> dc.Field:
17
17
  return dc.set_field_metadata(f, {
@@ -35,9 +35,16 @@ from .errors import ( # noqa
35
35
  )
36
36
 
37
37
  from .funcs import ( # noqa
38
+ exec, # noqa
39
+
38
40
  query,
39
41
  query_all,
40
- exec, # noqa
42
+ query_first,
43
+ query_opt_first,
44
+ query_one,
45
+ query_opt_one,
46
+ query_scalar,
47
+ query_maybe_scalar,
41
48
  )
42
49
 
43
50
  from .queries import ( # noqa
omlish/sql/api/base.py CHANGED
@@ -30,7 +30,7 @@ class Querier(ContextCloser, lang.Abstract):
30
30
  ##
31
31
 
32
32
 
33
- class Rows(ContextCloser, lang.Abstract):
33
+ class Rows(ContextCloser, ta.Iterator[Row], lang.Abstract):
34
34
  @property
35
35
  @abc.abstractmethod
36
36
  def columns(self) -> Columns:
omlish/sql/api/funcs.py CHANGED
@@ -1,5 +1,7 @@
1
1
  import typing as ta
2
2
 
3
+ from ... import check
4
+ from ... import lang
3
5
  from .asquery import as_query
4
6
  from .base import Querier
5
7
  from .base import Rows
@@ -10,6 +12,25 @@ from .rows import Row
10
12
  ##
11
13
 
12
14
 
15
+ def exec( # noqa
16
+ querier: Querier,
17
+ obj: ta.Any,
18
+ *args: ta.Any,
19
+ ) -> None:
20
+ q = as_query(
21
+ obj,
22
+ *args,
23
+ mode=QueryMode.EXEC,
24
+ querier=querier,
25
+ )
26
+
27
+ with querier.query(q):
28
+ pass
29
+
30
+
31
+ ##
32
+
33
+
13
34
  def query(
14
35
  querier: Querier,
15
36
  obj: ta.Any,
@@ -37,20 +58,58 @@ def query_all(
37
58
  return list(rows)
38
59
 
39
60
 
40
- ##
61
+ def query_first(
62
+ querier: Querier,
63
+ obj: ta.Any,
64
+ *args: ta.Any,
65
+ ) -> Row:
66
+ with query(querier, obj, *args) as rows:
67
+ return next(rows)
41
68
 
42
69
 
43
- def exec( # noqa
70
+ def query_opt_first(
44
71
  querier: Querier,
45
72
  obj: ta.Any,
46
73
  *args: ta.Any,
47
- ) -> None:
48
- q = as_query(
49
- obj,
50
- *args,
51
- mode=QueryMode.EXEC,
52
- querier=querier,
53
- )
74
+ ) -> Row | None:
75
+ with query(querier, obj, *args) as rows:
76
+ return next(rows, None)
54
77
 
55
- with querier.query(q):
56
- pass
78
+
79
+ def query_one(
80
+ querier: Querier,
81
+ obj: ta.Any,
82
+ *args: ta.Any,
83
+ ) -> Row:
84
+ with query(querier, obj, *args) as rows:
85
+ return check.single(rows)
86
+
87
+
88
+ def query_opt_one(
89
+ querier: Querier,
90
+ obj: ta.Any,
91
+ *args: ta.Any,
92
+ ) -> Row | None:
93
+ with query(querier, obj, *args) as rows:
94
+ return check.opt_single(rows)
95
+
96
+
97
+ def query_scalar(
98
+ querier: Querier,
99
+ obj: ta.Any,
100
+ *args: ta.Any,
101
+ ) -> ta.Any:
102
+ row = query_one(querier, obj, *args)
103
+ return check.single(row.values)
104
+
105
+
106
+ def query_maybe_scalar(
107
+ querier: Querier,
108
+ obj: ta.Any,
109
+ *args: ta.Any,
110
+ ) -> lang.Maybe[ta.Any]:
111
+ row = query_opt_one(querier, obj, *args)
112
+ if row is not None:
113
+ return lang.just(check.single(row.values))
114
+ else:
115
+ return lang.empty()
@@ -1,23 +1,31 @@
1
1
  import collections.abc
2
- import dataclasses as dc
3
2
  import typing as ta
4
3
 
4
+ from .. import dataclasses as dc
5
+ from .. import lang
6
+
5
7
 
6
8
  ##
7
9
 
8
10
 
11
+ def coerce_parts(parts: ta.Sequence[str]) -> tuple[str, ...]:
12
+ if not parts:
13
+ raise ValueError
14
+ if isinstance(parts, str):
15
+ raise TypeError(parts)
16
+ if not isinstance(parts, tuple):
17
+ parts = tuple(parts)
18
+ if not all(parts) and all(isinstance(p, str) for p in parts):
19
+ raise ValueError(parts)
20
+ return parts
21
+
22
+
23
+ #
24
+
25
+
9
26
  @dc.dataclass(frozen=True)
10
- class QualifiedName(ta.Sequence[str]):
11
- parts: ta.Sequence[str]
12
-
13
- def __post_init__(self) -> None:
14
- if not (
15
- self.parts and
16
- not isinstance(self.parts, str) and
17
- all(self.parts) and
18
- all(isinstance(p, str) for p in self.parts)
19
- ):
20
- raise ValueError(self)
27
+ class QualifiedName(ta.Sequence[str], lang.Final):
28
+ parts: ta.Sequence[str] = dc.field() | dc.with_extra_field_params(coerce=coerce_parts)
21
29
 
22
30
  def __repr__(self) -> str:
23
31
  return f'{self.__class__.__name__}([{", ".join(map(repr, self.parts))}])'
@@ -1,5 +1,6 @@
1
1
  from .base import ( # noqa
2
2
  Builder,
3
+ HasQn,
3
4
  Node,
4
5
  NodeComparisonTypeError,
5
6
  Value,
@@ -56,6 +57,8 @@ from .params import ( # noqa
56
57
  from .relations import ( # noqa
57
58
  CanRelation,
58
59
  CanTable,
60
+ Join,
61
+ JoinKind,
59
62
  Relation,
60
63
  RelationBuilder,
61
64
  Table,
@@ -1,8 +1,10 @@
1
+ import abc
1
2
  import types
2
3
  import typing as ta
3
4
 
4
5
  from ... import dataclasses as dc
5
6
  from ... import lang
7
+ from ..qualifiedname import QualifiedName
6
8
 
7
9
 
8
10
  ##
@@ -45,6 +47,10 @@ class Node(
45
47
  def __ne__(self, other) -> ta.NoReturn:
46
48
  raise NodeComparisonTypeError(type(self))
47
49
 
50
+ @ta.final
51
+ def __bool__(self) -> ta.NoReturn:
52
+ raise TypeError
53
+
48
54
  #
49
55
 
50
56
  @dc.dataclass(frozen=True)
@@ -130,3 +136,13 @@ class Node(
130
136
 
131
137
  class Builder(lang.Abstract):
132
138
  pass
139
+
140
+
141
+ ##
142
+
143
+
144
+ class HasQn(lang.Abstract):
145
+ @property
146
+ @abc.abstractmethod
147
+ def qn(self) -> QualifiedName:
148
+ raise NotImplementedError
@@ -1,9 +1,16 @@
1
+ """
2
+ TODO:
3
+ - clamp down on as_ident / CanIdent - no strs allowed
4
+ """
1
5
  import abc
2
6
  import functools
3
7
  import typing as ta
4
8
 
9
+ from ... import cached
5
10
  from ... import lang
11
+ from ..qualifiedname import QualifiedName
6
12
  from .base import Builder
13
+ from .base import HasQn
7
14
  from .base import Node
8
15
 
9
16
 
@@ -17,9 +24,13 @@ class IdentLike(abc.ABC): # noqa
17
24
  ##
18
25
 
19
26
 
20
- class Ident(Node, IdentLike, lang.Final):
27
+ class Ident(Node, IdentLike, HasQn, lang.Final):
21
28
  s: str
22
29
 
30
+ @cached.property
31
+ def qn(self) -> QualifiedName:
32
+ return QualifiedName((self.s,))
33
+
23
34
 
24
35
  ##
25
36
 
@@ -2,9 +2,12 @@ import abc
2
2
  import functools
3
3
  import typing as ta
4
4
 
5
+ from ... import cached
5
6
  from ... import check
6
7
  from ... import dataclasses as dc
7
8
  from ... import lang
9
+ from ..qualifiedname import QualifiedName
10
+ from .base import HasQn
8
11
  from .base import Node
9
12
  from .idents import CanIdent
10
13
  from .idents import Ident
@@ -27,9 +30,13 @@ def _coerce_name_parts(o: ta.Iterable[Ident]) -> ta.Sequence[Ident]:
27
30
  return check.not_empty(tuple(check.isinstance(e, Ident) for e in o))
28
31
 
29
32
 
30
- class Name(Node, NameLike, lang.Final):
33
+ class Name(Node, NameLike, HasQn, lang.Final):
31
34
  ps: ta.Sequence[Ident] = dc.xfield(coerce=_coerce_name_parts)
32
35
 
36
+ @cached.property
37
+ def qn(self) -> QualifiedName:
38
+ return QualifiedName(tuple(p.s for p in self.ps))
39
+
33
40
 
34
41
  ##
35
42