omlish 0.0.0.dev19__py3-none-any.whl → 0.0.0.dev21__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. omlish/__about__.py +3 -3
  2. omlish/asyncs/anyio.py +13 -6
  3. omlish/dataclasses/__init__.py +2 -0
  4. omlish/dataclasses/impl/fields.py +51 -0
  5. omlish/dataclasses/impl/frozen.py +38 -0
  6. omlish/dataclasses/impl/main.py +24 -88
  7. omlish/dataclasses/impl/metadata.py +2 -1
  8. omlish/dataclasses/impl/processing.py +10 -2
  9. omlish/dataclasses/utils.py +45 -0
  10. omlish/fnpairs.py +1 -1
  11. omlish/formats/json.py +1 -0
  12. omlish/lang/classes/simple.py +3 -0
  13. omlish/lang/clsdct.py +2 -0
  14. omlish/lang/contextmanagers.py +41 -0
  15. omlish/lang/descriptors.py +8 -0
  16. omlish/lang/objects.py +4 -0
  17. omlish/lang/resolving.py +9 -0
  18. omlish/lite/logs.py +2 -2
  19. omlish/lite/marshal.py +4 -2
  20. omlish/marshal/__init__.py +4 -0
  21. omlish/marshal/dataclasses.py +16 -3
  22. omlish/marshal/helpers.py +22 -0
  23. omlish/marshal/objects.py +33 -14
  24. omlish/multiprocessing.py +36 -4
  25. omlish/specs/__init__.py +0 -0
  26. omlish/specs/jsonschema/__init__.py +0 -0
  27. omlish/specs/jsonschema/keywords/__init__.py +42 -0
  28. omlish/specs/jsonschema/keywords/base.py +86 -0
  29. omlish/specs/jsonschema/keywords/core.py +26 -0
  30. omlish/specs/jsonschema/keywords/metadata.py +22 -0
  31. omlish/specs/jsonschema/keywords/parse.py +69 -0
  32. omlish/specs/jsonschema/keywords/render.py +47 -0
  33. omlish/specs/jsonschema/keywords/validation.py +68 -0
  34. omlish/specs/jsonschema/schemas/__init__.py +0 -0
  35. omlish/specs/jsonschema/schemas/draft202012/__init__.py +0 -0
  36. omlish/specs/jsonschema/schemas/draft202012/vocabularies/__init__.py +0 -0
  37. omlish/specs/jsonschema/types.py +21 -0
  38. {omlish-0.0.0.dev19.dist-info → omlish-0.0.0.dev21.dist-info}/METADATA +3 -3
  39. {omlish-0.0.0.dev19.dist-info → omlish-0.0.0.dev21.dist-info}/RECORD +42 -28
  40. {omlish-0.0.0.dev19.dist-info → omlish-0.0.0.dev21.dist-info}/LICENSE +0 -0
  41. {omlish-0.0.0.dev19.dist-info → omlish-0.0.0.dev21.dist-info}/WHEEL +0 -0
  42. {omlish-0.0.0.dev19.dist-info → omlish-0.0.0.dev21.dist-info}/top_level.txt +0 -0
omlish/__about__.py CHANGED
@@ -1,5 +1,5 @@
1
- __version__ = '0.0.0.dev19'
2
- __revision__ = '509bb451054d2d955e55cbcf1f271a9166a904eb'
1
+ __version__ = '0.0.0.dev21'
2
+ __revision__ = '4cb4af9e253f312d5f301449f08dfa7168234e07'
3
3
 
4
4
 
5
5
  #
@@ -88,7 +88,7 @@ class Project(ProjectBase):
88
88
  'sqlx': [
89
89
  'sqlean.py ~= 3.45; python_version < "3.13"',
90
90
 
91
- 'duckdb ~= 1.0',
91
+ 'duckdb ~= 1.1',
92
92
  ],
93
93
 
94
94
  'testing': [
omlish/asyncs/anyio.py CHANGED
@@ -23,6 +23,7 @@ async def killer(shutdown: anyio.Event, sleep_s: float) -> None:
23
23
  shutdown.set()
24
24
 
25
25
  """ # noqa
26
+ import dataclasses as dc
26
27
  import signal
27
28
  import typing as ta
28
29
 
@@ -44,6 +45,12 @@ StapledByteStream: ta.TypeAlias = anyio.streams.stapled.StapledByteStream
44
45
  StapledObjectStream: ta.TypeAlias = anyio.streams.stapled.StapledObjectStream
45
46
 
46
47
 
48
+ @dc.dataclass(eq=False)
49
+ class MemoryStapledObjectStream(StapledObjectStream[T]):
50
+ send_stream: MemoryObjectSendStream[T]
51
+ receive_stream: MemoryObjectReceiveStream[T]
52
+
53
+
47
54
  ##
48
55
 
49
56
 
@@ -143,8 +150,8 @@ def split_memory_object_streams(
143
150
  return tup
144
151
 
145
152
 
146
- def create_stapled_memory_object_stream(max_buffer_size: float = 0) -> StapledObjectStream:
147
- return StapledObjectStream(*anyio.create_memory_object_stream(max_buffer_size))
153
+ def create_stapled_memory_object_stream(max_buffer_size: float = 0) -> MemoryStapledObjectStream:
154
+ return MemoryStapledObjectStream(*anyio.create_memory_object_stream(max_buffer_size))
148
155
 
149
156
 
150
157
  # FIXME: https://github.com/python/mypy/issues/15238
@@ -158,9 +165,9 @@ def create_memory_object_stream[T](max_buffer_size: float = 0) -> tuple[
158
165
 
159
166
  def staple_memory_object_stream(
160
167
  *args: anyio.create_memory_object_stream[T],
161
- ) -> StapledObjectStream[T]:
168
+ ) -> MemoryStapledObjectStream[T]:
162
169
  send, receive = args
163
- return StapledObjectStream(
170
+ return MemoryStapledObjectStream(
164
171
  check.isinstance(send, MemoryObjectSendStream), # type: ignore
165
172
  check.isinstance(receive, MemoryObjectReceiveStream), # type: ignore
166
173
  )
@@ -168,9 +175,9 @@ def staple_memory_object_stream(
168
175
 
169
176
  # FIXME: https://github.com/python/mypy/issues/15238
170
177
  # FIXME: https://youtrack.jetbrains.com/issues?q=tag:%20%7BPEP%20695%7D
171
- def staple_memory_object_stream2[T](max_buffer_size: float = 0) -> StapledObjectStream[T]:
178
+ def staple_memory_object_stream2[T](max_buffer_size: float = 0) -> MemoryStapledObjectStream[T]:
172
179
  send, receive = anyio.create_memory_object_stream[T](max_buffer_size)
173
- return StapledObjectStream(
180
+ return MemoryStapledObjectStream(
174
181
  check.isinstance(send, MemoryObjectSendStream), # type: ignore
175
182
  check.isinstance(receive, MemoryObjectReceiveStream), # type: ignore
176
183
  )
@@ -96,4 +96,6 @@ from .utils import ( # noqa
96
96
  opt_repr,
97
97
  update_field_extras,
98
98
  update_field_metadata,
99
+ update_fields,
100
+ update_fields_metadata,
99
101
  )
@@ -4,10 +4,13 @@ import typing as ta
4
4
 
5
5
  from ... import check as check_
6
6
  from ... import lang
7
+ from .internals import FIELDS_ATTR
7
8
  from .internals import FieldType
8
9
  from .internals import is_classvar
9
10
  from .internals import is_initvar
11
+ from .internals import is_kw_only
10
12
  from .params import get_field_extras
13
+ from .processing import Processor
11
14
 
12
15
 
13
16
  if ta.TYPE_CHECKING:
@@ -19,6 +22,9 @@ else:
19
22
  MISSING = dc.MISSING
20
23
 
21
24
 
25
+ ##
26
+
27
+
22
28
  def field_type(f: dc.Field) -> FieldType:
23
29
  if (ft := getattr(f, '_field_type')) is not None:
24
30
  return FieldType(ft)
@@ -30,6 +36,51 @@ def has_default(f: dc.Field) -> bool:
30
36
  return not (f.default is MISSING and f.default_factory is MISSING)
31
37
 
32
38
 
39
+ ##
40
+
41
+
42
+ class FieldsProcessor(Processor):
43
+ def _process(self) -> None:
44
+ cls = self._info.cls
45
+ fields: dict[str, dc.Field] = {}
46
+
47
+ for b in cls.__mro__[-1:0:-1]:
48
+ base_fields = getattr(b, FIELDS_ATTR, None)
49
+ if base_fields is not None:
50
+ for f in base_fields.values():
51
+ fields[f.name] = f
52
+
53
+ cls_fields: list[dc.Field] = []
54
+
55
+ kw_only = self._info.params.kw_only
56
+ kw_only_seen = False
57
+ for name, ann in self._info.cls_annotations.items():
58
+ if is_kw_only(cls, ann):
59
+ if kw_only_seen:
60
+ raise TypeError(f'{name!r} is KW_ONLY, but KW_ONLY has already been specified')
61
+ kw_only_seen = True
62
+ kw_only = True
63
+ else:
64
+ cls_fields.append(preprocess_field(cls, name, ann, kw_only))
65
+
66
+ for f in cls_fields:
67
+ fields[f.name] = f
68
+ if isinstance(getattr(cls, f.name, None), dc.Field):
69
+ if f.default is MISSING:
70
+ delattr(cls, f.name)
71
+ else:
72
+ setattr(cls, f.name, f.default)
73
+
74
+ for name, value in cls.__dict__.items():
75
+ if isinstance(value, dc.Field) and name not in self._info.cls_annotations:
76
+ raise TypeError(f'{name!r} is a field but has no type annotation')
77
+
78
+ setattr(cls, FIELDS_ATTR, fields)
79
+
80
+
81
+ ##
82
+
83
+
33
84
  def preprocess_field(
34
85
  cls: type,
35
86
  a_name: str,
@@ -1,12 +1,47 @@
1
1
  import dataclasses as dc
2
2
  import typing as ta
3
3
 
4
+ from ... import lang
5
+ from .internals import FIELDS_ATTR
6
+ from .internals import PARAMS_ATTR
4
7
  from .processing import Processor
8
+ from .reflect import ClassInfo
5
9
  from .utils import Namespace
6
10
  from .utils import create_fn
7
11
  from .utils import set_new_attribute
8
12
 
9
13
 
14
+ if ta.TYPE_CHECKING:
15
+ from . import metaclass
16
+ else:
17
+ metaclass = lang.proxy_import('.metaclass', __package__)
18
+
19
+
20
+ def check_frozen_bases(info: ClassInfo) -> None:
21
+ mc_base = getattr(metaclass, 'Data', None)
22
+ all_frozen_bases = None
23
+ any_frozen_base = False
24
+ has_dataclass_bases = False
25
+ for b in info.cls.__mro__[-1:0:-1]:
26
+ if b is mc_base:
27
+ continue
28
+ base_fields = getattr(b, FIELDS_ATTR, None)
29
+ if base_fields is not None:
30
+ has_dataclass_bases = True
31
+ if all_frozen_bases is None:
32
+ all_frozen_bases = True
33
+ current_frozen = getattr(b, PARAMS_ATTR).frozen
34
+ all_frozen_bases = all_frozen_bases and current_frozen
35
+ any_frozen_base = any_frozen_base or current_frozen
36
+
37
+ if has_dataclass_bases:
38
+ if any_frozen_base and not info.params.frozen:
39
+ raise TypeError('cannot inherit non-frozen dataclass from a frozen one')
40
+
41
+ if all_frozen_bases is False and info.params.frozen:
42
+ raise TypeError('cannot inherit frozen dataclass from a non-frozen one')
43
+
44
+
10
45
  def frozen_get_del_attr(
11
46
  cls: type,
12
47
  fields: ta.Sequence[dc.Field],
@@ -46,6 +81,9 @@ def frozen_get_del_attr(
46
81
 
47
82
 
48
83
  class FrozenProcessor(Processor):
84
+ def check(self) -> None:
85
+ check_frozen_bases(self._info)
86
+
49
87
  def _process(self) -> None:
50
88
  if not self._info.params.frozen:
51
89
  return
@@ -5,14 +5,13 @@ import typing as ta
5
5
  from ... import check
6
6
  from ... import lang
7
7
  from .copy import CopyProcessor
8
- from .fields import preprocess_field
8
+ from .fields import FieldsProcessor
9
9
  from .frozen import FrozenProcessor
10
10
  from .hashing import HashProcessor
11
11
  from .init import InitProcessor
12
12
  from .internals import FIELDS_ATTR
13
13
  from .internals import PARAMS_ATTR
14
14
  from .internals import Params
15
- from .internals import is_kw_only
16
15
  from .order import OrderProcessor
17
16
  from .params import ParamsExtras
18
17
  from .processing import Processor
@@ -26,12 +25,6 @@ from .simple import OverridesProcessor
26
25
  from .slots import add_slots
27
26
 
28
27
 
29
- if ta.TYPE_CHECKING:
30
- from . import metaclass
31
- else:
32
- metaclass = lang.proxy_import('.metaclass', __package__)
33
-
34
-
35
28
  MISSING = dc.MISSING
36
29
 
37
30
 
@@ -50,67 +43,6 @@ class MainProcessor:
50
43
  if self._info.params.order and not self._info.params.eq:
51
44
  raise ValueError('eq must be true if order is true')
52
45
 
53
- def _check_frozen_bases(self) -> None:
54
- mc_base = getattr(metaclass, 'Data', None)
55
- all_frozen_bases = None
56
- any_frozen_base = False
57
- has_dataclass_bases = False
58
- for b in self._cls.__mro__[-1:0:-1]:
59
- if b is mc_base:
60
- continue
61
- base_fields = getattr(b, FIELDS_ATTR, None)
62
- if base_fields is not None:
63
- has_dataclass_bases = True
64
- if all_frozen_bases is None:
65
- all_frozen_bases = True
66
- current_frozen = getattr(b, PARAMS_ATTR).frozen
67
- all_frozen_bases = all_frozen_bases and current_frozen
68
- any_frozen_base = any_frozen_base or current_frozen
69
-
70
- if has_dataclass_bases:
71
- if any_frozen_base and not self._info.params.frozen:
72
- raise TypeError('cannot inherit non-frozen dataclass from a frozen one')
73
-
74
- if all_frozen_bases is False and self._info.params.frozen:
75
- raise TypeError('cannot inherit frozen dataclass from a non-frozen one')
76
-
77
- @lang.cached_function
78
- def _process_fields(self) -> None:
79
- fields: dict[str, dc.Field] = {}
80
-
81
- for b in self._cls.__mro__[-1:0:-1]:
82
- base_fields = getattr(b, FIELDS_ATTR, None)
83
- if base_fields is not None:
84
- for f in base_fields.values():
85
- fields[f.name] = f
86
-
87
- cls_fields: list[dc.Field] = []
88
-
89
- kw_only = self._info.params.kw_only
90
- kw_only_seen = False
91
- for name, ann in self._info.cls_annotations.items():
92
- if is_kw_only(self._cls, ann):
93
- if kw_only_seen:
94
- raise TypeError(f'{name!r} is KW_ONLY, but KW_ONLY has already been specified')
95
- kw_only_seen = True
96
- kw_only = True
97
- else:
98
- cls_fields.append(preprocess_field(self._cls, name, ann, kw_only))
99
-
100
- for f in cls_fields:
101
- fields[f.name] = f
102
- if isinstance(getattr(self._cls, f.name, None), dc.Field):
103
- if f.default is MISSING:
104
- delattr(self._cls, f.name)
105
- else:
106
- setattr(self._cls, f.name, f.default)
107
-
108
- for name, value in self._cls.__dict__.items():
109
- if isinstance(value, dc.Field) and name not in self._info.cls_annotations:
110
- raise TypeError(f'{name!r} is a field but has no type annotation')
111
-
112
- setattr(self._cls, FIELDS_ATTR, fields)
113
-
114
46
  @lang.cached_function
115
47
  def _transform_slots(self) -> None:
116
48
  if self._info.params.weakref_slot and not self._info.params.slots:
@@ -119,28 +51,32 @@ class MainProcessor:
119
51
  return
120
52
  self._cls = add_slots(self._cls, self._info.params.frozen, self._info.params.weakref_slot)
121
53
 
54
+ PROCESSOR_TYPES: ta.ClassVar[ta.Sequence[type[Processor]]] = [
55
+ FieldsProcessor,
56
+ InitProcessor,
57
+ OverridesProcessor,
58
+ ReprProcessor,
59
+ EqProcessor,
60
+ OrderProcessor,
61
+ FrozenProcessor,
62
+ HashProcessor,
63
+ DocProcessor,
64
+ MatchArgsProcessor,
65
+ ReplaceProcessor,
66
+ CopyProcessor,
67
+ ]
68
+
122
69
  @lang.cached_function
123
70
  def process(self) -> type:
124
71
  self._check_params()
125
- self._check_frozen_bases()
126
-
127
- self._process_fields()
128
-
129
- pcls: type[Processor]
130
- for pcls in [
131
- InitProcessor,
132
- OverridesProcessor,
133
- ReprProcessor,
134
- EqProcessor,
135
- OrderProcessor,
136
- FrozenProcessor,
137
- HashProcessor,
138
- DocProcessor,
139
- MatchArgsProcessor,
140
- ReplaceProcessor,
141
- CopyProcessor,
142
- ]:
143
- pcls(self._info).process()
72
+
73
+ ps = [pcls(self._info) for pcls in self.PROCESSOR_TYPES]
74
+
75
+ for p in ps:
76
+ p.check()
77
+
78
+ for p in ps:
79
+ p.process()
144
80
 
145
81
  self._transform_slots()
146
82
 
@@ -70,5 +70,6 @@ class Init(lang.Marker):
70
70
  pass
71
71
 
72
72
 
73
- def init(fn: ta.Callable[..., None]) -> None:
73
+ def init(fn: ta.Callable):
74
74
  _append_cls_md(Init, fn)
75
+ return fn
@@ -1,13 +1,21 @@
1
+ import typing as ta
2
+
1
3
  from ... import lang
2
- from .reflect import ClassInfo
4
+
5
+
6
+ if ta.TYPE_CHECKING:
7
+ from .reflect import ClassInfo
3
8
 
4
9
 
5
10
  class Processor(lang.Abstract):
6
- def __init__(self, info: ClassInfo) -> None:
11
+ def __init__(self, info: 'ClassInfo') -> None:
7
12
  super().__init__()
8
13
  self._cls = info.cls
9
14
  self._info = info
10
15
 
16
+ def check(self) -> None:
17
+ pass
18
+
11
19
  @lang.cached_function
12
20
  def process(self) -> None:
13
21
  self._process()
@@ -12,6 +12,9 @@ from .impl.params import get_field_extras
12
12
  T = ta.TypeVar('T')
13
13
 
14
14
 
15
+ #
16
+
17
+
15
18
  def maybe_post_init(sup: ta.Any) -> bool:
16
19
  try:
17
20
  fn = sup.__post_init__
@@ -21,10 +24,16 @@ def maybe_post_init(sup: ta.Any) -> bool:
21
24
  return True
22
25
 
23
26
 
27
+ #
28
+
29
+
24
30
  def opt_repr(o: ta.Any) -> str | None:
25
31
  return repr(o) if o is not None else None
26
32
 
27
33
 
34
+ #
35
+
36
+
28
37
  class field_modifier: # noqa
29
38
  def __init__(self, fn: ta.Callable[[dc.Field], dc.Field]) -> None:
30
39
  super().__init__()
@@ -58,6 +67,42 @@ def update_field_extras(f: dc.Field, *, unless_non_default: bool = False, **kwar
58
67
  })
59
68
 
60
69
 
70
+ def update_fields(
71
+ fn: ta.Callable[[str, dc.Field], dc.Field],
72
+ fields: ta.Iterable[str] | None = None,
73
+ ) -> ta.Callable[[type[T]], type[T]]:
74
+ def inner(cls):
75
+ if fields is None:
76
+ for a, v in list(cls.__dict__.items()):
77
+ if isinstance(v, dc.Field):
78
+ setattr(cls, a, fn(a, v))
79
+
80
+ else:
81
+ for a in fields:
82
+ v = cls.__dict__[a]
83
+ if not isinstance(v, dc.Field):
84
+ v = dc.field(default=v)
85
+ setattr(cls, a, fn(a, v))
86
+
87
+ return cls
88
+
89
+ check.not_isinstance(fields, str)
90
+ return inner
91
+
92
+
93
+ def update_fields_metadata(
94
+ nmd: ta.Mapping,
95
+ fields: ta.Iterable[str] | None = None,
96
+ ) -> ta.Callable[[type[T]], type[T]]:
97
+ def inner(a: str, f: dc.Field) -> dc.Field:
98
+ return update_field_metadata(f, nmd)
99
+
100
+ return update_fields(inner, fields)
101
+
102
+
103
+ #
104
+
105
+
61
106
  def deep_replace(o: T, *args: str | ta.Callable[[ta.Any], ta.Mapping[str, ta.Any]]) -> T:
62
107
  if not args:
63
108
  return o
omlish/fnpairs.py CHANGED
@@ -449,7 +449,7 @@ class Toml(ObjectStr_):
449
449
  #
450
450
 
451
451
 
452
- @_register_extension('cpkl')
452
+ @_register_extension('clpkl')
453
453
  @dc.dataclass(frozen=True)
454
454
  class Cloudpickle(ObjectBytes_):
455
455
  protocol: int | None = None
omlish/formats/json.py CHANGED
@@ -13,6 +13,7 @@ from .. import lang
13
13
  if ta.TYPE_CHECKING:
14
14
  import orjson as _orjson
15
15
  import ujson as _ujson
16
+
16
17
  else:
17
18
  _orjson = lang.proxy_import('orjson')
18
19
  _ujson = lang.proxy_import('ujson')
@@ -45,15 +45,18 @@ class _MarkerMeta(abc.ABCMeta):
45
45
 
46
46
  def __new__(mcls, name, bases, namespace):
47
47
  global _MARKER_NAMESPACE_KEYS
48
+
48
49
  if _MARKER_NAMESPACE_KEYS is None:
49
50
  if not (namespace.get('__module__') == __name__ and name == 'Marker'):
50
51
  raise RuntimeError
51
52
  _MARKER_NAMESPACE_KEYS = set(namespace)
53
+
52
54
  else:
53
55
  if set(namespace) - _MARKER_NAMESPACE_KEYS:
54
56
  raise TypeError('Markers must not include contents. Did you mean to use Namespace?')
55
57
  if Final not in bases:
56
58
  bases += (Final,)
59
+
57
60
  return super().__new__(mcls, name, bases, namespace)
58
61
 
59
62
  def __instancecheck__(self, instance):
omlish/lang/clsdct.py CHANGED
@@ -55,8 +55,10 @@ class ClassDctFn:
55
55
  except KeyError:
56
56
  f = sys._getframe(self._offset) # noqa
57
57
  cls_dct = _skip_cls_dct_frames(f).f_locals
58
+
58
59
  if not is_possibly_cls_dct(cls_dct):
59
60
  raise TypeError(cls_dct)
61
+
60
62
  return self._fn(cls_dct, *args, **kwargs)
61
63
 
62
64
 
@@ -227,6 +227,47 @@ class ExitStacked:
227
227
  return superfn(exc_type, exc_val, exc_tb)
228
228
 
229
229
 
230
+ class AsyncExitStacked:
231
+
232
+ @property
233
+ def _exit_stack(self) -> contextlib.AsyncExitStack:
234
+ try:
235
+ return self.__exit_stack # type: ignore
236
+ except AttributeError:
237
+ es = self.__exit_stack = contextlib.AsyncExitStack()
238
+ return es
239
+
240
+ async def _enter_async_context(self, context_manager: ta.AsyncContextManager[T]) -> T:
241
+ return await self._exit_stack.enter_async_context(ta.cast(ta.AsyncContextManager, context_manager))
242
+
243
+ def _enter_context(self, context_manager: ta.ContextManager[T]) -> T:
244
+ return self._exit_stack.enter_context(ta.cast(ta.ContextManager, context_manager))
245
+
246
+ async def __aenter__(self) -> ta.Self:
247
+ try:
248
+ superfn = super().__aenter__ # type: ignore
249
+ except AttributeError:
250
+ ret = self
251
+ else:
252
+ ret = await superfn()
253
+ await self._exit_stack.__aenter__()
254
+ return ret
255
+
256
+ async def __aexit__(
257
+ self,
258
+ exc_type: type[BaseException] | None,
259
+ exc_val: BaseException | None,
260
+ exc_tb: types.TracebackType | None,
261
+ ) -> bool | None:
262
+ await self._exit_stack.__aexit__(exc_type, exc_val, exc_tb)
263
+ try:
264
+ superfn = super().__aexit__ # type: ignore
265
+ except AttributeError:
266
+ return None
267
+ else:
268
+ return await superfn(exc_type, exc_val, exc_tb)
269
+
270
+
230
271
  ##
231
272
 
232
273
 
@@ -37,8 +37,10 @@ def _has_method_descriptor(obj: ta.Any) -> bool:
37
37
  while True:
38
38
  if is_method_descriptor(obj):
39
39
  return True
40
+
40
41
  elif isinstance(obj, functools.partial):
41
42
  obj = obj.func
43
+
42
44
  else:
43
45
  try:
44
46
  obj = getattr(obj, '__wrapped__')
@@ -65,6 +67,7 @@ def unwrap_func_with_partials(fn: ta.Callable) -> tuple[ta.Callable, list[functo
65
67
  while True:
66
68
  if is_method_descriptor(fn) or isinstance(fn, types.MethodType):
67
69
  fn = fn.__func__ # type: ignore
70
+
68
71
  elif hasattr(fn, '__wrapped__'):
69
72
  nxt = fn.__wrapped__
70
73
  if not callable(nxt):
@@ -72,13 +75,16 @@ def unwrap_func_with_partials(fn: ta.Callable) -> tuple[ta.Callable, list[functo
72
75
  if nxt is fn:
73
76
  raise TypeError(fn)
74
77
  fn = nxt
78
+
75
79
  # NOTE: __wrapped__ takes precedence - a partial might point to a bound Method when the important information is
76
80
  # still the unbound func. see _decorator_descriptor for an example of this.
77
81
  elif isinstance(fn, functools.partial):
78
82
  ps.append(fn)
79
83
  fn = fn.func
84
+
80
85
  else:
81
86
  break
87
+
82
88
  return fn, ps
83
89
 
84
90
 
@@ -125,11 +131,13 @@ class _decorator_descriptor: # noqa
125
131
 
126
132
  def __get__(self, instance, owner=None):
127
133
  fn = self._fn.__get__(instance, owner)
134
+
128
135
  if self._md or instance is not None:
129
136
  @functools.wraps(fn)
130
137
  def inner(*args, **kwargs):
131
138
  return self._wrapper(fn, *args, **kwargs)
132
139
  return inner
140
+
133
141
  else:
134
142
  @functools.wraps(fn)
135
143
  def outer(this, *args, **kwargs):
omlish/lang/objects.py CHANGED
@@ -105,20 +105,24 @@ def build_mro_dict(
105
105
  ) -> ta.Mapping[str, ta.Any]:
106
106
  if owner_cls is None:
107
107
  owner_cls = instance_cls
108
+
108
109
  mro = instance_cls.__mro__[-2::-1]
109
110
  try:
110
111
  pos = mro.index(owner_cls)
111
112
  except ValueError:
112
113
  raise TypeError(f'Owner class {owner_cls} not in mro of instance class {instance_cls}') from None
114
+
113
115
  dct: dict[str, ta.Any] = {}
114
116
  if not bottom_up_key_order:
115
117
  for cur_cls in mro[:pos + 1][::-1]:
116
118
  for k, v in cur_cls.__dict__.items():
117
119
  if k not in dct:
118
120
  dct[k] = v
121
+
119
122
  else:
120
123
  for cur_cls in mro[:pos + 1]:
121
124
  dct.update(cur_cls.__dict__)
125
+
122
126
  return dct
123
127
 
124
128
 
omlish/lang/resolving.py CHANGED
@@ -10,37 +10,46 @@ class ResolvableClassNameError(NameError):
10
10
  def get_cls_fqcn(cls: type, *, nocheck: bool = False) -> str:
11
11
  if not isinstance(cls, type):
12
12
  raise TypeError(cls)
13
+
13
14
  mn = cls.__module__
14
15
  if set(mn) - set(string.ascii_lowercase + string.digits + '_.'):
15
16
  raise ResolvableClassNameError(cls)
17
+
16
18
  qn = cls.__qualname__
17
19
  if not all(qp[0].isupper() for qp in qn.split('.')) or (set(qn) - set(string.ascii_letters + string.digits + '.')):
18
20
  raise ResolvableClassNameError(cls)
21
+
19
22
  fqcn = '.'.join([cls.__module__, cls.__qualname__])
20
23
  if not nocheck:
21
24
  if get_fqcn_cls(fqcn, nocheck=True) is not cls:
22
25
  raise ResolvableClassNameError(cls, fqcn)
26
+
23
27
  return fqcn
24
28
 
25
29
 
26
30
  def get_fqcn_cls(fqcn: str, *, nocheck: bool = False) -> type:
27
31
  if not isinstance(fqcn, str) or not fqcn:
28
32
  raise TypeError(fqcn)
33
+
29
34
  parts = fqcn.split('.')
30
35
  pos = next(i for i, p in enumerate(parts) if p[0].isupper())
31
36
  mps, qps = parts[:pos], parts[pos:]
32
37
  mod = importlib.import_module('.'.join(mps))
38
+
33
39
  o: ta.Any = mod
34
40
  for qp in qps:
35
41
  o = getattr(o, qp)
36
42
  if not isinstance(o, type):
37
43
  raise TypeError(o)
44
+
38
45
  cls = o
39
46
  if not isinstance(cls, type):
40
47
  raise TypeError(cls)
48
+
41
49
  if not nocheck:
42
50
  if not get_cls_fqcn(cls, nocheck=True) == fqcn:
43
51
  raise ResolvableClassNameError(cls, fqcn)
52
+
44
53
  return o
45
54
 
46
55