omlish 0.0.0.dev425__py3-none-any.whl → 0.0.0.dev427__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. omlish/__about__.py +2 -2
  2. omlish/c3.py +4 -1
  3. omlish/configs/processing/flattening.py +1 -1
  4. omlish/configs/processing/merging.py +8 -6
  5. omlish/dataclasses/impl/concerns/doc.py +1 -1
  6. omlish/diag/_pycharm/runhack.py +1 -1
  7. omlish/diag/procfs.py +2 -2
  8. omlish/formats/json/stream/lexing.py +63 -16
  9. omlish/formats/json/stream/parsing.py +1 -1
  10. omlish/formats/json/stream/utils.py +2 -2
  11. omlish/formats/logfmt.py +8 -2
  12. omlish/funcs/genmachine.py +1 -1
  13. omlish/http/sse.py +1 -1
  14. omlish/inject/impl/injector.py +1 -1
  15. omlish/inject/impl/multis.py +2 -2
  16. omlish/inject/impl/providers.py +0 -4
  17. omlish/inject/impl/proxy.py +0 -2
  18. omlish/inject/scopes.py +0 -4
  19. omlish/io/buffers.py +1 -1
  20. omlish/lang/__init__.py +23 -13
  21. omlish/lang/{attrs.py → attrstorage.py} +15 -15
  22. omlish/lang/cached/property.py +2 -2
  23. omlish/lang/classes/simple.py +26 -4
  24. omlish/lang/collections.py +1 -1
  25. omlish/lang/iterables.py +2 -2
  26. omlish/lifecycles/contextmanagers.py +1 -1
  27. omlish/lifecycles/controller.py +1 -1
  28. omlish/lite/asyncs.py +5 -0
  29. omlish/lite/attrops.py +332 -0
  30. omlish/lite/cached.py +1 -1
  31. omlish/lite/maybes.py +2 -0
  32. omlish/lite/strings.py +0 -7
  33. omlish/lite/timing.py +4 -1
  34. omlish/logs/all.py +4 -0
  35. omlish/logs/base.py +138 -152
  36. omlish/logs/callers.py +3 -3
  37. omlish/logs/contexts.py +250 -0
  38. omlish/logs/infos.py +16 -5
  39. omlish/logs/modules.py +10 -0
  40. omlish/logs/protocols.py +7 -7
  41. omlish/logs/std/adapters.py +9 -5
  42. omlish/logs/std/records.py +26 -11
  43. omlish/logs/times.py +4 -6
  44. omlish/manifests/loading.py +6 -0
  45. omlish/os/atomics.py +1 -1
  46. omlish/reflect/types.py +22 -0
  47. omlish/sockets/server/server.py +1 -1
  48. {omlish-0.0.0.dev425.dist-info → omlish-0.0.0.dev427.dist-info}/METADATA +2 -2
  49. {omlish-0.0.0.dev425.dist-info → omlish-0.0.0.dev427.dist-info}/RECORD +53 -52
  50. omlish/lite/logs.py +0 -4
  51. omlish/lite/reprs.py +0 -85
  52. {omlish-0.0.0.dev425.dist-info → omlish-0.0.0.dev427.dist-info}/WHEEL +0 -0
  53. {omlish-0.0.0.dev425.dist-info → omlish-0.0.0.dev427.dist-info}/entry_points.txt +0 -0
  54. {omlish-0.0.0.dev425.dist-info → omlish-0.0.0.dev427.dist-info}/licenses/LICENSE +0 -0
  55. {omlish-0.0.0.dev425.dist-info → omlish-0.0.0.dev427.dist-info}/top_level.txt +0 -0
omlish/__about__.py CHANGED
@@ -1,5 +1,5 @@
1
- __version__ = '0.0.0.dev425'
2
- __revision__ = '92376f181ba36a2d1eda16643d70fd1077e309f1'
1
+ __version__ = '0.0.0.dev427'
2
+ __revision__ = '13723ced41ac43696831e7a640c46f91663c9a46'
3
3
 
4
4
 
5
5
  #
omlish/c3.py CHANGED
@@ -33,6 +33,7 @@
33
33
  #
34
34
  # 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this
35
35
  # License Agreement.
36
+ import abc
36
37
  import functools
37
38
  import operator
38
39
  import typing as ta
@@ -75,7 +76,9 @@ def merge(seqs: ta.MutableSequence[list[T]]) -> list[T]:
75
76
 
76
77
 
77
78
  def _default_is_abstract(obj: ta.Any) -> bool:
78
- return hasattr(obj, '__abstractmethods__')
79
+ # .lite.abstracts.Abstract has '__abstractmethods__' but is not an ABCMeta, and thus for the purposes of c3 mro are
80
+ # not considered 'abstract'. What 'abstract' means in this context is 'can have virtual subclasses'.
81
+ return hasattr(obj, '__abstractmethods__') and isinstance(obj, abc.ABCMeta)
79
82
 
80
83
 
81
84
  def mro(
@@ -119,7 +119,7 @@ class ConfigFlattening:
119
119
  def unflatten(self, flattened: ta.Mapping[str, ta.Any]) -> ta.Mapping[str, ta.Any]:
120
120
  root = ConfigFlattening.UnflattenDict()
121
121
 
122
- def split_keys(fkey: str) -> ta.Iterable[ta.Union[str, int]]:
122
+ def split_keys(fkey: str) -> ta.Iterator[ta.Union[str, int]]:
123
123
  for part in fkey.split(self._delimiter):
124
124
  if self._index_open in part:
125
125
  check.state(part.endswith(self._index_close))
@@ -3,6 +3,7 @@
3
3
  """
4
4
  TODO:
5
5
  - smarter merging than just dumb dict-squashing
6
+ - at least sequence merging
6
7
  """
7
8
  import typing as ta
8
9
 
@@ -19,13 +20,14 @@ def merge_configs(*ms: ConfigMap) -> ConfigMap:
19
20
  e = o[k]
20
21
  except KeyError:
21
22
  o[k] = v
23
+ continue
24
+
25
+ if isinstance(e, ta.Mapping) and isinstance(v, ta.Mapping):
26
+ rec(e, v) # noqa
22
27
  else:
23
- if isinstance(e, ta.Mapping) and isinstance(v, ta.Mapping):
24
- rec(e, v) # noqa
25
- else:
26
- if isinstance(v, ta.Mapping):
27
- v = dict(v)
28
- o[k] = v
28
+ if isinstance(v, ta.Mapping):
29
+ v = dict(v)
30
+ o[k] = v
29
31
 
30
32
  o: dict = {}
31
33
  for i in ms:
@@ -36,7 +36,7 @@ def _build_cls_doc(cls: type) -> str:
36
36
 
37
37
 
38
38
  class _LazyClsDocDescriptor:
39
- def __get__(self, instance, owner):
39
+ def __get__(self, instance, owner=None):
40
40
  if instance is not None:
41
41
  owner = instance.__class__
42
42
  if not owner:
@@ -51,7 +51,7 @@ class _cached_nullary: # noqa
51
51
 
52
52
  return self._value
53
53
 
54
- def __get__(self, instance, owner): # noqa
54
+ def __get__(self, instance, owner=None): # noqa
55
55
  bound = instance.__dict__[self._fn.__name__] = self.__class__(self._fn.__get__(instance, owner))
56
56
  return bound
57
57
 
omlish/diag/procfs.py CHANGED
@@ -235,7 +235,7 @@ PAGEMAP_KEYS = (
235
235
  )
236
236
 
237
237
 
238
- def get_process_range_pagemaps(start: int, end: int, pid: PidLike = 'self') -> ta.Iterable[dict[str, int]]:
238
+ def get_process_range_pagemaps(start: int, end: int, pid: PidLike = 'self') -> ta.Iterator[dict[str, int]]:
239
239
  """https://www.kernel.org/doc/Documentation/vm/pagemap.txt"""
240
240
 
241
241
  _check_linux()
@@ -262,7 +262,7 @@ def get_process_range_pagemaps(start: int, end: int, pid: PidLike = 'self') -> t
262
262
  }
263
263
 
264
264
 
265
- def get_process_pagemaps(pid: PidLike = 'self') -> ta.Iterable[dict[str, int]]:
265
+ def get_process_pagemaps(pid: PidLike = 'self') -> ta.Iterator[dict[str, int]]:
266
266
  _check_linux()
267
267
  for m in get_process_maps(pid):
268
268
  yield from get_process_range_pagemaps(m['address'], m['end_address'], pid)
@@ -42,10 +42,13 @@ ControlTokenKind: ta.TypeAlias = ta.Literal[
42
42
 
43
43
  SpaceTokenKind: ta.TypeAlias = ta.Literal['SPACE']
44
44
 
45
+ CommentTokenKind: ta.TypeAlias = ta.Literal['COMMENT']
46
+
45
47
  TokenKind: ta.TypeAlias = ta.Union[ # noqa
46
48
  ValueTokenKind,
47
49
  ControlTokenKind,
48
50
  SpaceTokenKind,
51
+ CommentTokenKind,
49
52
  ]
50
53
 
51
54
 
@@ -118,10 +121,12 @@ class JsonStreamLexer(GenMachine[str, Token]):
118
121
  include_raw: bool = False,
119
122
  include_space: bool = False,
120
123
  allow_comments: bool = False,
124
+ include_comments: bool = False,
121
125
  ) -> None:
122
126
  self._include_raw = include_raw
123
127
  self._include_space = include_space
124
128
  self._allow_comments = allow_comments
129
+ self._include_comments = include_comments
125
130
 
126
131
  self._ofs = 0
127
132
  self._line = 1
@@ -157,7 +162,7 @@ class JsonStreamLexer(GenMachine[str, Token]):
157
162
  self,
158
163
  kind: TokenKind,
159
164
  value: ScalarValue,
160
- raw: str,
165
+ raw: str | None,
161
166
  pos: Position,
162
167
  ) -> ta.Sequence[Token]:
163
168
  tok = Token(
@@ -177,9 +182,13 @@ class JsonStreamLexer(GenMachine[str, Token]):
177
182
  def _raise(self, msg: str, src: Exception | None = None) -> ta.NoReturn:
178
183
  raise JsonStreamLexError(msg, self.pos) from src
179
184
 
180
- def _do_main(self):
185
+ def _do_main(self, peek: str | None = None):
181
186
  while True:
182
- c = self._char_in((yield None)) # noqa
187
+ if peek is not None:
188
+ c = peek
189
+ peek = None
190
+ else:
191
+ c = self._char_in((yield None)) # noqa
183
192
 
184
193
  if not c:
185
194
  return None
@@ -203,7 +212,8 @@ class JsonStreamLexer(GenMachine[str, Token]):
203
212
  return self._do_const(c)
204
213
 
205
214
  if self._allow_comments and c == '/':
206
- return self._do_comment()
215
+ yield from self._do_comment()
216
+ continue
207
217
 
208
218
  self._raise(f'Unexpected character: {c}')
209
219
 
@@ -295,17 +305,7 @@ class JsonStreamLexer(GenMachine[str, Token]):
295
305
  if not c:
296
306
  return None
297
307
 
298
- if c in CONTROL_TOKENS:
299
- yield self._make_tok(CONTROL_TOKENS[c], c, c, pos)
300
-
301
- elif c.isspace():
302
- if self._include_space:
303
- yield self._make_tok('SPACE', c, c, self.pos)
304
-
305
- else:
306
- self._raise(f'Unexpected character after number: {c}')
307
-
308
- return self._do_main()
308
+ return self._do_main(c)
309
309
 
310
310
  def _do_const(self, c: str):
311
311
  pos = self.pos
@@ -328,4 +328,51 @@ class JsonStreamLexer(GenMachine[str, Token]):
328
328
  return self._do_main()
329
329
 
330
330
  def _do_comment(self):
331
- raise NotImplementedError
331
+ check.state(self._buf.tell() == 0)
332
+
333
+ pos = self.pos
334
+ try:
335
+ oc = self._char_in((yield None)) # noqa
336
+ except GeneratorExit:
337
+ self._raise('Unexpected end of input')
338
+
339
+ if oc == '/':
340
+ while True:
341
+ try:
342
+ ic = self._char_in((yield None)) # noqa
343
+ except GeneratorExit:
344
+ self._raise('Unexpected end of input')
345
+
346
+ if ic == '\n':
347
+ break
348
+
349
+ if self._include_comments:
350
+ self._buf.write(ic)
351
+
352
+ if self._include_comments:
353
+ cmt = self._flip_buf()
354
+ raw = f'//{cmt}\n' if self._include_raw else None
355
+ yield self._make_tok('COMMENT', cmt, raw, pos)
356
+
357
+ elif oc == '*':
358
+ lc: str | None = None
359
+ while True:
360
+ try:
361
+ ic = self._char_in((yield None)) # noqa
362
+ except GeneratorExit:
363
+ self._raise('Unexpected end of input')
364
+
365
+ if lc == '*' and ic == '/':
366
+ break
367
+
368
+ if lc is not None and self._include_comments:
369
+ self._buf.write(lc)
370
+ lc = ic
371
+
372
+ if self._include_comments:
373
+ cmt = self._flip_buf()
374
+ raw = f'/*{cmt}*/' if self._include_raw else None
375
+ yield self._make_tok('COMMENT', cmt, raw, pos)
376
+
377
+ else:
378
+ self._raise(f'Unexpected character after comment start: {oc}')
@@ -106,7 +106,7 @@ class JsonStreamParser(GenMachine[Token, JsonStreamParserEvent]):
106
106
  while True:
107
107
  tok = yield None
108
108
 
109
- if tok.kind != 'SPACE':
109
+ if tok.kind != 'SPACE' and tok.kind != 'COMMENT':
110
110
  return tok
111
111
 
112
112
  #
@@ -15,7 +15,7 @@ class JsonStreamValueParser(lang.ExitStacked):
15
15
  include_raw: bool = False
16
16
  yield_object_lists: bool = False
17
17
 
18
- json5: bool = False
18
+ allow_comments: bool = False
19
19
 
20
20
  #
21
21
 
@@ -26,7 +26,7 @@ class JsonStreamValueParser(lang.ExitStacked):
26
26
  def _enter_contexts(self) -> None:
27
27
  self._lex = JsonStreamLexer(
28
28
  include_raw=self.include_raw,
29
- allow_comments=self.json5,
29
+ allow_comments=self.allow_comments,
30
30
  )
31
31
 
32
32
  self._parse = JsonStreamParser()
omlish/formats/logfmt.py CHANGED
@@ -20,7 +20,10 @@ def logfmt_encode(
20
20
  ) -> str:
21
21
  def encode(s: str) -> str:
22
22
  if _LOGFMT_ENCODE_WS_PAT.search(s) is not None:
23
- return '"' + s.replace('\\', '\\\\').replace('"', '\\"') + '"'
23
+ s = s.replace('\\', '\\\\')
24
+ s = s.replace('\n', '\\n')
25
+ s = s.replace('"', '\\"')
26
+ return f'"{s}"'
24
27
  else:
25
28
  return s
26
29
 
@@ -67,7 +70,10 @@ def logfmt_decode(
67
70
  if s.startswith('"'):
68
71
  if len(s) < 2 or not s.endswith('"'):
69
72
  raise ValueError(s)
70
- s = s[1:-1].replace('\\"', '"').replace('\\\\', '\\')
73
+ s = s[1:-1]
74
+ s = s.replace('\\"', '"')
75
+ s = s.replace('\\n', '\n')
76
+ s = s.replace('\\\\', '\\')
71
77
  return s
72
78
 
73
79
  if value_decoder is None:
@@ -88,7 +88,7 @@ class GenMachine(ta.Generic[I, O]):
88
88
 
89
89
  #
90
90
 
91
- def __call__(self, i: I) -> ta.Iterable[O]:
91
+ def __call__(self, i: I) -> ta.Iterator[O]:
92
92
  if self._gen is None:
93
93
  raise GenMachine.ClosedError
94
94
 
omlish/http/sse.py CHANGED
@@ -82,7 +82,7 @@ class SseDecoder:
82
82
 
83
83
  return e
84
84
 
85
- def process_line(self, line: bytes) -> ta.Iterable[SseDecoderOutput]:
85
+ def process_line(self, line: bytes) -> ta.Iterator[SseDecoderOutput]:
86
86
  if b'\r' in line or b'\n' in line:
87
87
  raise ValueError(line)
88
88
 
@@ -202,7 +202,7 @@ class InjectorImpl(Injector, lang.Final):
202
202
  #
203
203
 
204
204
  def try_provide(self, key: ta.Any) -> lang.Maybe[ta.Any]:
205
- return self.try_provide(key)
205
+ return self._try_provide(key)
206
206
 
207
207
  def provide(self, key: ta.Any) -> ta.Any:
208
208
  v = self._try_provide(key)
@@ -22,7 +22,7 @@ class SetProviderImpl(ProviderImpl, lang.Final):
22
22
  ps: ta.Sequence[ProviderImpl]
23
23
 
24
24
  @property
25
- def providers(self) -> ta.Iterable[Provider]:
25
+ def providers(self) -> ta.Iterator[Provider]:
26
26
  for p in self.ps:
27
27
  yield from p.providers
28
28
 
@@ -43,7 +43,7 @@ class MapProviderImpl(ProviderImpl, lang.Final):
43
43
  es: ta.Sequence[Entry]
44
44
 
45
45
  @property
46
- def providers(self) -> ta.Iterable[Provider]:
46
+ def providers(self) -> ta.Iterator[Provider]:
47
47
  for e in self.es:
48
48
  yield from e.v.providers
49
49
 
@@ -7,7 +7,6 @@ import typing as ta
7
7
 
8
8
  from ... import dataclasses as dc
9
9
  from ... import lang
10
- from ... import reflect as rfl
11
10
  from ..injector import Injector
12
11
  from ..inspect import KwargsTarget
13
12
  from ..providers import ConstProvider
@@ -39,9 +38,6 @@ class ProviderImpl(lang.Abstract):
39
38
  class InternalProvider(Provider):
40
39
  impl: ProviderImpl
41
40
 
42
- def provided_ty(self) -> rfl.Type | None:
43
- raise TypeError
44
-
45
41
 
46
42
  ##
47
43
 
@@ -12,7 +12,6 @@ def _cyclic_dependency_proxy() -> tuple[type, ta.Callable[[ta.Any, ta.Any], None
12
12
  import wrapt # noqa
13
13
 
14
14
  class _CyclicDependencyPlaceholder(lang.Final):
15
-
16
15
  def __init__(self, cls: ta.Any) -> None:
17
16
  super().__init__()
18
17
 
@@ -22,7 +21,6 @@ def _cyclic_dependency_proxy() -> tuple[type, ta.Callable[[ta.Any, ta.Any], None
22
21
  return f'{type(self).__name__}({self.__cls!r})'
23
22
 
24
23
  class _CyclicDependencyProxy(wrapt.ObjectProxy, lang.Final): # noqa
25
-
26
24
  def __init__(self, cls):
27
25
  super().__init__(_CyclicDependencyPlaceholder(cls))
28
26
 
omlish/inject/scopes.py CHANGED
@@ -5,7 +5,6 @@ import typing as ta
5
5
  from .. import check
6
6
  from .. import dataclasses as dc
7
7
  from .. import lang
8
- from .. import reflect as rfl
9
8
  from .bindings import Binding
10
9
  from .elements import Element
11
10
  from .keys import Key
@@ -76,9 +75,6 @@ class ScopeSeededProvider(Provider):
76
75
  ss: SeededScope = dc.xfield(coerce=check.of_isinstance(SeededScope))
77
76
  key: Key = dc.xfield(coerce=check.of_isinstance(Key))
78
77
 
79
- def provided_ty(self) -> rfl.Type | None:
80
- return self.key.ty
81
-
82
78
 
83
79
  def bind_scope_seed(k: ta.Any, ss: SeededScope) -> Element:
84
80
  k = as_key(k)
omlish/io/buffers.py CHANGED
@@ -3,8 +3,8 @@
3
3
  import io
4
4
  import typing as ta
5
5
 
6
+ from ..lite.attrops import attr_repr
6
7
  from ..lite.check import check
7
- from ..lite.strings import attr_repr
8
8
 
9
9
 
10
10
  ##
omlish/lang/__init__.py CHANGED
@@ -17,17 +17,21 @@ with _auto_proxy_init(
17
17
  sync_async_list,
18
18
  )
19
19
 
20
- from .attrs import ( # noqa
21
- AttrOps,
20
+ from .attrstorage import ( # noqa
22
21
  AttributePresentError,
23
- DictAttrOps,
24
- STD_ATTR_OPS,
25
22
  SetAttrIfPresent,
26
- StdAttrOps,
27
- TRANSIENT_ATTR_OPS,
28
- TransientAttrOps,
29
- TransientDict,
30
23
  set_attr,
24
+
25
+ AttrStorage,
26
+
27
+ StdAttrStorage,
28
+ STD_ATTR_STORAGE,
29
+
30
+ DictAttrStorage,
31
+
32
+ TransientDict,
33
+ TransientAttrStorage,
34
+ TRANSIENT_ATTR_STORAGE,
31
35
  transient_delattr,
32
36
  transient_getattr,
33
37
  transient_setattr,
@@ -445,6 +449,17 @@ with _auto_proxy_init(
445
449
  Args,
446
450
  )
447
451
 
452
+ from ..lite.asyncs import ( # noqa
453
+ opt_await,
454
+ )
455
+
456
+ from ..lite.attrops import ( # noqa
457
+ AttrOps,
458
+ attr_ops,
459
+
460
+ attr_repr,
461
+ )
462
+
448
463
  from ..lite.contextmanagers import ( # noqa
449
464
  AsyncExitStacked,
450
465
  ExitStacked,
@@ -489,11 +504,6 @@ with _auto_proxy_init(
489
504
  dir_dict,
490
505
  )
491
506
 
492
- from ..lite.reprs import ( # noqa
493
- AttrRepr,
494
- attr_repr,
495
- )
496
-
497
507
  from ..lite.timeouts import ( # noqa
498
508
  DeadlineTimeout,
499
509
  InfiniteTimeout,
@@ -76,7 +76,7 @@ def set_attr(
76
76
  ##
77
77
 
78
78
 
79
- class AttrOps(Abstract):
79
+ class AttrStorage(Abstract):
80
80
  class NOT_SET: # noqa
81
81
  def __new__(cls, *args, **kwargs): # noqa
82
82
  raise TypeError
@@ -97,9 +97,9 @@ class AttrOps(Abstract):
97
97
  #
98
98
 
99
99
 
100
- class StdAttrOps(AttrOps):
101
- def getattr(self, obj: ta.Any, name: str, default: ta.Any = AttrOps.NOT_SET) -> ta.Any:
102
- if default is AttrOps.NOT_SET:
100
+ class StdAttrStorage(AttrStorage):
101
+ def getattr(self, obj: ta.Any, name: str, default: ta.Any = AttrStorage.NOT_SET) -> ta.Any:
102
+ if default is AttrStorage.NOT_SET:
103
103
  return getattr(obj, name)
104
104
  else:
105
105
  return getattr(obj, name, default)
@@ -111,13 +111,13 @@ class StdAttrOps(AttrOps):
111
111
  delattr(obj, name)
112
112
 
113
113
 
114
- STD_ATTR_OPS = StdAttrOps()
114
+ STD_ATTR_STORAGE = StdAttrStorage()
115
115
 
116
116
 
117
117
  #
118
118
 
119
119
 
120
- class DictAttrOps(AttrOps):
120
+ class DictAttrStorage(AttrStorage):
121
121
  def __init__(self, dct: ta.MutableMapping[str, ta.Any] | None = None) -> None:
122
122
  super().__init__()
123
123
 
@@ -125,11 +125,11 @@ class DictAttrOps(AttrOps):
125
125
  dct = {}
126
126
  self._dct = dct
127
127
 
128
- def getattr(self, obj: ta.Any, name: str, default: ta.Any = AttrOps.NOT_SET) -> ta.Any:
128
+ def getattr(self, obj: ta.Any, name: str, default: ta.Any = AttrStorage.NOT_SET) -> ta.Any:
129
129
  try:
130
130
  return self._dct[name]
131
131
  except KeyError:
132
- if default is not AttrOps.NOT_SET:
132
+ if default is not AttrStorage.NOT_SET:
133
133
  return default
134
134
  raise AttributeError(name) from None
135
135
 
@@ -205,13 +205,13 @@ def _get_object_transient_dict(obj: ta.Any) -> TransientDict:
205
205
  return obj.__dict__.setdefault(_TRANSIENT_DICT_ATTR, TransientDict())
206
206
 
207
207
 
208
- class TransientAttrOps(AttrOps):
209
- def getattr(self, obj: ta.Any, name: str, default: ta.Any = AttrOps.NOT_SET) -> ta.Any:
208
+ class TransientAttrStorage(AttrStorage):
209
+ def getattr(self, obj: ta.Any, name: str, default: ta.Any = AttrStorage.NOT_SET) -> ta.Any:
210
210
  td = _get_object_transient_dict(obj)
211
211
  try:
212
212
  return td[name]
213
213
  except KeyError:
214
- if default is not AttrOps.NOT_SET:
214
+ if default is not AttrStorage.NOT_SET:
215
215
  return default
216
216
  raise AttributeError(name) from None
217
217
 
@@ -227,8 +227,8 @@ class TransientAttrOps(AttrOps):
227
227
  raise AttributeError(name) from None
228
228
 
229
229
 
230
- TRANSIENT_ATTR_OPS = TransientAttrOps()
230
+ TRANSIENT_ATTR_STORAGE = TransientAttrStorage()
231
231
 
232
- transient_getattr = TRANSIENT_ATTR_OPS.getattr
233
- transient_setattr = TRANSIENT_ATTR_OPS.setattr
234
- transient_delattr = TRANSIENT_ATTR_OPS.delattr
232
+ transient_getattr = TRANSIENT_ATTR_STORAGE.getattr
233
+ transient_setattr = TRANSIENT_ATTR_STORAGE.setattr
234
+ transient_delattr = TRANSIENT_ATTR_STORAGE.delattr
@@ -2,8 +2,8 @@ import abc
2
2
  import functools
3
3
  import typing as ta
4
4
 
5
- from ..attrs import transient_getattr
6
- from ..attrs import transient_setattr
5
+ from ..attrstorage import transient_getattr
6
+ from ..attrstorage import transient_setattr
7
7
  from ..classes.abstract import Abstract
8
8
 
9
9
 
@@ -63,7 +63,6 @@ class Marker(NotInstantiable, metaclass=_MarkerMeta):
63
63
 
64
64
 
65
65
  _SINGLETON_INSTANCE_ATTR = '__singleton_instance__'
66
- _SINGLETON_LOCK = threading.RLock()
67
66
 
68
67
 
69
68
  def _set_singleton_instance(inst):
@@ -85,7 +84,24 @@ def _set_singleton_instance(inst):
85
84
  return inst
86
85
 
87
86
 
88
- class Singleton:
87
+ class _AnySingleton:
88
+ def __init__(self) -> None:
89
+ try:
90
+ type(self).__dict__[_SINGLETON_INSTANCE_ATTR]
91
+ except KeyError:
92
+ pass
93
+ else:
94
+ raise TypeError(f'Must not re-instantiate singleton {type(self)}')
95
+
96
+ super().__init__()
97
+
98
+ @ta.final
99
+ def __reduce__(self):
100
+ return (type(self), ())
101
+
102
+
103
+ class Singleton(_AnySingleton):
104
+ @ta.final
89
105
  def __new__(cls):
90
106
  return cls.__dict__[_SINGLETON_INSTANCE_ATTR]
91
107
 
@@ -95,15 +111,21 @@ class Singleton:
95
111
  _set_singleton_instance(super().__new__(cls)) # noqa
96
112
 
97
113
 
98
- class LazySingleton:
114
+ _LAZY_SINGLETON_LOCK = threading.RLock()
115
+
116
+
117
+ class LazySingleton(_AnySingleton):
118
+ @ta.final
99
119
  def __new__(cls):
100
120
  try:
101
121
  return cls.__dict__[_SINGLETON_INSTANCE_ATTR]
102
122
  except KeyError:
103
123
  pass
104
- with _SINGLETON_LOCK:
124
+
125
+ with _LAZY_SINGLETON_LOCK:
105
126
  try:
106
127
  return cls.__dict__[_SINGLETON_INSTANCE_ATTR]
107
128
  except KeyError:
108
129
  pass
130
+
109
131
  return _set_singleton_instance(super().__new__(cls))
@@ -9,7 +9,7 @@ V = ta.TypeVar('V')
9
9
  ##
10
10
 
11
11
 
12
- def yield_dict_init(*args: ta.Any, **kwargs: ta.Any) -> ta.Iterable[tuple[ta.Any, ta.Any]]:
12
+ def yield_dict_init(*args: ta.Any, **kwargs: ta.Any) -> ta.Iterator[tuple[ta.Any, ta.Any]]:
13
13
  if len(args) > 1:
14
14
  raise TypeError
15
15
 
omlish/lang/iterables.py CHANGED
@@ -46,14 +46,14 @@ def chunk(n: int, iterable: ta.Iterable[T], strict: bool = False) -> ta.Iterator
46
46
  return iterator
47
47
 
48
48
 
49
- def interleave(vs: ta.Iterable[T], d: T) -> ta.Iterable[T]:
49
+ def interleave(vs: ta.Iterable[T], d: T) -> ta.Iterator[T]:
50
50
  for i, v in enumerate(vs):
51
51
  if i:
52
52
  yield d
53
53
  yield v
54
54
 
55
55
 
56
- def renumerate(it: ta.Iterable[T]) -> ta.Iterable[tuple[T, int]]:
56
+ def renumerate(it: ta.Iterable[T]) -> ta.Iterator[tuple[T, int]]:
57
57
  return ((e, i) for i, e in enumerate(it))
58
58
 
59
59
 
@@ -36,7 +36,7 @@ class LifecycleContextManager(ta.Generic[LifecycleT]):
36
36
  self._lifecycle = lifecycle
37
37
  self._controller = lifecycle if isinstance(lifecycle, LifecycleController) else LifecycleController(lifecycle)
38
38
 
39
- __repr__ = lang.AttrRepr(['lifecycle', 'state'])
39
+ __repr__ = lang.attr_ops('lifecycle', 'state').repr
40
40
 
41
41
  @property
42
42
  def lifecycle(self) -> LifecycleT:
@@ -47,7 +47,7 @@ class AnyLifecycleController(AnyLifecycle[R], lang.Abstract, ta.Generic[AnyLifec
47
47
  self._state = LifecycleStates.NEW
48
48
  self._listeners: list[AnyLifecycleListener[AnyLifecycleT, R]] = []
49
49
 
50
- __repr__ = lang.AttrRepr(['lifecycle', 'state'])
50
+ __repr__ = lang.attr_ops('lifecycle', 'state').repr
51
51
 
52
52
  @property
53
53
  def lifecycle(self) -> AnyLifecycleT: