omlish 0.0.0.dev6__py3-none-any.whl → 0.0.0.dev8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. omlish/__about__.py +109 -5
  2. omlish/__init__.py +0 -8
  3. omlish/asyncs/__init__.py +0 -9
  4. omlish/asyncs/anyio.py +40 -0
  5. omlish/bootstrap.py +737 -0
  6. omlish/check.py +1 -1
  7. omlish/collections/__init__.py +4 -0
  8. omlish/collections/exceptions.py +2 -0
  9. omlish/collections/utils.py +38 -9
  10. omlish/configs/strings.py +2 -0
  11. omlish/dataclasses/__init__.py +7 -0
  12. omlish/dataclasses/impl/descriptors.py +95 -0
  13. omlish/dataclasses/impl/reflect.py +1 -1
  14. omlish/dataclasses/utils.py +23 -0
  15. omlish/{lang/datetimes.py → datetimes.py} +8 -4
  16. omlish/diag/procfs.py +1 -1
  17. omlish/diag/threads.py +131 -48
  18. omlish/docker.py +16 -1
  19. omlish/fnpairs.py +0 -4
  20. omlish/{serde → formats}/dotenv.py +3 -0
  21. omlish/{serde → formats}/yaml.py +2 -2
  22. omlish/graphs/trees.py +1 -1
  23. omlish/http/consts.py +6 -0
  24. omlish/http/sessions.py +2 -2
  25. omlish/inject/__init__.py +4 -0
  26. omlish/inject/binder.py +3 -3
  27. omlish/inject/elements.py +1 -1
  28. omlish/inject/impl/injector.py +57 -27
  29. omlish/inject/impl/origins.py +2 -0
  30. omlish/inject/origins.py +3 -0
  31. omlish/inject/utils.py +18 -0
  32. omlish/iterators.py +69 -2
  33. omlish/lang/__init__.py +16 -7
  34. omlish/lang/classes/restrict.py +10 -0
  35. omlish/lang/contextmanagers.py +1 -1
  36. omlish/lang/descriptors.py +3 -3
  37. omlish/lang/imports.py +67 -0
  38. omlish/lang/iterables.py +40 -0
  39. omlish/lang/maybes.py +3 -0
  40. omlish/lang/objects.py +38 -0
  41. omlish/lang/strings.py +25 -0
  42. omlish/lang/sys.py +9 -0
  43. omlish/lang/typing.py +37 -0
  44. omlish/lite/__init__.py +1 -0
  45. omlish/lite/cached.py +18 -0
  46. omlish/lite/check.py +29 -0
  47. omlish/lite/contextmanagers.py +18 -0
  48. omlish/lite/json.py +30 -0
  49. omlish/lite/logs.py +121 -0
  50. omlish/lite/marshal.py +318 -0
  51. omlish/lite/reflect.py +49 -0
  52. omlish/lite/runtime.py +18 -0
  53. omlish/lite/secrets.py +19 -0
  54. omlish/lite/strings.py +25 -0
  55. omlish/lite/subprocesses.py +112 -0
  56. omlish/logs/__init__.py +13 -9
  57. omlish/logs/configs.py +17 -22
  58. omlish/logs/formatters.py +3 -48
  59. omlish/marshal/__init__.py +28 -0
  60. omlish/marshal/any.py +5 -5
  61. omlish/marshal/base.py +27 -11
  62. omlish/marshal/base64.py +24 -9
  63. omlish/marshal/dataclasses.py +34 -28
  64. omlish/marshal/datetimes.py +74 -18
  65. omlish/marshal/enums.py +14 -8
  66. omlish/marshal/exceptions.py +11 -1
  67. omlish/marshal/factories.py +59 -74
  68. omlish/marshal/forbidden.py +35 -0
  69. omlish/marshal/global_.py +11 -4
  70. omlish/marshal/iterables.py +21 -24
  71. omlish/marshal/mappings.py +23 -26
  72. omlish/marshal/numbers.py +51 -0
  73. omlish/marshal/optionals.py +11 -12
  74. omlish/marshal/polymorphism.py +86 -21
  75. omlish/marshal/primitives.py +4 -5
  76. omlish/marshal/standard.py +13 -8
  77. omlish/marshal/uuids.py +4 -5
  78. omlish/matchfns.py +218 -0
  79. omlish/os.py +64 -0
  80. omlish/reflect/__init__.py +39 -0
  81. omlish/reflect/isinstance.py +38 -0
  82. omlish/reflect/ops.py +84 -0
  83. omlish/reflect/subst.py +110 -0
  84. omlish/reflect/types.py +275 -0
  85. omlish/secrets/__init__.py +18 -2
  86. omlish/secrets/crypto.py +132 -0
  87. omlish/secrets/marshal.py +36 -7
  88. omlish/secrets/openssl.py +207 -0
  89. omlish/secrets/secrets.py +260 -8
  90. omlish/secrets/subprocesses.py +42 -0
  91. omlish/sql/dbs.py +6 -5
  92. omlish/sql/exprs.py +12 -0
  93. omlish/sql/secrets.py +10 -0
  94. omlish/term.py +1 -1
  95. omlish/testing/pytest/plugins/switches.py +54 -19
  96. omlish/text/glyphsplit.py +5 -0
  97. omlish-0.0.0.dev8.dist-info/METADATA +50 -0
  98. {omlish-0.0.0.dev6.dist-info → omlish-0.0.0.dev8.dist-info}/RECORD +105 -78
  99. {omlish-0.0.0.dev6.dist-info → omlish-0.0.0.dev8.dist-info}/WHEEL +1 -1
  100. omlish/logs/filters.py +0 -11
  101. omlish/reflect.py +0 -470
  102. omlish-0.0.0.dev6.dist-info/METADATA +0 -34
  103. /omlish/{asyncs/futures.py → concurrent.py} +0 -0
  104. /omlish/{serde → formats}/__init__.py +0 -0
  105. /omlish/{serde → formats}/json.py +0 -0
  106. /omlish/{serde → formats}/props.py +0 -0
  107. {omlish-0.0.0.dev6.dist-info → omlish-0.0.0.dev8.dist-info}/LICENSE +0 -0
  108. {omlish-0.0.0.dev6.dist-info → omlish-0.0.0.dev8.dist-info}/top_level.txt +0 -0
omlish/check.py CHANGED
@@ -58,7 +58,7 @@ def _unpack_isinstance_spec(spec: ta.Any) -> tuple:
58
58
  return spec
59
59
 
60
60
 
61
- def isinstance(v: ta.Any, spec: ta.Union[type[T], tuple], msg: Message = None) -> T: # noqa
61
+ def isinstance(v: ta.Any, spec: type[T] | tuple, msg: Message = None) -> T: # noqa
62
62
  if not _isinstance(v, _unpack_isinstance_spec(spec)):
63
63
  _raise(TypeError, 'Must be instance', msg, v, spec)
64
64
  return v
@@ -25,6 +25,10 @@ from .coerce import ( # noqa
25
25
  seq_or_none,
26
26
  )
27
27
 
28
+ from .exceptions import ( # noqa
29
+ DuplicateKeyError,
30
+ )
31
+
28
32
  from .frozen import ( # noqa
29
33
  Frozen,
30
34
  FrozenDict,
@@ -0,0 +1,2 @@
1
+ class DuplicateKeyError(KeyError):
2
+ pass
@@ -3,6 +3,8 @@ import itertools
3
3
  import typing as ta
4
4
 
5
5
  from .. import check
6
+ from .. import lang
7
+ from .exceptions import DuplicateKeyError
6
8
  from .identity import IdentityKeyDict
7
9
  from .identity import IdentitySet
8
10
 
@@ -48,29 +50,56 @@ def partition(items: ta.Iterable[T], pred: ta.Callable[[T], bool]) -> tuple[list
48
50
  return t, f
49
51
 
50
52
 
51
- def unique(it: ta.Iterable[T], *, identity: bool = False) -> list[T]:
53
+ def unique(
54
+ it: ta.Iterable[T],
55
+ *,
56
+ key: ta.Callable[[T], ta.Any] = lang.identity,
57
+ identity: bool = False,
58
+ strict: bool = False,
59
+ ) -> list[T]:
52
60
  if isinstance(it, str):
53
61
  raise TypeError(it)
54
62
  ret: list[T] = []
55
- seen: ta.MutableSet[T] = IdentitySet() if identity else set()
63
+ seen: ta.MutableSet = IdentitySet() if identity else set()
56
64
  for e in it:
57
- if e not in seen:
58
- seen.add(e)
65
+ k = key(e)
66
+ if k in seen:
67
+ if strict:
68
+ raise DuplicateKeyError(k, e)
69
+ else:
70
+ seen.add(k)
59
71
  ret.append(e)
60
72
  return ret
61
73
 
62
74
 
63
- def unique_map(kvs: ta.Iterable[tuple[K, V]], *, identity: bool = False) -> ta.MutableMapping[K, V]:
75
+ def unique_map(
76
+ kvs: ta.Iterable[tuple[K, V]],
77
+ *,
78
+ identity: bool = False,
79
+ strict: bool = False,
80
+ ) -> ta.MutableMapping[K, V]:
64
81
  d: ta.MutableMapping[K, V] = IdentityKeyDict() if identity else {}
65
82
  for k, v in kvs:
66
83
  if k in d:
67
- raise KeyError(k)
68
- d[k] = v
84
+ if strict:
85
+ raise DuplicateKeyError(k)
86
+ else:
87
+ d[k] = v
69
88
  return d
70
89
 
71
90
 
72
- def unique_map_by(fn: ta.Callable[[V], K], vs: ta.Iterable[V], *, identity: bool = False) -> ta.MutableMapping[K, V]:
73
- return unique_map(((fn(v), v) for v in vs), identity=identity)
91
+ def unique_map_by(
92
+ fn: ta.Callable[[V], K],
93
+ vs: ta.Iterable[V],
94
+ *,
95
+ identity: bool = False,
96
+ strict: bool = False,
97
+ ) -> ta.MutableMapping[K, V]:
98
+ return unique_map(
99
+ ((fn(v), v) for v in vs),
100
+ identity=identity,
101
+ strict=strict,
102
+ )
74
103
 
75
104
 
76
105
  def multi_map(kvs: ta.Iterable[tuple[K, V]], *, identity: bool = False) -> ta.MutableMapping[K, list[V]]:
omlish/configs/strings.py CHANGED
@@ -1,6 +1,8 @@
1
1
  """
2
2
  TODO:
3
3
  - reflecty generalized rewriter, obviously..
4
+ - env vars
5
+ - coalescing - {$FOO|$BAR|baz}
4
6
  """
5
7
  import collections.abc
6
8
  import typing as ta
@@ -36,6 +36,11 @@ from .impl.as_ import ( # noqa
36
36
  astuple,
37
37
  )
38
38
 
39
+ from .impl.params import ( # noqa
40
+ FieldExtras,
41
+ get_field_extras,
42
+ )
43
+
39
44
  from .impl.replace import ( # noqa
40
45
  replace,
41
46
  )
@@ -85,8 +90,10 @@ from .impl.reflect import ( # noqa
85
90
 
86
91
  from .utils import ( # noqa
87
92
  chain_metadata,
93
+ deep_replace,
88
94
  field_modifier,
89
95
  maybe_post_init,
90
96
  opt_repr,
97
+ update_field_extras,
91
98
  update_field_metadata,
92
99
  )
@@ -0,0 +1,95 @@
1
+ import abc
2
+ import dataclasses as dc
3
+ import typing as ta
4
+
5
+ from ... import defs
6
+
7
+
8
+ class AbstractFieldDescriptor(abc.ABC):
9
+
10
+ def __init__(
11
+ self,
12
+ *,
13
+ default: ta.Any = dc.MISSING,
14
+ frozen: bool = False,
15
+ name: str | None = None,
16
+ pre_set: ta.Callable[[ta.Any, ta.Any], ta.Any] | None = None,
17
+ post_set: ta.Callable[[ta.Any, ta.Any], None] | None = None,
18
+ ) -> None:
19
+ super().__init__()
20
+
21
+ self._default = default
22
+ self._frozen = frozen
23
+ self._name = name
24
+ self._pre_set = pre_set
25
+ self._post_set = post_set
26
+
27
+ defs.repr('name')
28
+ defs.getter('default', 'frozen', 'name', 'pre_set', 'post_set')
29
+
30
+ def __set_name__(self, owner, name):
31
+ if self._name is None:
32
+ self._name = name
33
+
34
+ def __get__(self, instance, owner=None):
35
+ if instance is not None:
36
+ try:
37
+ return self._get(instance)
38
+ except AttributeError:
39
+ pass
40
+ if self._default is not dc.MISSING:
41
+ return self._default
42
+ raise AttributeError(self._name)
43
+
44
+ @abc.abstractmethod
45
+ def _get(self, instance):
46
+ raise NotImplementedError
47
+
48
+ def __set__(self, instance, value):
49
+ if self._frozen:
50
+ raise dc.FrozenInstanceError(f'cannot assign to field {self._name!r}')
51
+ if self._pre_set is not None:
52
+ value = self._pre_set(instance, value)
53
+ self._set(instance, value)
54
+ if self._post_set is not None:
55
+ self._post_set(instance, value)
56
+
57
+ @abc.abstractmethod
58
+ def _set(self, instance, value):
59
+ raise NotImplementedError
60
+
61
+ def __delete__(self, instance):
62
+ if self._frozen:
63
+ raise dc.FrozenInstanceError(f'cannot delete field {self._name!r}')
64
+ self._del(instance)
65
+
66
+ @abc.abstractmethod
67
+ def _del(self, instance):
68
+ raise NotImplementedError
69
+
70
+
71
+ class PyFieldDescriptor(AbstractFieldDescriptor):
72
+
73
+ def __init__(
74
+ self,
75
+ attr: str,
76
+ **kwargs: ta.Any,
77
+ ) -> None:
78
+ super().__init__(**kwargs)
79
+
80
+ self._attr = attr
81
+
82
+ defs.repr('attr', 'name')
83
+ defs.getter('attr')
84
+
85
+ def _get(self, instance):
86
+ return getattr(instance, self._attr)
87
+
88
+ def _set(self, instance, value):
89
+ setattr(instance, self._attr, value)
90
+
91
+ def _del(self, instance):
92
+ delattr(instance, self._attr)
93
+
94
+
95
+ FieldDescriptor = PyFieldDescriptor
@@ -149,7 +149,7 @@ class ClassInfo:
149
149
 
150
150
  @cached.property
151
151
  def generic_mro_lookup(self) -> ta.Mapping[type, rfl.Type]:
152
- return col.unique_map((check.not_none(rfl.get_concrete_type(g)), g) for g in self.generic_mro)
152
+ return col.unique_map(((check.not_none(rfl.get_concrete_type(g)), g) for g in self.generic_mro), strict=True)
153
153
 
154
154
  @cached.property
155
155
  def generic_replaced_field_types(self) -> ta.Mapping[str, rfl.Type]:
@@ -4,6 +4,9 @@ import types
4
4
  import typing as ta
5
5
 
6
6
  from .. import check
7
+ from .impl.params import DEFAULT_FIELD_EXTRAS
8
+ from .impl.params import FieldExtras
9
+ from .impl.params import get_field_extras
7
10
 
8
11
 
9
12
  T = ta.TypeVar('T')
@@ -42,3 +45,23 @@ def update_field_metadata(f: dc.Field, nmd: ta.Mapping) -> dc.Field:
42
45
  check.isinstance(f, dc.Field)
43
46
  f.metadata = chain_metadata(nmd, f.metadata)
44
47
  return f
48
+
49
+
50
+ def update_field_extras(f: dc.Field, *, unless_non_default: bool = False, **kwargs: ta.Any) -> dc.Field:
51
+ fe = get_field_extras(f)
52
+ return update_field_metadata(f, {
53
+ FieldExtras: dc.replace(fe, **{
54
+ k: v
55
+ for k, v in kwargs.items()
56
+ if not unless_non_default or v != getattr(DEFAULT_FIELD_EXTRAS, k)
57
+ }),
58
+ })
59
+
60
+
61
+ def deep_replace(o: T, *args: str | ta.Callable[[ta.Any], ta.Mapping[str, ta.Any]]) -> T:
62
+ if not args:
63
+ return o
64
+ elif len(args) == 1:
65
+ return dc.replace(o, **args[0](o)) # type: ignore
66
+ else:
67
+ return dc.replace(o, **{args[0]: deep_replace(getattr(o, args[0]), *args[1:])}) # type: ignore
@@ -1,5 +1,8 @@
1
+ # ruff: noqa: UP007
2
+ # @omlish-lite
1
3
  import datetime
2
4
  import re
5
+ import typing as ta
3
6
 
4
7
 
5
8
  def to_seconds(value: datetime.timedelta) -> float:
@@ -18,8 +21,7 @@ def months_ago(date: datetime.date, num: int) -> datetime.date:
18
21
  return datetime.date(ago_year, ago_month, 1)
19
22
 
20
23
 
21
- def parse_date(s: str, tz: datetime.timezone | None = None) -> datetime.date:
22
-
24
+ def parse_date(s: str, tz: ta.Optional[datetime.timezone] = None) -> datetime.date:
23
25
  if s.lower() in ['today', 'now']:
24
26
  return datetime.datetime.now(tz=tz)
25
27
  elif s.lower() == 'yesterday':
@@ -39,7 +41,8 @@ _TIMEDELTA_STR_RE = re.compile(
39
41
  r'((?P<days>-?\d+)\s*days?,\s*)?'
40
42
  r'(?P<hours>\d?\d):(?P<minutes>\d\d)'
41
43
  r':(?P<seconds>\d\d+(\.\d+)?)'
42
- r'\s*$')
44
+ r'\s*$',
45
+ )
43
46
 
44
47
 
45
48
  _TIMEDELTA_DHMS_RE = re.compile(
@@ -49,7 +52,8 @@ _TIMEDELTA_DHMS_RE = re.compile(
49
52
  r',?\s*((?P<hours>\d+(\.\d+)?)\s*(h|hours?))?'
50
53
  r',?\s*((?P<minutes>\d+(\.\d+)?)\s*(m|minutes?))?'
51
54
  r',?\s*((?P<seconds>\d+(\.\d+)?)\s*(s|secs?|seconds?))?'
52
- r'\s*$')
55
+ r'\s*$',
56
+ )
53
57
 
54
58
 
55
59
  def parse_timedelta(s: str) -> datetime.timedelta:
omlish/diag/procfs.py CHANGED
@@ -14,7 +14,7 @@ import typing as ta
14
14
  from .. import iterators as it
15
15
  from .. import lang
16
16
  from .. import os as oos
17
- from ..serde import json
17
+ from ..formats import json
18
18
  from .procstats import ProcStats
19
19
 
20
20
 
omlish/diag/threads.py CHANGED
@@ -9,6 +9,90 @@ import traceback
9
9
  import typing as ta
10
10
 
11
11
 
12
+ ##
13
+
14
+
15
+ def dump_threads(out: ta.IO) -> None:
16
+ out.write('\n\n')
17
+
18
+ thrs_by_tid = {t.ident: t for t in threading.enumerate()}
19
+
20
+ for tid, fr in sys._current_frames().items(): # noqa
21
+ try:
22
+ thr = thrs_by_tid[tid]
23
+ except KeyError:
24
+ thr_rpr = repr(tid)
25
+ else:
26
+ thr_rpr = repr(thr)
27
+
28
+ tb = traceback.format_stack(fr)
29
+
30
+ out.write(f'{thr_rpr}\n')
31
+ out.write('\n'.join(l.strip() for l in tb))
32
+ out.write('\n\n')
33
+
34
+
35
+ def dump_threads_str() -> str:
36
+ out = io.StringIO()
37
+ dump_threads(out)
38
+ return out.getvalue()
39
+
40
+
41
+ ##
42
+
43
+
44
+ class StoppableThread:
45
+ def __init__(
46
+ self,
47
+ fn: ta.Callable[[], None],
48
+ interval_s: float,
49
+ *,
50
+ tick_immediately: bool = False,
51
+ start: bool = False,
52
+ **kwargs: ta.Any,
53
+ ) -> None:
54
+ super().__init__()
55
+ self._fn = fn
56
+ self._interval_s = interval_s
57
+ self._tick_immediately = tick_immediately
58
+ self._thread = threading.Thread(target=self._loop, **kwargs)
59
+ self._stop_event = threading.Event()
60
+ if start:
61
+ self.start()
62
+
63
+ @property
64
+ def thread(self) -> threading.Thread:
65
+ return self._thread
66
+
67
+ @property
68
+ def ident(self) -> int | None:
69
+ return self._thread.ident
70
+
71
+ def start(self) -> None:
72
+ return self._thread.start()
73
+
74
+ def stop_nowait(self) -> None:
75
+ self._stop_event.set()
76
+
77
+ def stop_wait(self, timeout: float | None = None) -> None:
78
+ self.stop_nowait()
79
+ self._thread.join(timeout)
80
+
81
+ def _loop(self) -> None:
82
+ if self._tick_immediately:
83
+ self._fn()
84
+
85
+ while True:
86
+ self._stop_event.wait(self._interval_s)
87
+ if self._stop_event.is_set():
88
+ return
89
+
90
+ self._fn()
91
+
92
+
93
+ ##
94
+
95
+
12
96
  _DEBUG_THREAD_COUNTER = itertools.count()
13
97
 
14
98
 
@@ -18,47 +102,23 @@ def create_thread_dump_thread(
18
102
  out: ta.TextIO = sys.stderr,
19
103
  start: bool = False,
20
104
  nodaemon: bool = False,
21
- ) -> threading.Thread:
22
- def dump():
23
- cthr = threading.current_thread()
24
- thrs_by_tid = {t.ident: t for t in threading.enumerate()}
25
-
26
- buf = io.StringIO()
27
- for tid, fr in sys._current_frames().items(): # noqa
28
- if tid == cthr.ident:
29
- continue
30
-
31
- try:
32
- thr = thrs_by_tid[tid]
33
- except KeyError:
34
- thr_rpr = repr(tid)
35
- else:
36
- thr_rpr = repr(thr)
37
-
38
- tb = traceback.format_stack(fr)
39
-
40
- buf.write(f'{thr_rpr}\n')
41
- buf.write('\n'.join(l.strip() for l in tb))
42
- buf.write('\n\n')
105
+ ) -> StoppableThread:
106
+ def proc() -> None:
107
+ try:
108
+ out.write(dump_threads_str())
109
+ except Exception as e: # noqa
110
+ out.write(repr(e) + '\n\n')
43
111
 
44
- out.write(buf.getvalue())
45
-
46
- def proc():
47
- while True:
48
- time.sleep(interval_s)
49
- try:
50
- dump()
51
- except Exception as e: # noqa
52
- out.write(repr(e) + '\n\n')
53
-
54
- dthr = threading.Thread(
55
- target=proc,
112
+ return StoppableThread(
113
+ proc,
114
+ interval_s,
56
115
  daemon=not nodaemon,
57
116
  name=f'thread-dump-thread-{next(_DEBUG_THREAD_COUNTER)}',
117
+ start=start,
58
118
  )
59
- if start:
60
- dthr.start()
61
- return dthr
119
+
120
+
121
+ ##
62
122
 
63
123
 
64
124
  def create_suicide_thread(
@@ -67,20 +127,43 @@ def create_suicide_thread(
67
127
  interval_s: float = 1.,
68
128
  parent_thread: threading.Thread | None = None,
69
129
  start: bool = False,
70
- ) -> threading.Thread:
130
+ ) -> StoppableThread:
131
+ """Kills process when parent_thread dies."""
132
+
71
133
  if parent_thread is None:
72
134
  parent_thread = threading.current_thread()
73
135
 
74
- def proc():
75
- while True:
76
- parent_thread.join(interval_s)
77
- if not parent_thread.is_alive():
78
- os.kill(os.getpid(), sig)
136
+ def proc() -> None:
137
+ if not parent_thread.is_alive():
138
+ os.kill(os.getpid(), sig)
79
139
 
80
- dthr = threading.Thread(
81
- target=proc,
140
+ return StoppableThread(
141
+ proc,
142
+ interval_s,
82
143
  name=f'suicide-thread-{next(_DEBUG_THREAD_COUNTER)}',
144
+ start=start,
145
+ )
146
+
147
+
148
+ ##
149
+
150
+
151
+ def create_timebomb_thread(
152
+ delay_s: float,
153
+ *,
154
+ sig: int = signal.SIGKILL,
155
+ interval_s: float = 1.,
156
+ start: bool = False,
157
+ ) -> StoppableThread:
158
+ def proc() -> None:
159
+ if time.time() >= deadline:
160
+ os.kill(os.getpid(), sig)
161
+
162
+ deadline = time.time() + delay_s
163
+
164
+ return StoppableThread(
165
+ proc,
166
+ interval_s,
167
+ name=f'timebomb-thread-{next(_DEBUG_THREAD_COUNTER)}',
168
+ start=start,
83
169
  )
84
- if start:
85
- dthr.start()
86
- return dthr
omlish/docker.py CHANGED
@@ -18,13 +18,14 @@ import datetime
18
18
  import re
19
19
  import shlex
20
20
  import subprocess
21
+ import sys
21
22
  import typing as ta
22
23
 
23
24
  from . import check
24
25
  from . import dataclasses as dc
25
26
  from . import lang
26
27
  from . import marshal as msh
27
- from .serde import json
28
+ from .formats import json
28
29
 
29
30
 
30
31
  if ta.TYPE_CHECKING:
@@ -168,3 +169,17 @@ def timebomb_payload(delay_s: float, name: str = 'omlish-docker-timebomb') -> st
168
169
  'sh -c \'killall5 -9 -o $PPID -o $$ ; kill 1\''
169
170
  ') &'
170
171
  )
172
+
173
+
174
+ ##
175
+
176
+
177
+ _LIKELY_IN_DOCKER_PATTERN = re.compile(r'^overlay / .*/docker/')
178
+
179
+
180
+ def is_likely_in_docker() -> bool:
181
+ if sys.platform != 'linux':
182
+ return False
183
+ with open('/proc/mounts') as f: # type: ignore
184
+ ls = f.readlines()
185
+ return any(_LIKELY_IN_DOCKER_PATTERN.match(l) for l in ls)
omlish/fnpairs.py CHANGED
@@ -4,11 +4,7 @@ TODO:
4
4
  - csv
5
5
  - csvloader
6
6
  - cbor
7
- - cloudpickle
8
7
  - alt json backends
9
- - compression
10
- - snappy
11
- - lz4
12
8
  - wrapped (wait for usecase)
13
9
  """
14
10
  import abc
@@ -20,6 +20,7 @@
20
20
  # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
21
21
  # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
22
22
  # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
23
+ # https://github.com/theskumar/python-dotenv/tree/4d505f2c9bc3569791e64bca0f2e4300f43df0e0/src/dotenv
23
24
  import abc
24
25
  import codecs
25
26
  import contextlib
@@ -544,6 +545,7 @@ def dotenv_values(
544
545
  verbose: bool = False,
545
546
  interpolate: bool = True,
546
547
  encoding: str | None = 'utf-8',
548
+ env: ta.Mapping[str, str] | None = None,
547
549
  ) -> dict[str, str | None]:
548
550
  """
549
551
  Parse a .env file and return its content as a dict.
@@ -571,4 +573,5 @@ def dotenv_values(
571
573
  interpolate=interpolate,
572
574
  override=True,
573
575
  encoding=encoding,
576
+ env=env,
574
577
  ).dict()
@@ -136,7 +136,7 @@ class _cached_class_property: # noqa
136
136
  def __set_name__(self, owner, name):
137
137
  self._attr = '_' + name
138
138
 
139
- def __get__(self, instance, owner):
139
+ def __get__(self, instance, owner=None):
140
140
  if owner is None:
141
141
  if instance is None:
142
142
  raise RuntimeError
@@ -152,7 +152,7 @@ class _cached_class_property: # noqa
152
152
  class WrappedLoaders(lang.Namespace):
153
153
 
154
154
  @staticmethod
155
- def _wrap(cls):
155
+ def _wrap(cls): # noqa
156
156
  return type('NodeWrapping$' + cls.__name__, (NodeWrappingConstructorMixin, cls), {})
157
157
 
158
158
  Base: type['yaml.BaseLoader'] = _cached_class_property(lambda cls: cls._wrap(yaml.BaseLoader)) # type: ignore
omlish/graphs/trees.py CHANGED
@@ -190,7 +190,7 @@ class BasicTreeAnalysis(ta.Generic[NodeT]):
190
190
  e: ta.Any
191
191
  d: ta.Any
192
192
  if identity:
193
- e, d = id, col.unique_map((id(n), n) for n, _ in pairs)
193
+ e, d = id, col.unique_map(((id(n), n) for n, _ in pairs), strict=True)
194
194
  else:
195
195
  e, d = lang.identity, lang.identity
196
196
  tsd = {e(n): {e(p)} for n, p in parents_by_node.items()}
omlish/http/consts.py CHANGED
@@ -1,3 +1,7 @@
1
+ """
2
+ TODO:
3
+ - import mimetypes lol
4
+ """
1
5
  import base64
2
6
  import http
3
7
 
@@ -46,6 +50,8 @@ CONTENT_TYPE_ICON = b'image/x-icon'
46
50
  CONTENT_TYPE_JSON = b'application/json'
47
51
  CONTENT_TYPE_JSON_UTF8 = b'; '.join([CONTENT_TYPE_JSON, CONTENT_CHARSET_UTF8])
48
52
 
53
+ CONTENT_TYPE_PNG = b'image/png'
54
+
49
55
  CONTENT_TYPE_TEXT = b'text/plain'
50
56
  CONTENT_TYPE_TEXT_UTF8 = b'; '.join([CONTENT_TYPE_TEXT, CONTENT_CHARSET_UTF8])
51
57
 
omlish/http/sessions.py CHANGED
@@ -45,7 +45,7 @@ def bytes_to_int(bytestr: bytes) -> int:
45
45
  class Signer:
46
46
  @dc.dataclass(frozen=True)
47
47
  class Config:
48
- secret_key: str | sec.Secret = dc.field()
48
+ secret_key: str | sec.SecretRef = dc.field()
49
49
  salt: str = 'cookie-session'
50
50
 
51
51
  def __init__(
@@ -65,7 +65,7 @@ class Signer:
65
65
 
66
66
  @lang.cached_function
67
67
  def derive_key(self) -> bytes:
68
- mac = hmac.new(self._secrets.fix(self._config.secret_key).encode(), digestmod=self.digest())
68
+ mac = hmac.new(self._secrets.fix(self._config.secret_key).reveal().encode(), digestmod=self.digest())
69
69
  mac.update(self._config.salt.encode())
70
70
  return mac.digest()
71
71
 
omlish/inject/__init__.py CHANGED
@@ -117,3 +117,7 @@ from .types import ( # noqa
117
117
  Tag,
118
118
  Unscoped,
119
119
  )
120
+
121
+ from .utils import ( # noqa
122
+ ConstFn,
123
+ )
omlish/inject/binder.py CHANGED
@@ -87,14 +87,14 @@ def bind(
87
87
  *,
88
88
  tag: ta.Any = None,
89
89
 
90
- in_: Scope | None = None,
91
- singleton: bool = False,
92
-
93
90
  to_fn: ta.Any = None,
94
91
  to_ctor: ta.Any = None,
95
92
  to_const: ta.Any = None,
96
93
  to_key: ta.Any = None,
97
94
 
95
+ in_: Scope | None = None,
96
+ singleton: bool = False,
97
+
98
98
  eager: bool = False,
99
99
  expose: bool = False,
100
100
  ) -> Element | Elements: