omlish 0.0.0.dev3__py3-none-any.whl → 0.0.0.dev5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of omlish might be problematic. Click here for more details.

Files changed (91) hide show
  1. omlish/__about__.py +1 -1
  2. omlish/__init__.py +8 -0
  3. omlish/asyncs/__init__.py +18 -0
  4. omlish/asyncs/anyio.py +66 -0
  5. omlish/asyncs/flavors.py +227 -0
  6. omlish/asyncs/trio_asyncio.py +47 -0
  7. omlish/c3.py +1 -1
  8. omlish/cached.py +1 -2
  9. omlish/collections/__init__.py +4 -1
  10. omlish/collections/cache/impl.py +1 -1
  11. omlish/collections/indexed.py +1 -1
  12. omlish/collections/utils.py +38 -6
  13. omlish/configs/__init__.py +5 -0
  14. omlish/configs/classes.py +53 -0
  15. omlish/configs/dotenv.py +586 -0
  16. omlish/configs/props.py +589 -49
  17. omlish/dataclasses/impl/api.py +1 -1
  18. omlish/dataclasses/impl/as_.py +1 -1
  19. omlish/dataclasses/impl/fields.py +1 -0
  20. omlish/dataclasses/impl/init.py +1 -1
  21. omlish/dataclasses/impl/main.py +1 -0
  22. omlish/dataclasses/impl/metaclass.py +6 -1
  23. omlish/dataclasses/impl/order.py +1 -1
  24. omlish/dataclasses/impl/reflect.py +15 -2
  25. omlish/defs.py +1 -1
  26. omlish/diag/procfs.py +29 -1
  27. omlish/diag/procstats.py +32 -0
  28. omlish/diag/replserver/console.py +3 -3
  29. omlish/diag/replserver/server.py +6 -5
  30. omlish/diag/threads.py +86 -0
  31. omlish/docker.py +19 -0
  32. omlish/dynamic.py +2 -2
  33. omlish/fnpairs.py +121 -24
  34. omlish/graphs/dags.py +113 -0
  35. omlish/graphs/domination.py +268 -0
  36. omlish/graphs/trees.py +2 -2
  37. omlish/http/__init__.py +25 -0
  38. omlish/http/asgi.py +131 -0
  39. omlish/http/consts.py +31 -4
  40. omlish/http/cookies.py +194 -0
  41. omlish/http/dates.py +70 -0
  42. omlish/http/encodings.py +6 -0
  43. omlish/http/json.py +273 -0
  44. omlish/http/sessions.py +197 -0
  45. omlish/inject/__init__.py +8 -2
  46. omlish/inject/bindings.py +3 -3
  47. omlish/inject/exceptions.py +3 -3
  48. omlish/inject/impl/elements.py +46 -25
  49. omlish/inject/impl/injector.py +8 -5
  50. omlish/inject/impl/multis.py +74 -0
  51. omlish/inject/impl/providers.py +19 -39
  52. omlish/inject/{proxy.py → impl/proxy.py} +2 -2
  53. omlish/inject/impl/scopes.py +4 -2
  54. omlish/inject/injector.py +1 -0
  55. omlish/inject/keys.py +3 -9
  56. omlish/inject/multis.py +70 -0
  57. omlish/inject/providers.py +23 -23
  58. omlish/inject/scopes.py +7 -3
  59. omlish/inject/types.py +0 -8
  60. omlish/iterators.py +13 -0
  61. omlish/json.py +138 -1
  62. omlish/lang/__init__.py +8 -0
  63. omlish/lang/classes/restrict.py +1 -1
  64. omlish/lang/classes/virtual.py +2 -2
  65. omlish/lang/contextmanagers.py +64 -0
  66. omlish/lang/datetimes.py +6 -5
  67. omlish/lang/functions.py +10 -0
  68. omlish/lang/imports.py +11 -2
  69. omlish/lang/sys.py +7 -0
  70. omlish/lang/typing.py +1 -0
  71. omlish/logs/utils.py +1 -1
  72. omlish/marshal/datetimes.py +1 -1
  73. omlish/reflect.py +8 -2
  74. omlish/sql/__init__.py +9 -0
  75. omlish/sql/asyncs.py +148 -0
  76. omlish/sync.py +70 -0
  77. omlish/term.py +6 -1
  78. omlish/testing/pydevd.py +2 -0
  79. omlish/testing/pytest/__init__.py +5 -0
  80. omlish/testing/pytest/helpers.py +0 -24
  81. omlish/testing/pytest/inject/harness.py +1 -1
  82. omlish/testing/pytest/marks.py +48 -0
  83. omlish/testing/pytest/plugins/__init__.py +2 -0
  84. omlish/testing/pytest/plugins/managermarks.py +60 -0
  85. omlish/testing/testing.py +10 -0
  86. omlish/text/delimit.py +4 -0
  87. {omlish-0.0.0.dev3.dist-info → omlish-0.0.0.dev5.dist-info}/METADATA +4 -1
  88. {omlish-0.0.0.dev3.dist-info → omlish-0.0.0.dev5.dist-info}/RECORD +91 -70
  89. {omlish-0.0.0.dev3.dist-info → omlish-0.0.0.dev5.dist-info}/WHEEL +1 -1
  90. {omlish-0.0.0.dev3.dist-info → omlish-0.0.0.dev5.dist-info}/LICENSE +0 -0
  91. {omlish-0.0.0.dev3.dist-info → omlish-0.0.0.dev5.dist-info}/top_level.txt +0 -0
@@ -24,6 +24,7 @@ from .simple import MatchArgsProcessor
24
24
  from .simple import OverridesProcessor
25
25
  from .slots import add_slots
26
26
 
27
+
27
28
  if ta.TYPE_CHECKING:
28
29
  from . import metaclass
29
30
  else:
@@ -39,7 +39,12 @@ def confer_kwargs(
39
39
  for ck in bmp.confer:
40
40
  if ck in kwargs:
41
41
  continue
42
- if ck in ('frozen', 'generic_init', 'kw_only'):
42
+ if ck in (
43
+ 'frozen',
44
+ 'generic_init',
45
+ 'kw_only',
46
+ 'reorder',
47
+ ):
43
48
  confer_kwarg(out, ck, get_params(base).frozen)
44
49
  elif ck == 'confer':
45
50
  confer_kwarg(out, 'confer', bmp.confer)
@@ -1,7 +1,7 @@
1
1
  import typing as ta
2
2
 
3
- from .utils import Namespace
4
3
  from .processing import Processor
4
+ from .utils import Namespace
5
5
  from .utils import create_fn
6
6
  from .utils import set_new_attribute
7
7
  from .utils import tuple_str
@@ -28,9 +28,22 @@ from .params import get_params_extras
28
28
  from .utils import Namespace
29
29
 
30
30
 
31
+ try:
32
+ import annotationlib # noqa
33
+ except ImportError:
34
+ annotationlib = None
35
+
36
+
31
37
  MISSING = dc.MISSING
32
38
 
33
39
 
40
+ def _get_annotations(obj):
41
+ if annotationlib is not None:
42
+ return annotationlib.get_annotations(obj, format=annotationlib.Format.FORWARDREF) # noqa
43
+ else:
44
+ return inspect.get_annotations(obj)
45
+
46
+
34
47
  class ClassInfo:
35
48
 
36
49
  def __init__(self, cls: type, *, _constructing: bool = False) -> None:
@@ -54,7 +67,7 @@ class ClassInfo:
54
67
 
55
68
  @cached.property
56
69
  def cls_annotations(self) -> ta.Mapping[str, ta.Any]:
57
- return inspect.get_annotations(self._cls)
70
+ return _get_annotations(self._cls)
58
71
 
59
72
  ##
60
73
 
@@ -136,7 +149,7 @@ class ClassInfo:
136
149
 
137
150
  @cached.property
138
151
  def generic_mro_lookup(self) -> ta.Mapping[type, rfl.Type]:
139
- return col.unique_dict((check.not_none(rfl.get_concrete_type(g)), g) for g in self.generic_mro)
152
+ return col.unique_map((check.not_none(rfl.get_concrete_type(g)), g) for g in self.generic_mro)
140
153
 
141
154
  @cached.property
142
155
  def generic_replaced_field_types(self) -> ta.Mapping[str, rfl.Type]:
omlish/defs.py CHANGED
@@ -141,7 +141,7 @@ def hash_eq(cls_dct, *attrs):
141
141
  def __eq__(self, other): # noqa
142
142
  if type(other) is not type(self):
143
143
  return False
144
- for attr in attrs:
144
+ for attr in attrs: # noqa
145
145
  if getattr(self, attr) != getattr(other, attr):
146
146
  return False
147
147
  return True
omlish/diag/procfs.py CHANGED
@@ -15,6 +15,7 @@ from .. import iterators as it
15
15
  from .. import json
16
16
  from .. import lang
17
17
  from .. import os as oos
18
+ from .procstats import ProcStats
18
19
 
19
20
 
20
21
  log = logging.getLogger(__name__)
@@ -23,6 +24,9 @@ log = logging.getLogger(__name__)
23
24
  PidLike = int | str
24
25
 
25
26
 
27
+ ##
28
+
29
+
26
30
  RLIMIT_RESOURCES = {
27
31
  getattr(resource, k): k
28
32
  for k in dir(resource)
@@ -98,6 +102,9 @@ def _check_linux() -> None:
98
102
  raise OSError
99
103
 
100
104
 
105
+ ##
106
+
107
+
101
108
  def get_process_stats(pid: PidLike = 'self') -> list[str]:
102
109
  """http://man7.org/linux/man-pages/man5/proc.5.html -> /proc/[pid]/stat"""
103
110
 
@@ -109,6 +116,18 @@ def get_process_stats(pid: PidLike = 'self') -> list[str]:
109
116
  return [pid.strip(), comm, *r.strip().split(' ')]
110
117
 
111
118
 
119
+ def get_process_procstats(pid: int | None = None) -> ProcStats:
120
+ st = get_process_stats('self' if pid is None else pid)
121
+ return ProcStats(
122
+ pid=int(st[ProcStat.PID]),
123
+
124
+ rss=int(st[ProcStat.RSS]),
125
+ )
126
+
127
+
128
+ ##
129
+
130
+
112
131
  def get_process_chain(pid: PidLike = 'self') -> list[tuple[int, str]]:
113
132
  _check_linux()
114
133
  lst = []
@@ -148,6 +167,9 @@ def set_process_oom_score_adj(score: str, pid: PidLike = 'self') -> None:
148
167
  f.write(str(score))
149
168
 
150
169
 
170
+ ##
171
+
172
+
151
173
  MAP_LINE_RX = re.compile(
152
174
  r'^'
153
175
  r'(?P<address>[A-Fa-f0-9]+)-(?P<end_address>[A-Fa-f0-9]+)\s+'
@@ -156,7 +178,7 @@ MAP_LINE_RX = re.compile(
156
178
  r'(?P<device>\S+)\s+'
157
179
  r'(?P<inode>\d+)\s+'
158
180
  r'(?P<path>.*)'
159
- r'$'
181
+ r'$',
160
182
  )
161
183
 
162
184
 
@@ -198,6 +220,9 @@ def get_process_maps(pid: PidLike = 'self', sharing: bool = False) -> ta.Iterato
198
220
  yield d
199
221
 
200
222
 
223
+ ##
224
+
225
+
201
226
  PAGEMAP_KEYS = (
202
227
  'address',
203
228
  'pfn',
@@ -243,6 +268,9 @@ def get_process_pagemaps(pid: PidLike = 'self') -> ta.Iterable[dict[str, int]]:
243
268
  yield from get_process_range_pagemaps(m['address'], m['end_address'], pid)
244
269
 
245
270
 
271
+ ##
272
+
273
+
246
274
  def _dump_cmd(args: ta.Any) -> None:
247
275
  total = 0
248
276
  dirty_total = 0
@@ -0,0 +1,32 @@
1
+ import dataclasses as dc
2
+ import os
3
+ import typing as ta
4
+
5
+ from .. import lang
6
+
7
+
8
+ if ta.TYPE_CHECKING:
9
+ import psutil as _psutil
10
+ else:
11
+ _psutil = lang.proxy_import('psutil')
12
+
13
+
14
+ @dc.dataclass(frozen=True, kw_only=True)
15
+ class ProcStats:
16
+ pid: int
17
+
18
+ rss: int
19
+
20
+
21
+ def get_psutil_procstats(pid: int | None = None) -> ProcStats:
22
+ if pid is None:
23
+ pid = os.getpid()
24
+
25
+ proc = _psutil.Process(pid)
26
+ mi = proc.memory_info()
27
+
28
+ return ProcStats(
29
+ pid=pid,
30
+
31
+ rss=mi.rss,
32
+ )
@@ -74,7 +74,7 @@ class InteractiveSocketConsole:
74
74
  CPRT = 'Type "help", "copyright", "credits" or "license" for more information.'
75
75
 
76
76
  def interact(self, banner: str | None = None, exitmsg: str | None = None) -> None:
77
- log.info(f'Console {id(self)} on thread {threading.current_thread().ident} interacting')
77
+ log.info('Console %x on thread %r interacting', id(self), threading.current_thread().ident)
78
78
 
79
79
  try:
80
80
  ps1 = getattr(sys, 'ps1', '>>> ')
@@ -115,7 +115,7 @@ class InteractiveSocketConsole:
115
115
  pass
116
116
 
117
117
  finally:
118
- log.info(f'Console {id(self)} on thread {threading.current_thread().ident} finished')
118
+ log.info('Console %x on thread %r finished', id(self), threading.current_thread().ident)
119
119
 
120
120
  def push_line(self, line: str) -> bool:
121
121
  self._buffer.append(line)
@@ -208,7 +208,7 @@ class InteractiveSocketConsole:
208
208
  exec(code, self._locals)
209
209
  except SystemExit:
210
210
  raise
211
- except Exception:
211
+ except Exception: # noqa
212
212
  self.show_traceback()
213
213
  else:
214
214
  if self._count == self._write_count:
@@ -84,7 +84,7 @@ class ReplServer:
84
84
  with contextlib.closing(self._socket):
85
85
  self._socket.listen(1)
86
86
 
87
- log.info(f'Repl server listening on file {self._config.path}')
87
+ log.info('Repl server listening on file %s', self._config.path)
88
88
 
89
89
  self._is_running = True
90
90
  try:
@@ -94,7 +94,7 @@ class ReplServer:
94
94
  except TimeoutError:
95
95
  continue
96
96
 
97
- log.info(f'Got repl server connection on file {self._config.path}')
97
+ log.info('Got repl server connection on file %s', self._config.path)
98
98
 
99
99
  def run(conn):
100
100
  with contextlib.closing(conn):
@@ -104,9 +104,10 @@ class ReplServer:
104
104
  variables['__console__'] = console
105
105
 
106
106
  log.info(
107
- f'Starting console {id(console)} repl server connection '
108
- f'on file {self._config.path} '
109
- f'on thread {threading.current_thread().ident}',
107
+ 'Starting console %x repl server connection on file %s on thread %r',
108
+ id(console),
109
+ self._config.path,
110
+ threading.current_thread().ident,
110
111
  )
111
112
  self._consoles_by_threads[threading.current_thread()] = console
112
113
  console.interact()
omlish/diag/threads.py ADDED
@@ -0,0 +1,86 @@
1
+ import io
2
+ import itertools
3
+ import os
4
+ import signal
5
+ import sys
6
+ import threading
7
+ import time
8
+ import traceback
9
+ import typing as ta
10
+
11
+
12
+ _DEBUG_THREAD_COUNTER = itertools.count()
13
+
14
+
15
+ def create_thread_dump_thread(
16
+ *,
17
+ interval_s: float = 5.,
18
+ out: ta.TextIO = sys.stderr,
19
+ start: bool = False,
20
+ nodaemon: bool = False,
21
+ ) -> threading.Thread:
22
+ def dump():
23
+ cthr = threading.current_thread()
24
+ thrs_by_tid = {t.ident: t for t in threading.enumerate()}
25
+
26
+ buf = io.StringIO()
27
+ for tid, fr in sys._current_frames().items(): # noqa
28
+ if tid == cthr.ident:
29
+ continue
30
+
31
+ try:
32
+ thr = thrs_by_tid[tid]
33
+ except KeyError:
34
+ thr_rpr = repr(tid)
35
+ else:
36
+ thr_rpr = repr(thr)
37
+
38
+ tb = traceback.format_stack(fr)
39
+
40
+ buf.write(f'{thr_rpr}\n')
41
+ buf.write('\n'.join(l.strip() for l in tb))
42
+ buf.write('\n\n')
43
+
44
+ out.write(buf.getvalue())
45
+
46
+ def proc():
47
+ while True:
48
+ time.sleep(interval_s)
49
+ try:
50
+ dump()
51
+ except Exception as e: # noqa
52
+ out.write(repr(e) + '\n\n')
53
+
54
+ dthr = threading.Thread(
55
+ target=proc,
56
+ daemon=not nodaemon,
57
+ name=f'thread-dump-thread-{next(_DEBUG_THREAD_COUNTER)}',
58
+ )
59
+ if start:
60
+ dthr.start()
61
+ return dthr
62
+
63
+
64
+ def create_suicide_thread(
65
+ *,
66
+ sig: int = signal.SIGKILL,
67
+ interval_s: float = 1.,
68
+ parent_thread: threading.Thread | None = None,
69
+ start: bool = False,
70
+ ) -> threading.Thread:
71
+ if parent_thread is None:
72
+ parent_thread = threading.current_thread()
73
+
74
+ def proc():
75
+ while True:
76
+ parent_thread.join(interval_s)
77
+ if not parent_thread.is_alive():
78
+ os.kill(os.getpid(), sig)
79
+
80
+ dthr = threading.Thread(
81
+ target=proc,
82
+ name=f'suicide-thread-{next(_DEBUG_THREAD_COUNTER)}',
83
+ )
84
+ if start:
85
+ dthr.start()
86
+ return dthr
omlish/docker.py CHANGED
@@ -26,12 +26,16 @@ from . import json
26
26
  from . import lang
27
27
  from . import marshal as msh
28
28
 
29
+
29
30
  if ta.TYPE_CHECKING:
30
31
  import yaml
31
32
  else:
32
33
  yaml = lang.proxy_import('yaml')
33
34
 
34
35
 
36
+ ##
37
+
38
+
35
39
  @dc.dataclass(frozen=True)
36
40
  class PsItem(lang.Final):
37
41
  dc.metadata(msh.ObjectMetadata(
@@ -113,6 +117,9 @@ def cli_inspect(ids: list[str]) -> list[Inspect]:
113
117
  return msh.unmarshal(json.loads(o.decode()), list[Inspect])
114
118
 
115
119
 
120
+ ##
121
+
122
+
116
123
  class ComposeConfig:
117
124
  def __init__(
118
125
  self,
@@ -141,6 +148,18 @@ class ComposeConfig:
141
148
  return ret
142
149
 
143
150
 
151
+ def get_compose_port(cfg: ta.Mapping[str, ta.Any], default: int) -> int:
152
+ return check.single(
153
+ int(l)
154
+ for p in cfg['ports']
155
+ for l, r in [p.split(':')]
156
+ if int(r) == default
157
+ )
158
+
159
+
160
+ ##
161
+
162
+
144
163
  def timebomb_payload(delay_s: float, name: str = 'omlish-docker-timebomb') -> str:
145
164
  return (
146
165
  '('
omlish/dynamic.py CHANGED
@@ -49,7 +49,7 @@ class Var(ta.Generic[T]):
49
49
 
50
50
  def __init__(
51
51
  self,
52
- default: type[MISSING] | T = MISSING, # type: ignore
52
+ default: type[MISSING] | T = MISSING,
53
53
  *,
54
54
  new: ta.Callable[[], T] | type[MISSING] = MISSING,
55
55
  validate: ta.Callable[[T], None] | None = None,
@@ -138,7 +138,7 @@ class Var(ta.Generic[T]):
138
138
  frame = frame.f_back
139
139
 
140
140
  if self._new is not MISSING:
141
- yield self._new()
141
+ yield self._new() # type: ignore
142
142
 
143
143
  def __iter__(self) -> ta.Iterator[T]:
144
144
  return self.values
omlish/fnpairs.py CHANGED
@@ -1,3 +1,16 @@
1
+ """
2
+ TODO:
3
+ - objects
4
+ - csv
5
+ - csvloader
6
+ - cbor
7
+ - cloudpickle
8
+ - alt json backends
9
+ - compression
10
+ - snappy
11
+ - lz4
12
+ - wrapped (wait for usecase)
13
+ """
1
14
  import abc
2
15
  import codecs
3
16
  import dataclasses as dc
@@ -5,8 +18,9 @@ import typing as ta
5
18
 
6
19
  from . import lang
7
20
 
21
+
8
22
  if ta.TYPE_CHECKING:
9
- import bzip2 as _bzip2
23
+ import bz2 as _bz2
10
24
  import gzip as _gzip
11
25
  import json as _json
12
26
  import lzma as _lzma
@@ -14,8 +28,14 @@ if ta.TYPE_CHECKING:
14
28
  import struct as _struct
15
29
  import tomllib as _tomllib
16
30
 
31
+ import cloudpickle as _cloudpickle
32
+ import lz4.frame as _lz4_frame
33
+ import snappy as _snappy
34
+ import yaml as _yaml
35
+ import zstd as _zstd
36
+
17
37
  else:
18
- _bzip2 = lang.proxy_import('bzip2')
38
+ _bz2 = lang.proxy_import('bz2')
19
39
  _gzip = lang.proxy_import('gzip')
20
40
  _json = lang.proxy_import('json')
21
41
  _lzma = lang.proxy_import('lzma')
@@ -23,15 +43,20 @@ else:
23
43
  _struct = lang.proxy_import('struct')
24
44
  _tomllib = lang.proxy_import('tomllib')
25
45
 
26
- _zstd = lang.proxy_import('zstd')
27
- _yaml = lang.proxy_import('yaml')
46
+ _cloudpickle = lang.proxy_import('cloudpickle')
47
+ _lz4_frame = lang.proxy_import('lz4.frame')
48
+ _snappy = lang.proxy_import('snappy')
49
+ _yaml = lang.proxy_import('yaml')
50
+ _zstd = lang.proxy_import('zstd')
28
51
 
29
52
 
30
53
  ##
31
54
 
32
55
 
33
56
  F = ta.TypeVar('F')
57
+ F2 = ta.TypeVar('F2')
34
58
  T = ta.TypeVar('T')
59
+ T2 = ta.TypeVar('T2')
35
60
  U = ta.TypeVar('U')
36
61
 
37
62
 
@@ -44,6 +69,11 @@ class FnPair(ta.Generic[F, T], abc.ABC):
44
69
  def backward(self, t: T) -> F:
45
70
  raise NotImplementedError
46
71
 
72
+ ##
73
+
74
+ def __call__(self, f: F) -> T:
75
+ return self.forward(f)
76
+
47
77
  def invert(self) -> 'FnPair[T, F]':
48
78
  if isinstance(self, Inverted):
49
79
  return self.fp
@@ -76,6 +106,8 @@ Simple.__abstractmethods__ = frozenset() # noqa
76
106
 
77
107
  of = Simple
78
108
 
109
+ NOP: FnPair[ta.Any, ta.Any] = of(lang.identity, lang.identity)
110
+
79
111
 
80
112
  ##
81
113
 
@@ -91,9 +123,6 @@ class Inverted(FnPair[F, T]):
91
123
  return self.fp.forward(t)
92
124
 
93
125
 
94
- ##
95
-
96
-
97
126
  @dc.dataclass(frozen=True)
98
127
  class Composite(FnPair[F, T]):
99
128
  children: ta.Sequence[FnPair]
@@ -109,6 +138,14 @@ class Composite(FnPair[F, T]):
109
138
  return ta.cast(F, t)
110
139
 
111
140
 
141
+ def compose(*ps: FnPair) -> FnPair:
142
+ if not ps:
143
+ return NOP
144
+ if len(ps) == 1:
145
+ return ps[0]
146
+ return Composite(ps)
147
+
148
+
112
149
  ##
113
150
 
114
151
 
@@ -143,13 +180,13 @@ UTF8 = text('utf-8')
143
180
 
144
181
 
145
182
  @dc.dataclass(frozen=True)
146
- class Optional(FnPair[ta.Optional[F], ta.Optional[T]]):
183
+ class Optional(FnPair[F | None, T | None]):
147
184
  fp: FnPair[F, T]
148
185
 
149
- def forward(self, f: ta.Optional[F]) -> ta.Optional[T]:
186
+ def forward(self, f: F | None) -> T | None:
150
187
  return None if f is None else self.fp.forward(f)
151
188
 
152
- def backward(self, t: ta.Optional[T]) -> ta.Optional[F]:
189
+ def backward(self, t: T | None) -> F | None:
153
190
  return None if t is None else self.fp.backward(t)
154
191
 
155
192
 
@@ -177,6 +214,10 @@ def _register_extension(*ss):
177
214
  return inner
178
215
 
179
216
 
217
+ def get_for_extension(ext: str) -> FnPair:
218
+ return compose(*[_EXTENSION_REGISTRY[p]() for p in ext.split('.')])
219
+
220
+
180
221
  ##
181
222
 
182
223
 
@@ -184,28 +225,28 @@ class Compression(FnPair[bytes, bytes], abc.ABC):
184
225
  pass
185
226
 
186
227
 
187
- @_register_extension('gz')
228
+ @_register_extension('bz2')
188
229
  @dc.dataclass(frozen=True)
189
- class Gzip(Compression):
230
+ class Bz2(Compression):
190
231
  compresslevel: int = 9
191
232
 
192
233
  def forward(self, f: bytes) -> bytes:
193
- return _gzip.compress(f, compresslevel=self.compresslevel)
234
+ return _bz2.compress(f, compresslevel=self.compresslevel)
194
235
 
195
236
  def backward(self, t: bytes) -> bytes:
196
- return _gzip.decompress(t)
237
+ return _bz2.decompress(t)
197
238
 
198
239
 
199
- @_register_extension('bz2')
240
+ @_register_extension('gz')
200
241
  @dc.dataclass(frozen=True)
201
- class Bzip2(Compression):
242
+ class Gzip(Compression):
202
243
  compresslevel: int = 9
203
244
 
204
245
  def forward(self, f: bytes) -> bytes:
205
- return _bzip2.compress(f, compresslevel=self.compresslevel)
246
+ return _gzip.compress(f, compresslevel=self.compresslevel)
206
247
 
207
248
  def backward(self, t: bytes) -> bytes:
208
- return _bzip2.decompress(t)
249
+ return _gzip.decompress(t)
209
250
 
210
251
 
211
252
  @_register_extension('lzma')
@@ -220,6 +261,27 @@ class Lzma(Compression):
220
261
  #
221
262
 
222
263
 
264
+ @_register_extension('lz4')
265
+ @dc.dataclass(frozen=True)
266
+ class Lz4(Compression):
267
+ compression_level: int = 0
268
+
269
+ def forward(self, f: bytes) -> bytes:
270
+ return _lz4_frame.compress(f, compression_level=self.compression_level)
271
+
272
+ def backward(self, t: bytes) -> bytes:
273
+ return _lz4_frame.decompress(t)
274
+
275
+
276
+ @_register_extension('snappy')
277
+ class Snappy(Compression):
278
+ def forward(self, f: bytes) -> bytes:
279
+ return _snappy.compress(f)
280
+
281
+ def backward(self, t: bytes) -> bytes:
282
+ return _snappy.decompress(t)
283
+
284
+
223
285
  @_register_extension('zstd')
224
286
  class Zstd(Compression):
225
287
  def forward(self, f: bytes) -> bytes:
@@ -243,9 +305,32 @@ class Struct(FnPair[tuple, bytes]):
243
305
  return _struct.unpack(self.fmt, t)
244
306
 
245
307
 
308
+ ##
309
+
310
+
311
+ Object: ta.TypeAlias = FnPair[ta.Any, T]
312
+ ObjectStr: ta.TypeAlias = Object[str]
313
+ ObjectBytes: ta.TypeAlias = Object[bytes]
314
+
315
+
316
+ class Object_(FnPair[ta.Any, T], lang.Abstract): # noqa
317
+ pass
318
+
319
+
320
+ class ObjectStr_(Object_[str], lang.Abstract): # noqa
321
+ pass
322
+
323
+
324
+ class ObjectBytes_(Object_[bytes], lang.Abstract): # noqa
325
+ pass
326
+
327
+
328
+ #
329
+
330
+
246
331
  @_register_extension('pkl')
247
332
  @dc.dataclass(frozen=True)
248
- class Pickle(FnPair[ta.Any, bytes]):
333
+ class Pickle(ObjectBytes_):
249
334
  protocol: int | None = None
250
335
 
251
336
  def forward(self, f: ta.Any) -> bytes:
@@ -257,7 +342,7 @@ class Pickle(FnPair[ta.Any, bytes]):
257
342
 
258
343
  @_register_extension('json')
259
344
  @dc.dataclass(frozen=True)
260
- class Json(FnPair[ta.Any, str]):
345
+ class Json(ObjectStr_):
261
346
  indent: int | str | None = dc.field(default=None, kw_only=True)
262
347
  separators: tuple[str, str] | None = dc.field(default=None, kw_only=True)
263
348
 
@@ -283,7 +368,7 @@ class JsonLines(FnPair[ta.Sequence[ta.Any], str]):
283
368
 
284
369
 
285
370
  @_register_extension('toml')
286
- class Toml(FnPair[ta.Any, str]):
371
+ class Toml(ObjectStr_):
287
372
  def forward(self, f: ta.Any) -> str:
288
373
  raise NotImplementedError
289
374
 
@@ -294,8 +379,20 @@ class Toml(FnPair[ta.Any, str]):
294
379
  #
295
380
 
296
381
 
382
+ @_register_extension('cpkl')
383
+ @dc.dataclass(frozen=True)
384
+ class Cloudpickle(ObjectBytes_):
385
+ protocol: int | None = None
386
+
387
+ def forward(self, f: ta.Any) -> bytes:
388
+ return _cloudpickle.dumps(f, protocol=self.protocol)
389
+
390
+ def backward(self, t: bytes) -> ta.Any:
391
+ return _cloudpickle.loads(t)
392
+
393
+
297
394
  @_register_extension('yml', 'yaml')
298
- class Yaml(FnPair[ta.Any, str]):
395
+ class Yaml(ObjectStr_):
299
396
  def forward(self, f: ta.Any) -> str:
300
397
  return _yaml.dump(f)
301
398
 
@@ -303,9 +400,9 @@ class Yaml(FnPair[ta.Any, str]):
303
400
  return _yaml.safe_load(t)
304
401
 
305
402
 
306
- class UnsafeYaml(FnPair[ta.Any, str]):
403
+ class YamlUnsafe(ObjectStr_):
307
404
  def forward(self, f: ta.Any) -> str:
308
405
  return _yaml.dump(f)
309
406
 
310
407
  def backward(self, t: str) -> ta.Any:
311
- return _yaml.safe_load(t, loader=_yaml.FullLoader)
408
+ return _yaml.load(t, _yaml.FullLoader)