omlish 0.0.0.dev223__py3-none-any.whl → 0.0.0.dev225__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- omlish/__about__.py +2 -2
- omlish/asyncs/asyncio/subprocesses.py +15 -15
- omlish/asyncs/asyncs.py +0 -1
- omlish/bootstrap/sys.py +2 -2
- omlish/dataclasses/impl/metaclass.py +5 -0
- omlish/http/coro/server.py +5 -54
- omlish/http/coro/simple.py +1 -1
- omlish/http/coro/sockets.py +59 -0
- omlish/http/handlers.py +52 -1
- omlish/lang/__init__.py +1 -0
- omlish/lang/imports.py +22 -0
- omlish/libc.py +10 -0
- omlish/lite/timing.py +8 -0
- omlish/logs/timing.py +58 -0
- omlish/multiprocessing/__init__.py +0 -7
- omlish/os/pidfiles/__init__.py +0 -0
- omlish/os/pidfiles/manager.py +97 -0
- omlish/os/pidfiles/pidfile.py +142 -0
- omlish/secrets/crypto.py +1 -2
- omlish/secrets/openssl.py +1 -1
- omlish/secrets/tempssl.py +40 -21
- omlish/sockets/handlers.py +4 -0
- omlish/sockets/server/handlers.py +22 -0
- omlish/subprocesses/__init__.py +0 -0
- omlish/subprocesses/async_.py +96 -0
- omlish/subprocesses/base.py +215 -0
- omlish/subprocesses/run.py +98 -0
- omlish/subprocesses/sync.py +147 -0
- omlish/subprocesses/utils.py +22 -0
- omlish/subprocesses/wrap.py +23 -0
- {omlish-0.0.0.dev223.dist-info → omlish-0.0.0.dev225.dist-info}/METADATA +1 -1
- {omlish-0.0.0.dev223.dist-info → omlish-0.0.0.dev225.dist-info}/RECORD +37 -26
- omlish/os/pidfile.py +0 -69
- omlish/subprocesses.py +0 -491
- /omlish/{multiprocessing → os}/death.py +0 -0
- {omlish-0.0.0.dev223.dist-info → omlish-0.0.0.dev225.dist-info}/LICENSE +0 -0
- {omlish-0.0.0.dev223.dist-info → omlish-0.0.0.dev225.dist-info}/WHEEL +0 -0
- {omlish-0.0.0.dev223.dist-info → omlish-0.0.0.dev225.dist-info}/entry_points.txt +0 -0
- {omlish-0.0.0.dev223.dist-info → omlish-0.0.0.dev225.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,142 @@
|
|
1
|
+
# ruff: noqa: UP007
|
2
|
+
# @omlish-lite
|
3
|
+
"""
|
4
|
+
TODO:
|
5
|
+
- reliable pid retrieval
|
6
|
+
- contents are *ignored*, just advisory
|
7
|
+
- check double-check:
|
8
|
+
- 1) get pid of flock holder
|
9
|
+
- 2) get pidfd to that
|
10
|
+
- 3) recheck current pid of flock holder == that pid
|
11
|
+
- racy as to if it's a different actual process as initial check, just with same pid, but due to 'identity' / semantic
|
12
|
+
meaning of the named pidfile the processes are considered equivalent
|
13
|
+
"""
|
14
|
+
import fcntl
|
15
|
+
import os
|
16
|
+
import signal
|
17
|
+
import typing as ta
|
18
|
+
|
19
|
+
|
20
|
+
##
|
21
|
+
|
22
|
+
|
23
|
+
class Pidfile:
|
24
|
+
def __init__(
|
25
|
+
self,
|
26
|
+
path: str,
|
27
|
+
*,
|
28
|
+
inheritable: bool = True,
|
29
|
+
) -> None:
|
30
|
+
super().__init__()
|
31
|
+
|
32
|
+
self._path = path
|
33
|
+
self._inheritable = inheritable
|
34
|
+
|
35
|
+
def __repr__(self) -> str:
|
36
|
+
return f'{self.__class__.__name__}({self._path!r})'
|
37
|
+
|
38
|
+
#
|
39
|
+
|
40
|
+
_f: ta.TextIO
|
41
|
+
|
42
|
+
def fileno(self) -> ta.Optional[int]:
|
43
|
+
if hasattr(self, '_f'):
|
44
|
+
return self._f.fileno()
|
45
|
+
else:
|
46
|
+
return None
|
47
|
+
|
48
|
+
#
|
49
|
+
|
50
|
+
def __enter__(self) -> 'Pidfile':
|
51
|
+
fd = os.open(self._path, os.O_RDWR | os.O_CREAT, 0o600)
|
52
|
+
|
53
|
+
try:
|
54
|
+
if self._inheritable:
|
55
|
+
os.set_inheritable(fd, True)
|
56
|
+
|
57
|
+
f = os.fdopen(fd, 'r+')
|
58
|
+
|
59
|
+
except Exception:
|
60
|
+
try:
|
61
|
+
os.close(fd)
|
62
|
+
except Exception: # noqa
|
63
|
+
pass
|
64
|
+
raise
|
65
|
+
|
66
|
+
self._f = f
|
67
|
+
return self
|
68
|
+
|
69
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
70
|
+
self.close()
|
71
|
+
|
72
|
+
#
|
73
|
+
|
74
|
+
def __getstate__(self):
|
75
|
+
state = self.__dict__.copy()
|
76
|
+
|
77
|
+
if '_f' in state:
|
78
|
+
if os.get_inheritable(fd := state.pop('_f').fileno()):
|
79
|
+
state['__fd'] = fd
|
80
|
+
|
81
|
+
return state
|
82
|
+
|
83
|
+
def __setstate__(self, state):
|
84
|
+
if '_f' in state:
|
85
|
+
raise RuntimeError
|
86
|
+
|
87
|
+
if '__fd' in state:
|
88
|
+
state['_f'] = os.fdopen(state.pop('__fd'), 'r+')
|
89
|
+
|
90
|
+
self.__dict__.update(state)
|
91
|
+
|
92
|
+
#
|
93
|
+
|
94
|
+
def close(self) -> bool:
|
95
|
+
if not hasattr(self, '_f'):
|
96
|
+
return False
|
97
|
+
|
98
|
+
self._f.close()
|
99
|
+
del self._f
|
100
|
+
return True
|
101
|
+
|
102
|
+
def try_lock(self) -> bool:
|
103
|
+
try:
|
104
|
+
fcntl.flock(self._f, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
105
|
+
return True
|
106
|
+
|
107
|
+
except OSError:
|
108
|
+
return False
|
109
|
+
|
110
|
+
def ensure_locked(self) -> None:
|
111
|
+
if not self.try_lock():
|
112
|
+
raise RuntimeError('Could not get lock')
|
113
|
+
|
114
|
+
#
|
115
|
+
|
116
|
+
def write(self, pid: ta.Optional[int] = None) -> None:
|
117
|
+
self.ensure_locked()
|
118
|
+
|
119
|
+
if pid is None:
|
120
|
+
pid = os.getpid()
|
121
|
+
|
122
|
+
self._f.seek(0)
|
123
|
+
self._f.truncate()
|
124
|
+
self._f.write(f'{pid}\n')
|
125
|
+
self._f.flush()
|
126
|
+
|
127
|
+
def clear(self) -> None:
|
128
|
+
self.ensure_locked()
|
129
|
+
|
130
|
+
self._f.seek(0)
|
131
|
+
self._f.truncate()
|
132
|
+
|
133
|
+
def read(self) -> int:
|
134
|
+
if self.try_lock():
|
135
|
+
raise RuntimeError('Got lock')
|
136
|
+
|
137
|
+
self._f.seek(0)
|
138
|
+
return int(self._f.read()) # FIXME: could be empty or hold old value, race w proc start
|
139
|
+
|
140
|
+
def kill(self, sig: int = signal.SIGTERM) -> None:
|
141
|
+
pid = self.read()
|
142
|
+
os.kill(pid, sig) # FIXME: Still racy - pidfd_send_signal?
|
omlish/secrets/crypto.py
CHANGED
@@ -76,7 +76,6 @@ class Crypto(abc.ABC):
|
|
76
76
|
|
77
77
|
|
78
78
|
class FernetCrypto(Crypto):
|
79
|
-
|
80
79
|
def generate_key(self) -> bytes:
|
81
80
|
return cry_fernet.Fernet.generate_key()
|
82
81
|
|
@@ -98,7 +97,7 @@ class FernetCrypto(Crypto):
|
|
98
97
|
raise DecryptionError from e
|
99
98
|
|
100
99
|
|
101
|
-
class
|
100
|
+
class AesgcmCrypto(Crypto):
|
102
101
|
"""https://stackoverflow.com/a/59835994"""
|
103
102
|
|
104
103
|
def generate_key(self) -> bytes:
|
omlish/secrets/openssl.py
CHANGED
@@ -33,7 +33,7 @@ else:
|
|
33
33
|
DEFAULT_KEY_SIZE = 64
|
34
34
|
|
35
35
|
|
36
|
-
def generate_key(
|
36
|
+
def generate_key(sz: int = DEFAULT_KEY_SIZE) -> bytes:
|
37
37
|
# !! https://docs.openssl.org/3.0/man7/passphrase-encoding/
|
38
38
|
# Must not contain null bytes!
|
39
39
|
return secrets.token_hex(sz).encode('ascii')
|
omlish/secrets/tempssl.py
CHANGED
@@ -1,10 +1,14 @@
|
|
1
1
|
# @omlish-lite
|
2
2
|
# ruff: noqa: UP006 UP007
|
3
|
+
import dataclasses as dc
|
3
4
|
import os.path
|
4
|
-
import subprocess
|
5
5
|
import tempfile
|
6
6
|
import typing as ta
|
7
7
|
|
8
|
+
from ..lite.cached import cached_nullary
|
9
|
+
from ..subprocesses.run import SubprocessRun
|
10
|
+
from ..subprocesses.run import SubprocessRunnable
|
11
|
+
from ..subprocesses.run import SubprocessRunOutput
|
8
12
|
from .ssl import SslCert
|
9
13
|
|
10
14
|
|
@@ -13,11 +17,15 @@ class TempSslCert(ta.NamedTuple):
|
|
13
17
|
temp_dir: str
|
14
18
|
|
15
19
|
|
16
|
-
|
17
|
-
|
20
|
+
@dc.dataclass(frozen=True)
|
21
|
+
class TempSslCertGenerator(SubprocessRunnable[TempSslCert]):
|
22
|
+
@cached_nullary
|
23
|
+
def temp_dir(self) -> str:
|
24
|
+
return tempfile.mkdtemp()
|
18
25
|
|
19
|
-
|
20
|
-
|
26
|
+
@cached_nullary
|
27
|
+
def make_run(self) -> SubprocessRun:
|
28
|
+
return SubprocessRun.of(
|
21
29
|
'openssl',
|
22
30
|
'req',
|
23
31
|
'-x509',
|
@@ -32,19 +40,30 @@ def generate_temp_localhost_ssl_cert() -> TempSslCert:
|
|
32
40
|
|
33
41
|
'-subj', '/CN=localhost',
|
34
42
|
'-addext', 'subjectAltName = DNS:localhost,IP:127.0.0.1',
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
43
|
+
|
44
|
+
cwd=self.temp_dir(),
|
45
|
+
capture_output=True,
|
46
|
+
check=False,
|
47
|
+
)
|
48
|
+
|
49
|
+
def handle_run_output(self, proc: SubprocessRunOutput) -> TempSslCert:
|
50
|
+
if proc.returncode:
|
51
|
+
raise RuntimeError(f'Failed to generate temp ssl cert: {proc.stderr=}')
|
52
|
+
|
53
|
+
key_file = os.path.join(self.temp_dir(), 'key.pem')
|
54
|
+
cert_file = os.path.join(self.temp_dir(), 'cert.pem')
|
55
|
+
for file in [key_file, cert_file]:
|
56
|
+
if not os.path.isfile(file):
|
57
|
+
raise RuntimeError(f'Failed to generate temp ssl cert (file not found): {file}')
|
58
|
+
|
59
|
+
return TempSslCert(
|
60
|
+
SslCert(
|
61
|
+
key_file=key_file,
|
62
|
+
cert_file=cert_file,
|
63
|
+
),
|
64
|
+
temp_dir=self.temp_dir(),
|
65
|
+
)
|
66
|
+
|
67
|
+
|
68
|
+
def generate_temp_localhost_ssl_cert() -> TempSslCert:
|
69
|
+
return TempSslCertGenerator().run()
|
omlish/sockets/handlers.py
CHANGED
@@ -3,6 +3,7 @@
|
|
3
3
|
import abc
|
4
4
|
import concurrent.futures as cf
|
5
5
|
import dataclasses as dc
|
6
|
+
import logging
|
6
7
|
import socket
|
7
8
|
import typing as ta
|
8
9
|
|
@@ -132,3 +133,24 @@ class ExecutorSocketServerHandler(SocketServerHandler_):
|
|
132
133
|
|
133
134
|
def __call__(self, conn: SocketAndAddress) -> None:
|
134
135
|
self.executor.submit(self.handler, conn)
|
136
|
+
|
137
|
+
|
138
|
+
#
|
139
|
+
|
140
|
+
|
141
|
+
@dc.dataclass(frozen=True)
|
142
|
+
class ExceptionLoggingSocketServerHandler(SocketServerHandler_):
|
143
|
+
handler: SocketServerHandler
|
144
|
+
log: logging.Logger
|
145
|
+
|
146
|
+
ignored: ta.Optional[ta.Container[ta.Type[Exception]]] = None
|
147
|
+
|
148
|
+
def __call__(self, conn: SocketAndAddress) -> None:
|
149
|
+
try:
|
150
|
+
return self.handler(conn)
|
151
|
+
|
152
|
+
except Exception as e: # noqa
|
153
|
+
if (ignored := self.ignored) is None or type(e) not in ignored:
|
154
|
+
self.log.exception('Error in handler %r for conn %r', self.handler, conn)
|
155
|
+
|
156
|
+
raise
|
File without changes
|
@@ -0,0 +1,96 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
# @omlish-lite
|
3
|
+
import abc
|
4
|
+
import sys
|
5
|
+
import typing as ta
|
6
|
+
|
7
|
+
from .base import BaseSubprocesses
|
8
|
+
from .run import SubprocessRun
|
9
|
+
from .run import SubprocessRunOutput
|
10
|
+
|
11
|
+
|
12
|
+
##
|
13
|
+
|
14
|
+
|
15
|
+
class AbstractAsyncSubprocesses(BaseSubprocesses):
|
16
|
+
@abc.abstractmethod
|
17
|
+
async def run_(self, run: SubprocessRun) -> SubprocessRunOutput:
|
18
|
+
raise NotImplementedError
|
19
|
+
|
20
|
+
def run(
|
21
|
+
self,
|
22
|
+
*cmd: str,
|
23
|
+
input: ta.Any = None, # noqa
|
24
|
+
timeout: ta.Optional[float] = None,
|
25
|
+
check: bool = False,
|
26
|
+
capture_output: ta.Optional[bool] = None,
|
27
|
+
**kwargs: ta.Any,
|
28
|
+
) -> ta.Awaitable[SubprocessRunOutput]:
|
29
|
+
return self.run_(SubprocessRun(
|
30
|
+
cmd=cmd,
|
31
|
+
input=input,
|
32
|
+
timeout=timeout,
|
33
|
+
check=check,
|
34
|
+
capture_output=capture_output,
|
35
|
+
kwargs=kwargs,
|
36
|
+
))
|
37
|
+
|
38
|
+
#
|
39
|
+
|
40
|
+
@abc.abstractmethod
|
41
|
+
async def check_call(
|
42
|
+
self,
|
43
|
+
*cmd: str,
|
44
|
+
stdout: ta.Any = sys.stderr,
|
45
|
+
**kwargs: ta.Any,
|
46
|
+
) -> None:
|
47
|
+
raise NotImplementedError
|
48
|
+
|
49
|
+
@abc.abstractmethod
|
50
|
+
async def check_output(
|
51
|
+
self,
|
52
|
+
*cmd: str,
|
53
|
+
**kwargs: ta.Any,
|
54
|
+
) -> bytes:
|
55
|
+
raise NotImplementedError
|
56
|
+
|
57
|
+
#
|
58
|
+
|
59
|
+
async def check_output_str(
|
60
|
+
self,
|
61
|
+
*cmd: str,
|
62
|
+
**kwargs: ta.Any,
|
63
|
+
) -> str:
|
64
|
+
return (await self.check_output(*cmd, **kwargs)).decode().strip()
|
65
|
+
|
66
|
+
#
|
67
|
+
|
68
|
+
async def try_call(
|
69
|
+
self,
|
70
|
+
*cmd: str,
|
71
|
+
**kwargs: ta.Any,
|
72
|
+
) -> bool:
|
73
|
+
if isinstance(await self.async_try_fn(self.check_call, *cmd, **kwargs), Exception):
|
74
|
+
return False
|
75
|
+
else:
|
76
|
+
return True
|
77
|
+
|
78
|
+
async def try_output(
|
79
|
+
self,
|
80
|
+
*cmd: str,
|
81
|
+
**kwargs: ta.Any,
|
82
|
+
) -> ta.Optional[bytes]:
|
83
|
+
if isinstance(ret := await self.async_try_fn(self.check_output, *cmd, **kwargs), Exception):
|
84
|
+
return None
|
85
|
+
else:
|
86
|
+
return ret
|
87
|
+
|
88
|
+
async def try_output_str(
|
89
|
+
self,
|
90
|
+
*cmd: str,
|
91
|
+
**kwargs: ta.Any,
|
92
|
+
) -> ta.Optional[str]:
|
93
|
+
if (ret := await self.try_output(*cmd, **kwargs)) is None:
|
94
|
+
return None
|
95
|
+
else:
|
96
|
+
return ret.decode().strip()
|
@@ -0,0 +1,215 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
# @omlish-lite
|
3
|
+
import abc
|
4
|
+
import contextlib
|
5
|
+
import logging
|
6
|
+
import os
|
7
|
+
import subprocess
|
8
|
+
import time
|
9
|
+
import typing as ta
|
10
|
+
|
11
|
+
from .wrap import subprocess_maybe_shell_wrap_exec
|
12
|
+
|
13
|
+
|
14
|
+
T = ta.TypeVar('T')
|
15
|
+
SubprocessChannelOption = ta.Literal['pipe', 'stdout', 'devnull'] # ta.TypeAlias
|
16
|
+
|
17
|
+
|
18
|
+
##
|
19
|
+
|
20
|
+
|
21
|
+
# Valid channel type kwarg values:
|
22
|
+
# - A special flag negative int
|
23
|
+
# - A positive fd int
|
24
|
+
# - A file-like object
|
25
|
+
# - None
|
26
|
+
|
27
|
+
SUBPROCESS_CHANNEL_OPTION_VALUES: ta.Mapping[SubprocessChannelOption, int] = {
|
28
|
+
'pipe': subprocess.PIPE,
|
29
|
+
'stdout': subprocess.STDOUT,
|
30
|
+
'devnull': subprocess.DEVNULL,
|
31
|
+
}
|
32
|
+
|
33
|
+
|
34
|
+
##
|
35
|
+
|
36
|
+
|
37
|
+
class VerboseCalledProcessError(subprocess.CalledProcessError):
|
38
|
+
@classmethod
|
39
|
+
def from_std(cls, e: subprocess.CalledProcessError) -> 'VerboseCalledProcessError':
|
40
|
+
return cls(
|
41
|
+
e.returncode,
|
42
|
+
e.cmd,
|
43
|
+
output=e.output,
|
44
|
+
stderr=e.stderr,
|
45
|
+
)
|
46
|
+
|
47
|
+
def __str__(self) -> str:
|
48
|
+
msg = super().__str__()
|
49
|
+
if self.output is not None:
|
50
|
+
msg += f' Output: {self.output!r}'
|
51
|
+
if self.stderr is not None:
|
52
|
+
msg += f' Stderr: {self.stderr!r}'
|
53
|
+
return msg
|
54
|
+
|
55
|
+
|
56
|
+
class BaseSubprocesses(abc.ABC): # noqa
|
57
|
+
DEFAULT_LOGGER: ta.ClassVar[ta.Optional[logging.Logger]] = None
|
58
|
+
|
59
|
+
def __init__(
|
60
|
+
self,
|
61
|
+
*,
|
62
|
+
log: ta.Optional[logging.Logger] = None,
|
63
|
+
try_exceptions: ta.Optional[ta.Tuple[ta.Type[Exception], ...]] = None,
|
64
|
+
) -> None:
|
65
|
+
super().__init__()
|
66
|
+
|
67
|
+
self._log = log if log is not None else self.DEFAULT_LOGGER
|
68
|
+
self._try_exceptions = try_exceptions if try_exceptions is not None else self.DEFAULT_TRY_EXCEPTIONS
|
69
|
+
|
70
|
+
def set_logger(self, log: ta.Optional[logging.Logger]) -> None:
|
71
|
+
self._log = log
|
72
|
+
|
73
|
+
#
|
74
|
+
|
75
|
+
def prepare_args(
|
76
|
+
self,
|
77
|
+
*cmd: str,
|
78
|
+
env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
|
79
|
+
extra_env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
|
80
|
+
quiet: bool = False,
|
81
|
+
shell: bool = False,
|
82
|
+
**kwargs: ta.Any,
|
83
|
+
) -> ta.Tuple[ta.Tuple[ta.Any, ...], ta.Dict[str, ta.Any]]:
|
84
|
+
if self._log:
|
85
|
+
self._log.debug('Subprocesses.prepare_args: cmd=%r', cmd)
|
86
|
+
if extra_env:
|
87
|
+
self._log.debug('Subprocesses.prepare_args: extra_env=%r', extra_env)
|
88
|
+
|
89
|
+
#
|
90
|
+
|
91
|
+
if extra_env:
|
92
|
+
env = {**(env if env is not None else os.environ), **extra_env}
|
93
|
+
|
94
|
+
#
|
95
|
+
|
96
|
+
if quiet and 'stderr' not in kwargs:
|
97
|
+
if self._log and not self._log.isEnabledFor(logging.DEBUG):
|
98
|
+
kwargs['stderr'] = subprocess.DEVNULL
|
99
|
+
|
100
|
+
for chk in ('stdout', 'stderr'):
|
101
|
+
try:
|
102
|
+
chv = kwargs[chk]
|
103
|
+
except KeyError:
|
104
|
+
continue
|
105
|
+
kwargs[chk] = SUBPROCESS_CHANNEL_OPTION_VALUES.get(chv, chv)
|
106
|
+
|
107
|
+
#
|
108
|
+
|
109
|
+
if not shell:
|
110
|
+
cmd = subprocess_maybe_shell_wrap_exec(*cmd)
|
111
|
+
|
112
|
+
#
|
113
|
+
|
114
|
+
return cmd, dict(
|
115
|
+
env=env,
|
116
|
+
shell=shell,
|
117
|
+
**kwargs,
|
118
|
+
)
|
119
|
+
|
120
|
+
@contextlib.contextmanager
|
121
|
+
def wrap_call(
|
122
|
+
self,
|
123
|
+
*cmd: ta.Any,
|
124
|
+
raise_verbose: bool = False,
|
125
|
+
**kwargs: ta.Any,
|
126
|
+
) -> ta.Iterator[None]:
|
127
|
+
start_time = time.time()
|
128
|
+
try:
|
129
|
+
if self._log:
|
130
|
+
self._log.debug('Subprocesses.wrap_call.try: cmd=%r', cmd)
|
131
|
+
|
132
|
+
yield
|
133
|
+
|
134
|
+
except Exception as exc: # noqa
|
135
|
+
if self._log:
|
136
|
+
self._log.debug('Subprocesses.wrap_call.except: exc=%r', exc)
|
137
|
+
|
138
|
+
if (
|
139
|
+
raise_verbose and
|
140
|
+
isinstance(exc, subprocess.CalledProcessError) and
|
141
|
+
not isinstance(exc, VerboseCalledProcessError) and
|
142
|
+
(exc.output is not None or exc.stderr is not None)
|
143
|
+
):
|
144
|
+
raise VerboseCalledProcessError.from_std(exc) from exc
|
145
|
+
|
146
|
+
raise
|
147
|
+
|
148
|
+
finally:
|
149
|
+
end_time = time.time()
|
150
|
+
elapsed_s = end_time - start_time
|
151
|
+
|
152
|
+
if self._log:
|
153
|
+
self._log.debug('Subprocesses.wrap_call.finally: elapsed_s=%f cmd=%r', elapsed_s, cmd)
|
154
|
+
|
155
|
+
@contextlib.contextmanager
|
156
|
+
def prepare_and_wrap(
|
157
|
+
self,
|
158
|
+
*cmd: ta.Any,
|
159
|
+
raise_verbose: bool = False,
|
160
|
+
**kwargs: ta.Any,
|
161
|
+
) -> ta.Iterator[ta.Tuple[
|
162
|
+
ta.Tuple[ta.Any, ...],
|
163
|
+
ta.Dict[str, ta.Any],
|
164
|
+
]]:
|
165
|
+
cmd, kwargs = self.prepare_args(*cmd, **kwargs)
|
166
|
+
|
167
|
+
with self.wrap_call(
|
168
|
+
*cmd,
|
169
|
+
raise_verbose=raise_verbose,
|
170
|
+
**kwargs,
|
171
|
+
):
|
172
|
+
yield cmd, kwargs
|
173
|
+
|
174
|
+
#
|
175
|
+
|
176
|
+
DEFAULT_TRY_EXCEPTIONS: ta.Tuple[ta.Type[Exception], ...] = (
|
177
|
+
FileNotFoundError,
|
178
|
+
subprocess.CalledProcessError,
|
179
|
+
)
|
180
|
+
|
181
|
+
def try_fn(
|
182
|
+
self,
|
183
|
+
fn: ta.Callable[..., T],
|
184
|
+
*cmd: str,
|
185
|
+
try_exceptions: ta.Optional[ta.Tuple[ta.Type[Exception], ...]] = None,
|
186
|
+
**kwargs: ta.Any,
|
187
|
+
) -> ta.Union[T, Exception]:
|
188
|
+
if try_exceptions is None:
|
189
|
+
try_exceptions = self._try_exceptions
|
190
|
+
|
191
|
+
try:
|
192
|
+
return fn(*cmd, **kwargs)
|
193
|
+
|
194
|
+
except try_exceptions as e: # noqa
|
195
|
+
if self._log and self._log.isEnabledFor(logging.DEBUG):
|
196
|
+
self._log.exception('command failed')
|
197
|
+
return e
|
198
|
+
|
199
|
+
async def async_try_fn(
|
200
|
+
self,
|
201
|
+
fn: ta.Callable[..., ta.Awaitable[T]],
|
202
|
+
*cmd: ta.Any,
|
203
|
+
try_exceptions: ta.Optional[ta.Tuple[ta.Type[Exception], ...]] = None,
|
204
|
+
**kwargs: ta.Any,
|
205
|
+
) -> ta.Union[T, Exception]:
|
206
|
+
if try_exceptions is None:
|
207
|
+
try_exceptions = self._try_exceptions
|
208
|
+
|
209
|
+
try:
|
210
|
+
return await fn(*cmd, **kwargs)
|
211
|
+
|
212
|
+
except try_exceptions as e: # noqa
|
213
|
+
if self._log and self._log.isEnabledFor(logging.DEBUG):
|
214
|
+
self._log.exception('command failed')
|
215
|
+
return e
|
@@ -0,0 +1,98 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
# @omlish-lite
|
3
|
+
import abc
|
4
|
+
import dataclasses as dc
|
5
|
+
import typing as ta
|
6
|
+
|
7
|
+
from ..lite.check import check
|
8
|
+
|
9
|
+
|
10
|
+
T = ta.TypeVar('T')
|
11
|
+
|
12
|
+
|
13
|
+
##
|
14
|
+
|
15
|
+
|
16
|
+
@dc.dataclass(frozen=True)
|
17
|
+
class SubprocessRunOutput(ta.Generic[T]):
|
18
|
+
proc: T
|
19
|
+
|
20
|
+
returncode: int # noqa
|
21
|
+
|
22
|
+
stdout: ta.Optional[bytes] = None
|
23
|
+
stderr: ta.Optional[bytes] = None
|
24
|
+
|
25
|
+
|
26
|
+
##
|
27
|
+
|
28
|
+
|
29
|
+
@dc.dataclass(frozen=True)
|
30
|
+
class SubprocessRun:
|
31
|
+
cmd: ta.Sequence[str]
|
32
|
+
input: ta.Any = None
|
33
|
+
timeout: ta.Optional[float] = None
|
34
|
+
check: bool = False
|
35
|
+
capture_output: ta.Optional[bool] = None
|
36
|
+
kwargs: ta.Optional[ta.Mapping[str, ta.Any]] = None
|
37
|
+
|
38
|
+
@classmethod
|
39
|
+
def of(
|
40
|
+
cls,
|
41
|
+
*cmd: str,
|
42
|
+
input: ta.Any = None, # noqa
|
43
|
+
timeout: ta.Optional[float] = None,
|
44
|
+
check: bool = False, # noqa
|
45
|
+
capture_output: ta.Optional[bool] = None,
|
46
|
+
**kwargs: ta.Any,
|
47
|
+
) -> 'SubprocessRun':
|
48
|
+
return cls(
|
49
|
+
cmd=cmd,
|
50
|
+
input=input,
|
51
|
+
timeout=timeout,
|
52
|
+
check=check,
|
53
|
+
capture_output=capture_output,
|
54
|
+
kwargs=kwargs,
|
55
|
+
)
|
56
|
+
|
57
|
+
#
|
58
|
+
|
59
|
+
_DEFAULT_SUBPROCESSES: ta.ClassVar[ta.Optional[ta.Any]] = None # AbstractSubprocesses
|
60
|
+
|
61
|
+
def run(
|
62
|
+
self,
|
63
|
+
subprocesses: ta.Optional[ta.Any] = None, # AbstractSubprocesses
|
64
|
+
) -> SubprocessRunOutput:
|
65
|
+
if subprocesses is None:
|
66
|
+
subprocesses = self._DEFAULT_SUBPROCESSES
|
67
|
+
return check.not_none(subprocesses).run_(self) # type: ignore[attr-defined]
|
68
|
+
|
69
|
+
_DEFAULT_ASYNC_SUBPROCESSES: ta.ClassVar[ta.Optional[ta.Any]] = None # AbstractAsyncSubprocesses
|
70
|
+
|
71
|
+
async def async_run(
|
72
|
+
self,
|
73
|
+
async_subprocesses: ta.Optional[ta.Any] = None, # AbstractAsyncSubprocesses
|
74
|
+
) -> SubprocessRunOutput:
|
75
|
+
if async_subprocesses is None:
|
76
|
+
async_subprocesses = self._DEFAULT_ASYNC_SUBPROCESSES
|
77
|
+
return await check.not_none(async_subprocesses).run_(self) # type: ignore[attr-defined]
|
78
|
+
|
79
|
+
|
80
|
+
##
|
81
|
+
|
82
|
+
|
83
|
+
class SubprocessRunnable(abc.ABC, ta.Generic[T]):
|
84
|
+
@abc.abstractmethod
|
85
|
+
def make_run(self) -> SubprocessRun:
|
86
|
+
raise NotImplementedError
|
87
|
+
|
88
|
+
@abc.abstractmethod
|
89
|
+
def handle_run_output(self, output: SubprocessRunOutput) -> T:
|
90
|
+
raise NotImplementedError
|
91
|
+
|
92
|
+
#
|
93
|
+
|
94
|
+
def run(self, subprocesses: ta.Optional[ta.Any] = None) -> T: # AbstractSubprocesses
|
95
|
+
return self.handle_run_output(self.make_run().run(subprocesses))
|
96
|
+
|
97
|
+
async def async_run(self, async_subprocesses: ta.Optional[ta.Any] = None) -> T: # AbstractAsyncSubprocesses
|
98
|
+
return self.handle_run_output(await self.make_run().async_run(async_subprocesses))
|