ominfra 0.0.0.dev157__py3-none-any.whl → 0.0.0.dev159__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- ominfra/clouds/aws/journald2aws/main.py +1 -1
- ominfra/journald/tailer.py +2 -2
- ominfra/manage/bootstrap_.py +1 -1
- ominfra/manage/commands/subprocess.py +4 -4
- ominfra/manage/deploy/apps.py +23 -21
- ominfra/manage/deploy/atomics.py +207 -0
- ominfra/manage/deploy/config.py +3 -0
- ominfra/manage/deploy/git.py +27 -47
- ominfra/manage/deploy/inject.py +11 -0
- ominfra/manage/deploy/paths.py +89 -51
- ominfra/manage/deploy/specs.py +42 -0
- ominfra/manage/deploy/tmp.py +46 -0
- ominfra/manage/deploy/types.py +1 -0
- ominfra/manage/deploy/venvs.py +16 -6
- ominfra/manage/remote/spawning.py +3 -3
- ominfra/manage/system/packages.py +1 -1
- ominfra/pyremote.py +26 -26
- ominfra/scripts/journald2aws.py +467 -354
- ominfra/scripts/manage.py +1426 -1037
- ominfra/scripts/supervisor.py +359 -336
- ominfra/supervisor/http.py +1 -1
- ominfra/supervisor/main.py +2 -2
- {ominfra-0.0.0.dev157.dist-info → ominfra-0.0.0.dev159.dist-info}/METADATA +3 -3
- {ominfra-0.0.0.dev157.dist-info → ominfra-0.0.0.dev159.dist-info}/RECORD +28 -25
- {ominfra-0.0.0.dev157.dist-info → ominfra-0.0.0.dev159.dist-info}/LICENSE +0 -0
- {ominfra-0.0.0.dev157.dist-info → ominfra-0.0.0.dev159.dist-info}/WHEEL +0 -0
- {ominfra-0.0.0.dev157.dist-info → ominfra-0.0.0.dev159.dist-info}/entry_points.txt +0 -0
- {ominfra-0.0.0.dev157.dist-info → ominfra-0.0.0.dev159.dist-info}/top_level.txt +0 -0
ominfra/scripts/manage.py
CHANGED
@@ -24,6 +24,7 @@ import decimal
|
|
24
24
|
import enum
|
25
25
|
import fractions
|
26
26
|
import functools
|
27
|
+
import hashlib
|
27
28
|
import inspect
|
28
29
|
import itertools
|
29
30
|
import json
|
@@ -41,6 +42,7 @@ import string
|
|
41
42
|
import struct
|
42
43
|
import subprocess
|
43
44
|
import sys
|
45
|
+
import tempfile
|
44
46
|
import threading
|
45
47
|
import time
|
46
48
|
import traceback
|
@@ -98,9 +100,13 @@ CallableVersionOperator = ta.Callable[['Version', str], bool]
|
|
98
100
|
CommandT = ta.TypeVar('CommandT', bound='Command')
|
99
101
|
CommandOutputT = ta.TypeVar('CommandOutputT', bound='Command.Output')
|
100
102
|
|
103
|
+
# deploy/atomics.py
|
104
|
+
DeployAtomicPathSwapKind = ta.Literal['dir', 'file']
|
105
|
+
DeployAtomicPathSwapState = ta.Literal['open', 'committed', 'aborted'] # ta.TypeAlias
|
106
|
+
|
101
107
|
# deploy/paths.py
|
102
108
|
DeployPathKind = ta.Literal['dir', 'file'] # ta.TypeAlias
|
103
|
-
|
109
|
+
DeployPathPlaceholder = ta.Literal['app', 'tag'] # ta.TypeAlias
|
104
110
|
|
105
111
|
# ../../omlish/argparse/cli.py
|
106
112
|
ArgparseCommandFn = ta.Callable[[], ta.Optional[int]] # ta.TypeAlias
|
@@ -118,7 +124,7 @@ InjectorBindingOrBindings = ta.Union['InjectorBinding', 'InjectorBindings']
|
|
118
124
|
# ../configs.py
|
119
125
|
ConfigMapping = ta.Mapping[str, ta.Any]
|
120
126
|
|
121
|
-
# ../../omlish/
|
127
|
+
# ../../omlish/subprocesses.py
|
122
128
|
SubprocessChannelOption = ta.Literal['pipe', 'stdout', 'devnull'] # ta.TypeAlias
|
123
129
|
|
124
130
|
# system/packages.py
|
@@ -1365,6 +1371,9 @@ class MainConfig:
|
|
1365
1371
|
# ../deploy/config.py
|
1366
1372
|
|
1367
1373
|
|
1374
|
+
##
|
1375
|
+
|
1376
|
+
|
1368
1377
|
@dc.dataclass(frozen=True)
|
1369
1378
|
class DeployConfig:
|
1370
1379
|
deploy_home: ta.Optional[str] = None
|
@@ -1379,6 +1388,7 @@ DeployHome = ta.NewType('DeployHome', str)
|
|
1379
1388
|
DeployApp = ta.NewType('DeployApp', str)
|
1380
1389
|
DeployTag = ta.NewType('DeployTag', str)
|
1381
1390
|
DeployRev = ta.NewType('DeployRev', str)
|
1391
|
+
DeployKey = ta.NewType('DeployKey', str)
|
1382
1392
|
|
1383
1393
|
|
1384
1394
|
class DeployAppTag(ta.NamedTuple):
|
@@ -1539,7 +1549,7 @@ def _pyremote_bootstrap_main(context_name: str) -> None:
|
|
1539
1549
|
# Get pid
|
1540
1550
|
pid = os.getpid()
|
1541
1551
|
|
1542
|
-
# Two copies of
|
1552
|
+
# Two copies of payload src to be sent to parent
|
1543
1553
|
r0, w0 = os.pipe()
|
1544
1554
|
r1, w1 = os.pipe()
|
1545
1555
|
|
@@ -1578,17 +1588,17 @@ def _pyremote_bootstrap_main(context_name: str) -> None:
|
|
1578
1588
|
# Write pid
|
1579
1589
|
os.write(1, struct.pack('<Q', pid))
|
1580
1590
|
|
1581
|
-
# Read
|
1582
|
-
|
1583
|
-
if len(
|
1591
|
+
# Read payload src from stdin
|
1592
|
+
payload_z_len = struct.unpack('<I', os.read(0, 4))[0]
|
1593
|
+
if len(payload_z := os.fdopen(0, 'rb').read(payload_z_len)) != payload_z_len:
|
1584
1594
|
raise EOFError
|
1585
|
-
|
1595
|
+
payload_src = zlib.decompress(payload_z)
|
1586
1596
|
|
1587
|
-
# Write both copies of
|
1588
|
-
# and block and need to be drained by pyremote_bootstrap_finalize running in parent.
|
1597
|
+
# Write both copies of payload src. Must write to w0 (parent stdin) before w1 (copy pipe) as pipe will likely
|
1598
|
+
# fill and block and need to be drained by pyremote_bootstrap_finalize running in parent.
|
1589
1599
|
for w in [w0, w1]:
|
1590
1600
|
fp = os.fdopen(w, 'wb', 0)
|
1591
|
-
fp.write(
|
1601
|
+
fp.write(payload_src)
|
1592
1602
|
fp.close()
|
1593
1603
|
|
1594
1604
|
# Write second ack
|
@@ -1652,7 +1662,7 @@ class PyremotePayloadRuntime:
|
|
1652
1662
|
input: ta.BinaryIO
|
1653
1663
|
output: ta.BinaryIO
|
1654
1664
|
context_name: str
|
1655
|
-
|
1665
|
+
payload_src: str
|
1656
1666
|
options: PyremoteBootstrapOptions
|
1657
1667
|
env_info: PyremoteEnvInfo
|
1658
1668
|
|
@@ -1660,9 +1670,9 @@ class PyremotePayloadRuntime:
|
|
1660
1670
|
def pyremote_bootstrap_finalize() -> PyremotePayloadRuntime:
|
1661
1671
|
# If src file var is not present we need to do initial finalization
|
1662
1672
|
if _PYREMOTE_BOOTSTRAP_SRC_FILE_VAR not in os.environ:
|
1663
|
-
# Read second copy of
|
1673
|
+
# Read second copy of payload src
|
1664
1674
|
r1 = os.fdopen(_PYREMOTE_BOOTSTRAP_SRC_FD, 'rb', 0)
|
1665
|
-
|
1675
|
+
payload_src = r1.read().decode('utf-8')
|
1666
1676
|
r1.close()
|
1667
1677
|
|
1668
1678
|
# Reap boostrap child. Must be done after reading second copy of source because source may be too big to fit in
|
@@ -1680,7 +1690,7 @@ def pyremote_bootstrap_finalize() -> PyremotePayloadRuntime:
|
|
1680
1690
|
# Write temp source file
|
1681
1691
|
import tempfile
|
1682
1692
|
tfd, tfn = tempfile.mkstemp('-pyremote.py')
|
1683
|
-
os.write(tfd,
|
1693
|
+
os.write(tfd, payload_src.encode('utf-8'))
|
1684
1694
|
os.close(tfd)
|
1685
1695
|
|
1686
1696
|
# Set vars
|
@@ -1699,7 +1709,7 @@ def pyremote_bootstrap_finalize() -> PyremotePayloadRuntime:
|
|
1699
1709
|
|
1700
1710
|
# Read temp source file
|
1701
1711
|
with open(os.environ.pop(_PYREMOTE_BOOTSTRAP_SRC_FILE_VAR)) as sf:
|
1702
|
-
|
1712
|
+
payload_src = sf.read()
|
1703
1713
|
|
1704
1714
|
# Restore vars
|
1705
1715
|
sys.executable = os.environ.pop(_PYREMOTE_BOOTSTRAP_ARGV0_VAR)
|
@@ -1732,7 +1742,7 @@ def pyremote_bootstrap_finalize() -> PyremotePayloadRuntime:
|
|
1732
1742
|
input=input,
|
1733
1743
|
output=output,
|
1734
1744
|
context_name=context_name,
|
1735
|
-
|
1745
|
+
payload_src=payload_src,
|
1736
1746
|
options=options,
|
1737
1747
|
env_info=env_info,
|
1738
1748
|
)
|
@@ -1744,31 +1754,31 @@ def pyremote_bootstrap_finalize() -> PyremotePayloadRuntime:
|
|
1744
1754
|
class PyremoteBootstrapDriver:
|
1745
1755
|
def __init__(
|
1746
1756
|
self,
|
1747
|
-
|
1757
|
+
payload_src: ta.Union[str, ta.Sequence[str]],
|
1748
1758
|
options: PyremoteBootstrapOptions = PyremoteBootstrapOptions(),
|
1749
1759
|
) -> None:
|
1750
1760
|
super().__init__()
|
1751
1761
|
|
1752
|
-
self.
|
1762
|
+
self._payload_src = payload_src
|
1753
1763
|
self._options = options
|
1754
1764
|
|
1755
|
-
self.
|
1756
|
-
self.
|
1765
|
+
self._prepared_payload_src = self._prepare_payload_src(payload_src, options)
|
1766
|
+
self._payload_z = zlib.compress(self._prepared_payload_src.encode('utf-8'))
|
1757
1767
|
|
1758
1768
|
self._options_json = json.dumps(dc.asdict(options), indent=None, separators=(',', ':')).encode('utf-8') # noqa
|
1759
1769
|
#
|
1760
1770
|
|
1761
1771
|
@classmethod
|
1762
|
-
def
|
1772
|
+
def _prepare_payload_src(
|
1763
1773
|
cls,
|
1764
|
-
|
1774
|
+
payload_src: ta.Union[str, ta.Sequence[str]],
|
1765
1775
|
options: PyremoteBootstrapOptions,
|
1766
1776
|
) -> str:
|
1767
1777
|
parts: ta.List[str]
|
1768
|
-
if isinstance(
|
1769
|
-
parts = [
|
1778
|
+
if isinstance(payload_src, str):
|
1779
|
+
parts = [payload_src]
|
1770
1780
|
else:
|
1771
|
-
parts = list(
|
1781
|
+
parts = list(payload_src)
|
1772
1782
|
|
1773
1783
|
if (mn := options.main_name_override) is not None:
|
1774
1784
|
parts.insert(0, f'__name__ = {mn!r}')
|
@@ -1804,9 +1814,9 @@ class PyremoteBootstrapDriver:
|
|
1804
1814
|
d = yield from self._read(8)
|
1805
1815
|
pid = struct.unpack('<Q', d)[0]
|
1806
1816
|
|
1807
|
-
# Write
|
1808
|
-
yield from self._write(struct.pack('<I', len(self.
|
1809
|
-
yield from self._write(self.
|
1817
|
+
# Write payload src
|
1818
|
+
yield from self._write(struct.pack('<I', len(self._payload_z)))
|
1819
|
+
yield from self._write(self._payload_z)
|
1810
1820
|
|
1811
1821
|
# Read second ack (after writing src copies)
|
1812
1822
|
yield from self._expect(_PYREMOTE_BOOTSTRAP_ACK1)
|
@@ -2540,6 +2550,13 @@ json_dump_compact: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON
|
|
2540
2550
|
json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_COMPACT_KWARGS)
|
2541
2551
|
|
2542
2552
|
|
2553
|
+
########################################
|
2554
|
+
# ../../../omlish/lite/logs.py
|
2555
|
+
|
2556
|
+
|
2557
|
+
log = logging.getLogger(__name__)
|
2558
|
+
|
2559
|
+
|
2543
2560
|
########################################
|
2544
2561
|
# ../../../omlish/lite/maybes.py
|
2545
2562
|
|
@@ -2754,6 +2771,116 @@ def format_num_bytes(num_bytes: int) -> str:
|
|
2754
2771
|
return f'{num_bytes / 1024 ** (len(FORMAT_NUM_BYTES_SUFFIXES) - 1):.2f}{FORMAT_NUM_BYTES_SUFFIXES[-1]}'
|
2755
2772
|
|
2756
2773
|
|
2774
|
+
########################################
|
2775
|
+
# ../../../omlish/logs/filters.py
|
2776
|
+
|
2777
|
+
|
2778
|
+
class TidLogFilter(logging.Filter):
|
2779
|
+
def filter(self, record):
|
2780
|
+
record.tid = threading.get_native_id()
|
2781
|
+
return True
|
2782
|
+
|
2783
|
+
|
2784
|
+
########################################
|
2785
|
+
# ../../../omlish/logs/proxy.py
|
2786
|
+
|
2787
|
+
|
2788
|
+
class ProxyLogFilterer(logging.Filterer):
|
2789
|
+
def __init__(self, underlying: logging.Filterer) -> None: # noqa
|
2790
|
+
self._underlying = underlying
|
2791
|
+
|
2792
|
+
@property
|
2793
|
+
def underlying(self) -> logging.Filterer:
|
2794
|
+
return self._underlying
|
2795
|
+
|
2796
|
+
@property
|
2797
|
+
def filters(self):
|
2798
|
+
return self._underlying.filters
|
2799
|
+
|
2800
|
+
@filters.setter
|
2801
|
+
def filters(self, filters):
|
2802
|
+
self._underlying.filters = filters
|
2803
|
+
|
2804
|
+
def addFilter(self, filter): # noqa
|
2805
|
+
self._underlying.addFilter(filter)
|
2806
|
+
|
2807
|
+
def removeFilter(self, filter): # noqa
|
2808
|
+
self._underlying.removeFilter(filter)
|
2809
|
+
|
2810
|
+
def filter(self, record):
|
2811
|
+
return self._underlying.filter(record)
|
2812
|
+
|
2813
|
+
|
2814
|
+
class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
|
2815
|
+
def __init__(self, underlying: logging.Handler) -> None: # noqa
|
2816
|
+
ProxyLogFilterer.__init__(self, underlying)
|
2817
|
+
|
2818
|
+
_underlying: logging.Handler
|
2819
|
+
|
2820
|
+
@property
|
2821
|
+
def underlying(self) -> logging.Handler:
|
2822
|
+
return self._underlying
|
2823
|
+
|
2824
|
+
def get_name(self):
|
2825
|
+
return self._underlying.get_name()
|
2826
|
+
|
2827
|
+
def set_name(self, name):
|
2828
|
+
self._underlying.set_name(name)
|
2829
|
+
|
2830
|
+
@property
|
2831
|
+
def name(self):
|
2832
|
+
return self._underlying.name
|
2833
|
+
|
2834
|
+
@property
|
2835
|
+
def level(self):
|
2836
|
+
return self._underlying.level
|
2837
|
+
|
2838
|
+
@level.setter
|
2839
|
+
def level(self, level):
|
2840
|
+
self._underlying.level = level
|
2841
|
+
|
2842
|
+
@property
|
2843
|
+
def formatter(self):
|
2844
|
+
return self._underlying.formatter
|
2845
|
+
|
2846
|
+
@formatter.setter
|
2847
|
+
def formatter(self, formatter):
|
2848
|
+
self._underlying.formatter = formatter
|
2849
|
+
|
2850
|
+
def createLock(self):
|
2851
|
+
self._underlying.createLock()
|
2852
|
+
|
2853
|
+
def acquire(self):
|
2854
|
+
self._underlying.acquire()
|
2855
|
+
|
2856
|
+
def release(self):
|
2857
|
+
self._underlying.release()
|
2858
|
+
|
2859
|
+
def setLevel(self, level):
|
2860
|
+
self._underlying.setLevel(level)
|
2861
|
+
|
2862
|
+
def format(self, record):
|
2863
|
+
return self._underlying.format(record)
|
2864
|
+
|
2865
|
+
def emit(self, record):
|
2866
|
+
self._underlying.emit(record)
|
2867
|
+
|
2868
|
+
def handle(self, record):
|
2869
|
+
return self._underlying.handle(record)
|
2870
|
+
|
2871
|
+
def setFormatter(self, fmt):
|
2872
|
+
self._underlying.setFormatter(fmt)
|
2873
|
+
|
2874
|
+
def flush(self):
|
2875
|
+
self._underlying.flush()
|
2876
|
+
|
2877
|
+
def close(self):
|
2878
|
+
self._underlying.close()
|
2879
|
+
|
2880
|
+
def handleError(self, record):
|
2881
|
+
self._underlying.handleError(record)
|
2882
|
+
|
2883
|
+
|
2757
2884
|
########################################
|
2758
2885
|
# ../../../omlish/os/deathsig.py
|
2759
2886
|
|
@@ -3933,28 +4060,228 @@ def build_command_name_map(crs: CommandRegistrations) -> CommandNameMap:
|
|
3933
4060
|
return CommandNameMap(dct)
|
3934
4061
|
|
3935
4062
|
|
4063
|
+
########################################
|
4064
|
+
# ../deploy/atomics.py
|
4065
|
+
|
4066
|
+
|
4067
|
+
##
|
4068
|
+
|
4069
|
+
|
4070
|
+
class DeployAtomicPathSwap(abc.ABC):
|
4071
|
+
def __init__(
|
4072
|
+
self,
|
4073
|
+
kind: DeployAtomicPathSwapKind,
|
4074
|
+
dst_path: str,
|
4075
|
+
*,
|
4076
|
+
auto_commit: bool = False,
|
4077
|
+
) -> None:
|
4078
|
+
super().__init__()
|
4079
|
+
|
4080
|
+
self._kind = kind
|
4081
|
+
self._dst_path = dst_path
|
4082
|
+
self._auto_commit = auto_commit
|
4083
|
+
|
4084
|
+
self._state: DeployAtomicPathSwapState = 'open'
|
4085
|
+
|
4086
|
+
def __repr__(self) -> str:
|
4087
|
+
return attr_repr(self, 'kind', 'dst_path', 'tmp_path')
|
4088
|
+
|
4089
|
+
@property
|
4090
|
+
def kind(self) -> DeployAtomicPathSwapKind:
|
4091
|
+
return self._kind
|
4092
|
+
|
4093
|
+
@property
|
4094
|
+
def dst_path(self) -> str:
|
4095
|
+
return self._dst_path
|
4096
|
+
|
4097
|
+
@property
|
4098
|
+
@abc.abstractmethod
|
4099
|
+
def tmp_path(self) -> str:
|
4100
|
+
raise NotImplementedError
|
4101
|
+
|
4102
|
+
#
|
4103
|
+
|
4104
|
+
@property
|
4105
|
+
def state(self) -> DeployAtomicPathSwapState:
|
4106
|
+
return self._state
|
4107
|
+
|
4108
|
+
def _check_state(self, *states: DeployAtomicPathSwapState) -> None:
|
4109
|
+
if self._state not in states:
|
4110
|
+
raise RuntimeError(f'Atomic path swap not in correct state: {self._state}, {states}')
|
4111
|
+
|
4112
|
+
#
|
4113
|
+
|
4114
|
+
@abc.abstractmethod
|
4115
|
+
def _commit(self) -> None:
|
4116
|
+
raise NotImplementedError
|
4117
|
+
|
4118
|
+
def commit(self) -> None:
|
4119
|
+
if self._state == 'committed':
|
4120
|
+
return
|
4121
|
+
self._check_state('open')
|
4122
|
+
try:
|
4123
|
+
self._commit()
|
4124
|
+
except Exception: # noqa
|
4125
|
+
self._abort()
|
4126
|
+
raise
|
4127
|
+
else:
|
4128
|
+
self._state = 'committed'
|
4129
|
+
|
4130
|
+
#
|
4131
|
+
|
4132
|
+
@abc.abstractmethod
|
4133
|
+
def _abort(self) -> None:
|
4134
|
+
raise NotImplementedError
|
4135
|
+
|
4136
|
+
def abort(self) -> None:
|
4137
|
+
if self._state == 'aborted':
|
4138
|
+
return
|
4139
|
+
self._abort()
|
4140
|
+
self._state = 'aborted'
|
4141
|
+
|
4142
|
+
#
|
4143
|
+
|
4144
|
+
def __enter__(self) -> 'DeployAtomicPathSwap':
|
4145
|
+
return self
|
4146
|
+
|
4147
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
4148
|
+
if (
|
4149
|
+
exc_type is None and
|
4150
|
+
self._auto_commit and
|
4151
|
+
self._state == 'open'
|
4152
|
+
):
|
4153
|
+
self.commit()
|
4154
|
+
else:
|
4155
|
+
self.abort()
|
4156
|
+
|
4157
|
+
|
4158
|
+
#
|
4159
|
+
|
4160
|
+
|
4161
|
+
class DeployAtomicPathSwapping(abc.ABC):
|
4162
|
+
@abc.abstractmethod
|
4163
|
+
def begin_atomic_path_swap(
|
4164
|
+
self,
|
4165
|
+
kind: DeployAtomicPathSwapKind,
|
4166
|
+
dst_path: str,
|
4167
|
+
*,
|
4168
|
+
name_hint: ta.Optional[str] = None,
|
4169
|
+
make_dirs: bool = False,
|
4170
|
+
**kwargs: ta.Any,
|
4171
|
+
) -> DeployAtomicPathSwap:
|
4172
|
+
raise NotImplementedError
|
4173
|
+
|
4174
|
+
|
4175
|
+
##
|
4176
|
+
|
4177
|
+
|
4178
|
+
class OsRenameDeployAtomicPathSwap(DeployAtomicPathSwap):
|
4179
|
+
def __init__(
|
4180
|
+
self,
|
4181
|
+
kind: DeployAtomicPathSwapKind,
|
4182
|
+
dst_path: str,
|
4183
|
+
tmp_path: str,
|
4184
|
+
**kwargs: ta.Any,
|
4185
|
+
) -> None:
|
4186
|
+
if kind == 'dir':
|
4187
|
+
check.state(os.path.isdir(tmp_path))
|
4188
|
+
elif kind == 'file':
|
4189
|
+
check.state(os.path.isfile(tmp_path))
|
4190
|
+
else:
|
4191
|
+
raise TypeError(kind)
|
4192
|
+
|
4193
|
+
super().__init__(
|
4194
|
+
kind,
|
4195
|
+
dst_path,
|
4196
|
+
**kwargs,
|
4197
|
+
)
|
4198
|
+
|
4199
|
+
self._tmp_path = tmp_path
|
4200
|
+
|
4201
|
+
@property
|
4202
|
+
def tmp_path(self) -> str:
|
4203
|
+
return self._tmp_path
|
4204
|
+
|
4205
|
+
def _commit(self) -> None:
|
4206
|
+
os.rename(self._tmp_path, self._dst_path)
|
4207
|
+
|
4208
|
+
def _abort(self) -> None:
|
4209
|
+
shutil.rmtree(self._tmp_path, ignore_errors=True)
|
4210
|
+
|
4211
|
+
|
4212
|
+
class TempDirDeployAtomicPathSwapping(DeployAtomicPathSwapping):
|
4213
|
+
def __init__(
|
4214
|
+
self,
|
4215
|
+
*,
|
4216
|
+
temp_dir: ta.Optional[str] = None,
|
4217
|
+
root_dir: ta.Optional[str] = None,
|
4218
|
+
) -> None:
|
4219
|
+
super().__init__()
|
4220
|
+
|
4221
|
+
if root_dir is not None:
|
4222
|
+
root_dir = os.path.abspath(root_dir)
|
4223
|
+
self._root_dir = root_dir
|
4224
|
+
self._temp_dir = temp_dir
|
4225
|
+
|
4226
|
+
def begin_atomic_path_swap(
|
4227
|
+
self,
|
4228
|
+
kind: DeployAtomicPathSwapKind,
|
4229
|
+
dst_path: str,
|
4230
|
+
*,
|
4231
|
+
name_hint: ta.Optional[str] = None,
|
4232
|
+
make_dirs: bool = False,
|
4233
|
+
**kwargs: ta.Any,
|
4234
|
+
) -> DeployAtomicPathSwap:
|
4235
|
+
dst_path = os.path.abspath(dst_path)
|
4236
|
+
if self._root_dir is not None and not dst_path.startswith(check.non_empty_str(self._root_dir)):
|
4237
|
+
raise RuntimeError(f'Atomic path swap dst must be in root dir: {dst_path}, {self._root_dir}')
|
4238
|
+
|
4239
|
+
dst_dir = os.path.dirname(dst_path)
|
4240
|
+
if make_dirs:
|
4241
|
+
os.makedirs(dst_dir, exist_ok=True)
|
4242
|
+
if not os.path.isdir(dst_dir):
|
4243
|
+
raise RuntimeError(f'Atomic path swap dst dir does not exist: {dst_dir}')
|
4244
|
+
|
4245
|
+
if kind == 'dir':
|
4246
|
+
tmp_path = tempfile.mkdtemp(prefix=name_hint, dir=self._temp_dir)
|
4247
|
+
elif kind == 'file':
|
4248
|
+
fd, tmp_path = tempfile.mkstemp(prefix=name_hint, dir=self._temp_dir)
|
4249
|
+
os.close(fd)
|
4250
|
+
else:
|
4251
|
+
raise TypeError(kind)
|
4252
|
+
|
4253
|
+
return OsRenameDeployAtomicPathSwap(
|
4254
|
+
kind,
|
4255
|
+
dst_path,
|
4256
|
+
tmp_path,
|
4257
|
+
**kwargs,
|
4258
|
+
)
|
4259
|
+
|
4260
|
+
|
3936
4261
|
########################################
|
3937
4262
|
# ../deploy/paths.py
|
3938
4263
|
"""
|
3939
4264
|
~deploy
|
3940
4265
|
deploy.pid (flock)
|
3941
4266
|
/app
|
3942
|
-
/<
|
4267
|
+
/<appplaceholder> - shallow clone
|
3943
4268
|
/conf
|
3944
4269
|
/env
|
3945
|
-
<
|
4270
|
+
<appplaceholder>.env
|
3946
4271
|
/nginx
|
3947
|
-
<
|
4272
|
+
<appplaceholder>.conf
|
3948
4273
|
/supervisor
|
3949
|
-
<
|
4274
|
+
<appplaceholder>.conf
|
3950
4275
|
/venv
|
3951
|
-
/<
|
4276
|
+
/<appplaceholder>
|
4277
|
+
|
4278
|
+
/tmp
|
3952
4279
|
|
3953
4280
|
?
|
3954
4281
|
/logs
|
3955
|
-
/wrmsr--omlish--<
|
4282
|
+
/wrmsr--omlish--<placeholder>
|
3956
4283
|
|
3957
|
-
|
4284
|
+
placeholder = <name>--<rev>--<when>
|
3958
4285
|
|
3959
4286
|
==
|
3960
4287
|
|
@@ -3975,10 +4302,10 @@ for dn in [
|
|
3975
4302
|
##
|
3976
4303
|
|
3977
4304
|
|
3978
|
-
|
3979
|
-
|
4305
|
+
DEPLOY_PATH_PLACEHOLDER_PLACEHOLDER = '@'
|
4306
|
+
DEPLOY_PATH_PLACEHOLDER_SEPARATORS = '-.'
|
3980
4307
|
|
3981
|
-
|
4308
|
+
DEPLOY_PATH_PLACEHOLDERS: ta.FrozenSet[str] = frozenset([
|
3982
4309
|
'app',
|
3983
4310
|
'tag', # <rev>-<dt>
|
3984
4311
|
])
|
@@ -3996,7 +4323,7 @@ class DeployPathPart(abc.ABC): # noqa
|
|
3996
4323
|
raise NotImplementedError
|
3997
4324
|
|
3998
4325
|
@abc.abstractmethod
|
3999
|
-
def render(self,
|
4326
|
+
def render(self, placeholders: ta.Optional[ta.Mapping[DeployPathPlaceholder, str]] = None) -> str:
|
4000
4327
|
raise NotImplementedError
|
4001
4328
|
|
4002
4329
|
|
@@ -4010,9 +4337,9 @@ class DirDeployPathPart(DeployPathPart, abc.ABC):
|
|
4010
4337
|
|
4011
4338
|
@classmethod
|
4012
4339
|
def parse(cls, s: str) -> 'DirDeployPathPart':
|
4013
|
-
if
|
4014
|
-
check.equal(s[0],
|
4015
|
-
return
|
4340
|
+
if DEPLOY_PATH_PLACEHOLDER_PLACEHOLDER in s:
|
4341
|
+
check.equal(s[0], DEPLOY_PATH_PLACEHOLDER_PLACEHOLDER)
|
4342
|
+
return PlaceholderDirDeployPathPart(s[1:])
|
4016
4343
|
else:
|
4017
4344
|
return ConstDirDeployPathPart(s)
|
4018
4345
|
|
@@ -4024,13 +4351,13 @@ class FileDeployPathPart(DeployPathPart, abc.ABC):
|
|
4024
4351
|
|
4025
4352
|
@classmethod
|
4026
4353
|
def parse(cls, s: str) -> 'FileDeployPathPart':
|
4027
|
-
if
|
4028
|
-
check.equal(s[0],
|
4029
|
-
if not any(c in s for c in
|
4030
|
-
return
|
4354
|
+
if DEPLOY_PATH_PLACEHOLDER_PLACEHOLDER in s:
|
4355
|
+
check.equal(s[0], DEPLOY_PATH_PLACEHOLDER_PLACEHOLDER)
|
4356
|
+
if not any(c in s for c in DEPLOY_PATH_PLACEHOLDER_SEPARATORS):
|
4357
|
+
return PlaceholderFileDeployPathPart(s[1:], '')
|
4031
4358
|
else:
|
4032
|
-
p = min(f for c in
|
4033
|
-
return
|
4359
|
+
p = min(f for c in DEPLOY_PATH_PLACEHOLDER_SEPARATORS if (f := s.find(c)) > 0)
|
4360
|
+
return PlaceholderFileDeployPathPart(s[1:p], s[p:])
|
4034
4361
|
else:
|
4035
4362
|
return ConstFileDeployPathPart(s)
|
4036
4363
|
|
@@ -4045,9 +4372,9 @@ class ConstDeployPathPart(DeployPathPart, abc.ABC):
|
|
4045
4372
|
def __post_init__(self) -> None:
|
4046
4373
|
check.non_empty_str(self.name)
|
4047
4374
|
check.not_in('/', self.name)
|
4048
|
-
check.not_in(
|
4375
|
+
check.not_in(DEPLOY_PATH_PLACEHOLDER_PLACEHOLDER, self.name)
|
4049
4376
|
|
4050
|
-
def render(self,
|
4377
|
+
def render(self, placeholders: ta.Optional[ta.Mapping[DeployPathPlaceholder, str]] = None) -> str:
|
4051
4378
|
return self.name
|
4052
4379
|
|
4053
4380
|
|
@@ -4063,40 +4390,40 @@ class ConstFileDeployPathPart(ConstDeployPathPart, FileDeployPathPart):
|
|
4063
4390
|
|
4064
4391
|
|
4065
4392
|
@dc.dataclass(frozen=True)
|
4066
|
-
class
|
4067
|
-
|
4393
|
+
class PlaceholderDeployPathPart(DeployPathPart, abc.ABC):
|
4394
|
+
placeholder: str # DeployPathPlaceholder
|
4068
4395
|
|
4069
4396
|
def __post_init__(self) -> None:
|
4070
|
-
check.non_empty_str(self.
|
4071
|
-
for c in [*
|
4072
|
-
check.not_in(c, self.
|
4073
|
-
check.in_(self.
|
4074
|
-
|
4075
|
-
def
|
4076
|
-
if
|
4077
|
-
return
|
4397
|
+
check.non_empty_str(self.placeholder)
|
4398
|
+
for c in [*DEPLOY_PATH_PLACEHOLDER_SEPARATORS, DEPLOY_PATH_PLACEHOLDER_PLACEHOLDER, '/']:
|
4399
|
+
check.not_in(c, self.placeholder)
|
4400
|
+
check.in_(self.placeholder, DEPLOY_PATH_PLACEHOLDERS)
|
4401
|
+
|
4402
|
+
def _render_placeholder(self, placeholders: ta.Optional[ta.Mapping[DeployPathPlaceholder, str]] = None) -> str:
|
4403
|
+
if placeholders is not None:
|
4404
|
+
return placeholders[self.placeholder] # type: ignore
|
4078
4405
|
else:
|
4079
|
-
return
|
4406
|
+
return DEPLOY_PATH_PLACEHOLDER_PLACEHOLDER + self.placeholder
|
4080
4407
|
|
4081
4408
|
|
4082
4409
|
@dc.dataclass(frozen=True)
|
4083
|
-
class
|
4084
|
-
def render(self,
|
4085
|
-
return self.
|
4410
|
+
class PlaceholderDirDeployPathPart(PlaceholderDeployPathPart, DirDeployPathPart):
|
4411
|
+
def render(self, placeholders: ta.Optional[ta.Mapping[DeployPathPlaceholder, str]] = None) -> str:
|
4412
|
+
return self._render_placeholder(placeholders)
|
4086
4413
|
|
4087
4414
|
|
4088
4415
|
@dc.dataclass(frozen=True)
|
4089
|
-
class
|
4416
|
+
class PlaceholderFileDeployPathPart(PlaceholderDeployPathPart, FileDeployPathPart):
|
4090
4417
|
suffix: str
|
4091
4418
|
|
4092
4419
|
def __post_init__(self) -> None:
|
4093
4420
|
super().__post_init__()
|
4094
4421
|
if self.suffix:
|
4095
|
-
for c in [
|
4422
|
+
for c in [DEPLOY_PATH_PLACEHOLDER_PLACEHOLDER, '/']:
|
4096
4423
|
check.not_in(c, self.suffix)
|
4097
4424
|
|
4098
|
-
def render(self,
|
4099
|
-
return self.
|
4425
|
+
def render(self, placeholders: ta.Optional[ta.Mapping[DeployPathPlaceholder, str]] = None) -> str:
|
4426
|
+
return self._render_placeholder(placeholders) + self.suffix
|
4100
4427
|
|
4101
4428
|
|
4102
4429
|
##
|
@@ -4107,28 +4434,30 @@ class DeployPath:
|
|
4107
4434
|
parts: ta.Sequence[DeployPathPart]
|
4108
4435
|
|
4109
4436
|
def __post_init__(self) -> None:
|
4437
|
+
hash(self)
|
4438
|
+
|
4110
4439
|
check.not_empty(self.parts)
|
4111
4440
|
for p in self.parts[:-1]:
|
4112
4441
|
check.equal(p.kind, 'dir')
|
4113
4442
|
|
4114
4443
|
pd = {}
|
4115
4444
|
for i, p in enumerate(self.parts):
|
4116
|
-
if isinstance(p,
|
4117
|
-
if p.
|
4118
|
-
raise DeployPathError('Duplicate
|
4119
|
-
pd[p.
|
4445
|
+
if isinstance(p, PlaceholderDeployPathPart):
|
4446
|
+
if p.placeholder in pd:
|
4447
|
+
raise DeployPathError('Duplicate placeholders in path', self)
|
4448
|
+
pd[p.placeholder] = i
|
4120
4449
|
|
4121
4450
|
if 'tag' in pd:
|
4122
4451
|
if 'app' not in pd or pd['app'] >= pd['tag']:
|
4123
|
-
raise DeployPathError('Tag
|
4452
|
+
raise DeployPathError('Tag placeholder in path without preceding app', self)
|
4124
4453
|
|
4125
4454
|
@property
|
4126
4455
|
def kind(self) -> ta.Literal['file', 'dir']:
|
4127
4456
|
return self.parts[-1].kind
|
4128
4457
|
|
4129
|
-
def render(self,
|
4458
|
+
def render(self, placeholders: ta.Optional[ta.Mapping[DeployPathPlaceholder, str]] = None) -> str:
|
4130
4459
|
return os.path.join( # noqa
|
4131
|
-
*[p.render(
|
4460
|
+
*[p.render(placeholders) for p in self.parts],
|
4132
4461
|
*([''] if self.kind == 'dir' else []),
|
4133
4462
|
)
|
4134
4463
|
|
@@ -4141,10 +4470,10 @@ class DeployPath:
|
|
4141
4470
|
else:
|
4142
4471
|
tail_parse = FileDeployPathPart.parse
|
4143
4472
|
ps = check.non_empty_str(s).split('/')
|
4144
|
-
return cls(
|
4473
|
+
return cls((
|
4145
4474
|
*([DirDeployPathPart.parse(p) for p in ps[:-1]] if len(ps) > 1 else []),
|
4146
4475
|
tail_parse(ps[-1]),
|
4147
|
-
|
4476
|
+
))
|
4148
4477
|
|
4149
4478
|
|
4150
4479
|
##
|
@@ -4152,41 +4481,107 @@ class DeployPath:
|
|
4152
4481
|
|
4153
4482
|
class DeployPathOwner(abc.ABC):
|
4154
4483
|
@abc.abstractmethod
|
4155
|
-
def
|
4484
|
+
def get_owned_deploy_paths(self) -> ta.AbstractSet[DeployPath]:
|
4156
4485
|
raise NotImplementedError
|
4157
4486
|
|
4158
4487
|
|
4159
|
-
|
4160
|
-
|
4161
|
-
|
4162
|
-
|
4163
|
-
|
4164
|
-
|
4165
|
-
|
4488
|
+
class SingleDirDeployPathOwner(DeployPathOwner, abc.ABC):
|
4489
|
+
def __init__(
|
4490
|
+
self,
|
4491
|
+
*args: ta.Any,
|
4492
|
+
owned_dir: str,
|
4493
|
+
deploy_home: ta.Optional[DeployHome],
|
4494
|
+
**kwargs: ta.Any,
|
4495
|
+
) -> None:
|
4496
|
+
super().__init__(*args, **kwargs)
|
4166
4497
|
|
4167
|
-
|
4498
|
+
check.not_in('/', owned_dir)
|
4499
|
+
self._owned_dir: str = check.non_empty_str(owned_dir)
|
4168
4500
|
|
4169
|
-
|
4501
|
+
self._deploy_home = deploy_home
|
4170
4502
|
|
4171
|
-
|
4503
|
+
self._owned_deploy_paths = frozenset([DeployPath.parse(self._owned_dir + '/')])
|
4172
4504
|
|
4173
|
-
|
4505
|
+
@cached_nullary
|
4506
|
+
def _dir(self) -> str:
|
4507
|
+
return os.path.join(check.non_empty_str(self._deploy_home), self._owned_dir)
|
4174
4508
|
|
4175
|
-
|
4509
|
+
@cached_nullary
|
4510
|
+
def _make_dir(self) -> str:
|
4511
|
+
if not os.path.isdir(d := self._dir()):
|
4512
|
+
os.makedirs(d, exist_ok=True)
|
4513
|
+
return d
|
4176
4514
|
|
4177
|
-
|
4515
|
+
def get_owned_deploy_paths(self) -> ta.AbstractSet[DeployPath]:
|
4516
|
+
return self._owned_deploy_paths
|
4178
4517
|
|
4179
4518
|
|
4180
4519
|
########################################
|
4181
|
-
# ../
|
4182
|
-
|
4520
|
+
# ../deploy/specs.py
|
4183
4521
|
|
4184
|
-
RemoteExecutionPayloadFile = ta.NewType('RemoteExecutionPayloadFile', str)
|
4185
4522
|
|
4523
|
+
##
|
4186
4524
|
|
4187
|
-
|
4188
|
-
|
4189
|
-
|
4525
|
+
|
4526
|
+
@dc.dataclass(frozen=True)
|
4527
|
+
class DeployGitRepo:
|
4528
|
+
host: ta.Optional[str] = None
|
4529
|
+
username: ta.Optional[str] = None
|
4530
|
+
path: ta.Optional[str] = None
|
4531
|
+
|
4532
|
+
def __post_init__(self) -> None:
|
4533
|
+
check.not_in('..', check.non_empty_str(self.host))
|
4534
|
+
check.not_in('.', check.non_empty_str(self.path))
|
4535
|
+
|
4536
|
+
|
4537
|
+
##
|
4538
|
+
|
4539
|
+
|
4540
|
+
@dc.dataclass(frozen=True)
|
4541
|
+
class DeploySpec:
|
4542
|
+
app: DeployApp
|
4543
|
+
repo: DeployGitRepo
|
4544
|
+
rev: DeployRev
|
4545
|
+
|
4546
|
+
def __post_init__(self) -> None:
|
4547
|
+
hash(self)
|
4548
|
+
|
4549
|
+
@cached_nullary
|
4550
|
+
def key(self) -> DeployKey:
|
4551
|
+
return DeployKey(hashlib.sha256(repr(self).encode('utf-8')).hexdigest()[:8])
|
4552
|
+
|
4553
|
+
|
4554
|
+
########################################
|
4555
|
+
# ../remote/config.py
|
4556
|
+
|
4557
|
+
|
4558
|
+
@dc.dataclass(frozen=True)
|
4559
|
+
class RemoteConfig:
|
4560
|
+
payload_file: ta.Optional[str] = None
|
4561
|
+
|
4562
|
+
set_pgid: bool = True
|
4563
|
+
|
4564
|
+
deathsig: ta.Optional[str] = 'KILL'
|
4565
|
+
|
4566
|
+
pycharm_remote_debug: ta.Optional[PycharmRemoteDebug] = None
|
4567
|
+
|
4568
|
+
forward_logging: bool = True
|
4569
|
+
|
4570
|
+
timebomb_delay_s: ta.Optional[float] = 60 * 60.
|
4571
|
+
|
4572
|
+
heartbeat_interval_s: float = 3.
|
4573
|
+
|
4574
|
+
|
4575
|
+
########################################
|
4576
|
+
# ../remote/payload.py
|
4577
|
+
|
4578
|
+
|
4579
|
+
RemoteExecutionPayloadFile = ta.NewType('RemoteExecutionPayloadFile', str)
|
4580
|
+
|
4581
|
+
|
4582
|
+
@cached_nullary
|
4583
|
+
def _get_self_src() -> str:
|
4584
|
+
return inspect.getsource(sys.modules[__name__])
|
4190
4585
|
|
4191
4586
|
|
4192
4587
|
def _is_src_amalg(src: str) -> bool:
|
@@ -4216,6 +4611,75 @@ def get_remote_payload_src(
|
|
4216
4611
|
return importlib.resources.files(__package__.split('.')[0] + '.scripts').joinpath('manage.py').read_text()
|
4217
4612
|
|
4218
4613
|
|
4614
|
+
########################################
|
4615
|
+
# ../system/platforms.py
|
4616
|
+
|
4617
|
+
|
4618
|
+
##
|
4619
|
+
|
4620
|
+
|
4621
|
+
@dc.dataclass(frozen=True)
|
4622
|
+
class Platform(abc.ABC): # noqa
|
4623
|
+
pass
|
4624
|
+
|
4625
|
+
|
4626
|
+
class LinuxPlatform(Platform, abc.ABC):
|
4627
|
+
pass
|
4628
|
+
|
4629
|
+
|
4630
|
+
class UbuntuPlatform(LinuxPlatform):
|
4631
|
+
pass
|
4632
|
+
|
4633
|
+
|
4634
|
+
class AmazonLinuxPlatform(LinuxPlatform):
|
4635
|
+
pass
|
4636
|
+
|
4637
|
+
|
4638
|
+
class GenericLinuxPlatform(LinuxPlatform):
|
4639
|
+
pass
|
4640
|
+
|
4641
|
+
|
4642
|
+
class DarwinPlatform(Platform):
|
4643
|
+
pass
|
4644
|
+
|
4645
|
+
|
4646
|
+
class UnknownPlatform(Platform):
|
4647
|
+
pass
|
4648
|
+
|
4649
|
+
|
4650
|
+
##
|
4651
|
+
|
4652
|
+
|
4653
|
+
def _detect_system_platform() -> Platform:
|
4654
|
+
plat = sys.platform
|
4655
|
+
|
4656
|
+
if plat == 'linux':
|
4657
|
+
if (osr := LinuxOsRelease.read()) is None:
|
4658
|
+
return GenericLinuxPlatform()
|
4659
|
+
|
4660
|
+
if osr.id == 'amzn':
|
4661
|
+
return AmazonLinuxPlatform()
|
4662
|
+
|
4663
|
+
elif osr.id == 'ubuntu':
|
4664
|
+
return UbuntuPlatform()
|
4665
|
+
|
4666
|
+
else:
|
4667
|
+
return GenericLinuxPlatform()
|
4668
|
+
|
4669
|
+
elif plat == 'darwin':
|
4670
|
+
return DarwinPlatform()
|
4671
|
+
|
4672
|
+
else:
|
4673
|
+
return UnknownPlatform()
|
4674
|
+
|
4675
|
+
|
4676
|
+
@cached_nullary
|
4677
|
+
def detect_system_platform() -> Platform:
|
4678
|
+
platform = _detect_system_platform()
|
4679
|
+
log.info('Detected platform: %r', platform)
|
4680
|
+
return platform
|
4681
|
+
|
4682
|
+
|
4219
4683
|
########################################
|
4220
4684
|
# ../targets/targets.py
|
4221
4685
|
"""
|
@@ -5540,367 +6004,97 @@ inj = Injection
|
|
5540
6004
|
|
5541
6005
|
|
5542
6006
|
########################################
|
5543
|
-
# ../../../omlish/lite/
|
6007
|
+
# ../../../omlish/lite/marshal.py
|
5544
6008
|
"""
|
5545
6009
|
TODO:
|
5546
|
-
-
|
5547
|
-
-
|
6010
|
+
- pickle stdlib objs? have to pin to 3.8 pickle protocol, will be cross-version
|
6011
|
+
- namedtuple
|
6012
|
+
- literals
|
6013
|
+
- newtypes?
|
5548
6014
|
"""
|
5549
6015
|
|
5550
6016
|
|
5551
|
-
|
6017
|
+
##
|
5552
6018
|
|
5553
6019
|
|
5554
|
-
|
6020
|
+
@dc.dataclass(frozen=True)
|
6021
|
+
class ObjMarshalOptions:
|
6022
|
+
raw_bytes: bool = False
|
6023
|
+
nonstrict_dataclasses: bool = False
|
5555
6024
|
|
5556
6025
|
|
5557
|
-
class
|
6026
|
+
class ObjMarshaler(abc.ABC):
|
6027
|
+
@abc.abstractmethod
|
6028
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
6029
|
+
raise NotImplementedError
|
5558
6030
|
|
5559
|
-
|
5560
|
-
|
5561
|
-
|
6031
|
+
@abc.abstractmethod
|
6032
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
6033
|
+
raise NotImplementedError
|
5562
6034
|
|
5563
6035
|
|
5564
|
-
|
6036
|
+
class NopObjMarshaler(ObjMarshaler):
|
6037
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
6038
|
+
return o
|
5565
6039
|
|
6040
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
6041
|
+
return o
|
5566
6042
|
|
5567
|
-
class JsonLogFormatter(logging.Formatter):
|
5568
6043
|
|
5569
|
-
|
5570
|
-
|
5571
|
-
|
5572
|
-
'args': False,
|
5573
|
-
'levelname': False,
|
5574
|
-
'levelno': False,
|
5575
|
-
'pathname': False,
|
5576
|
-
'filename': False,
|
5577
|
-
'module': False,
|
5578
|
-
'exc_info': True,
|
5579
|
-
'exc_text': True,
|
5580
|
-
'stack_info': True,
|
5581
|
-
'lineno': False,
|
5582
|
-
'funcName': False,
|
5583
|
-
'created': False,
|
5584
|
-
'msecs': False,
|
5585
|
-
'relativeCreated': False,
|
5586
|
-
'thread': False,
|
5587
|
-
'threadName': False,
|
5588
|
-
'processName': False,
|
5589
|
-
'process': False,
|
5590
|
-
}
|
6044
|
+
@dc.dataclass()
|
6045
|
+
class ProxyObjMarshaler(ObjMarshaler):
|
6046
|
+
m: ta.Optional[ObjMarshaler] = None
|
5591
6047
|
|
5592
|
-
def
|
5593
|
-
|
5594
|
-
k: v
|
5595
|
-
for k, o in self.KEYS.items()
|
5596
|
-
for v in [getattr(record, k)]
|
5597
|
-
if not (o and v is None)
|
5598
|
-
}
|
5599
|
-
return json_dumps_compact(dct)
|
6048
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
6049
|
+
return check.not_none(self.m).marshal(o, ctx)
|
5600
6050
|
|
6051
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
6052
|
+
return check.not_none(self.m).unmarshal(o, ctx)
|
5601
6053
|
|
5602
|
-
##
|
5603
6054
|
|
6055
|
+
@dc.dataclass(frozen=True)
|
6056
|
+
class CastObjMarshaler(ObjMarshaler):
|
6057
|
+
ty: type
|
5604
6058
|
|
5605
|
-
|
5606
|
-
|
5607
|
-
('process', 'pid=%(process)-6s'),
|
5608
|
-
('thread', 'tid=%(thread)x'),
|
5609
|
-
('levelname', '%(levelname)s'),
|
5610
|
-
('name', '%(name)s'),
|
5611
|
-
('separator', '::'),
|
5612
|
-
('message', '%(message)s'),
|
5613
|
-
]
|
6059
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
6060
|
+
return o
|
5614
6061
|
|
6062
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
6063
|
+
return self.ty(o)
|
5615
6064
|
|
5616
|
-
class StandardLogFormatter(logging.Formatter):
|
5617
6065
|
|
5618
|
-
|
5619
|
-
def
|
5620
|
-
return
|
6066
|
+
class DynamicObjMarshaler(ObjMarshaler):
|
6067
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
6068
|
+
return ctx.manager.marshal_obj(o, opts=ctx.options)
|
5621
6069
|
|
5622
|
-
|
6070
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
6071
|
+
return o
|
5623
6072
|
|
5624
|
-
def formatTime(self, record, datefmt=None):
|
5625
|
-
ct = self.converter(record.created) # type: ignore
|
5626
|
-
if datefmt:
|
5627
|
-
return ct.strftime(datefmt) # noqa
|
5628
|
-
else:
|
5629
|
-
t = ct.strftime('%Y-%m-%d %H:%M:%S')
|
5630
|
-
return '%s.%03d' % (t, record.msecs) # noqa
|
5631
6073
|
|
6074
|
+
@dc.dataclass(frozen=True)
|
6075
|
+
class Base64ObjMarshaler(ObjMarshaler):
|
6076
|
+
ty: type
|
5632
6077
|
|
5633
|
-
|
6078
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
6079
|
+
return base64.b64encode(o).decode('ascii')
|
5634
6080
|
|
6081
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
6082
|
+
return self.ty(base64.b64decode(o))
|
5635
6083
|
|
5636
|
-
class ProxyLogFilterer(logging.Filterer):
|
5637
|
-
def __init__(self, underlying: logging.Filterer) -> None: # noqa
|
5638
|
-
self._underlying = underlying
|
5639
6084
|
|
5640
|
-
|
5641
|
-
|
5642
|
-
|
6085
|
+
@dc.dataclass(frozen=True)
|
6086
|
+
class BytesSwitchedObjMarshaler(ObjMarshaler):
|
6087
|
+
m: ObjMarshaler
|
5643
6088
|
|
5644
|
-
|
5645
|
-
|
5646
|
-
|
6089
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
6090
|
+
if ctx.options.raw_bytes:
|
6091
|
+
return o
|
6092
|
+
return self.m.marshal(o, ctx)
|
5647
6093
|
|
5648
|
-
|
5649
|
-
|
5650
|
-
|
5651
|
-
|
5652
|
-
def addFilter(self, filter): # noqa
|
5653
|
-
self._underlying.addFilter(filter)
|
5654
|
-
|
5655
|
-
def removeFilter(self, filter): # noqa
|
5656
|
-
self._underlying.removeFilter(filter)
|
5657
|
-
|
5658
|
-
def filter(self, record):
|
5659
|
-
return self._underlying.filter(record)
|
5660
|
-
|
5661
|
-
|
5662
|
-
class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
|
5663
|
-
def __init__(self, underlying: logging.Handler) -> None: # noqa
|
5664
|
-
ProxyLogFilterer.__init__(self, underlying)
|
5665
|
-
|
5666
|
-
_underlying: logging.Handler
|
5667
|
-
|
5668
|
-
@property
|
5669
|
-
def underlying(self) -> logging.Handler:
|
5670
|
-
return self._underlying
|
5671
|
-
|
5672
|
-
def get_name(self):
|
5673
|
-
return self._underlying.get_name()
|
5674
|
-
|
5675
|
-
def set_name(self, name):
|
5676
|
-
self._underlying.set_name(name)
|
5677
|
-
|
5678
|
-
@property
|
5679
|
-
def name(self):
|
5680
|
-
return self._underlying.name
|
5681
|
-
|
5682
|
-
@property
|
5683
|
-
def level(self):
|
5684
|
-
return self._underlying.level
|
5685
|
-
|
5686
|
-
@level.setter
|
5687
|
-
def level(self, level):
|
5688
|
-
self._underlying.level = level
|
5689
|
-
|
5690
|
-
@property
|
5691
|
-
def formatter(self):
|
5692
|
-
return self._underlying.formatter
|
5693
|
-
|
5694
|
-
@formatter.setter
|
5695
|
-
def formatter(self, formatter):
|
5696
|
-
self._underlying.formatter = formatter
|
5697
|
-
|
5698
|
-
def createLock(self):
|
5699
|
-
self._underlying.createLock()
|
5700
|
-
|
5701
|
-
def acquire(self):
|
5702
|
-
self._underlying.acquire()
|
5703
|
-
|
5704
|
-
def release(self):
|
5705
|
-
self._underlying.release()
|
5706
|
-
|
5707
|
-
def setLevel(self, level):
|
5708
|
-
self._underlying.setLevel(level)
|
5709
|
-
|
5710
|
-
def format(self, record):
|
5711
|
-
return self._underlying.format(record)
|
5712
|
-
|
5713
|
-
def emit(self, record):
|
5714
|
-
self._underlying.emit(record)
|
5715
|
-
|
5716
|
-
def handle(self, record):
|
5717
|
-
return self._underlying.handle(record)
|
5718
|
-
|
5719
|
-
def setFormatter(self, fmt):
|
5720
|
-
self._underlying.setFormatter(fmt)
|
5721
|
-
|
5722
|
-
def flush(self):
|
5723
|
-
self._underlying.flush()
|
5724
|
-
|
5725
|
-
def close(self):
|
5726
|
-
self._underlying.close()
|
5727
|
-
|
5728
|
-
def handleError(self, record):
|
5729
|
-
self._underlying.handleError(record)
|
5730
|
-
|
5731
|
-
|
5732
|
-
##
|
5733
|
-
|
5734
|
-
|
5735
|
-
class StandardLogHandler(ProxyLogHandler):
|
5736
|
-
pass
|
5737
|
-
|
5738
|
-
|
5739
|
-
##
|
5740
|
-
|
5741
|
-
|
5742
|
-
@contextlib.contextmanager
|
5743
|
-
def _locking_logging_module_lock() -> ta.Iterator[None]:
|
5744
|
-
if hasattr(logging, '_acquireLock'):
|
5745
|
-
logging._acquireLock() # noqa
|
5746
|
-
try:
|
5747
|
-
yield
|
5748
|
-
finally:
|
5749
|
-
logging._releaseLock() # type: ignore # noqa
|
5750
|
-
|
5751
|
-
elif hasattr(logging, '_lock'):
|
5752
|
-
# https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
|
5753
|
-
with logging._lock: # noqa
|
5754
|
-
yield
|
5755
|
-
|
5756
|
-
else:
|
5757
|
-
raise Exception("Can't find lock in logging module")
|
5758
|
-
|
5759
|
-
|
5760
|
-
def configure_standard_logging(
|
5761
|
-
level: ta.Union[int, str] = logging.INFO,
|
5762
|
-
*,
|
5763
|
-
json: bool = False,
|
5764
|
-
target: ta.Optional[logging.Logger] = None,
|
5765
|
-
force: bool = False,
|
5766
|
-
handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
|
5767
|
-
) -> ta.Optional[StandardLogHandler]:
|
5768
|
-
with _locking_logging_module_lock():
|
5769
|
-
if target is None:
|
5770
|
-
target = logging.root
|
5771
|
-
|
5772
|
-
#
|
5773
|
-
|
5774
|
-
if not force:
|
5775
|
-
if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
|
5776
|
-
return None
|
5777
|
-
|
5778
|
-
#
|
5779
|
-
|
5780
|
-
if handler_factory is not None:
|
5781
|
-
handler = handler_factory()
|
5782
|
-
else:
|
5783
|
-
handler = logging.StreamHandler()
|
5784
|
-
|
5785
|
-
#
|
5786
|
-
|
5787
|
-
formatter: logging.Formatter
|
5788
|
-
if json:
|
5789
|
-
formatter = JsonLogFormatter()
|
5790
|
-
else:
|
5791
|
-
formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
|
5792
|
-
handler.setFormatter(formatter)
|
5793
|
-
|
5794
|
-
#
|
5795
|
-
|
5796
|
-
handler.addFilter(TidLogFilter())
|
5797
|
-
|
5798
|
-
#
|
5799
|
-
|
5800
|
-
target.addHandler(handler)
|
5801
|
-
|
5802
|
-
#
|
5803
|
-
|
5804
|
-
if level is not None:
|
5805
|
-
target.setLevel(level)
|
5806
|
-
|
5807
|
-
#
|
5808
|
-
|
5809
|
-
return StandardLogHandler(handler)
|
5810
|
-
|
5811
|
-
|
5812
|
-
########################################
|
5813
|
-
# ../../../omlish/lite/marshal.py
|
5814
|
-
"""
|
5815
|
-
TODO:
|
5816
|
-
- pickle stdlib objs? have to pin to 3.8 pickle protocol, will be cross-version
|
5817
|
-
- namedtuple
|
5818
|
-
- literals
|
5819
|
-
- newtypes?
|
5820
|
-
"""
|
5821
|
-
|
5822
|
-
|
5823
|
-
##
|
5824
|
-
|
5825
|
-
|
5826
|
-
@dc.dataclass(frozen=True)
|
5827
|
-
class ObjMarshalOptions:
|
5828
|
-
raw_bytes: bool = False
|
5829
|
-
nonstrict_dataclasses: bool = False
|
5830
|
-
|
5831
|
-
|
5832
|
-
class ObjMarshaler(abc.ABC):
|
5833
|
-
@abc.abstractmethod
|
5834
|
-
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5835
|
-
raise NotImplementedError
|
5836
|
-
|
5837
|
-
@abc.abstractmethod
|
5838
|
-
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5839
|
-
raise NotImplementedError
|
5840
|
-
|
5841
|
-
|
5842
|
-
class NopObjMarshaler(ObjMarshaler):
|
5843
|
-
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5844
|
-
return o
|
5845
|
-
|
5846
|
-
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5847
|
-
return o
|
5848
|
-
|
5849
|
-
|
5850
|
-
@dc.dataclass()
|
5851
|
-
class ProxyObjMarshaler(ObjMarshaler):
|
5852
|
-
m: ta.Optional[ObjMarshaler] = None
|
5853
|
-
|
5854
|
-
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5855
|
-
return check.not_none(self.m).marshal(o, ctx)
|
5856
|
-
|
5857
|
-
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5858
|
-
return check.not_none(self.m).unmarshal(o, ctx)
|
5859
|
-
|
5860
|
-
|
5861
|
-
@dc.dataclass(frozen=True)
|
5862
|
-
class CastObjMarshaler(ObjMarshaler):
|
5863
|
-
ty: type
|
5864
|
-
|
5865
|
-
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5866
|
-
return o
|
5867
|
-
|
5868
|
-
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5869
|
-
return self.ty(o)
|
5870
|
-
|
5871
|
-
|
5872
|
-
class DynamicObjMarshaler(ObjMarshaler):
|
5873
|
-
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5874
|
-
return ctx.manager.marshal_obj(o, opts=ctx.options)
|
5875
|
-
|
5876
|
-
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5877
|
-
return o
|
5878
|
-
|
5879
|
-
|
5880
|
-
@dc.dataclass(frozen=True)
|
5881
|
-
class Base64ObjMarshaler(ObjMarshaler):
|
5882
|
-
ty: type
|
5883
|
-
|
5884
|
-
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5885
|
-
return base64.b64encode(o).decode('ascii')
|
5886
|
-
|
5887
|
-
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5888
|
-
return self.ty(base64.b64decode(o))
|
5889
|
-
|
5890
|
-
|
5891
|
-
@dc.dataclass(frozen=True)
|
5892
|
-
class BytesSwitchedObjMarshaler(ObjMarshaler):
|
5893
|
-
m: ObjMarshaler
|
5894
|
-
|
5895
|
-
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5896
|
-
if ctx.options.raw_bytes:
|
5897
|
-
return o
|
5898
|
-
return self.m.marshal(o, ctx)
|
5899
|
-
|
5900
|
-
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5901
|
-
if ctx.options.raw_bytes:
|
5902
|
-
return o
|
5903
|
-
return self.m.unmarshal(o, ctx)
|
6094
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
6095
|
+
if ctx.options.raw_bytes:
|
6096
|
+
return o
|
6097
|
+
return self.m.unmarshal(o, ctx)
|
5904
6098
|
|
5905
6099
|
|
5906
6100
|
@dc.dataclass(frozen=True)
|
@@ -6263,22 +6457,76 @@ def is_debugger_attached() -> bool:
|
|
6263
6457
|
return any(frame[1].endswith('pydevd.py') for frame in inspect.stack())
|
6264
6458
|
|
6265
6459
|
|
6266
|
-
|
6460
|
+
LITE_REQUIRED_PYTHON_VERSION = (3, 8)
|
6267
6461
|
|
6268
6462
|
|
6269
|
-
def
|
6270
|
-
if sys.version_info <
|
6271
|
-
raise OSError(f'Requires python {
|
6463
|
+
def check_lite_runtime_version() -> None:
|
6464
|
+
if sys.version_info < LITE_REQUIRED_PYTHON_VERSION:
|
6465
|
+
raise OSError(f'Requires python {LITE_REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
|
6272
6466
|
|
6273
6467
|
|
6274
6468
|
########################################
|
6275
|
-
# ../../../
|
6469
|
+
# ../../../omlish/logs/json.py
|
6470
|
+
"""
|
6471
|
+
TODO:
|
6472
|
+
- translate json keys
|
6473
|
+
"""
|
6276
6474
|
|
6277
6475
|
|
6278
|
-
|
6279
|
-
|
6280
|
-
|
6281
|
-
|
6476
|
+
class JsonLogFormatter(logging.Formatter):
|
6477
|
+
KEYS: ta.Mapping[str, bool] = {
|
6478
|
+
'name': False,
|
6479
|
+
'msg': False,
|
6480
|
+
'args': False,
|
6481
|
+
'levelname': False,
|
6482
|
+
'levelno': False,
|
6483
|
+
'pathname': False,
|
6484
|
+
'filename': False,
|
6485
|
+
'module': False,
|
6486
|
+
'exc_info': True,
|
6487
|
+
'exc_text': True,
|
6488
|
+
'stack_info': True,
|
6489
|
+
'lineno': False,
|
6490
|
+
'funcName': False,
|
6491
|
+
'created': False,
|
6492
|
+
'msecs': False,
|
6493
|
+
'relativeCreated': False,
|
6494
|
+
'thread': False,
|
6495
|
+
'threadName': False,
|
6496
|
+
'processName': False,
|
6497
|
+
'process': False,
|
6498
|
+
}
|
6499
|
+
|
6500
|
+
def __init__(
|
6501
|
+
self,
|
6502
|
+
*args: ta.Any,
|
6503
|
+
json_dumps: ta.Optional[ta.Callable[[ta.Any], str]] = None,
|
6504
|
+
**kwargs: ta.Any,
|
6505
|
+
) -> None:
|
6506
|
+
super().__init__(*args, **kwargs)
|
6507
|
+
|
6508
|
+
if json_dumps is None:
|
6509
|
+
json_dumps = json_dumps_compact
|
6510
|
+
self._json_dumps = json_dumps
|
6511
|
+
|
6512
|
+
def format(self, record: logging.LogRecord) -> str:
|
6513
|
+
dct = {
|
6514
|
+
k: v
|
6515
|
+
for k, o in self.KEYS.items()
|
6516
|
+
for v in [getattr(record, k)]
|
6517
|
+
if not (o and v is None)
|
6518
|
+
}
|
6519
|
+
return self._json_dumps(dct)
|
6520
|
+
|
6521
|
+
|
6522
|
+
########################################
|
6523
|
+
# ../../../omdev/interp/types.py
|
6524
|
+
|
6525
|
+
|
6526
|
+
# See https://peps.python.org/pep-3149/
|
6527
|
+
INTERP_OPT_GLYPHS_BY_ATTR: ta.Mapping[str, str] = collections.OrderedDict([
|
6528
|
+
('debug', 'd'),
|
6529
|
+
('threaded', 't'),
|
6282
6530
|
])
|
6283
6531
|
|
6284
6532
|
INTERP_OPT_ATTRS_BY_GLYPH: ta.Mapping[str, str] = collections.OrderedDict(
|
@@ -6522,6 +6770,44 @@ class DeployCommandExecutor(CommandExecutor[DeployCommand, DeployCommand.Output]
|
|
6522
6770
|
return DeployCommand.Output()
|
6523
6771
|
|
6524
6772
|
|
6773
|
+
########################################
|
6774
|
+
# ../deploy/tmp.py
|
6775
|
+
|
6776
|
+
|
6777
|
+
class DeployTmpManager(
|
6778
|
+
SingleDirDeployPathOwner,
|
6779
|
+
DeployAtomicPathSwapping,
|
6780
|
+
):
|
6781
|
+
def __init__(
|
6782
|
+
self,
|
6783
|
+
*,
|
6784
|
+
deploy_home: ta.Optional[DeployHome] = None,
|
6785
|
+
) -> None:
|
6786
|
+
super().__init__(
|
6787
|
+
owned_dir='tmp',
|
6788
|
+
deploy_home=deploy_home,
|
6789
|
+
)
|
6790
|
+
|
6791
|
+
@cached_nullary
|
6792
|
+
def _swapping(self) -> DeployAtomicPathSwapping:
|
6793
|
+
return TempDirDeployAtomicPathSwapping(
|
6794
|
+
temp_dir=self._make_dir(),
|
6795
|
+
root_dir=check.non_empty_str(self._deploy_home),
|
6796
|
+
)
|
6797
|
+
|
6798
|
+
def begin_atomic_path_swap(
|
6799
|
+
self,
|
6800
|
+
kind: DeployAtomicPathSwapKind,
|
6801
|
+
dst_path: str,
|
6802
|
+
**kwargs: ta.Any,
|
6803
|
+
) -> DeployAtomicPathSwap:
|
6804
|
+
return self._swapping().begin_atomic_path_swap(
|
6805
|
+
kind,
|
6806
|
+
dst_path,
|
6807
|
+
**kwargs,
|
6808
|
+
)
|
6809
|
+
|
6810
|
+
|
6525
6811
|
########################################
|
6526
6812
|
# ../marshal.py
|
6527
6813
|
|
@@ -6614,76 +6900,138 @@ class RemoteChannelImpl(RemoteChannel):
|
|
6614
6900
|
|
6615
6901
|
|
6616
6902
|
########################################
|
6617
|
-
# ../system/
|
6903
|
+
# ../system/config.py
|
6618
6904
|
|
6619
6905
|
|
6620
|
-
|
6906
|
+
@dc.dataclass(frozen=True)
|
6907
|
+
class SystemConfig:
|
6908
|
+
platform: ta.Optional[Platform] = None
|
6621
6909
|
|
6622
6910
|
|
6623
|
-
|
6624
|
-
|
6625
|
-
|
6911
|
+
########################################
|
6912
|
+
# ../../../omlish/logs/standard.py
|
6913
|
+
"""
|
6914
|
+
TODO:
|
6915
|
+
- structured
|
6916
|
+
- prefixed
|
6917
|
+
- debug
|
6918
|
+
- optional noisy? noisy will never be lite - some kinda configure_standard callback mechanism?
|
6919
|
+
"""
|
6626
6920
|
|
6627
6921
|
|
6628
|
-
|
6629
|
-
pass
|
6922
|
+
##
|
6630
6923
|
|
6631
6924
|
|
6632
|
-
|
6633
|
-
|
6925
|
+
STANDARD_LOG_FORMAT_PARTS = [
|
6926
|
+
('asctime', '%(asctime)-15s'),
|
6927
|
+
('process', 'pid=%(process)-6s'),
|
6928
|
+
('thread', 'tid=%(thread)x'),
|
6929
|
+
('levelname', '%(levelname)s'),
|
6930
|
+
('name', '%(name)s'),
|
6931
|
+
('separator', '::'),
|
6932
|
+
('message', '%(message)s'),
|
6933
|
+
]
|
6634
6934
|
|
6635
6935
|
|
6636
|
-
class
|
6637
|
-
|
6936
|
+
class StandardLogFormatter(logging.Formatter):
|
6937
|
+
@staticmethod
|
6938
|
+
def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
|
6939
|
+
return ' '.join(v for k, v in parts)
|
6638
6940
|
|
6941
|
+
converter = datetime.datetime.fromtimestamp # type: ignore
|
6639
6942
|
|
6640
|
-
|
6641
|
-
|
6943
|
+
def formatTime(self, record, datefmt=None):
|
6944
|
+
ct = self.converter(record.created) # type: ignore
|
6945
|
+
if datefmt:
|
6946
|
+
return ct.strftime(datefmt) # noqa
|
6947
|
+
else:
|
6948
|
+
t = ct.strftime('%Y-%m-%d %H:%M:%S')
|
6949
|
+
return '%s.%03d' % (t, record.msecs) # noqa
|
6642
6950
|
|
6643
6951
|
|
6644
|
-
|
6645
|
-
pass
|
6952
|
+
##
|
6646
6953
|
|
6647
6954
|
|
6648
|
-
class
|
6649
|
-
|
6955
|
+
class StandardConfiguredLogHandler(ProxyLogHandler):
|
6956
|
+
def __init_subclass__(cls, **kwargs):
|
6957
|
+
raise TypeError('This class serves only as a marker and should not be subclassed.')
|
6650
6958
|
|
6651
6959
|
|
6652
6960
|
##
|
6653
6961
|
|
6654
6962
|
|
6655
|
-
|
6656
|
-
|
6963
|
+
@contextlib.contextmanager
|
6964
|
+
def _locking_logging_module_lock() -> ta.Iterator[None]:
|
6965
|
+
if hasattr(logging, '_acquireLock'):
|
6966
|
+
logging._acquireLock() # noqa
|
6967
|
+
try:
|
6968
|
+
yield
|
6969
|
+
finally:
|
6970
|
+
logging._releaseLock() # type: ignore # noqa
|
6657
6971
|
|
6658
|
-
|
6659
|
-
|
6660
|
-
|
6972
|
+
elif hasattr(logging, '_lock'):
|
6973
|
+
# https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
|
6974
|
+
with logging._lock: # noqa
|
6975
|
+
yield
|
6661
6976
|
|
6662
|
-
|
6663
|
-
|
6977
|
+
else:
|
6978
|
+
raise Exception("Can't find lock in logging module")
|
6664
6979
|
|
6665
|
-
elif osr.id == 'ubuntu':
|
6666
|
-
return UbuntuPlatform()
|
6667
6980
|
|
6981
|
+
def configure_standard_logging(
|
6982
|
+
level: ta.Union[int, str] = logging.INFO,
|
6983
|
+
*,
|
6984
|
+
json: bool = False,
|
6985
|
+
target: ta.Optional[logging.Logger] = None,
|
6986
|
+
force: bool = False,
|
6987
|
+
handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
|
6988
|
+
) -> ta.Optional[StandardConfiguredLogHandler]:
|
6989
|
+
with _locking_logging_module_lock():
|
6990
|
+
if target is None:
|
6991
|
+
target = logging.root
|
6992
|
+
|
6993
|
+
#
|
6994
|
+
|
6995
|
+
if not force:
|
6996
|
+
if any(isinstance(h, StandardConfiguredLogHandler) for h in list(target.handlers)):
|
6997
|
+
return None
|
6998
|
+
|
6999
|
+
#
|
7000
|
+
|
7001
|
+
if handler_factory is not None:
|
7002
|
+
handler = handler_factory()
|
6668
7003
|
else:
|
6669
|
-
|
7004
|
+
handler = logging.StreamHandler()
|
6670
7005
|
|
6671
|
-
|
6672
|
-
return DarwinPlatform()
|
7006
|
+
#
|
6673
7007
|
|
6674
|
-
|
6675
|
-
|
7008
|
+
formatter: logging.Formatter
|
7009
|
+
if json:
|
7010
|
+
formatter = JsonLogFormatter()
|
7011
|
+
else:
|
7012
|
+
formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
|
7013
|
+
handler.setFormatter(formatter)
|
6676
7014
|
|
7015
|
+
#
|
6677
7016
|
|
6678
|
-
|
6679
|
-
|
6680
|
-
|
6681
|
-
|
6682
|
-
|
7017
|
+
handler.addFilter(TidLogFilter())
|
7018
|
+
|
7019
|
+
#
|
7020
|
+
|
7021
|
+
target.addHandler(handler)
|
7022
|
+
|
7023
|
+
#
|
7024
|
+
|
7025
|
+
if level is not None:
|
7026
|
+
target.setLevel(level)
|
7027
|
+
|
7028
|
+
#
|
7029
|
+
|
7030
|
+
return StandardConfiguredLogHandler(handler)
|
6683
7031
|
|
6684
7032
|
|
6685
7033
|
########################################
|
6686
|
-
# ../../../omlish/
|
7034
|
+
# ../../../omlish/subprocesses.py
|
6687
7035
|
|
6688
7036
|
|
6689
7037
|
##
|
@@ -6734,8 +7082,8 @@ def subprocess_close(
|
|
6734
7082
|
##
|
6735
7083
|
|
6736
7084
|
|
6737
|
-
class
|
6738
|
-
DEFAULT_LOGGER: ta.ClassVar[ta.Optional[logging.Logger]] =
|
7085
|
+
class BaseSubprocesses(abc.ABC): # noqa
|
7086
|
+
DEFAULT_LOGGER: ta.ClassVar[ta.Optional[logging.Logger]] = None
|
6739
7087
|
|
6740
7088
|
def __init__(
|
6741
7089
|
self,
|
@@ -6748,6 +7096,9 @@ class AbstractSubprocesses(abc.ABC): # noqa
|
|
6748
7096
|
self._log = log if log is not None else self.DEFAULT_LOGGER
|
6749
7097
|
self._try_exceptions = try_exceptions if try_exceptions is not None else self.DEFAULT_TRY_EXCEPTIONS
|
6750
7098
|
|
7099
|
+
def set_logger(self, log: ta.Optional[logging.Logger]) -> None:
|
7100
|
+
self._log = log
|
7101
|
+
|
6751
7102
|
#
|
6752
7103
|
|
6753
7104
|
def prepare_args(
|
@@ -6859,23 +7210,25 @@ class AbstractSubprocesses(abc.ABC): # noqa
|
|
6859
7210
|
##
|
6860
7211
|
|
6861
7212
|
|
6862
|
-
class
|
7213
|
+
class AbstractSubprocesses(BaseSubprocesses, abc.ABC):
|
7214
|
+
@abc.abstractmethod
|
6863
7215
|
def check_call(
|
6864
7216
|
self,
|
6865
7217
|
*cmd: str,
|
6866
7218
|
stdout: ta.Any = sys.stderr,
|
6867
7219
|
**kwargs: ta.Any,
|
6868
7220
|
) -> None:
|
6869
|
-
|
6870
|
-
subprocess.check_call(cmd, **kwargs)
|
7221
|
+
raise NotImplementedError
|
6871
7222
|
|
7223
|
+
@abc.abstractmethod
|
6872
7224
|
def check_output(
|
6873
7225
|
self,
|
6874
7226
|
*cmd: str,
|
6875
7227
|
**kwargs: ta.Any,
|
6876
7228
|
) -> bytes:
|
6877
|
-
|
6878
|
-
|
7229
|
+
raise NotImplementedError
|
7230
|
+
|
7231
|
+
#
|
6879
7232
|
|
6880
7233
|
def check_output_str(
|
6881
7234
|
self,
|
@@ -6917,49 +7270,149 @@ class Subprocesses(AbstractSubprocesses):
|
|
6917
7270
|
return ret.decode().strip()
|
6918
7271
|
|
6919
7272
|
|
6920
|
-
|
6921
|
-
|
6922
|
-
|
6923
|
-
########################################
|
6924
|
-
# ../commands/local.py
|
7273
|
+
##
|
6925
7274
|
|
6926
7275
|
|
6927
|
-
class
|
6928
|
-
def
|
7276
|
+
class Subprocesses(AbstractSubprocesses):
|
7277
|
+
def check_call(
|
6929
7278
|
self,
|
6930
|
-
|
6931
|
-
|
7279
|
+
*cmd: str,
|
7280
|
+
stdout: ta.Any = sys.stderr,
|
7281
|
+
**kwargs: ta.Any,
|
6932
7282
|
) -> None:
|
6933
|
-
|
6934
|
-
|
6935
|
-
self._command_executors = command_executors
|
7283
|
+
with self.prepare_and_wrap(*cmd, stdout=stdout, **kwargs) as (cmd, kwargs): # noqa
|
7284
|
+
subprocess.check_call(cmd, **kwargs)
|
6936
7285
|
|
6937
|
-
|
6938
|
-
|
6939
|
-
|
7286
|
+
def check_output(
|
7287
|
+
self,
|
7288
|
+
*cmd: str,
|
7289
|
+
**kwargs: ta.Any,
|
7290
|
+
) -> bytes:
|
7291
|
+
with self.prepare_and_wrap(*cmd, **kwargs) as (cmd, kwargs): # noqa
|
7292
|
+
return subprocess.check_output(cmd, **kwargs)
|
6940
7293
|
|
6941
7294
|
|
6942
|
-
|
6943
|
-
# ../remote/execution.py
|
6944
|
-
"""
|
6945
|
-
TODO:
|
6946
|
-
- sequence all messages
|
6947
|
-
"""
|
7295
|
+
subprocesses = Subprocesses()
|
6948
7296
|
|
6949
7297
|
|
6950
7298
|
##
|
6951
7299
|
|
6952
7300
|
|
6953
|
-
class
|
6954
|
-
|
6955
|
-
|
6956
|
-
|
6957
|
-
|
6958
|
-
|
6959
|
-
|
6960
|
-
|
7301
|
+
class AbstractAsyncSubprocesses(BaseSubprocesses):
|
7302
|
+
@abc.abstractmethod
|
7303
|
+
async def check_call(
|
7304
|
+
self,
|
7305
|
+
*cmd: str,
|
7306
|
+
stdout: ta.Any = sys.stderr,
|
7307
|
+
**kwargs: ta.Any,
|
7308
|
+
) -> None:
|
7309
|
+
raise NotImplementedError
|
7310
|
+
|
7311
|
+
@abc.abstractmethod
|
7312
|
+
async def check_output(
|
7313
|
+
self,
|
7314
|
+
*cmd: str,
|
7315
|
+
**kwargs: ta.Any,
|
7316
|
+
) -> bytes:
|
7317
|
+
raise NotImplementedError
|
7318
|
+
|
7319
|
+
#
|
7320
|
+
|
7321
|
+
async def check_output_str(
|
7322
|
+
self,
|
7323
|
+
*cmd: str,
|
7324
|
+
**kwargs: ta.Any,
|
7325
|
+
) -> str:
|
7326
|
+
return (await self.check_output(*cmd, **kwargs)).decode().strip()
|
7327
|
+
|
7328
|
+
#
|
7329
|
+
|
7330
|
+
async def try_call(
|
7331
|
+
self,
|
7332
|
+
*cmd: str,
|
7333
|
+
**kwargs: ta.Any,
|
7334
|
+
) -> bool:
|
7335
|
+
if isinstance(await self.async_try_fn(self.check_call, *cmd, **kwargs), Exception):
|
7336
|
+
return False
|
7337
|
+
else:
|
7338
|
+
return True
|
7339
|
+
|
7340
|
+
async def try_output(
|
7341
|
+
self,
|
7342
|
+
*cmd: str,
|
7343
|
+
**kwargs: ta.Any,
|
7344
|
+
) -> ta.Optional[bytes]:
|
7345
|
+
if isinstance(ret := await self.async_try_fn(self.check_output, *cmd, **kwargs), Exception):
|
7346
|
+
return None
|
7347
|
+
else:
|
7348
|
+
return ret
|
7349
|
+
|
7350
|
+
async def try_output_str(
|
7351
|
+
self,
|
7352
|
+
*cmd: str,
|
7353
|
+
**kwargs: ta.Any,
|
7354
|
+
) -> ta.Optional[str]:
|
7355
|
+
if (ret := await self.try_output(*cmd, **kwargs)) is None:
|
7356
|
+
return None
|
7357
|
+
else:
|
7358
|
+
return ret.decode().strip()
|
7359
|
+
|
7360
|
+
|
7361
|
+
########################################
|
7362
|
+
# ../bootstrap.py
|
7363
|
+
|
7364
|
+
|
7365
|
+
@dc.dataclass(frozen=True)
|
7366
|
+
class MainBootstrap:
|
7367
|
+
main_config: MainConfig = MainConfig()
|
7368
|
+
|
7369
|
+
deploy_config: DeployConfig = DeployConfig()
|
7370
|
+
|
7371
|
+
remote_config: RemoteConfig = RemoteConfig()
|
7372
|
+
|
7373
|
+
system_config: SystemConfig = SystemConfig()
|
6961
7374
|
|
6962
|
-
|
7375
|
+
|
7376
|
+
########################################
|
7377
|
+
# ../commands/local.py
|
7378
|
+
|
7379
|
+
|
7380
|
+
class LocalCommandExecutor(CommandExecutor):
|
7381
|
+
def __init__(
|
7382
|
+
self,
|
7383
|
+
*,
|
7384
|
+
command_executors: CommandExecutorMap,
|
7385
|
+
) -> None:
|
7386
|
+
super().__init__()
|
7387
|
+
|
7388
|
+
self._command_executors = command_executors
|
7389
|
+
|
7390
|
+
async def execute(self, cmd: Command) -> Command.Output:
|
7391
|
+
ce: CommandExecutor = self._command_executors[type(cmd)]
|
7392
|
+
return await ce.execute(cmd)
|
7393
|
+
|
7394
|
+
|
7395
|
+
########################################
|
7396
|
+
# ../remote/execution.py
|
7397
|
+
"""
|
7398
|
+
TODO:
|
7399
|
+
- sequence all messages
|
7400
|
+
"""
|
7401
|
+
|
7402
|
+
|
7403
|
+
##
|
7404
|
+
|
7405
|
+
|
7406
|
+
class _RemoteProtocol:
|
7407
|
+
class Message(abc.ABC): # noqa
|
7408
|
+
async def send(self, chan: RemoteChannel) -> None:
|
7409
|
+
await chan.send_obj(self, _RemoteProtocol.Message)
|
7410
|
+
|
7411
|
+
@classmethod
|
7412
|
+
async def recv(cls: ta.Type[T], chan: RemoteChannel) -> ta.Optional[T]:
|
7413
|
+
return await chan.recv_obj(cls)
|
7414
|
+
|
7415
|
+
#
|
6963
7416
|
|
6964
7417
|
class Request(Message, abc.ABC): # noqa
|
6965
7418
|
pass
|
@@ -7337,16 +7790,7 @@ class RemoteCommandExecutor(CommandExecutor):
|
|
7337
7790
|
|
7338
7791
|
|
7339
7792
|
########################################
|
7340
|
-
#
|
7341
|
-
|
7342
|
-
|
7343
|
-
@dc.dataclass(frozen=True)
|
7344
|
-
class SystemConfig:
|
7345
|
-
platform: ta.Optional[Platform] = None
|
7346
|
-
|
7347
|
-
|
7348
|
-
########################################
|
7349
|
-
# ../../../omlish/lite/asyncio/subprocesses.py
|
7793
|
+
# ../../../omlish/asyncs/asyncio/subprocesses.py
|
7350
7794
|
|
7351
7795
|
|
7352
7796
|
##
|
@@ -7357,6 +7801,8 @@ class AsyncioProcessCommunicator:
|
|
7357
7801
|
self,
|
7358
7802
|
proc: asyncio.subprocess.Process,
|
7359
7803
|
loop: ta.Optional[ta.Any] = None,
|
7804
|
+
*,
|
7805
|
+
log: ta.Optional[logging.Logger] = None,
|
7360
7806
|
) -> None:
|
7361
7807
|
super().__init__()
|
7362
7808
|
|
@@ -7365,6 +7811,7 @@ class AsyncioProcessCommunicator:
|
|
7365
7811
|
|
7366
7812
|
self._proc = proc
|
7367
7813
|
self._loop = loop
|
7814
|
+
self._log = log
|
7368
7815
|
|
7369
7816
|
self._transport: asyncio.base_subprocess.BaseSubprocessTransport = check.isinstance(
|
7370
7817
|
proc._transport, # type: ignore # noqa
|
@@ -7380,19 +7827,19 @@ class AsyncioProcessCommunicator:
|
|
7380
7827
|
try:
|
7381
7828
|
if input is not None:
|
7382
7829
|
stdin.write(input)
|
7383
|
-
if self._debug:
|
7384
|
-
|
7830
|
+
if self._debug and self._log is not None:
|
7831
|
+
self._log.debug('%r communicate: feed stdin (%s bytes)', self, len(input))
|
7385
7832
|
|
7386
7833
|
await stdin.drain()
|
7387
7834
|
|
7388
7835
|
except (BrokenPipeError, ConnectionResetError) as exc:
|
7389
7836
|
# communicate() ignores BrokenPipeError and ConnectionResetError. write() and drain() can raise these
|
7390
7837
|
# exceptions.
|
7391
|
-
if self._debug:
|
7392
|
-
|
7838
|
+
if self._debug and self._log is not None:
|
7839
|
+
self._log.debug('%r communicate: stdin got %r', self, exc)
|
7393
7840
|
|
7394
|
-
if self._debug:
|
7395
|
-
|
7841
|
+
if self._debug and self._log is not None:
|
7842
|
+
self._log.debug('%r communicate: close stdin', self)
|
7396
7843
|
|
7397
7844
|
stdin.close()
|
7398
7845
|
|
@@ -7408,15 +7855,15 @@ class AsyncioProcessCommunicator:
|
|
7408
7855
|
check.equal(fd, 1)
|
7409
7856
|
stream = check.not_none(self._proc.stdout)
|
7410
7857
|
|
7411
|
-
if self._debug:
|
7858
|
+
if self._debug and self._log is not None:
|
7412
7859
|
name = 'stdout' if fd == 1 else 'stderr'
|
7413
|
-
|
7860
|
+
self._log.debug('%r communicate: read %s', self, name)
|
7414
7861
|
|
7415
7862
|
output = await stream.read()
|
7416
7863
|
|
7417
|
-
if self._debug:
|
7864
|
+
if self._debug and self._log is not None:
|
7418
7865
|
name = 'stdout' if fd == 1 else 'stderr'
|
7419
|
-
|
7866
|
+
self._log.debug('%r communicate: close %s', self, name)
|
7420
7867
|
|
7421
7868
|
transport.close()
|
7422
7869
|
|
@@ -7465,7 +7912,7 @@ class AsyncioProcessCommunicator:
|
|
7465
7912
|
##
|
7466
7913
|
|
7467
7914
|
|
7468
|
-
class AsyncioSubprocesses(
|
7915
|
+
class AsyncioSubprocesses(AbstractAsyncSubprocesses):
|
7469
7916
|
async def communicate(
|
7470
7917
|
self,
|
7471
7918
|
proc: asyncio.subprocess.Process,
|
@@ -7562,45 +8009,6 @@ class AsyncioSubprocesses(AbstractSubprocesses):
|
|
7562
8009
|
with self.prepare_and_wrap(*cmd, stdout=subprocess.PIPE, check=True, **kwargs) as (cmd, kwargs): # noqa
|
7563
8010
|
return check.not_none((await self.run(*cmd, **kwargs)).stdout)
|
7564
8011
|
|
7565
|
-
async def check_output_str(
|
7566
|
-
self,
|
7567
|
-
*cmd: str,
|
7568
|
-
**kwargs: ta.Any,
|
7569
|
-
) -> str:
|
7570
|
-
return (await self.check_output(*cmd, **kwargs)).decode().strip()
|
7571
|
-
|
7572
|
-
#
|
7573
|
-
|
7574
|
-
async def try_call(
|
7575
|
-
self,
|
7576
|
-
*cmd: str,
|
7577
|
-
**kwargs: ta.Any,
|
7578
|
-
) -> bool:
|
7579
|
-
if isinstance(await self.async_try_fn(self.check_call, *cmd, **kwargs), Exception):
|
7580
|
-
return False
|
7581
|
-
else:
|
7582
|
-
return True
|
7583
|
-
|
7584
|
-
async def try_output(
|
7585
|
-
self,
|
7586
|
-
*cmd: str,
|
7587
|
-
**kwargs: ta.Any,
|
7588
|
-
) -> ta.Optional[bytes]:
|
7589
|
-
if isinstance(ret := await self.async_try_fn(self.check_output, *cmd, **kwargs), Exception):
|
7590
|
-
return None
|
7591
|
-
else:
|
7592
|
-
return ret
|
7593
|
-
|
7594
|
-
async def try_output_str(
|
7595
|
-
self,
|
7596
|
-
*cmd: str,
|
7597
|
-
**kwargs: ta.Any,
|
7598
|
-
) -> ta.Optional[str]:
|
7599
|
-
if (ret := await self.try_output(*cmd, **kwargs)) is None:
|
7600
|
-
return None
|
7601
|
-
else:
|
7602
|
-
return ret.decode().strip()
|
7603
|
-
|
7604
8012
|
|
7605
8013
|
asyncio_subprocesses = AsyncioSubprocesses()
|
7606
8014
|
|
@@ -7700,21 +8108,6 @@ class InterpInspector:
|
|
7700
8108
|
INTERP_INSPECTOR = InterpInspector()
|
7701
8109
|
|
7702
8110
|
|
7703
|
-
########################################
|
7704
|
-
# ../bootstrap.py
|
7705
|
-
|
7706
|
-
|
7707
|
-
@dc.dataclass(frozen=True)
|
7708
|
-
class MainBootstrap:
|
7709
|
-
main_config: MainConfig = MainConfig()
|
7710
|
-
|
7711
|
-
deploy_config: DeployConfig = DeployConfig()
|
7712
|
-
|
7713
|
-
remote_config: RemoteConfig = RemoteConfig()
|
7714
|
-
|
7715
|
-
system_config: SystemConfig = SystemConfig()
|
7716
|
-
|
7717
|
-
|
7718
8111
|
########################################
|
7719
8112
|
# ../commands/subprocess.py
|
7720
8113
|
|
@@ -7801,44 +8194,22 @@ github.com/wrmsr/omlish@rev
|
|
7801
8194
|
##
|
7802
8195
|
|
7803
8196
|
|
7804
|
-
|
7805
|
-
class DeployGitRepo:
|
7806
|
-
host: ta.Optional[str] = None
|
7807
|
-
username: ta.Optional[str] = None
|
7808
|
-
path: ta.Optional[str] = None
|
7809
|
-
|
7810
|
-
def __post_init__(self) -> None:
|
7811
|
-
check.not_in('..', check.non_empty_str(self.host))
|
7812
|
-
check.not_in('.', check.non_empty_str(self.path))
|
7813
|
-
|
7814
|
-
|
7815
|
-
@dc.dataclass(frozen=True)
|
7816
|
-
class DeployGitSpec:
|
7817
|
-
repo: DeployGitRepo
|
7818
|
-
rev: DeployRev
|
7819
|
-
|
7820
|
-
|
7821
|
-
##
|
7822
|
-
|
7823
|
-
|
7824
|
-
class DeployGitManager(DeployPathOwner):
|
8197
|
+
class DeployGitManager(SingleDirDeployPathOwner):
|
7825
8198
|
def __init__(
|
7826
8199
|
self,
|
7827
8200
|
*,
|
7828
|
-
deploy_home: DeployHome,
|
8201
|
+
deploy_home: ta.Optional[DeployHome] = None,
|
8202
|
+
atomics: DeployAtomicPathSwapping,
|
7829
8203
|
) -> None:
|
7830
|
-
super().__init__(
|
8204
|
+
super().__init__(
|
8205
|
+
owned_dir='git',
|
8206
|
+
deploy_home=deploy_home,
|
8207
|
+
)
|
7831
8208
|
|
7832
|
-
self.
|
7833
|
-
self._dir = os.path.join(deploy_home, 'git')
|
8209
|
+
self._atomics = atomics
|
7834
8210
|
|
7835
8211
|
self._repo_dirs: ta.Dict[DeployGitRepo, DeployGitManager.RepoDir] = {}
|
7836
8212
|
|
7837
|
-
def get_deploy_paths(self) -> ta.AbstractSet[DeployPath]:
|
7838
|
-
return {
|
7839
|
-
DeployPath.parse('git'),
|
7840
|
-
}
|
7841
|
-
|
7842
8213
|
class RepoDir:
|
7843
8214
|
def __init__(
|
7844
8215
|
self,
|
@@ -7850,7 +8221,7 @@ class DeployGitManager(DeployPathOwner):
|
|
7850
8221
|
self._git = git
|
7851
8222
|
self._repo = repo
|
7852
8223
|
self._dir = os.path.join(
|
7853
|
-
self._git.
|
8224
|
+
self._git._make_dir(), # noqa
|
7854
8225
|
check.non_empty_str(repo.host),
|
7855
8226
|
check.non_empty_str(repo.path),
|
7856
8227
|
)
|
@@ -7887,18 +8258,20 @@ class DeployGitManager(DeployPathOwner):
|
|
7887
8258
|
|
7888
8259
|
async def checkout(self, rev: DeployRev, dst_dir: str) -> None:
|
7889
8260
|
check.state(not os.path.exists(dst_dir))
|
8261
|
+
with self._git._atomics.begin_atomic_path_swap( # noqa
|
8262
|
+
'dir',
|
8263
|
+
dst_dir,
|
8264
|
+
auto_commit=True,
|
8265
|
+
make_dirs=True,
|
8266
|
+
) as dst_swap:
|
8267
|
+
await self.fetch(rev)
|
7890
8268
|
|
7891
|
-
|
7892
|
-
|
7893
|
-
# FIXME: temp dir swap
|
7894
|
-
os.makedirs(dst_dir)
|
8269
|
+
dst_call = functools.partial(asyncio_subprocesses.check_call, cwd=dst_swap.tmp_path)
|
8270
|
+
await dst_call('git', 'init')
|
7895
8271
|
|
7896
|
-
|
7897
|
-
|
7898
|
-
|
7899
|
-
await dst_call('git', 'remote', 'add', 'local', self._dir)
|
7900
|
-
await dst_call('git', 'fetch', '--depth=1', 'local', rev)
|
7901
|
-
await dst_call('git', 'checkout', rev)
|
8272
|
+
await dst_call('git', 'remote', 'add', 'local', self._dir)
|
8273
|
+
await dst_call('git', 'fetch', '--depth=1', 'local', rev)
|
8274
|
+
await dst_call('git', 'checkout', rev)
|
7902
8275
|
|
7903
8276
|
def get_repo_dir(self, repo: DeployGitRepo) -> RepoDir:
|
7904
8277
|
try:
|
@@ -7907,8 +8280,8 @@ class DeployGitManager(DeployPathOwner):
|
|
7907
8280
|
repo_dir = self._repo_dirs[repo] = DeployGitManager.RepoDir(self, repo)
|
7908
8281
|
return repo_dir
|
7909
8282
|
|
7910
|
-
async def checkout(self,
|
7911
|
-
await self.get_repo_dir(
|
8283
|
+
async def checkout(self, repo: DeployGitRepo, rev: DeployRev, dst_dir: str) -> None:
|
8284
|
+
await self.get_repo_dir(repo).checkout(rev, dst_dir)
|
7912
8285
|
|
7913
8286
|
|
7914
8287
|
########################################
|
@@ -7924,14 +8297,19 @@ class DeployVenvManager(DeployPathOwner):
|
|
7924
8297
|
def __init__(
|
7925
8298
|
self,
|
7926
8299
|
*,
|
7927
|
-
deploy_home: DeployHome,
|
8300
|
+
deploy_home: ta.Optional[DeployHome] = None,
|
8301
|
+
atomics: DeployAtomicPathSwapping,
|
7928
8302
|
) -> None:
|
7929
8303
|
super().__init__()
|
7930
8304
|
|
7931
8305
|
self._deploy_home = deploy_home
|
7932
|
-
self.
|
8306
|
+
self._atomics = atomics
|
8307
|
+
|
8308
|
+
@cached_nullary
|
8309
|
+
def _dir(self) -> str:
|
8310
|
+
return os.path.join(check.non_empty_str(self._deploy_home), 'venvs')
|
7933
8311
|
|
7934
|
-
def
|
8312
|
+
def get_owned_deploy_paths(self) -> ta.AbstractSet[DeployPath]:
|
7935
8313
|
return {
|
7936
8314
|
DeployPath.parse('venvs/@app/@tag/'),
|
7937
8315
|
}
|
@@ -7945,6 +8323,8 @@ class DeployVenvManager(DeployPathOwner):
|
|
7945
8323
|
) -> None:
|
7946
8324
|
sys_exe = 'python3'
|
7947
8325
|
|
8326
|
+
# !! NOTE: (most) venvs cannot be relocated, so an atomic swap can't be used. it's up to the path manager to
|
8327
|
+
# garbage collect orphaned dirs.
|
7948
8328
|
await asyncio_subprocesses.check_call(sys_exe, '-m', 'venv', venv_dir)
|
7949
8329
|
|
7950
8330
|
#
|
@@ -7966,58 +8346,200 @@ class DeployVenvManager(DeployPathOwner):
|
|
7966
8346
|
|
7967
8347
|
async def setup_app_venv(self, app_tag: DeployAppTag) -> None:
|
7968
8348
|
await self.setup_venv(
|
7969
|
-
os.path.join(self._deploy_home, 'apps', app_tag.app, app_tag.tag),
|
7970
|
-
os.path.join(self.
|
8349
|
+
os.path.join(check.non_empty_str(self._deploy_home), 'apps', app_tag.app, app_tag.tag),
|
8350
|
+
os.path.join(self._dir(), app_tag.app, app_tag.tag),
|
7971
8351
|
)
|
7972
8352
|
|
7973
8353
|
|
7974
8354
|
########################################
|
7975
|
-
# ../remote/
|
8355
|
+
# ../remote/_main.py
|
7976
8356
|
|
7977
8357
|
|
7978
8358
|
##
|
7979
8359
|
|
7980
8360
|
|
7981
|
-
class
|
7982
|
-
|
7983
|
-
|
7984
|
-
|
7985
|
-
shell_quote: bool = False
|
8361
|
+
class _RemoteExecutionLogHandler(logging.Handler):
|
8362
|
+
def __init__(self, fn: ta.Callable[[str], None]) -> None:
|
8363
|
+
super().__init__()
|
8364
|
+
self._fn = fn
|
7986
8365
|
|
7987
|
-
|
7988
|
-
|
8366
|
+
def emit(self, record):
|
8367
|
+
msg = self.format(record)
|
8368
|
+
self._fn(msg)
|
7989
8369
|
|
7990
|
-
stderr: ta.Optional[str] = None # SubprocessChannelOption
|
7991
8370
|
|
7992
|
-
|
7993
|
-
class Spawned:
|
7994
|
-
stdin: asyncio.StreamWriter
|
7995
|
-
stdout: asyncio.StreamReader
|
7996
|
-
stderr: ta.Optional[asyncio.StreamReader]
|
8371
|
+
##
|
7997
8372
|
|
7998
|
-
|
7999
|
-
|
8373
|
+
|
8374
|
+
class _RemoteExecutionMain:
|
8375
|
+
def __init__(
|
8000
8376
|
self,
|
8001
|
-
|
8002
|
-
|
8003
|
-
|
8004
|
-
timeout: ta.Optional[float] = None,
|
8005
|
-
debug: bool = False,
|
8006
|
-
) -> ta.AsyncContextManager[Spawned]:
|
8007
|
-
raise NotImplementedError
|
8377
|
+
chan: RemoteChannel,
|
8378
|
+
) -> None:
|
8379
|
+
super().__init__()
|
8008
8380
|
|
8381
|
+
self._chan = chan
|
8009
8382
|
|
8010
|
-
|
8383
|
+
self.__bootstrap: ta.Optional[MainBootstrap] = None
|
8384
|
+
self.__injector: ta.Optional[Injector] = None
|
8011
8385
|
|
8386
|
+
@property
|
8387
|
+
def _bootstrap(self) -> MainBootstrap:
|
8388
|
+
return check.not_none(self.__bootstrap)
|
8012
8389
|
|
8013
|
-
|
8014
|
-
|
8015
|
-
|
8016
|
-
shell: bool
|
8390
|
+
@property
|
8391
|
+
def _injector(self) -> Injector:
|
8392
|
+
return check.not_none(self.__injector)
|
8017
8393
|
|
8018
|
-
|
8394
|
+
#
|
8395
|
+
|
8396
|
+
def _timebomb_main(
|
8019
8397
|
self,
|
8020
|
-
|
8398
|
+
delay_s: float,
|
8399
|
+
*,
|
8400
|
+
sig: int = signal.SIGINT,
|
8401
|
+
code: int = 1,
|
8402
|
+
) -> None:
|
8403
|
+
time.sleep(delay_s)
|
8404
|
+
|
8405
|
+
if (pgid := os.getpgid(0)) == os.getpid():
|
8406
|
+
os.killpg(pgid, sig)
|
8407
|
+
|
8408
|
+
os._exit(code) # noqa
|
8409
|
+
|
8410
|
+
@cached_nullary
|
8411
|
+
def _timebomb_thread(self) -> ta.Optional[threading.Thread]:
|
8412
|
+
if (tbd := self._bootstrap.remote_config.timebomb_delay_s) is None:
|
8413
|
+
return None
|
8414
|
+
|
8415
|
+
thr = threading.Thread(
|
8416
|
+
target=functools.partial(self._timebomb_main, tbd),
|
8417
|
+
name=f'{self.__class__.__name__}.timebomb',
|
8418
|
+
daemon=True,
|
8419
|
+
)
|
8420
|
+
|
8421
|
+
thr.start()
|
8422
|
+
|
8423
|
+
log.debug('Started timebomb thread: %r', thr)
|
8424
|
+
|
8425
|
+
return thr
|
8426
|
+
|
8427
|
+
#
|
8428
|
+
|
8429
|
+
@cached_nullary
|
8430
|
+
def _log_handler(self) -> _RemoteLogHandler:
|
8431
|
+
return _RemoteLogHandler(self._chan)
|
8432
|
+
|
8433
|
+
#
|
8434
|
+
|
8435
|
+
async def _setup(self) -> None:
|
8436
|
+
check.none(self.__bootstrap)
|
8437
|
+
check.none(self.__injector)
|
8438
|
+
|
8439
|
+
# Bootstrap
|
8440
|
+
|
8441
|
+
self.__bootstrap = check.not_none(await self._chan.recv_obj(MainBootstrap))
|
8442
|
+
|
8443
|
+
if (prd := self._bootstrap.remote_config.pycharm_remote_debug) is not None:
|
8444
|
+
pycharm_debug_connect(prd)
|
8445
|
+
|
8446
|
+
self.__injector = main_bootstrap(self._bootstrap)
|
8447
|
+
|
8448
|
+
self._chan.set_marshaler(self._injector[ObjMarshalerManager])
|
8449
|
+
|
8450
|
+
# Post-bootstrap
|
8451
|
+
|
8452
|
+
if self._bootstrap.remote_config.set_pgid:
|
8453
|
+
if os.getpgid(0) != os.getpid():
|
8454
|
+
log.debug('Setting pgid')
|
8455
|
+
os.setpgid(0, 0)
|
8456
|
+
|
8457
|
+
if (ds := self._bootstrap.remote_config.deathsig) is not None:
|
8458
|
+
log.debug('Setting deathsig: %s', ds)
|
8459
|
+
set_process_deathsig(int(signal.Signals[f'SIG{ds.upper()}']))
|
8460
|
+
|
8461
|
+
self._timebomb_thread()
|
8462
|
+
|
8463
|
+
if self._bootstrap.remote_config.forward_logging:
|
8464
|
+
log.debug('Installing log forwarder')
|
8465
|
+
logging.root.addHandler(self._log_handler())
|
8466
|
+
|
8467
|
+
#
|
8468
|
+
|
8469
|
+
async def run(self) -> None:
|
8470
|
+
await self._setup()
|
8471
|
+
|
8472
|
+
executor = self._injector[LocalCommandExecutor]
|
8473
|
+
|
8474
|
+
handler = _RemoteCommandHandler(self._chan, executor)
|
8475
|
+
|
8476
|
+
await handler.run()
|
8477
|
+
|
8478
|
+
|
8479
|
+
def _remote_execution_main() -> None:
|
8480
|
+
rt = pyremote_bootstrap_finalize() # noqa
|
8481
|
+
|
8482
|
+
async def inner() -> None:
|
8483
|
+
input = await asyncio_open_stream_reader(rt.input) # noqa
|
8484
|
+
output = await asyncio_open_stream_writer(rt.output)
|
8485
|
+
|
8486
|
+
chan = RemoteChannelImpl(
|
8487
|
+
input,
|
8488
|
+
output,
|
8489
|
+
)
|
8490
|
+
|
8491
|
+
await _RemoteExecutionMain(chan).run()
|
8492
|
+
|
8493
|
+
asyncio.run(inner())
|
8494
|
+
|
8495
|
+
|
8496
|
+
########################################
|
8497
|
+
# ../remote/spawning.py
|
8498
|
+
|
8499
|
+
|
8500
|
+
##
|
8501
|
+
|
8502
|
+
|
8503
|
+
class RemoteSpawning(abc.ABC):
|
8504
|
+
@dc.dataclass(frozen=True)
|
8505
|
+
class Target:
|
8506
|
+
shell: ta.Optional[str] = None
|
8507
|
+
shell_quote: bool = False
|
8508
|
+
|
8509
|
+
DEFAULT_PYTHON: ta.ClassVar[str] = 'python3'
|
8510
|
+
python: str = DEFAULT_PYTHON
|
8511
|
+
|
8512
|
+
stderr: ta.Optional[str] = None # SubprocessChannelOption
|
8513
|
+
|
8514
|
+
@dc.dataclass(frozen=True)
|
8515
|
+
class Spawned:
|
8516
|
+
stdin: asyncio.StreamWriter
|
8517
|
+
stdout: asyncio.StreamReader
|
8518
|
+
stderr: ta.Optional[asyncio.StreamReader]
|
8519
|
+
|
8520
|
+
@abc.abstractmethod
|
8521
|
+
def spawn(
|
8522
|
+
self,
|
8523
|
+
tgt: Target,
|
8524
|
+
src: str,
|
8525
|
+
*,
|
8526
|
+
timeout: ta.Optional[float] = None,
|
8527
|
+
debug: bool = False,
|
8528
|
+
) -> ta.AsyncContextManager[Spawned]:
|
8529
|
+
raise NotImplementedError
|
8530
|
+
|
8531
|
+
|
8532
|
+
##
|
8533
|
+
|
8534
|
+
|
8535
|
+
class SubprocessRemoteSpawning(RemoteSpawning):
|
8536
|
+
class _PreparedCmd(ta.NamedTuple): # noqa
|
8537
|
+
cmd: ta.Sequence[str]
|
8538
|
+
shell: bool
|
8539
|
+
|
8540
|
+
def _prepare_cmd(
|
8541
|
+
self,
|
8542
|
+
tgt: RemoteSpawning.Target,
|
8021
8543
|
src: str,
|
8022
8544
|
) -> _PreparedCmd:
|
8023
8545
|
if tgt.shell is not None:
|
@@ -8360,20 +8882,22 @@ def bind_commands(
|
|
8360
8882
|
|
8361
8883
|
def make_deploy_tag(
|
8362
8884
|
rev: DeployRev,
|
8363
|
-
|
8885
|
+
key: DeployKey,
|
8886
|
+
*,
|
8887
|
+
utcnow: ta.Optional[datetime.datetime] = None,
|
8364
8888
|
) -> DeployTag:
|
8365
|
-
if
|
8366
|
-
|
8367
|
-
now_fmt = '%Y%m%dT%H%M%
|
8368
|
-
now_str =
|
8369
|
-
return DeployTag('-'.join([rev,
|
8889
|
+
if utcnow is None:
|
8890
|
+
utcnow = datetime.datetime.now(tz=datetime.timezone.utc) # noqa
|
8891
|
+
now_fmt = '%Y%m%dT%H%M%SZ'
|
8892
|
+
now_str = utcnow.strftime(now_fmt)
|
8893
|
+
return DeployTag('-'.join([now_str, rev, key]))
|
8370
8894
|
|
8371
8895
|
|
8372
8896
|
class DeployAppManager(DeployPathOwner):
|
8373
8897
|
def __init__(
|
8374
8898
|
self,
|
8375
8899
|
*,
|
8376
|
-
deploy_home: DeployHome,
|
8900
|
+
deploy_home: ta.Optional[DeployHome] = None,
|
8377
8901
|
git: DeployGitManager,
|
8378
8902
|
venvs: DeployVenvManager,
|
8379
8903
|
) -> None:
|
@@ -8383,29 +8907,27 @@ class DeployAppManager(DeployPathOwner):
|
|
8383
8907
|
self._git = git
|
8384
8908
|
self._venvs = venvs
|
8385
8909
|
|
8386
|
-
|
8910
|
+
@cached_nullary
|
8911
|
+
def _dir(self) -> str:
|
8912
|
+
return os.path.join(check.non_empty_str(self._deploy_home), 'apps')
|
8387
8913
|
|
8388
|
-
def
|
8914
|
+
def get_owned_deploy_paths(self) -> ta.AbstractSet[DeployPath]:
|
8389
8915
|
return {
|
8390
8916
|
DeployPath.parse('apps/@app/@tag'),
|
8391
8917
|
}
|
8392
8918
|
|
8393
8919
|
async def prepare_app(
|
8394
8920
|
self,
|
8395
|
-
|
8396
|
-
rev: DeployRev,
|
8397
|
-
repo: DeployGitRepo,
|
8921
|
+
spec: DeploySpec,
|
8398
8922
|
):
|
8399
|
-
app_tag = DeployAppTag(app, make_deploy_tag(rev))
|
8400
|
-
app_dir = os.path.join(self._dir, app, app_tag.tag)
|
8923
|
+
app_tag = DeployAppTag(spec.app, make_deploy_tag(spec.rev, spec.key()))
|
8924
|
+
app_dir = os.path.join(self._dir(), spec.app, app_tag.tag)
|
8401
8925
|
|
8402
8926
|
#
|
8403
8927
|
|
8404
8928
|
await self._git.checkout(
|
8405
|
-
|
8406
|
-
|
8407
|
-
rev=rev,
|
8408
|
-
),
|
8929
|
+
spec.repo,
|
8930
|
+
spec.rev,
|
8409
8931
|
app_dir,
|
8410
8932
|
)
|
8411
8933
|
|
@@ -8415,145 +8937,122 @@ class DeployAppManager(DeployPathOwner):
|
|
8415
8937
|
|
8416
8938
|
|
8417
8939
|
########################################
|
8418
|
-
# ../remote/
|
8419
|
-
|
8420
|
-
|
8421
|
-
##
|
8422
|
-
|
8423
|
-
|
8424
|
-
class _RemoteExecutionLogHandler(logging.Handler):
|
8425
|
-
def __init__(self, fn: ta.Callable[[str], None]) -> None:
|
8426
|
-
super().__init__()
|
8427
|
-
self._fn = fn
|
8428
|
-
|
8429
|
-
def emit(self, record):
|
8430
|
-
msg = self.format(record)
|
8431
|
-
self._fn(msg)
|
8940
|
+
# ../remote/connection.py
|
8432
8941
|
|
8433
8942
|
|
8434
8943
|
##
|
8435
8944
|
|
8436
8945
|
|
8437
|
-
class
|
8946
|
+
class PyremoteRemoteExecutionConnector:
|
8438
8947
|
def __init__(
|
8439
8948
|
self,
|
8440
|
-
|
8949
|
+
*,
|
8950
|
+
spawning: RemoteSpawning,
|
8951
|
+
msh: ObjMarshalerManager,
|
8952
|
+
payload_file: ta.Optional[RemoteExecutionPayloadFile] = None,
|
8441
8953
|
) -> None:
|
8442
8954
|
super().__init__()
|
8443
8955
|
|
8444
|
-
self.
|
8956
|
+
self._spawning = spawning
|
8957
|
+
self._msh = msh
|
8958
|
+
self._payload_file = payload_file
|
8445
8959
|
|
8446
|
-
|
8447
|
-
self.__injector: ta.Optional[Injector] = None
|
8960
|
+
#
|
8448
8961
|
|
8449
|
-
@
|
8450
|
-
def
|
8451
|
-
return
|
8962
|
+
@cached_nullary
|
8963
|
+
def _payload_src(self) -> str:
|
8964
|
+
return get_remote_payload_src(file=self._payload_file)
|
8452
8965
|
|
8453
|
-
@
|
8454
|
-
def
|
8455
|
-
return
|
8966
|
+
@cached_nullary
|
8967
|
+
def _remote_src(self) -> ta.Sequence[str]:
|
8968
|
+
return [
|
8969
|
+
self._payload_src(),
|
8970
|
+
'_remote_execution_main()',
|
8971
|
+
]
|
8972
|
+
|
8973
|
+
@cached_nullary
|
8974
|
+
def _spawn_src(self) -> str:
|
8975
|
+
return pyremote_build_bootstrap_cmd(__package__ or 'manage')
|
8456
8976
|
|
8457
8977
|
#
|
8458
8978
|
|
8459
|
-
|
8979
|
+
@contextlib.asynccontextmanager
|
8980
|
+
async def connect(
|
8460
8981
|
self,
|
8461
|
-
|
8462
|
-
|
8463
|
-
|
8464
|
-
|
8465
|
-
|
8466
|
-
time.sleep(delay_s)
|
8467
|
-
|
8468
|
-
if (pgid := os.getpgid(0)) == os.getpid():
|
8469
|
-
os.killpg(pgid, sig)
|
8982
|
+
tgt: RemoteSpawning.Target,
|
8983
|
+
bs: MainBootstrap,
|
8984
|
+
) -> ta.AsyncGenerator[RemoteCommandExecutor, None]:
|
8985
|
+
spawn_src = self._spawn_src()
|
8986
|
+
remote_src = self._remote_src()
|
8470
8987
|
|
8471
|
-
|
8988
|
+
async with self._spawning.spawn(
|
8989
|
+
tgt,
|
8990
|
+
spawn_src,
|
8991
|
+
debug=bs.main_config.debug,
|
8992
|
+
) as proc:
|
8993
|
+
res = await PyremoteBootstrapDriver( # noqa
|
8994
|
+
remote_src,
|
8995
|
+
PyremoteBootstrapOptions(
|
8996
|
+
debug=bs.main_config.debug,
|
8997
|
+
),
|
8998
|
+
).async_run(
|
8999
|
+
proc.stdout,
|
9000
|
+
proc.stdin,
|
9001
|
+
)
|
8472
9002
|
|
8473
|
-
|
8474
|
-
|
8475
|
-
|
8476
|
-
|
9003
|
+
chan = RemoteChannelImpl(
|
9004
|
+
proc.stdout,
|
9005
|
+
proc.stdin,
|
9006
|
+
msh=self._msh,
|
9007
|
+
)
|
8477
9008
|
|
8478
|
-
|
8479
|
-
target=functools.partial(self._timebomb_main, tbd),
|
8480
|
-
name=f'{self.__class__.__name__}.timebomb',
|
8481
|
-
daemon=True,
|
8482
|
-
)
|
9009
|
+
await chan.send_obj(bs)
|
8483
9010
|
|
8484
|
-
|
9011
|
+
rce: RemoteCommandExecutor
|
9012
|
+
async with aclosing(RemoteCommandExecutor(chan)) as rce:
|
9013
|
+
await rce.start()
|
8485
9014
|
|
8486
|
-
|
9015
|
+
yield rce
|
8487
9016
|
|
8488
|
-
return thr
|
8489
9017
|
|
8490
|
-
|
9018
|
+
##
|
8491
9019
|
|
8492
|
-
@cached_nullary
|
8493
|
-
def _log_handler(self) -> _RemoteLogHandler:
|
8494
|
-
return _RemoteLogHandler(self._chan)
|
8495
9020
|
|
8496
|
-
|
8497
|
-
|
8498
|
-
|
8499
|
-
|
8500
|
-
|
8501
|
-
|
8502
|
-
|
8503
|
-
|
8504
|
-
self.__bootstrap = check.not_none(await self._chan.recv_obj(MainBootstrap))
|
8505
|
-
|
8506
|
-
if (prd := self._bootstrap.remote_config.pycharm_remote_debug) is not None:
|
8507
|
-
pycharm_debug_connect(prd)
|
8508
|
-
|
8509
|
-
self.__injector = main_bootstrap(self._bootstrap)
|
8510
|
-
|
8511
|
-
self._chan.set_marshaler(self._injector[ObjMarshalerManager])
|
8512
|
-
|
8513
|
-
# Post-bootstrap
|
8514
|
-
|
8515
|
-
if self._bootstrap.remote_config.set_pgid:
|
8516
|
-
if os.getpgid(0) != os.getpid():
|
8517
|
-
log.debug('Setting pgid')
|
8518
|
-
os.setpgid(0, 0)
|
8519
|
-
|
8520
|
-
if (ds := self._bootstrap.remote_config.deathsig) is not None:
|
8521
|
-
log.debug('Setting deathsig: %s', ds)
|
8522
|
-
set_process_deathsig(int(signal.Signals[f'SIG{ds.upper()}']))
|
8523
|
-
|
8524
|
-
self._timebomb_thread()
|
8525
|
-
|
8526
|
-
if self._bootstrap.remote_config.forward_logging:
|
8527
|
-
log.debug('Installing log forwarder')
|
8528
|
-
logging.root.addHandler(self._log_handler())
|
8529
|
-
|
8530
|
-
#
|
8531
|
-
|
8532
|
-
async def run(self) -> None:
|
8533
|
-
await self._setup()
|
8534
|
-
|
8535
|
-
executor = self._injector[LocalCommandExecutor]
|
8536
|
-
|
8537
|
-
handler = _RemoteCommandHandler(self._chan, executor)
|
8538
|
-
|
8539
|
-
await handler.run()
|
9021
|
+
class InProcessRemoteExecutionConnector:
|
9022
|
+
def __init__(
|
9023
|
+
self,
|
9024
|
+
*,
|
9025
|
+
msh: ObjMarshalerManager,
|
9026
|
+
local_executor: LocalCommandExecutor,
|
9027
|
+
) -> None:
|
9028
|
+
super().__init__()
|
8540
9029
|
|
9030
|
+
self._msh = msh
|
9031
|
+
self._local_executor = local_executor
|
8541
9032
|
|
8542
|
-
|
8543
|
-
|
9033
|
+
@contextlib.asynccontextmanager
|
9034
|
+
async def connect(self) -> ta.AsyncGenerator[RemoteCommandExecutor, None]:
|
9035
|
+
r0, w0 = asyncio_create_bytes_channel()
|
9036
|
+
r1, w1 = asyncio_create_bytes_channel()
|
8544
9037
|
|
8545
|
-
|
8546
|
-
|
8547
|
-
output = await asyncio_open_stream_writer(rt.output)
|
9038
|
+
remote_chan = RemoteChannelImpl(r0, w1, msh=self._msh)
|
9039
|
+
local_chan = RemoteChannelImpl(r1, w0, msh=self._msh)
|
8548
9040
|
|
8549
|
-
|
8550
|
-
|
8551
|
-
|
9041
|
+
rch = _RemoteCommandHandler(
|
9042
|
+
remote_chan,
|
9043
|
+
self._local_executor,
|
8552
9044
|
)
|
9045
|
+
rch_task = asyncio.create_task(rch.run()) # noqa
|
9046
|
+
try:
|
9047
|
+
rce: RemoteCommandExecutor
|
9048
|
+
async with aclosing(RemoteCommandExecutor(local_chan)) as rce:
|
9049
|
+
await rce.start()
|
8553
9050
|
|
8554
|
-
|
9051
|
+
yield rce
|
8555
9052
|
|
8556
|
-
|
9053
|
+
finally:
|
9054
|
+
rch.stop()
|
9055
|
+
await rch_task
|
8557
9056
|
|
8558
9057
|
|
8559
9058
|
########################################
|
@@ -9152,122 +9651,31 @@ class SystemInterpProvider(InterpProvider):
|
|
9152
9651
|
|
9153
9652
|
|
9154
9653
|
########################################
|
9155
|
-
# ../remote/
|
9156
|
-
|
9654
|
+
# ../remote/inject.py
|
9157
9655
|
|
9158
|
-
##
|
9159
9656
|
|
9657
|
+
def bind_remote(
|
9658
|
+
*,
|
9659
|
+
remote_config: RemoteConfig,
|
9660
|
+
) -> InjectorBindings:
|
9661
|
+
lst: ta.List[InjectorBindingOrBindings] = [
|
9662
|
+
inj.bind(remote_config),
|
9160
9663
|
|
9161
|
-
|
9162
|
-
|
9163
|
-
self,
|
9164
|
-
*,
|
9165
|
-
spawning: RemoteSpawning,
|
9166
|
-
msh: ObjMarshalerManager,
|
9167
|
-
payload_file: ta.Optional[RemoteExecutionPayloadFile] = None,
|
9168
|
-
) -> None:
|
9169
|
-
super().__init__()
|
9664
|
+
inj.bind(SubprocessRemoteSpawning, singleton=True),
|
9665
|
+
inj.bind(RemoteSpawning, to_key=SubprocessRemoteSpawning),
|
9170
9666
|
|
9171
|
-
|
9172
|
-
|
9173
|
-
|
9667
|
+
inj.bind(PyremoteRemoteExecutionConnector, singleton=True),
|
9668
|
+
inj.bind(InProcessRemoteExecutionConnector, singleton=True),
|
9669
|
+
]
|
9174
9670
|
|
9175
9671
|
#
|
9176
9672
|
|
9177
|
-
|
9178
|
-
|
9179
|
-
return get_remote_payload_src(file=self._payload_file)
|
9180
|
-
|
9181
|
-
@cached_nullary
|
9182
|
-
def _remote_src(self) -> ta.Sequence[str]:
|
9183
|
-
return [
|
9184
|
-
self._payload_src(),
|
9185
|
-
'_remote_execution_main()',
|
9186
|
-
]
|
9187
|
-
|
9188
|
-
@cached_nullary
|
9189
|
-
def _spawn_src(self) -> str:
|
9190
|
-
return pyremote_build_bootstrap_cmd(__package__ or 'manage')
|
9673
|
+
if (pf := remote_config.payload_file) is not None:
|
9674
|
+
lst.append(inj.bind(pf, key=RemoteExecutionPayloadFile))
|
9191
9675
|
|
9192
9676
|
#
|
9193
9677
|
|
9194
|
-
|
9195
|
-
async def connect(
|
9196
|
-
self,
|
9197
|
-
tgt: RemoteSpawning.Target,
|
9198
|
-
bs: MainBootstrap,
|
9199
|
-
) -> ta.AsyncGenerator[RemoteCommandExecutor, None]:
|
9200
|
-
spawn_src = self._spawn_src()
|
9201
|
-
remote_src = self._remote_src()
|
9202
|
-
|
9203
|
-
async with self._spawning.spawn(
|
9204
|
-
tgt,
|
9205
|
-
spawn_src,
|
9206
|
-
debug=bs.main_config.debug,
|
9207
|
-
) as proc:
|
9208
|
-
res = await PyremoteBootstrapDriver( # noqa
|
9209
|
-
remote_src,
|
9210
|
-
PyremoteBootstrapOptions(
|
9211
|
-
debug=bs.main_config.debug,
|
9212
|
-
),
|
9213
|
-
).async_run(
|
9214
|
-
proc.stdout,
|
9215
|
-
proc.stdin,
|
9216
|
-
)
|
9217
|
-
|
9218
|
-
chan = RemoteChannelImpl(
|
9219
|
-
proc.stdout,
|
9220
|
-
proc.stdin,
|
9221
|
-
msh=self._msh,
|
9222
|
-
)
|
9223
|
-
|
9224
|
-
await chan.send_obj(bs)
|
9225
|
-
|
9226
|
-
rce: RemoteCommandExecutor
|
9227
|
-
async with aclosing(RemoteCommandExecutor(chan)) as rce:
|
9228
|
-
await rce.start()
|
9229
|
-
|
9230
|
-
yield rce
|
9231
|
-
|
9232
|
-
|
9233
|
-
##
|
9234
|
-
|
9235
|
-
|
9236
|
-
class InProcessRemoteExecutionConnector:
|
9237
|
-
def __init__(
|
9238
|
-
self,
|
9239
|
-
*,
|
9240
|
-
msh: ObjMarshalerManager,
|
9241
|
-
local_executor: LocalCommandExecutor,
|
9242
|
-
) -> None:
|
9243
|
-
super().__init__()
|
9244
|
-
|
9245
|
-
self._msh = msh
|
9246
|
-
self._local_executor = local_executor
|
9247
|
-
|
9248
|
-
@contextlib.asynccontextmanager
|
9249
|
-
async def connect(self) -> ta.AsyncGenerator[RemoteCommandExecutor, None]:
|
9250
|
-
r0, w0 = asyncio_create_bytes_channel()
|
9251
|
-
r1, w1 = asyncio_create_bytes_channel()
|
9252
|
-
|
9253
|
-
remote_chan = RemoteChannelImpl(r0, w1, msh=self._msh)
|
9254
|
-
local_chan = RemoteChannelImpl(r1, w0, msh=self._msh)
|
9255
|
-
|
9256
|
-
rch = _RemoteCommandHandler(
|
9257
|
-
remote_chan,
|
9258
|
-
self._local_executor,
|
9259
|
-
)
|
9260
|
-
rch_task = asyncio.create_task(rch.run()) # noqa
|
9261
|
-
try:
|
9262
|
-
rce: RemoteCommandExecutor
|
9263
|
-
async with aclosing(RemoteCommandExecutor(local_chan)) as rce:
|
9264
|
-
await rce.start()
|
9265
|
-
|
9266
|
-
yield rce
|
9267
|
-
|
9268
|
-
finally:
|
9269
|
-
rch.stop()
|
9270
|
-
await rch_task
|
9678
|
+
return inj.as_bindings(*lst)
|
9271
9679
|
|
9272
9680
|
|
9273
9681
|
########################################
|
@@ -9318,132 +9726,6 @@ def bind_system(
|
|
9318
9726
|
return inj.as_bindings(*lst)
|
9319
9727
|
|
9320
9728
|
|
9321
|
-
########################################
|
9322
|
-
# ../../../omdev/interp/resolvers.py
|
9323
|
-
|
9324
|
-
|
9325
|
-
INTERP_PROVIDER_TYPES_BY_NAME: ta.Mapping[str, ta.Type[InterpProvider]] = {
|
9326
|
-
cls.name: cls for cls in deep_subclasses(InterpProvider) if abc.ABC not in cls.__bases__ # type: ignore
|
9327
|
-
}
|
9328
|
-
|
9329
|
-
|
9330
|
-
class InterpResolver:
|
9331
|
-
def __init__(
|
9332
|
-
self,
|
9333
|
-
providers: ta.Sequence[ta.Tuple[str, InterpProvider]],
|
9334
|
-
) -> None:
|
9335
|
-
super().__init__()
|
9336
|
-
|
9337
|
-
self._providers: ta.Mapping[str, InterpProvider] = collections.OrderedDict(providers)
|
9338
|
-
|
9339
|
-
async def _resolve_installed(self, spec: InterpSpecifier) -> ta.Optional[ta.Tuple[InterpProvider, InterpVersion]]:
|
9340
|
-
lst = [
|
9341
|
-
(i, si)
|
9342
|
-
for i, p in enumerate(self._providers.values())
|
9343
|
-
for si in await p.get_installed_versions(spec)
|
9344
|
-
if spec.contains(si)
|
9345
|
-
]
|
9346
|
-
|
9347
|
-
slst = sorted(lst, key=lambda t: (-t[0], t[1].version))
|
9348
|
-
if not slst:
|
9349
|
-
return None
|
9350
|
-
|
9351
|
-
bi, bv = slst[-1]
|
9352
|
-
bp = list(self._providers.values())[bi]
|
9353
|
-
return (bp, bv)
|
9354
|
-
|
9355
|
-
async def resolve(
|
9356
|
-
self,
|
9357
|
-
spec: InterpSpecifier,
|
9358
|
-
*,
|
9359
|
-
install: bool = False,
|
9360
|
-
) -> ta.Optional[Interp]:
|
9361
|
-
tup = await self._resolve_installed(spec)
|
9362
|
-
if tup is not None:
|
9363
|
-
bp, bv = tup
|
9364
|
-
return await bp.get_installed_version(bv)
|
9365
|
-
|
9366
|
-
if not install:
|
9367
|
-
return None
|
9368
|
-
|
9369
|
-
tp = list(self._providers.values())[0] # noqa
|
9370
|
-
|
9371
|
-
sv = sorted(
|
9372
|
-
[s for s in await tp.get_installable_versions(spec) if s in spec],
|
9373
|
-
key=lambda s: s.version,
|
9374
|
-
)
|
9375
|
-
if not sv:
|
9376
|
-
return None
|
9377
|
-
|
9378
|
-
bv = sv[-1]
|
9379
|
-
return await tp.install_version(bv)
|
9380
|
-
|
9381
|
-
async def list(self, spec: InterpSpecifier) -> None:
|
9382
|
-
print('installed:')
|
9383
|
-
for n, p in self._providers.items():
|
9384
|
-
lst = [
|
9385
|
-
si
|
9386
|
-
for si in await p.get_installed_versions(spec)
|
9387
|
-
if spec.contains(si)
|
9388
|
-
]
|
9389
|
-
if lst:
|
9390
|
-
print(f' {n}')
|
9391
|
-
for si in lst:
|
9392
|
-
print(f' {si}')
|
9393
|
-
|
9394
|
-
print()
|
9395
|
-
|
9396
|
-
print('installable:')
|
9397
|
-
for n, p in self._providers.items():
|
9398
|
-
lst = [
|
9399
|
-
si
|
9400
|
-
for si in await p.get_installable_versions(spec)
|
9401
|
-
if spec.contains(si)
|
9402
|
-
]
|
9403
|
-
if lst:
|
9404
|
-
print(f' {n}')
|
9405
|
-
for si in lst:
|
9406
|
-
print(f' {si}')
|
9407
|
-
|
9408
|
-
|
9409
|
-
DEFAULT_INTERP_RESOLVER = InterpResolver([(p.name, p) for p in [
|
9410
|
-
# pyenv is preferred to system interpreters as it tends to have more support for things like tkinter
|
9411
|
-
PyenvInterpProvider(try_update=True),
|
9412
|
-
|
9413
|
-
RunningInterpProvider(),
|
9414
|
-
|
9415
|
-
SystemInterpProvider(),
|
9416
|
-
]])
|
9417
|
-
|
9418
|
-
|
9419
|
-
########################################
|
9420
|
-
# ../remote/inject.py
|
9421
|
-
|
9422
|
-
|
9423
|
-
def bind_remote(
|
9424
|
-
*,
|
9425
|
-
remote_config: RemoteConfig,
|
9426
|
-
) -> InjectorBindings:
|
9427
|
-
lst: ta.List[InjectorBindingOrBindings] = [
|
9428
|
-
inj.bind(remote_config),
|
9429
|
-
|
9430
|
-
inj.bind(SubprocessRemoteSpawning, singleton=True),
|
9431
|
-
inj.bind(RemoteSpawning, to_key=SubprocessRemoteSpawning),
|
9432
|
-
|
9433
|
-
inj.bind(PyremoteRemoteExecutionConnector, singleton=True),
|
9434
|
-
inj.bind(InProcessRemoteExecutionConnector, singleton=True),
|
9435
|
-
]
|
9436
|
-
|
9437
|
-
#
|
9438
|
-
|
9439
|
-
if (pf := remote_config.payload_file) is not None:
|
9440
|
-
lst.append(inj.bind(pf, key=RemoteExecutionPayloadFile))
|
9441
|
-
|
9442
|
-
#
|
9443
|
-
|
9444
|
-
return inj.as_bindings(*lst)
|
9445
|
-
|
9446
|
-
|
9447
9729
|
########################################
|
9448
9730
|
# ../targets/connection.py
|
9449
9731
|
|
@@ -9579,33 +9861,101 @@ class SshManageTargetConnector(ManageTargetConnector):
|
|
9579
9861
|
|
9580
9862
|
|
9581
9863
|
########################################
|
9582
|
-
#
|
9864
|
+
# ../../../omdev/interp/resolvers.py
|
9583
9865
|
|
9584
9866
|
|
9585
|
-
|
9867
|
+
INTERP_PROVIDER_TYPES_BY_NAME: ta.Mapping[str, ta.Type[InterpProvider]] = {
|
9868
|
+
cls.name: cls for cls in deep_subclasses(InterpProvider) if abc.ABC not in cls.__bases__ # type: ignore
|
9869
|
+
}
|
9586
9870
|
|
9587
9871
|
|
9588
|
-
|
9589
|
-
|
9590
|
-
|
9591
|
-
|
9872
|
+
class InterpResolver:
|
9873
|
+
def __init__(
|
9874
|
+
self,
|
9875
|
+
providers: ta.Sequence[ta.Tuple[str, InterpProvider]],
|
9876
|
+
) -> None:
|
9877
|
+
super().__init__()
|
9592
9878
|
|
9593
|
-
|
9594
|
-
class Output(Command.Output):
|
9595
|
-
exe: str
|
9596
|
-
version: str
|
9597
|
-
opts: InterpOpts
|
9879
|
+
self._providers: ta.Mapping[str, InterpProvider] = collections.OrderedDict(providers)
|
9598
9880
|
|
9881
|
+
async def _resolve_installed(self, spec: InterpSpecifier) -> ta.Optional[ta.Tuple[InterpProvider, InterpVersion]]:
|
9882
|
+
lst = [
|
9883
|
+
(i, si)
|
9884
|
+
for i, p in enumerate(self._providers.values())
|
9885
|
+
for si in await p.get_installed_versions(spec)
|
9886
|
+
if spec.contains(si)
|
9887
|
+
]
|
9599
9888
|
|
9600
|
-
|
9601
|
-
|
9602
|
-
|
9603
|
-
|
9604
|
-
|
9605
|
-
|
9606
|
-
|
9607
|
-
|
9889
|
+
slst = sorted(lst, key=lambda t: (-t[0], t[1].version))
|
9890
|
+
if not slst:
|
9891
|
+
return None
|
9892
|
+
|
9893
|
+
bi, bv = slst[-1]
|
9894
|
+
bp = list(self._providers.values())[bi]
|
9895
|
+
return (bp, bv)
|
9896
|
+
|
9897
|
+
async def resolve(
|
9898
|
+
self,
|
9899
|
+
spec: InterpSpecifier,
|
9900
|
+
*,
|
9901
|
+
install: bool = False,
|
9902
|
+
) -> ta.Optional[Interp]:
|
9903
|
+
tup = await self._resolve_installed(spec)
|
9904
|
+
if tup is not None:
|
9905
|
+
bp, bv = tup
|
9906
|
+
return await bp.get_installed_version(bv)
|
9907
|
+
|
9908
|
+
if not install:
|
9909
|
+
return None
|
9910
|
+
|
9911
|
+
tp = list(self._providers.values())[0] # noqa
|
9912
|
+
|
9913
|
+
sv = sorted(
|
9914
|
+
[s for s in await tp.get_installable_versions(spec) if s in spec],
|
9915
|
+
key=lambda s: s.version,
|
9608
9916
|
)
|
9917
|
+
if not sv:
|
9918
|
+
return None
|
9919
|
+
|
9920
|
+
bv = sv[-1]
|
9921
|
+
return await tp.install_version(bv)
|
9922
|
+
|
9923
|
+
async def list(self, spec: InterpSpecifier) -> None:
|
9924
|
+
print('installed:')
|
9925
|
+
for n, p in self._providers.items():
|
9926
|
+
lst = [
|
9927
|
+
si
|
9928
|
+
for si in await p.get_installed_versions(spec)
|
9929
|
+
if spec.contains(si)
|
9930
|
+
]
|
9931
|
+
if lst:
|
9932
|
+
print(f' {n}')
|
9933
|
+
for si in lst:
|
9934
|
+
print(f' {si}')
|
9935
|
+
|
9936
|
+
print()
|
9937
|
+
|
9938
|
+
print('installable:')
|
9939
|
+
for n, p in self._providers.items():
|
9940
|
+
lst = [
|
9941
|
+
si
|
9942
|
+
for si in await p.get_installable_versions(spec)
|
9943
|
+
if spec.contains(si)
|
9944
|
+
]
|
9945
|
+
if lst:
|
9946
|
+
print(f' {n}')
|
9947
|
+
for si in lst:
|
9948
|
+
print(f' {si}')
|
9949
|
+
|
9950
|
+
|
9951
|
+
DEFAULT_INTERP_RESOLVER = InterpResolver([(p.name, p) for p in [
|
9952
|
+
# pyenv is preferred to system interpreters as it tends to have more support for things like tkinter
|
9953
|
+
PyenvInterpProvider(try_update=True),
|
9954
|
+
|
9955
|
+
RunningInterpProvider(),
|
9956
|
+
|
9957
|
+
SystemInterpProvider(),
|
9958
|
+
]])
|
9609
9959
|
|
9610
9960
|
|
9611
9961
|
########################################
|
@@ -9637,6 +9987,36 @@ def bind_targets() -> InjectorBindings:
|
|
9637
9987
|
return inj.as_bindings(*lst)
|
9638
9988
|
|
9639
9989
|
|
9990
|
+
########################################
|
9991
|
+
# ../deploy/interp.py
|
9992
|
+
|
9993
|
+
|
9994
|
+
##
|
9995
|
+
|
9996
|
+
|
9997
|
+
@dc.dataclass(frozen=True)
|
9998
|
+
class InterpCommand(Command['InterpCommand.Output']):
|
9999
|
+
spec: str
|
10000
|
+
install: bool = False
|
10001
|
+
|
10002
|
+
@dc.dataclass(frozen=True)
|
10003
|
+
class Output(Command.Output):
|
10004
|
+
exe: str
|
10005
|
+
version: str
|
10006
|
+
opts: InterpOpts
|
10007
|
+
|
10008
|
+
|
10009
|
+
class InterpCommandExecutor(CommandExecutor[InterpCommand, InterpCommand.Output]):
|
10010
|
+
async def execute(self, cmd: InterpCommand) -> InterpCommand.Output:
|
10011
|
+
i = InterpSpecifier.parse(check.not_none(cmd.spec))
|
10012
|
+
o = check.not_none(await DEFAULT_INTERP_RESOLVER.resolve(i, install=cmd.install))
|
10013
|
+
return InterpCommand.Output(
|
10014
|
+
exe=o.exe,
|
10015
|
+
version=str(o.version.version),
|
10016
|
+
opts=o.version.opts,
|
10017
|
+
)
|
10018
|
+
|
10019
|
+
|
9640
10020
|
########################################
|
9641
10021
|
# ../deploy/inject.py
|
9642
10022
|
|
@@ -9648,10 +10028,19 @@ def bind_deploy(
|
|
9648
10028
|
lst: ta.List[InjectorBindingOrBindings] = [
|
9649
10029
|
inj.bind(deploy_config),
|
9650
10030
|
|
10031
|
+
#
|
10032
|
+
|
9651
10033
|
inj.bind(DeployAppManager, singleton=True),
|
10034
|
+
|
9652
10035
|
inj.bind(DeployGitManager, singleton=True),
|
10036
|
+
|
10037
|
+
inj.bind(DeployTmpManager, singleton=True),
|
10038
|
+
inj.bind(DeployAtomicPathSwapping, to_key=DeployTmpManager),
|
10039
|
+
|
9653
10040
|
inj.bind(DeployVenvManager, singleton=True),
|
9654
10041
|
|
10042
|
+
#
|
10043
|
+
|
9655
10044
|
bind_command(DeployCommand, DeployCommandExecutor),
|
9656
10045
|
bind_command(InterpCommand, InterpCommandExecutor),
|
9657
10046
|
]
|