ominfra 0.0.0.dev156__py3-none-any.whl → 0.0.0.dev158__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ominfra/clouds/aws/journald2aws/main.py +1 -1
- ominfra/journald/tailer.py +2 -2
- ominfra/manage/bootstrap_.py +1 -1
- ominfra/manage/commands/subprocess.py +4 -4
- ominfra/manage/deploy/apps.py +14 -15
- ominfra/manage/deploy/config.py +3 -0
- ominfra/manage/deploy/git.py +11 -27
- ominfra/manage/deploy/paths.py +48 -48
- ominfra/manage/deploy/specs.py +32 -0
- ominfra/manage/deploy/venvs.py +10 -5
- ominfra/manage/main.py +33 -4
- ominfra/manage/remote/spawning.py +4 -9
- ominfra/manage/system/packages.py +1 -1
- ominfra/pyremote.py +26 -26
- ominfra/scripts/journald2aws.py +469 -357
- ominfra/scripts/manage.py +2488 -1463
- ominfra/scripts/supervisor.py +385 -351
- ominfra/supervisor/configs.py +2 -0
- ominfra/supervisor/http.py +1 -1
- ominfra/supervisor/main.py +2 -2
- ominfra/supervisor/supervisor.py +2 -33
- ominfra/supervisor/utils/os.py +41 -0
- {ominfra-0.0.0.dev156.dist-info → ominfra-0.0.0.dev158.dist-info}/METADATA +3 -3
- {ominfra-0.0.0.dev156.dist-info → ominfra-0.0.0.dev158.dist-info}/RECORD +28 -27
- {ominfra-0.0.0.dev156.dist-info → ominfra-0.0.0.dev158.dist-info}/LICENSE +0 -0
- {ominfra-0.0.0.dev156.dist-info → ominfra-0.0.0.dev158.dist-info}/WHEEL +0 -0
- {ominfra-0.0.0.dev156.dist-info → ominfra-0.0.0.dev158.dist-info}/entry_points.txt +0 -0
- {ominfra-0.0.0.dev156.dist-info → ominfra-0.0.0.dev158.dist-info}/top_level.txt +0 -0
ominfra/scripts/manage.py
CHANGED
@@ -37,6 +37,7 @@ import shlex
|
|
37
37
|
import shutil
|
38
38
|
import signal
|
39
39
|
import site
|
40
|
+
import string
|
40
41
|
import struct
|
41
42
|
import subprocess
|
42
43
|
import sys
|
@@ -68,6 +69,11 @@ VersionCmpLocalType = ta.Union['NegativeInfinityVersionType', _VersionCmpLocalTy
|
|
68
69
|
VersionCmpKey = ta.Tuple[int, ta.Tuple[int, ...], VersionCmpPrePostDevType, VersionCmpPrePostDevType, VersionCmpPrePostDevType, VersionCmpLocalType] # noqa
|
69
70
|
VersionComparisonMethod = ta.Callable[[VersionCmpKey, VersionCmpKey], bool]
|
70
71
|
|
72
|
+
# ../../omdev/toml/parser.py
|
73
|
+
TomlParseFloat = ta.Callable[[str], ta.Any]
|
74
|
+
TomlKey = ta.Tuple[str, ...]
|
75
|
+
TomlPos = int # ta.TypeAlias
|
76
|
+
|
71
77
|
# ../../omlish/asyncs/asyncio/timeouts.py
|
72
78
|
AwaitableT = ta.TypeVar('AwaitableT', bound=ta.Awaitable)
|
73
79
|
|
@@ -94,7 +100,7 @@ CommandOutputT = ta.TypeVar('CommandOutputT', bound='Command.Output')
|
|
94
100
|
|
95
101
|
# deploy/paths.py
|
96
102
|
DeployPathKind = ta.Literal['dir', 'file'] # ta.TypeAlias
|
97
|
-
|
103
|
+
DeployPathPlaceholder = ta.Literal['app', 'tag'] # ta.TypeAlias
|
98
104
|
|
99
105
|
# ../../omlish/argparse/cli.py
|
100
106
|
ArgparseCommandFn = ta.Callable[[], ta.Optional[int]] # ta.TypeAlias
|
@@ -109,7 +115,10 @@ InjectorProviderFn = ta.Callable[['Injector'], ta.Any]
|
|
109
115
|
InjectorProviderFnMap = ta.Mapping['InjectorKey', 'InjectorProviderFn']
|
110
116
|
InjectorBindingOrBindings = ta.Union['InjectorBinding', 'InjectorBindings']
|
111
117
|
|
112
|
-
#
|
118
|
+
# ../configs.py
|
119
|
+
ConfigMapping = ta.Mapping[str, ta.Any]
|
120
|
+
|
121
|
+
# ../../omlish/subprocesses.py
|
113
122
|
SubprocessChannelOption = ta.Literal['pipe', 'stdout', 'devnull'] # ta.TypeAlias
|
114
123
|
|
115
124
|
# system/packages.py
|
@@ -523,6 +532,824 @@ def canonicalize_version(
|
|
523
532
|
return ''.join(parts)
|
524
533
|
|
525
534
|
|
535
|
+
########################################
|
536
|
+
# ../../../omdev/toml/parser.py
|
537
|
+
# SPDX-License-Identifier: MIT
|
538
|
+
# SPDX-FileCopyrightText: 2021 Taneli Hukkinen
|
539
|
+
# Licensed to PSF under a Contributor Agreement.
|
540
|
+
#
|
541
|
+
# PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
542
|
+
# --------------------------------------------
|
543
|
+
#
|
544
|
+
# 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization
|
545
|
+
# ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated
|
546
|
+
# documentation.
|
547
|
+
#
|
548
|
+
# 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive,
|
549
|
+
# royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative
|
550
|
+
# works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License
|
551
|
+
# Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
|
552
|
+
# 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; All
|
553
|
+
# Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee.
|
554
|
+
#
|
555
|
+
# 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and
|
556
|
+
# wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in
|
557
|
+
# any such work a brief summary of the changes made to Python.
|
558
|
+
#
|
559
|
+
# 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES,
|
560
|
+
# EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY
|
561
|
+
# OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY
|
562
|
+
# RIGHTS.
|
563
|
+
#
|
564
|
+
# 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL
|
565
|
+
# DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF
|
566
|
+
# ADVISED OF THE POSSIBILITY THEREOF.
|
567
|
+
#
|
568
|
+
# 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions.
|
569
|
+
#
|
570
|
+
# 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint
|
571
|
+
# venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade
|
572
|
+
# name in a trademark sense to endorse or promote products or services of Licensee, or any third party.
|
573
|
+
#
|
574
|
+
# 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this
|
575
|
+
# License Agreement.
|
576
|
+
#
|
577
|
+
# https://github.com/python/cpython/blob/9ce90206b7a4649600218cf0bd4826db79c9a312/Lib/tomllib/_parser.py
|
578
|
+
|
579
|
+
|
580
|
+
##
|
581
|
+
|
582
|
+
|
583
|
+
_TOML_TIME_RE_STR = r'([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?'
|
584
|
+
|
585
|
+
TOML_RE_NUMBER = re.compile(
|
586
|
+
r"""
|
587
|
+
0
|
588
|
+
(?:
|
589
|
+
x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex
|
590
|
+
|
|
591
|
+
b[01](?:_?[01])* # bin
|
592
|
+
|
|
593
|
+
o[0-7](?:_?[0-7])* # oct
|
594
|
+
)
|
595
|
+
|
|
596
|
+
[+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part
|
597
|
+
(?P<floatpart>
|
598
|
+
(?:\.[0-9](?:_?[0-9])*)? # optional fractional part
|
599
|
+
(?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part
|
600
|
+
)
|
601
|
+
""",
|
602
|
+
flags=re.VERBOSE,
|
603
|
+
)
|
604
|
+
TOML_RE_LOCALTIME = re.compile(_TOML_TIME_RE_STR)
|
605
|
+
TOML_RE_DATETIME = re.compile(
|
606
|
+
rf"""
|
607
|
+
([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27
|
608
|
+
(?:
|
609
|
+
[Tt ]
|
610
|
+
{_TOML_TIME_RE_STR}
|
611
|
+
(?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset
|
612
|
+
)?
|
613
|
+
""",
|
614
|
+
flags=re.VERBOSE,
|
615
|
+
)
|
616
|
+
|
617
|
+
|
618
|
+
def toml_match_to_datetime(match: re.Match) -> ta.Union[datetime.datetime, datetime.date]:
|
619
|
+
"""Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`.
|
620
|
+
|
621
|
+
Raises ValueError if the match does not correspond to a valid date or datetime.
|
622
|
+
"""
|
623
|
+
(
|
624
|
+
year_str,
|
625
|
+
month_str,
|
626
|
+
day_str,
|
627
|
+
hour_str,
|
628
|
+
minute_str,
|
629
|
+
sec_str,
|
630
|
+
micros_str,
|
631
|
+
zulu_time,
|
632
|
+
offset_sign_str,
|
633
|
+
offset_hour_str,
|
634
|
+
offset_minute_str,
|
635
|
+
) = match.groups()
|
636
|
+
year, month, day = int(year_str), int(month_str), int(day_str)
|
637
|
+
if hour_str is None:
|
638
|
+
return datetime.date(year, month, day)
|
639
|
+
hour, minute, sec = int(hour_str), int(minute_str), int(sec_str)
|
640
|
+
micros = int(micros_str.ljust(6, '0')) if micros_str else 0
|
641
|
+
if offset_sign_str:
|
642
|
+
tz: ta.Optional[datetime.tzinfo] = toml_cached_tz(
|
643
|
+
offset_hour_str, offset_minute_str, offset_sign_str,
|
644
|
+
)
|
645
|
+
elif zulu_time:
|
646
|
+
tz = datetime.UTC
|
647
|
+
else: # local date-time
|
648
|
+
tz = None
|
649
|
+
return datetime.datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz)
|
650
|
+
|
651
|
+
|
652
|
+
@functools.lru_cache() # noqa
|
653
|
+
def toml_cached_tz(hour_str: str, minute_str: str, sign_str: str) -> datetime.timezone:
|
654
|
+
sign = 1 if sign_str == '+' else -1
|
655
|
+
return datetime.timezone(
|
656
|
+
datetime.timedelta(
|
657
|
+
hours=sign * int(hour_str),
|
658
|
+
minutes=sign * int(minute_str),
|
659
|
+
),
|
660
|
+
)
|
661
|
+
|
662
|
+
|
663
|
+
def toml_match_to_localtime(match: re.Match) -> datetime.time:
|
664
|
+
hour_str, minute_str, sec_str, micros_str = match.groups()
|
665
|
+
micros = int(micros_str.ljust(6, '0')) if micros_str else 0
|
666
|
+
return datetime.time(int(hour_str), int(minute_str), int(sec_str), micros)
|
667
|
+
|
668
|
+
|
669
|
+
def toml_match_to_number(match: re.Match, parse_float: TomlParseFloat) -> ta.Any:
|
670
|
+
if match.group('floatpart'):
|
671
|
+
return parse_float(match.group())
|
672
|
+
return int(match.group(), 0)
|
673
|
+
|
674
|
+
|
675
|
+
TOML_ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127))
|
676
|
+
|
677
|
+
# Neither of these sets include quotation mark or backslash. They are currently handled as separate cases in the parser
|
678
|
+
# functions.
|
679
|
+
TOML_ILLEGAL_BASIC_STR_CHARS = TOML_ASCII_CTRL - frozenset('\t')
|
680
|
+
TOML_ILLEGAL_MULTILINE_BASIC_STR_CHARS = TOML_ASCII_CTRL - frozenset('\t\n')
|
681
|
+
|
682
|
+
TOML_ILLEGAL_LITERAL_STR_CHARS = TOML_ILLEGAL_BASIC_STR_CHARS
|
683
|
+
TOML_ILLEGAL_MULTILINE_LITERAL_STR_CHARS = TOML_ILLEGAL_MULTILINE_BASIC_STR_CHARS
|
684
|
+
|
685
|
+
TOML_ILLEGAL_COMMENT_CHARS = TOML_ILLEGAL_BASIC_STR_CHARS
|
686
|
+
|
687
|
+
TOML_WS = frozenset(' \t')
|
688
|
+
TOML_WS_AND_NEWLINE = TOML_WS | frozenset('\n')
|
689
|
+
TOML_BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + '-_')
|
690
|
+
TOML_KEY_INITIAL_CHARS = TOML_BARE_KEY_CHARS | frozenset("\"'")
|
691
|
+
TOML_HEXDIGIT_CHARS = frozenset(string.hexdigits)
|
692
|
+
|
693
|
+
TOML_BASIC_STR_ESCAPE_REPLACEMENTS = types.MappingProxyType(
|
694
|
+
{
|
695
|
+
'\\b': '\u0008', # backspace
|
696
|
+
'\\t': '\u0009', # tab
|
697
|
+
'\\n': '\u000A', # linefeed
|
698
|
+
'\\f': '\u000C', # form feed
|
699
|
+
'\\r': '\u000D', # carriage return
|
700
|
+
'\\"': '\u0022', # quote
|
701
|
+
'\\\\': '\u005C', # backslash
|
702
|
+
},
|
703
|
+
)
|
704
|
+
|
705
|
+
|
706
|
+
class TomlDecodeError(ValueError):
|
707
|
+
"""An error raised if a document is not valid TOML."""
|
708
|
+
|
709
|
+
|
710
|
+
def toml_load(fp: ta.BinaryIO, /, *, parse_float: TomlParseFloat = float) -> ta.Dict[str, ta.Any]:
|
711
|
+
"""Parse TOML from a binary file object."""
|
712
|
+
b = fp.read()
|
713
|
+
try:
|
714
|
+
s = b.decode()
|
715
|
+
except AttributeError:
|
716
|
+
raise TypeError("File must be opened in binary mode, e.g. use `open('foo.toml', 'rb')`") from None
|
717
|
+
return toml_loads(s, parse_float=parse_float)
|
718
|
+
|
719
|
+
|
720
|
+
def toml_loads(s: str, /, *, parse_float: TomlParseFloat = float) -> ta.Dict[str, ta.Any]: # noqa: C901
|
721
|
+
"""Parse TOML from a string."""
|
722
|
+
|
723
|
+
# The spec allows converting "\r\n" to "\n", even in string literals. Let's do so to simplify parsing.
|
724
|
+
try:
|
725
|
+
src = s.replace('\r\n', '\n')
|
726
|
+
except (AttributeError, TypeError):
|
727
|
+
raise TypeError(f"Expected str object, not '{type(s).__qualname__}'") from None
|
728
|
+
pos = 0
|
729
|
+
out = TomlOutput(TomlNestedDict(), TomlFlags())
|
730
|
+
header: TomlKey = ()
|
731
|
+
parse_float = toml_make_safe_parse_float(parse_float)
|
732
|
+
|
733
|
+
# Parse one statement at a time (typically means one line in TOML source)
|
734
|
+
while True:
|
735
|
+
# 1. Skip line leading whitespace
|
736
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
737
|
+
|
738
|
+
# 2. Parse rules. Expect one of the following:
|
739
|
+
# - end of file
|
740
|
+
# - end of line
|
741
|
+
# - comment
|
742
|
+
# - key/value pair
|
743
|
+
# - append dict to list (and move to its namespace)
|
744
|
+
# - create dict (and move to its namespace)
|
745
|
+
# Skip trailing whitespace when applicable.
|
746
|
+
try:
|
747
|
+
char = src[pos]
|
748
|
+
except IndexError:
|
749
|
+
break
|
750
|
+
if char == '\n':
|
751
|
+
pos += 1
|
752
|
+
continue
|
753
|
+
if char in TOML_KEY_INITIAL_CHARS:
|
754
|
+
pos = toml_key_value_rule(src, pos, out, header, parse_float)
|
755
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
756
|
+
elif char == '[':
|
757
|
+
try:
|
758
|
+
second_char: ta.Optional[str] = src[pos + 1]
|
759
|
+
except IndexError:
|
760
|
+
second_char = None
|
761
|
+
out.flags.finalize_pending()
|
762
|
+
if second_char == '[':
|
763
|
+
pos, header = toml_create_list_rule(src, pos, out)
|
764
|
+
else:
|
765
|
+
pos, header = toml_create_dict_rule(src, pos, out)
|
766
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
767
|
+
elif char != '#':
|
768
|
+
raise toml_suffixed_err(src, pos, 'Invalid statement')
|
769
|
+
|
770
|
+
# 3. Skip comment
|
771
|
+
pos = toml_skip_comment(src, pos)
|
772
|
+
|
773
|
+
# 4. Expect end of line or end of file
|
774
|
+
try:
|
775
|
+
char = src[pos]
|
776
|
+
except IndexError:
|
777
|
+
break
|
778
|
+
if char != '\n':
|
779
|
+
raise toml_suffixed_err(
|
780
|
+
src, pos, 'Expected newline or end of document after a statement',
|
781
|
+
)
|
782
|
+
pos += 1
|
783
|
+
|
784
|
+
return out.data.dict
|
785
|
+
|
786
|
+
|
787
|
+
class TomlFlags:
|
788
|
+
"""Flags that map to parsed keys/namespaces."""
|
789
|
+
|
790
|
+
# Marks an immutable namespace (inline array or inline table).
|
791
|
+
FROZEN = 0
|
792
|
+
# Marks a nest that has been explicitly created and can no longer be opened using the "[table]" syntax.
|
793
|
+
EXPLICIT_NEST = 1
|
794
|
+
|
795
|
+
def __init__(self) -> None:
|
796
|
+
self._flags: ta.Dict[str, dict] = {}
|
797
|
+
self._pending_flags: ta.Set[ta.Tuple[TomlKey, int]] = set()
|
798
|
+
|
799
|
+
def add_pending(self, key: TomlKey, flag: int) -> None:
|
800
|
+
self._pending_flags.add((key, flag))
|
801
|
+
|
802
|
+
def finalize_pending(self) -> None:
|
803
|
+
for key, flag in self._pending_flags:
|
804
|
+
self.set(key, flag, recursive=False)
|
805
|
+
self._pending_flags.clear()
|
806
|
+
|
807
|
+
def unset_all(self, key: TomlKey) -> None:
|
808
|
+
cont = self._flags
|
809
|
+
for k in key[:-1]:
|
810
|
+
if k not in cont:
|
811
|
+
return
|
812
|
+
cont = cont[k]['nested']
|
813
|
+
cont.pop(key[-1], None)
|
814
|
+
|
815
|
+
def set(self, key: TomlKey, flag: int, *, recursive: bool) -> None: # noqa: A003
|
816
|
+
cont = self._flags
|
817
|
+
key_parent, key_stem = key[:-1], key[-1]
|
818
|
+
for k in key_parent:
|
819
|
+
if k not in cont:
|
820
|
+
cont[k] = {'flags': set(), 'recursive_flags': set(), 'nested': {}}
|
821
|
+
cont = cont[k]['nested']
|
822
|
+
if key_stem not in cont:
|
823
|
+
cont[key_stem] = {'flags': set(), 'recursive_flags': set(), 'nested': {}}
|
824
|
+
cont[key_stem]['recursive_flags' if recursive else 'flags'].add(flag)
|
825
|
+
|
826
|
+
def is_(self, key: TomlKey, flag: int) -> bool:
|
827
|
+
if not key:
|
828
|
+
return False # document root has no flags
|
829
|
+
cont = self._flags
|
830
|
+
for k in key[:-1]:
|
831
|
+
if k not in cont:
|
832
|
+
return False
|
833
|
+
inner_cont = cont[k]
|
834
|
+
if flag in inner_cont['recursive_flags']:
|
835
|
+
return True
|
836
|
+
cont = inner_cont['nested']
|
837
|
+
key_stem = key[-1]
|
838
|
+
if key_stem in cont:
|
839
|
+
cont = cont[key_stem]
|
840
|
+
return flag in cont['flags'] or flag in cont['recursive_flags']
|
841
|
+
return False
|
842
|
+
|
843
|
+
|
844
|
+
class TomlNestedDict:
|
845
|
+
def __init__(self) -> None:
|
846
|
+
# The parsed content of the TOML document
|
847
|
+
self.dict: ta.Dict[str, ta.Any] = {}
|
848
|
+
|
849
|
+
def get_or_create_nest(
|
850
|
+
self,
|
851
|
+
key: TomlKey,
|
852
|
+
*,
|
853
|
+
access_lists: bool = True,
|
854
|
+
) -> dict:
|
855
|
+
cont: ta.Any = self.dict
|
856
|
+
for k in key:
|
857
|
+
if k not in cont:
|
858
|
+
cont[k] = {}
|
859
|
+
cont = cont[k]
|
860
|
+
if access_lists and isinstance(cont, list):
|
861
|
+
cont = cont[-1]
|
862
|
+
if not isinstance(cont, dict):
|
863
|
+
raise KeyError('There is no nest behind this key')
|
864
|
+
return cont
|
865
|
+
|
866
|
+
def append_nest_to_list(self, key: TomlKey) -> None:
|
867
|
+
cont = self.get_or_create_nest(key[:-1])
|
868
|
+
last_key = key[-1]
|
869
|
+
if last_key in cont:
|
870
|
+
list_ = cont[last_key]
|
871
|
+
if not isinstance(list_, list):
|
872
|
+
raise KeyError('An object other than list found behind this key')
|
873
|
+
list_.append({})
|
874
|
+
else:
|
875
|
+
cont[last_key] = [{}]
|
876
|
+
|
877
|
+
|
878
|
+
class TomlOutput(ta.NamedTuple):
|
879
|
+
data: TomlNestedDict
|
880
|
+
flags: TomlFlags
|
881
|
+
|
882
|
+
|
883
|
+
def toml_skip_chars(src: str, pos: TomlPos, chars: ta.Iterable[str]) -> TomlPos:
|
884
|
+
try:
|
885
|
+
while src[pos] in chars:
|
886
|
+
pos += 1
|
887
|
+
except IndexError:
|
888
|
+
pass
|
889
|
+
return pos
|
890
|
+
|
891
|
+
|
892
|
+
def toml_skip_until(
|
893
|
+
src: str,
|
894
|
+
pos: TomlPos,
|
895
|
+
expect: str,
|
896
|
+
*,
|
897
|
+
error_on: ta.FrozenSet[str],
|
898
|
+
error_on_eof: bool,
|
899
|
+
) -> TomlPos:
|
900
|
+
try:
|
901
|
+
new_pos = src.index(expect, pos)
|
902
|
+
except ValueError:
|
903
|
+
new_pos = len(src)
|
904
|
+
if error_on_eof:
|
905
|
+
raise toml_suffixed_err(src, new_pos, f'Expected {expect!r}') from None
|
906
|
+
|
907
|
+
if not error_on.isdisjoint(src[pos:new_pos]):
|
908
|
+
while src[pos] not in error_on:
|
909
|
+
pos += 1
|
910
|
+
raise toml_suffixed_err(src, pos, f'Found invalid character {src[pos]!r}')
|
911
|
+
return new_pos
|
912
|
+
|
913
|
+
|
914
|
+
def toml_skip_comment(src: str, pos: TomlPos) -> TomlPos:
|
915
|
+
try:
|
916
|
+
char: ta.Optional[str] = src[pos]
|
917
|
+
except IndexError:
|
918
|
+
char = None
|
919
|
+
if char == '#':
|
920
|
+
return toml_skip_until(
|
921
|
+
src, pos + 1, '\n', error_on=TOML_ILLEGAL_COMMENT_CHARS, error_on_eof=False,
|
922
|
+
)
|
923
|
+
return pos
|
924
|
+
|
925
|
+
|
926
|
+
def toml_skip_comments_and_array_ws(src: str, pos: TomlPos) -> TomlPos:
|
927
|
+
while True:
|
928
|
+
pos_before_skip = pos
|
929
|
+
pos = toml_skip_chars(src, pos, TOML_WS_AND_NEWLINE)
|
930
|
+
pos = toml_skip_comment(src, pos)
|
931
|
+
if pos == pos_before_skip:
|
932
|
+
return pos
|
933
|
+
|
934
|
+
|
935
|
+
def toml_create_dict_rule(src: str, pos: TomlPos, out: TomlOutput) -> ta.Tuple[TomlPos, TomlKey]:
|
936
|
+
pos += 1 # Skip "["
|
937
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
938
|
+
pos, key = toml_parse_key(src, pos)
|
939
|
+
|
940
|
+
if out.flags.is_(key, TomlFlags.EXPLICIT_NEST) or out.flags.is_(key, TomlFlags.FROZEN):
|
941
|
+
raise toml_suffixed_err(src, pos, f'Cannot declare {key} twice')
|
942
|
+
out.flags.set(key, TomlFlags.EXPLICIT_NEST, recursive=False)
|
943
|
+
try:
|
944
|
+
out.data.get_or_create_nest(key)
|
945
|
+
except KeyError:
|
946
|
+
raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
|
947
|
+
|
948
|
+
if not src.startswith(']', pos):
|
949
|
+
raise toml_suffixed_err(src, pos, "Expected ']' at the end of a table declaration")
|
950
|
+
return pos + 1, key
|
951
|
+
|
952
|
+
|
953
|
+
def toml_create_list_rule(src: str, pos: TomlPos, out: TomlOutput) -> ta.Tuple[TomlPos, TomlKey]:
|
954
|
+
pos += 2 # Skip "[["
|
955
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
956
|
+
pos, key = toml_parse_key(src, pos)
|
957
|
+
|
958
|
+
if out.flags.is_(key, TomlFlags.FROZEN):
|
959
|
+
raise toml_suffixed_err(src, pos, f'Cannot mutate immutable namespace {key}')
|
960
|
+
# Free the namespace now that it points to another empty list item...
|
961
|
+
out.flags.unset_all(key)
|
962
|
+
# ...but this key precisely is still prohibited from table declaration
|
963
|
+
out.flags.set(key, TomlFlags.EXPLICIT_NEST, recursive=False)
|
964
|
+
try:
|
965
|
+
out.data.append_nest_to_list(key)
|
966
|
+
except KeyError:
|
967
|
+
raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
|
968
|
+
|
969
|
+
if not src.startswith(']]', pos):
|
970
|
+
raise toml_suffixed_err(src, pos, "Expected ']]' at the end of an array declaration")
|
971
|
+
return pos + 2, key
|
972
|
+
|
973
|
+
|
974
|
+
def toml_key_value_rule(
|
975
|
+
src: str,
|
976
|
+
pos: TomlPos,
|
977
|
+
out: TomlOutput,
|
978
|
+
header: TomlKey,
|
979
|
+
parse_float: TomlParseFloat,
|
980
|
+
) -> TomlPos:
|
981
|
+
pos, key, value = toml_parse_key_value_pair(src, pos, parse_float)
|
982
|
+
key_parent, key_stem = key[:-1], key[-1]
|
983
|
+
abs_key_parent = header + key_parent
|
984
|
+
|
985
|
+
relative_path_cont_keys = (header + key[:i] for i in range(1, len(key)))
|
986
|
+
for cont_key in relative_path_cont_keys:
|
987
|
+
# Check that dotted key syntax does not redefine an existing table
|
988
|
+
if out.flags.is_(cont_key, TomlFlags.EXPLICIT_NEST):
|
989
|
+
raise toml_suffixed_err(src, pos, f'Cannot redefine namespace {cont_key}')
|
990
|
+
# Containers in the relative path can't be opened with the table syntax or dotted key/value syntax in following
|
991
|
+
# table sections.
|
992
|
+
out.flags.add_pending(cont_key, TomlFlags.EXPLICIT_NEST)
|
993
|
+
|
994
|
+
if out.flags.is_(abs_key_parent, TomlFlags.FROZEN):
|
995
|
+
raise toml_suffixed_err(
|
996
|
+
src,
|
997
|
+
pos,
|
998
|
+
f'Cannot mutate immutable namespace {abs_key_parent}',
|
999
|
+
)
|
1000
|
+
|
1001
|
+
try:
|
1002
|
+
nest = out.data.get_or_create_nest(abs_key_parent)
|
1003
|
+
except KeyError:
|
1004
|
+
raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
|
1005
|
+
if key_stem in nest:
|
1006
|
+
raise toml_suffixed_err(src, pos, 'Cannot overwrite a value')
|
1007
|
+
# Mark inline table and array namespaces recursively immutable
|
1008
|
+
if isinstance(value, (dict, list)):
|
1009
|
+
out.flags.set(header + key, TomlFlags.FROZEN, recursive=True)
|
1010
|
+
nest[key_stem] = value
|
1011
|
+
return pos
|
1012
|
+
|
1013
|
+
|
1014
|
+
def toml_parse_key_value_pair(
|
1015
|
+
src: str,
|
1016
|
+
pos: TomlPos,
|
1017
|
+
parse_float: TomlParseFloat,
|
1018
|
+
) -> ta.Tuple[TomlPos, TomlKey, ta.Any]:
|
1019
|
+
pos, key = toml_parse_key(src, pos)
|
1020
|
+
try:
|
1021
|
+
char: ta.Optional[str] = src[pos]
|
1022
|
+
except IndexError:
|
1023
|
+
char = None
|
1024
|
+
if char != '=':
|
1025
|
+
raise toml_suffixed_err(src, pos, "Expected '=' after a key in a key/value pair")
|
1026
|
+
pos += 1
|
1027
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
1028
|
+
pos, value = toml_parse_value(src, pos, parse_float)
|
1029
|
+
return pos, key, value
|
1030
|
+
|
1031
|
+
|
1032
|
+
def toml_parse_key(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, TomlKey]:
|
1033
|
+
pos, key_part = toml_parse_key_part(src, pos)
|
1034
|
+
key: TomlKey = (key_part,)
|
1035
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
1036
|
+
while True:
|
1037
|
+
try:
|
1038
|
+
char: ta.Optional[str] = src[pos]
|
1039
|
+
except IndexError:
|
1040
|
+
char = None
|
1041
|
+
if char != '.':
|
1042
|
+
return pos, key
|
1043
|
+
pos += 1
|
1044
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
1045
|
+
pos, key_part = toml_parse_key_part(src, pos)
|
1046
|
+
key += (key_part,)
|
1047
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
1048
|
+
|
1049
|
+
|
1050
|
+
def toml_parse_key_part(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
|
1051
|
+
try:
|
1052
|
+
char: ta.Optional[str] = src[pos]
|
1053
|
+
except IndexError:
|
1054
|
+
char = None
|
1055
|
+
if char in TOML_BARE_KEY_CHARS:
|
1056
|
+
start_pos = pos
|
1057
|
+
pos = toml_skip_chars(src, pos, TOML_BARE_KEY_CHARS)
|
1058
|
+
return pos, src[start_pos:pos]
|
1059
|
+
if char == "'":
|
1060
|
+
return toml_parse_literal_str(src, pos)
|
1061
|
+
if char == '"':
|
1062
|
+
return toml_parse_one_line_basic_str(src, pos)
|
1063
|
+
raise toml_suffixed_err(src, pos, 'Invalid initial character for a key part')
|
1064
|
+
|
1065
|
+
|
1066
|
+
def toml_parse_one_line_basic_str(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
|
1067
|
+
pos += 1
|
1068
|
+
return toml_parse_basic_str(src, pos, multiline=False)
|
1069
|
+
|
1070
|
+
|
1071
|
+
def toml_parse_array(src: str, pos: TomlPos, parse_float: TomlParseFloat) -> ta.Tuple[TomlPos, list]:
|
1072
|
+
pos += 1
|
1073
|
+
array: list = []
|
1074
|
+
|
1075
|
+
pos = toml_skip_comments_and_array_ws(src, pos)
|
1076
|
+
if src.startswith(']', pos):
|
1077
|
+
return pos + 1, array
|
1078
|
+
while True:
|
1079
|
+
pos, val = toml_parse_value(src, pos, parse_float)
|
1080
|
+
array.append(val)
|
1081
|
+
pos = toml_skip_comments_and_array_ws(src, pos)
|
1082
|
+
|
1083
|
+
c = src[pos:pos + 1]
|
1084
|
+
if c == ']':
|
1085
|
+
return pos + 1, array
|
1086
|
+
if c != ',':
|
1087
|
+
raise toml_suffixed_err(src, pos, 'Unclosed array')
|
1088
|
+
pos += 1
|
1089
|
+
|
1090
|
+
pos = toml_skip_comments_and_array_ws(src, pos)
|
1091
|
+
if src.startswith(']', pos):
|
1092
|
+
return pos + 1, array
|
1093
|
+
|
1094
|
+
|
1095
|
+
def toml_parse_inline_table(src: str, pos: TomlPos, parse_float: TomlParseFloat) -> ta.Tuple[TomlPos, dict]:
|
1096
|
+
pos += 1
|
1097
|
+
nested_dict = TomlNestedDict()
|
1098
|
+
flags = TomlFlags()
|
1099
|
+
|
1100
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
1101
|
+
if src.startswith('}', pos):
|
1102
|
+
return pos + 1, nested_dict.dict
|
1103
|
+
while True:
|
1104
|
+
pos, key, value = toml_parse_key_value_pair(src, pos, parse_float)
|
1105
|
+
key_parent, key_stem = key[:-1], key[-1]
|
1106
|
+
if flags.is_(key, TomlFlags.FROZEN):
|
1107
|
+
raise toml_suffixed_err(src, pos, f'Cannot mutate immutable namespace {key}')
|
1108
|
+
try:
|
1109
|
+
nest = nested_dict.get_or_create_nest(key_parent, access_lists=False)
|
1110
|
+
except KeyError:
|
1111
|
+
raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
|
1112
|
+
if key_stem in nest:
|
1113
|
+
raise toml_suffixed_err(src, pos, f'Duplicate inline table key {key_stem!r}')
|
1114
|
+
nest[key_stem] = value
|
1115
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
1116
|
+
c = src[pos:pos + 1]
|
1117
|
+
if c == '}':
|
1118
|
+
return pos + 1, nested_dict.dict
|
1119
|
+
if c != ',':
|
1120
|
+
raise toml_suffixed_err(src, pos, 'Unclosed inline table')
|
1121
|
+
if isinstance(value, (dict, list)):
|
1122
|
+
flags.set(key, TomlFlags.FROZEN, recursive=True)
|
1123
|
+
pos += 1
|
1124
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
1125
|
+
|
1126
|
+
|
1127
|
+
def toml_parse_basic_str_escape(
|
1128
|
+
src: str,
|
1129
|
+
pos: TomlPos,
|
1130
|
+
*,
|
1131
|
+
multiline: bool = False,
|
1132
|
+
) -> ta.Tuple[TomlPos, str]:
|
1133
|
+
escape_id = src[pos:pos + 2]
|
1134
|
+
pos += 2
|
1135
|
+
if multiline and escape_id in {'\\ ', '\\\t', '\\\n'}:
|
1136
|
+
# Skip whitespace until next non-whitespace character or end of the doc. Error if non-whitespace is found before
|
1137
|
+
# newline.
|
1138
|
+
if escape_id != '\\\n':
|
1139
|
+
pos = toml_skip_chars(src, pos, TOML_WS)
|
1140
|
+
try:
|
1141
|
+
char = src[pos]
|
1142
|
+
except IndexError:
|
1143
|
+
return pos, ''
|
1144
|
+
if char != '\n':
|
1145
|
+
raise toml_suffixed_err(src, pos, "Unescaped '\\' in a string")
|
1146
|
+
pos += 1
|
1147
|
+
pos = toml_skip_chars(src, pos, TOML_WS_AND_NEWLINE)
|
1148
|
+
return pos, ''
|
1149
|
+
if escape_id == '\\u':
|
1150
|
+
return toml_parse_hex_char(src, pos, 4)
|
1151
|
+
if escape_id == '\\U':
|
1152
|
+
return toml_parse_hex_char(src, pos, 8)
|
1153
|
+
try:
|
1154
|
+
return pos, TOML_BASIC_STR_ESCAPE_REPLACEMENTS[escape_id]
|
1155
|
+
except KeyError:
|
1156
|
+
raise toml_suffixed_err(src, pos, "Unescaped '\\' in a string") from None
|
1157
|
+
|
1158
|
+
|
1159
|
+
def toml_parse_basic_str_escape_multiline(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
|
1160
|
+
return toml_parse_basic_str_escape(src, pos, multiline=True)
|
1161
|
+
|
1162
|
+
|
1163
|
+
def toml_parse_hex_char(src: str, pos: TomlPos, hex_len: int) -> ta.Tuple[TomlPos, str]:
|
1164
|
+
hex_str = src[pos:pos + hex_len]
|
1165
|
+
if len(hex_str) != hex_len or not TOML_HEXDIGIT_CHARS.issuperset(hex_str):
|
1166
|
+
raise toml_suffixed_err(src, pos, 'Invalid hex value')
|
1167
|
+
pos += hex_len
|
1168
|
+
hex_int = int(hex_str, 16)
|
1169
|
+
if not toml_is_unicode_scalar_value(hex_int):
|
1170
|
+
raise toml_suffixed_err(src, pos, 'Escaped character is not a Unicode scalar value')
|
1171
|
+
return pos, chr(hex_int)
|
1172
|
+
|
1173
|
+
|
1174
|
+
def toml_parse_literal_str(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
|
1175
|
+
pos += 1 # Skip starting apostrophe
|
1176
|
+
start_pos = pos
|
1177
|
+
pos = toml_skip_until(
|
1178
|
+
src, pos, "'", error_on=TOML_ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True,
|
1179
|
+
)
|
1180
|
+
return pos + 1, src[start_pos:pos] # Skip ending apostrophe
|
1181
|
+
|
1182
|
+
|
1183
|
+
def toml_parse_multiline_str(src: str, pos: TomlPos, *, literal: bool) -> ta.Tuple[TomlPos, str]:
|
1184
|
+
pos += 3
|
1185
|
+
if src.startswith('\n', pos):
|
1186
|
+
pos += 1
|
1187
|
+
|
1188
|
+
if literal:
|
1189
|
+
delim = "'"
|
1190
|
+
end_pos = toml_skip_until(
|
1191
|
+
src,
|
1192
|
+
pos,
|
1193
|
+
"'''",
|
1194
|
+
error_on=TOML_ILLEGAL_MULTILINE_LITERAL_STR_CHARS,
|
1195
|
+
error_on_eof=True,
|
1196
|
+
)
|
1197
|
+
result = src[pos:end_pos]
|
1198
|
+
pos = end_pos + 3
|
1199
|
+
else:
|
1200
|
+
delim = '"'
|
1201
|
+
pos, result = toml_parse_basic_str(src, pos, multiline=True)
|
1202
|
+
|
1203
|
+
# Add at maximum two extra apostrophes/quotes if the end sequence is 4 or 5 chars long instead of just 3.
|
1204
|
+
if not src.startswith(delim, pos):
|
1205
|
+
return pos, result
|
1206
|
+
pos += 1
|
1207
|
+
if not src.startswith(delim, pos):
|
1208
|
+
return pos, result + delim
|
1209
|
+
pos += 1
|
1210
|
+
return pos, result + (delim * 2)
|
1211
|
+
|
1212
|
+
|
1213
|
+
def toml_parse_basic_str(src: str, pos: TomlPos, *, multiline: bool) -> ta.Tuple[TomlPos, str]:
|
1214
|
+
if multiline:
|
1215
|
+
error_on = TOML_ILLEGAL_MULTILINE_BASIC_STR_CHARS
|
1216
|
+
parse_escapes = toml_parse_basic_str_escape_multiline
|
1217
|
+
else:
|
1218
|
+
error_on = TOML_ILLEGAL_BASIC_STR_CHARS
|
1219
|
+
parse_escapes = toml_parse_basic_str_escape
|
1220
|
+
result = ''
|
1221
|
+
start_pos = pos
|
1222
|
+
while True:
|
1223
|
+
try:
|
1224
|
+
char = src[pos]
|
1225
|
+
except IndexError:
|
1226
|
+
raise toml_suffixed_err(src, pos, 'Unterminated string') from None
|
1227
|
+
if char == '"':
|
1228
|
+
if not multiline:
|
1229
|
+
return pos + 1, result + src[start_pos:pos]
|
1230
|
+
if src.startswith('"""', pos):
|
1231
|
+
return pos + 3, result + src[start_pos:pos]
|
1232
|
+
pos += 1
|
1233
|
+
continue
|
1234
|
+
if char == '\\':
|
1235
|
+
result += src[start_pos:pos]
|
1236
|
+
pos, parsed_escape = parse_escapes(src, pos)
|
1237
|
+
result += parsed_escape
|
1238
|
+
start_pos = pos
|
1239
|
+
continue
|
1240
|
+
if char in error_on:
|
1241
|
+
raise toml_suffixed_err(src, pos, f'Illegal character {char!r}')
|
1242
|
+
pos += 1
|
1243
|
+
|
1244
|
+
|
1245
|
+
def toml_parse_value( # noqa: C901
|
1246
|
+
src: str,
|
1247
|
+
pos: TomlPos,
|
1248
|
+
parse_float: TomlParseFloat,
|
1249
|
+
) -> ta.Tuple[TomlPos, ta.Any]:
|
1250
|
+
try:
|
1251
|
+
char: ta.Optional[str] = src[pos]
|
1252
|
+
except IndexError:
|
1253
|
+
char = None
|
1254
|
+
|
1255
|
+
# IMPORTANT: order conditions based on speed of checking and likelihood
|
1256
|
+
|
1257
|
+
# Basic strings
|
1258
|
+
if char == '"':
|
1259
|
+
if src.startswith('"""', pos):
|
1260
|
+
return toml_parse_multiline_str(src, pos, literal=False)
|
1261
|
+
return toml_parse_one_line_basic_str(src, pos)
|
1262
|
+
|
1263
|
+
# Literal strings
|
1264
|
+
if char == "'":
|
1265
|
+
if src.startswith("'''", pos):
|
1266
|
+
return toml_parse_multiline_str(src, pos, literal=True)
|
1267
|
+
return toml_parse_literal_str(src, pos)
|
1268
|
+
|
1269
|
+
# Booleans
|
1270
|
+
if char == 't':
|
1271
|
+
if src.startswith('true', pos):
|
1272
|
+
return pos + 4, True
|
1273
|
+
if char == 'f':
|
1274
|
+
if src.startswith('false', pos):
|
1275
|
+
return pos + 5, False
|
1276
|
+
|
1277
|
+
# Arrays
|
1278
|
+
if char == '[':
|
1279
|
+
return toml_parse_array(src, pos, parse_float)
|
1280
|
+
|
1281
|
+
# Inline tables
|
1282
|
+
if char == '{':
|
1283
|
+
return toml_parse_inline_table(src, pos, parse_float)
|
1284
|
+
|
1285
|
+
# Dates and times
|
1286
|
+
datetime_match = TOML_RE_DATETIME.match(src, pos)
|
1287
|
+
if datetime_match:
|
1288
|
+
try:
|
1289
|
+
datetime_obj = toml_match_to_datetime(datetime_match)
|
1290
|
+
except ValueError as e:
|
1291
|
+
raise toml_suffixed_err(src, pos, 'Invalid date or datetime') from e
|
1292
|
+
return datetime_match.end(), datetime_obj
|
1293
|
+
localtime_match = TOML_RE_LOCALTIME.match(src, pos)
|
1294
|
+
if localtime_match:
|
1295
|
+
return localtime_match.end(), toml_match_to_localtime(localtime_match)
|
1296
|
+
|
1297
|
+
# Integers and "normal" floats. The regex will greedily match any type starting with a decimal char, so needs to be
|
1298
|
+
# located after handling of dates and times.
|
1299
|
+
number_match = TOML_RE_NUMBER.match(src, pos)
|
1300
|
+
if number_match:
|
1301
|
+
return number_match.end(), toml_match_to_number(number_match, parse_float)
|
1302
|
+
|
1303
|
+
# Special floats
|
1304
|
+
first_three = src[pos:pos + 3]
|
1305
|
+
if first_three in {'inf', 'nan'}:
|
1306
|
+
return pos + 3, parse_float(first_three)
|
1307
|
+
first_four = src[pos:pos + 4]
|
1308
|
+
if first_four in {'-inf', '+inf', '-nan', '+nan'}:
|
1309
|
+
return pos + 4, parse_float(first_four)
|
1310
|
+
|
1311
|
+
raise toml_suffixed_err(src, pos, 'Invalid value')
|
1312
|
+
|
1313
|
+
|
1314
|
+
def toml_suffixed_err(src: str, pos: TomlPos, msg: str) -> TomlDecodeError:
|
1315
|
+
"""Return a `TomlDecodeError` where error message is suffixed with coordinates in source."""
|
1316
|
+
|
1317
|
+
def coord_repr(src: str, pos: TomlPos) -> str:
|
1318
|
+
if pos >= len(src):
|
1319
|
+
return 'end of document'
|
1320
|
+
line = src.count('\n', 0, pos) + 1
|
1321
|
+
if line == 1:
|
1322
|
+
column = pos + 1
|
1323
|
+
else:
|
1324
|
+
column = pos - src.rindex('\n', 0, pos)
|
1325
|
+
return f'line {line}, column {column}'
|
1326
|
+
|
1327
|
+
return TomlDecodeError(f'{msg} (at {coord_repr(src, pos)})')
|
1328
|
+
|
1329
|
+
|
1330
|
+
def toml_is_unicode_scalar_value(codepoint: int) -> bool:
|
1331
|
+
return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111)
|
1332
|
+
|
1333
|
+
|
1334
|
+
def toml_make_safe_parse_float(parse_float: TomlParseFloat) -> TomlParseFloat:
|
1335
|
+
"""A decorator to make `parse_float` safe.
|
1336
|
+
|
1337
|
+
`parse_float` must not return dicts or lists, because these types would be mixed with parsed TOML tables and arrays,
|
1338
|
+
thus confusing the parser. The returned decorated callable raises `ValueError` instead of returning illegal types.
|
1339
|
+
"""
|
1340
|
+
# The default `float` callable never returns illegal types. Optimize it.
|
1341
|
+
if parse_float is float:
|
1342
|
+
return float
|
1343
|
+
|
1344
|
+
def safe_parse_float(float_str: str) -> ta.Any:
|
1345
|
+
float_value = parse_float(float_str)
|
1346
|
+
if isinstance(float_value, (dict, list)):
|
1347
|
+
raise ValueError('parse_float must not return dicts or lists') # noqa
|
1348
|
+
return float_value
|
1349
|
+
|
1350
|
+
return safe_parse_float
|
1351
|
+
|
1352
|
+
|
526
1353
|
########################################
|
527
1354
|
# ../config.py
|
528
1355
|
|
@@ -538,6 +1365,9 @@ class MainConfig:
|
|
538
1365
|
# ../deploy/config.py
|
539
1366
|
|
540
1367
|
|
1368
|
+
##
|
1369
|
+
|
1370
|
+
|
541
1371
|
@dc.dataclass(frozen=True)
|
542
1372
|
class DeployConfig:
|
543
1373
|
deploy_home: ta.Optional[str] = None
|
@@ -712,7 +1542,7 @@ def _pyremote_bootstrap_main(context_name: str) -> None:
|
|
712
1542
|
# Get pid
|
713
1543
|
pid = os.getpid()
|
714
1544
|
|
715
|
-
# Two copies of
|
1545
|
+
# Two copies of payload src to be sent to parent
|
716
1546
|
r0, w0 = os.pipe()
|
717
1547
|
r1, w1 = os.pipe()
|
718
1548
|
|
@@ -751,17 +1581,17 @@ def _pyremote_bootstrap_main(context_name: str) -> None:
|
|
751
1581
|
# Write pid
|
752
1582
|
os.write(1, struct.pack('<Q', pid))
|
753
1583
|
|
754
|
-
# Read
|
755
|
-
|
756
|
-
if len(
|
1584
|
+
# Read payload src from stdin
|
1585
|
+
payload_z_len = struct.unpack('<I', os.read(0, 4))[0]
|
1586
|
+
if len(payload_z := os.fdopen(0, 'rb').read(payload_z_len)) != payload_z_len:
|
757
1587
|
raise EOFError
|
758
|
-
|
1588
|
+
payload_src = zlib.decompress(payload_z)
|
759
1589
|
|
760
|
-
# Write both copies of
|
761
|
-
# and block and need to be drained by pyremote_bootstrap_finalize running in parent.
|
1590
|
+
# Write both copies of payload src. Must write to w0 (parent stdin) before w1 (copy pipe) as pipe will likely
|
1591
|
+
# fill and block and need to be drained by pyremote_bootstrap_finalize running in parent.
|
762
1592
|
for w in [w0, w1]:
|
763
1593
|
fp = os.fdopen(w, 'wb', 0)
|
764
|
-
fp.write(
|
1594
|
+
fp.write(payload_src)
|
765
1595
|
fp.close()
|
766
1596
|
|
767
1597
|
# Write second ack
|
@@ -825,7 +1655,7 @@ class PyremotePayloadRuntime:
|
|
825
1655
|
input: ta.BinaryIO
|
826
1656
|
output: ta.BinaryIO
|
827
1657
|
context_name: str
|
828
|
-
|
1658
|
+
payload_src: str
|
829
1659
|
options: PyremoteBootstrapOptions
|
830
1660
|
env_info: PyremoteEnvInfo
|
831
1661
|
|
@@ -833,9 +1663,9 @@ class PyremotePayloadRuntime:
|
|
833
1663
|
def pyremote_bootstrap_finalize() -> PyremotePayloadRuntime:
|
834
1664
|
# If src file var is not present we need to do initial finalization
|
835
1665
|
if _PYREMOTE_BOOTSTRAP_SRC_FILE_VAR not in os.environ:
|
836
|
-
# Read second copy of
|
1666
|
+
# Read second copy of payload src
|
837
1667
|
r1 = os.fdopen(_PYREMOTE_BOOTSTRAP_SRC_FD, 'rb', 0)
|
838
|
-
|
1668
|
+
payload_src = r1.read().decode('utf-8')
|
839
1669
|
r1.close()
|
840
1670
|
|
841
1671
|
# Reap boostrap child. Must be done after reading second copy of source because source may be too big to fit in
|
@@ -853,7 +1683,7 @@ def pyremote_bootstrap_finalize() -> PyremotePayloadRuntime:
|
|
853
1683
|
# Write temp source file
|
854
1684
|
import tempfile
|
855
1685
|
tfd, tfn = tempfile.mkstemp('-pyremote.py')
|
856
|
-
os.write(tfd,
|
1686
|
+
os.write(tfd, payload_src.encode('utf-8'))
|
857
1687
|
os.close(tfd)
|
858
1688
|
|
859
1689
|
# Set vars
|
@@ -872,7 +1702,7 @@ def pyremote_bootstrap_finalize() -> PyremotePayloadRuntime:
|
|
872
1702
|
|
873
1703
|
# Read temp source file
|
874
1704
|
with open(os.environ.pop(_PYREMOTE_BOOTSTRAP_SRC_FILE_VAR)) as sf:
|
875
|
-
|
1705
|
+
payload_src = sf.read()
|
876
1706
|
|
877
1707
|
# Restore vars
|
878
1708
|
sys.executable = os.environ.pop(_PYREMOTE_BOOTSTRAP_ARGV0_VAR)
|
@@ -905,7 +1735,7 @@ def pyremote_bootstrap_finalize() -> PyremotePayloadRuntime:
|
|
905
1735
|
input=input,
|
906
1736
|
output=output,
|
907
1737
|
context_name=context_name,
|
908
|
-
|
1738
|
+
payload_src=payload_src,
|
909
1739
|
options=options,
|
910
1740
|
env_info=env_info,
|
911
1741
|
)
|
@@ -917,31 +1747,31 @@ def pyremote_bootstrap_finalize() -> PyremotePayloadRuntime:
|
|
917
1747
|
class PyremoteBootstrapDriver:
|
918
1748
|
def __init__(
|
919
1749
|
self,
|
920
|
-
|
1750
|
+
payload_src: ta.Union[str, ta.Sequence[str]],
|
921
1751
|
options: PyremoteBootstrapOptions = PyremoteBootstrapOptions(),
|
922
1752
|
) -> None:
|
923
1753
|
super().__init__()
|
924
1754
|
|
925
|
-
self.
|
1755
|
+
self._payload_src = payload_src
|
926
1756
|
self._options = options
|
927
1757
|
|
928
|
-
self.
|
929
|
-
self.
|
1758
|
+
self._prepared_payload_src = self._prepare_payload_src(payload_src, options)
|
1759
|
+
self._payload_z = zlib.compress(self._prepared_payload_src.encode('utf-8'))
|
930
1760
|
|
931
1761
|
self._options_json = json.dumps(dc.asdict(options), indent=None, separators=(',', ':')).encode('utf-8') # noqa
|
932
1762
|
#
|
933
1763
|
|
934
1764
|
@classmethod
|
935
|
-
def
|
1765
|
+
def _prepare_payload_src(
|
936
1766
|
cls,
|
937
|
-
|
1767
|
+
payload_src: ta.Union[str, ta.Sequence[str]],
|
938
1768
|
options: PyremoteBootstrapOptions,
|
939
1769
|
) -> str:
|
940
1770
|
parts: ta.List[str]
|
941
|
-
if isinstance(
|
942
|
-
parts = [
|
1771
|
+
if isinstance(payload_src, str):
|
1772
|
+
parts = [payload_src]
|
943
1773
|
else:
|
944
|
-
parts = list(
|
1774
|
+
parts = list(payload_src)
|
945
1775
|
|
946
1776
|
if (mn := options.main_name_override) is not None:
|
947
1777
|
parts.insert(0, f'__name__ = {mn!r}')
|
@@ -977,9 +1807,9 @@ class PyremoteBootstrapDriver:
|
|
977
1807
|
d = yield from self._read(8)
|
978
1808
|
pid = struct.unpack('<Q', d)[0]
|
979
1809
|
|
980
|
-
# Write
|
981
|
-
yield from self._write(struct.pack('<I', len(self.
|
982
|
-
yield from self._write(self.
|
1810
|
+
# Write payload src
|
1811
|
+
yield from self._write(struct.pack('<I', len(self._payload_z)))
|
1812
|
+
yield from self._write(self._payload_z)
|
983
1813
|
|
984
1814
|
# Read second ack (after writing src copies)
|
985
1815
|
yield from self._expect(_PYREMOTE_BOOTSTRAP_ACK1)
|
@@ -1208,8 +2038,8 @@ class _CachedNullary(_AbstractCachedNullary):
|
|
1208
2038
|
return self._value
|
1209
2039
|
|
1210
2040
|
|
1211
|
-
def cached_nullary(fn
|
1212
|
-
return _CachedNullary(fn)
|
2041
|
+
def cached_nullary(fn: CallableT) -> CallableT:
|
2042
|
+
return _CachedNullary(fn) # type: ignore
|
1213
2043
|
|
1214
2044
|
|
1215
2045
|
def static_init(fn: CallableT) -> CallableT:
|
@@ -1713,6 +2543,13 @@ json_dump_compact: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON
|
|
1713
2543
|
json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_COMPACT_KWARGS)
|
1714
2544
|
|
1715
2545
|
|
2546
|
+
########################################
|
2547
|
+
# ../../../omlish/lite/logs.py
|
2548
|
+
|
2549
|
+
|
2550
|
+
log = logging.getLogger(__name__)
|
2551
|
+
|
2552
|
+
|
1716
2553
|
########################################
|
1717
2554
|
# ../../../omlish/lite/maybes.py
|
1718
2555
|
|
@@ -1928,83 +2765,193 @@ def format_num_bytes(num_bytes: int) -> str:
|
|
1928
2765
|
|
1929
2766
|
|
1930
2767
|
########################################
|
1931
|
-
# ../../../omlish/
|
2768
|
+
# ../../../omlish/logs/filters.py
|
1932
2769
|
|
1933
2770
|
|
1934
|
-
|
1935
|
-
|
1936
|
-
|
2771
|
+
class TidLogFilter(logging.Filter):
|
2772
|
+
def filter(self, record):
|
2773
|
+
record.tid = threading.get_native_id()
|
2774
|
+
return True
|
1937
2775
|
|
1938
|
-
def set_process_deathsig(sig: int) -> bool:
|
1939
|
-
if sys.platform == 'linux':
|
1940
|
-
libc = ct.CDLL('libc.so.6')
|
1941
2776
|
|
1942
|
-
|
1943
|
-
|
1944
|
-
libc.prctl.argtypes = [ct.c_int, ct.c_ulong, ct.c_ulong, ct.c_ulong, ct.c_ulong]
|
2777
|
+
########################################
|
2778
|
+
# ../../../omlish/logs/proxy.py
|
1945
2779
|
|
1946
|
-
libc.prctl(LINUX_PR_SET_PDEATHSIG, sig, 0, 0, 0, 0)
|
1947
2780
|
|
1948
|
-
|
2781
|
+
class ProxyLogFilterer(logging.Filterer):
|
2782
|
+
def __init__(self, underlying: logging.Filterer) -> None: # noqa
|
2783
|
+
self._underlying = underlying
|
1949
2784
|
|
1950
|
-
|
1951
|
-
|
2785
|
+
@property
|
2786
|
+
def underlying(self) -> logging.Filterer:
|
2787
|
+
return self._underlying
|
1952
2788
|
|
2789
|
+
@property
|
2790
|
+
def filters(self):
|
2791
|
+
return self._underlying.filters
|
1953
2792
|
|
1954
|
-
|
1955
|
-
|
1956
|
-
|
1957
|
-
➜ ~ cat /etc/os-release
|
1958
|
-
NAME="Amazon Linux"
|
1959
|
-
VERSION="2"
|
1960
|
-
ID="amzn"
|
1961
|
-
ID_LIKE="centos rhel fedora"
|
1962
|
-
VERSION_ID="2"
|
1963
|
-
PRETTY_NAME="Amazon Linux 2"
|
2793
|
+
@filters.setter
|
2794
|
+
def filters(self, filters):
|
2795
|
+
self._underlying.filters = filters
|
1964
2796
|
|
1965
|
-
|
1966
|
-
|
1967
|
-
NAME="Ubuntu"
|
1968
|
-
VERSION_ID="22.04"
|
1969
|
-
VERSION="22.04.5 LTS (Jammy Jellyfish)"
|
1970
|
-
VERSION_CODENAME=jammy
|
1971
|
-
ID=ubuntu
|
1972
|
-
ID_LIKE=debian
|
1973
|
-
UBUNTU_CODENAME=jammy
|
2797
|
+
def addFilter(self, filter): # noqa
|
2798
|
+
self._underlying.addFilter(filter)
|
1974
2799
|
|
1975
|
-
|
1976
|
-
|
1977
|
-
NAME="Debian GNU/Linux"
|
1978
|
-
VERSION_ID="12"
|
1979
|
-
VERSION="12 (bookworm)"
|
1980
|
-
VERSION_CODENAME=bookworm
|
1981
|
-
ID=debian
|
1982
|
-
"""
|
2800
|
+
def removeFilter(self, filter): # noqa
|
2801
|
+
self._underlying.removeFilter(filter)
|
1983
2802
|
|
2803
|
+
def filter(self, record):
|
2804
|
+
return self._underlying.filter(record)
|
1984
2805
|
|
1985
|
-
@dc.dataclass(frozen=True)
|
1986
|
-
class LinuxOsRelease:
|
1987
|
-
"""
|
1988
|
-
https://man7.org/linux/man-pages/man5/os-release.5.html
|
1989
|
-
"""
|
1990
2806
|
|
1991
|
-
|
2807
|
+
class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
|
2808
|
+
def __init__(self, underlying: logging.Handler) -> None: # noqa
|
2809
|
+
ProxyLogFilterer.__init__(self, underlying)
|
1992
2810
|
|
1993
|
-
|
2811
|
+
_underlying: logging.Handler
|
1994
2812
|
|
1995
2813
|
@property
|
1996
|
-
def
|
1997
|
-
|
1998
|
-
A string identifying the operating system, without a version component, and suitable for presentation to the
|
1999
|
-
user. If not set, a default of "NAME=Linux" may be used.
|
2814
|
+
def underlying(self) -> logging.Handler:
|
2815
|
+
return self._underlying
|
2000
2816
|
|
2001
|
-
|
2002
|
-
|
2817
|
+
def get_name(self):
|
2818
|
+
return self._underlying.get_name()
|
2003
2819
|
|
2004
|
-
|
2820
|
+
def set_name(self, name):
|
2821
|
+
self._underlying.set_name(name)
|
2005
2822
|
|
2006
2823
|
@property
|
2007
|
-
def
|
2824
|
+
def name(self):
|
2825
|
+
return self._underlying.name
|
2826
|
+
|
2827
|
+
@property
|
2828
|
+
def level(self):
|
2829
|
+
return self._underlying.level
|
2830
|
+
|
2831
|
+
@level.setter
|
2832
|
+
def level(self, level):
|
2833
|
+
self._underlying.level = level
|
2834
|
+
|
2835
|
+
@property
|
2836
|
+
def formatter(self):
|
2837
|
+
return self._underlying.formatter
|
2838
|
+
|
2839
|
+
@formatter.setter
|
2840
|
+
def formatter(self, formatter):
|
2841
|
+
self._underlying.formatter = formatter
|
2842
|
+
|
2843
|
+
def createLock(self):
|
2844
|
+
self._underlying.createLock()
|
2845
|
+
|
2846
|
+
def acquire(self):
|
2847
|
+
self._underlying.acquire()
|
2848
|
+
|
2849
|
+
def release(self):
|
2850
|
+
self._underlying.release()
|
2851
|
+
|
2852
|
+
def setLevel(self, level):
|
2853
|
+
self._underlying.setLevel(level)
|
2854
|
+
|
2855
|
+
def format(self, record):
|
2856
|
+
return self._underlying.format(record)
|
2857
|
+
|
2858
|
+
def emit(self, record):
|
2859
|
+
self._underlying.emit(record)
|
2860
|
+
|
2861
|
+
def handle(self, record):
|
2862
|
+
return self._underlying.handle(record)
|
2863
|
+
|
2864
|
+
def setFormatter(self, fmt):
|
2865
|
+
self._underlying.setFormatter(fmt)
|
2866
|
+
|
2867
|
+
def flush(self):
|
2868
|
+
self._underlying.flush()
|
2869
|
+
|
2870
|
+
def close(self):
|
2871
|
+
self._underlying.close()
|
2872
|
+
|
2873
|
+
def handleError(self, record):
|
2874
|
+
self._underlying.handleError(record)
|
2875
|
+
|
2876
|
+
|
2877
|
+
########################################
|
2878
|
+
# ../../../omlish/os/deathsig.py
|
2879
|
+
|
2880
|
+
|
2881
|
+
LINUX_PR_SET_PDEATHSIG = 1 # Second arg is a signal
|
2882
|
+
LINUX_PR_GET_PDEATHSIG = 2 # Second arg is a ptr to return the signal
|
2883
|
+
|
2884
|
+
|
2885
|
+
def set_process_deathsig(sig: int) -> bool:
|
2886
|
+
if sys.platform == 'linux':
|
2887
|
+
libc = ct.CDLL('libc.so.6')
|
2888
|
+
|
2889
|
+
# int prctl(int option, unsigned long arg2, unsigned long arg3, unsigned long arg4, unsigned long arg5);
|
2890
|
+
libc.prctl.restype = ct.c_int
|
2891
|
+
libc.prctl.argtypes = [ct.c_int, ct.c_ulong, ct.c_ulong, ct.c_ulong, ct.c_ulong]
|
2892
|
+
|
2893
|
+
libc.prctl(LINUX_PR_SET_PDEATHSIG, sig, 0, 0, 0, 0)
|
2894
|
+
|
2895
|
+
return True
|
2896
|
+
|
2897
|
+
else:
|
2898
|
+
return False
|
2899
|
+
|
2900
|
+
|
2901
|
+
########################################
|
2902
|
+
# ../../../omlish/os/linux.py
|
2903
|
+
"""
|
2904
|
+
➜ ~ cat /etc/os-release
|
2905
|
+
NAME="Amazon Linux"
|
2906
|
+
VERSION="2"
|
2907
|
+
ID="amzn"
|
2908
|
+
ID_LIKE="centos rhel fedora"
|
2909
|
+
VERSION_ID="2"
|
2910
|
+
PRETTY_NAME="Amazon Linux 2"
|
2911
|
+
|
2912
|
+
➜ ~ cat /etc/os-release
|
2913
|
+
PRETTY_NAME="Ubuntu 22.04.5 LTS"
|
2914
|
+
NAME="Ubuntu"
|
2915
|
+
VERSION_ID="22.04"
|
2916
|
+
VERSION="22.04.5 LTS (Jammy Jellyfish)"
|
2917
|
+
VERSION_CODENAME=jammy
|
2918
|
+
ID=ubuntu
|
2919
|
+
ID_LIKE=debian
|
2920
|
+
UBUNTU_CODENAME=jammy
|
2921
|
+
|
2922
|
+
➜ omlish git:(master) docker run -i python:3.12 cat /etc/os-release
|
2923
|
+
PRETTY_NAME="Debian GNU/Linux 12 (bookworm)"
|
2924
|
+
NAME="Debian GNU/Linux"
|
2925
|
+
VERSION_ID="12"
|
2926
|
+
VERSION="12 (bookworm)"
|
2927
|
+
VERSION_CODENAME=bookworm
|
2928
|
+
ID=debian
|
2929
|
+
"""
|
2930
|
+
|
2931
|
+
|
2932
|
+
@dc.dataclass(frozen=True)
|
2933
|
+
class LinuxOsRelease:
|
2934
|
+
"""
|
2935
|
+
https://man7.org/linux/man-pages/man5/os-release.5.html
|
2936
|
+
"""
|
2937
|
+
|
2938
|
+
raw: ta.Mapping[str, str]
|
2939
|
+
|
2940
|
+
# General information identifying the operating system
|
2941
|
+
|
2942
|
+
@property
|
2943
|
+
def name(self) -> str:
|
2944
|
+
"""
|
2945
|
+
A string identifying the operating system, without a version component, and suitable for presentation to the
|
2946
|
+
user. If not set, a default of "NAME=Linux" may be used.
|
2947
|
+
|
2948
|
+
Examples: "NAME=Fedora", "NAME="Debian GNU/Linux"".
|
2949
|
+
"""
|
2950
|
+
|
2951
|
+
return self.raw['NAME']
|
2952
|
+
|
2953
|
+
@property
|
2954
|
+
def id(self) -> str:
|
2008
2955
|
"""
|
2009
2956
|
A lower-case string (no spaces or other characters outside of 0-9, a-z, ".", "_" and "-") identifying the
|
2010
2957
|
operating system, excluding any version information and suitable for processing by scripts or usage in generated
|
@@ -3112,22 +4059,22 @@ def build_command_name_map(crs: CommandRegistrations) -> CommandNameMap:
|
|
3112
4059
|
~deploy
|
3113
4060
|
deploy.pid (flock)
|
3114
4061
|
/app
|
3115
|
-
/<
|
4062
|
+
/<appplaceholder> - shallow clone
|
3116
4063
|
/conf
|
3117
4064
|
/env
|
3118
|
-
<
|
4065
|
+
<appplaceholder>.env
|
3119
4066
|
/nginx
|
3120
|
-
<
|
4067
|
+
<appplaceholder>.conf
|
3121
4068
|
/supervisor
|
3122
|
-
<
|
4069
|
+
<appplaceholder>.conf
|
3123
4070
|
/venv
|
3124
|
-
/<
|
4071
|
+
/<appplaceholder>
|
3125
4072
|
|
3126
4073
|
?
|
3127
4074
|
/logs
|
3128
|
-
/wrmsr--omlish--<
|
4075
|
+
/wrmsr--omlish--<placeholder>
|
3129
4076
|
|
3130
|
-
|
4077
|
+
placeholder = <name>--<rev>--<when>
|
3131
4078
|
|
3132
4079
|
==
|
3133
4080
|
|
@@ -3148,10 +4095,10 @@ for dn in [
|
|
3148
4095
|
##
|
3149
4096
|
|
3150
4097
|
|
3151
|
-
|
3152
|
-
|
4098
|
+
DEPLOY_PATH_PLACEHOLDER_PLACEHOLDER = '@'
|
4099
|
+
DEPLOY_PATH_PLACEHOLDER_SEPARATORS = '-.'
|
3153
4100
|
|
3154
|
-
|
4101
|
+
DEPLOY_PATH_PLACEHOLDERS: ta.FrozenSet[str] = frozenset([
|
3155
4102
|
'app',
|
3156
4103
|
'tag', # <rev>-<dt>
|
3157
4104
|
])
|
@@ -3169,7 +4116,7 @@ class DeployPathPart(abc.ABC): # noqa
|
|
3169
4116
|
raise NotImplementedError
|
3170
4117
|
|
3171
4118
|
@abc.abstractmethod
|
3172
|
-
def render(self,
|
4119
|
+
def render(self, placeholders: ta.Optional[ta.Mapping[DeployPathPlaceholder, str]] = None) -> str:
|
3173
4120
|
raise NotImplementedError
|
3174
4121
|
|
3175
4122
|
|
@@ -3183,9 +4130,9 @@ class DirDeployPathPart(DeployPathPart, abc.ABC):
|
|
3183
4130
|
|
3184
4131
|
@classmethod
|
3185
4132
|
def parse(cls, s: str) -> 'DirDeployPathPart':
|
3186
|
-
if
|
3187
|
-
check.equal(s[0],
|
3188
|
-
return
|
4133
|
+
if DEPLOY_PATH_PLACEHOLDER_PLACEHOLDER in s:
|
4134
|
+
check.equal(s[0], DEPLOY_PATH_PLACEHOLDER_PLACEHOLDER)
|
4135
|
+
return PlaceholderDirDeployPathPart(s[1:])
|
3189
4136
|
else:
|
3190
4137
|
return ConstDirDeployPathPart(s)
|
3191
4138
|
|
@@ -3197,13 +4144,13 @@ class FileDeployPathPart(DeployPathPart, abc.ABC):
|
|
3197
4144
|
|
3198
4145
|
@classmethod
|
3199
4146
|
def parse(cls, s: str) -> 'FileDeployPathPart':
|
3200
|
-
if
|
3201
|
-
check.equal(s[0],
|
3202
|
-
if not any(c in s for c in
|
3203
|
-
return
|
4147
|
+
if DEPLOY_PATH_PLACEHOLDER_PLACEHOLDER in s:
|
4148
|
+
check.equal(s[0], DEPLOY_PATH_PLACEHOLDER_PLACEHOLDER)
|
4149
|
+
if not any(c in s for c in DEPLOY_PATH_PLACEHOLDER_SEPARATORS):
|
4150
|
+
return PlaceholderFileDeployPathPart(s[1:], '')
|
3204
4151
|
else:
|
3205
|
-
p = min(f for c in
|
3206
|
-
return
|
4152
|
+
p = min(f for c in DEPLOY_PATH_PLACEHOLDER_SEPARATORS if (f := s.find(c)) > 0)
|
4153
|
+
return PlaceholderFileDeployPathPart(s[1:p], s[p:])
|
3207
4154
|
else:
|
3208
4155
|
return ConstFileDeployPathPart(s)
|
3209
4156
|
|
@@ -3218,9 +4165,9 @@ class ConstDeployPathPart(DeployPathPart, abc.ABC):
|
|
3218
4165
|
def __post_init__(self) -> None:
|
3219
4166
|
check.non_empty_str(self.name)
|
3220
4167
|
check.not_in('/', self.name)
|
3221
|
-
check.not_in(
|
4168
|
+
check.not_in(DEPLOY_PATH_PLACEHOLDER_PLACEHOLDER, self.name)
|
3222
4169
|
|
3223
|
-
def render(self,
|
4170
|
+
def render(self, placeholders: ta.Optional[ta.Mapping[DeployPathPlaceholder, str]] = None) -> str:
|
3224
4171
|
return self.name
|
3225
4172
|
|
3226
4173
|
|
@@ -3236,40 +4183,40 @@ class ConstFileDeployPathPart(ConstDeployPathPart, FileDeployPathPart):
|
|
3236
4183
|
|
3237
4184
|
|
3238
4185
|
@dc.dataclass(frozen=True)
|
3239
|
-
class
|
3240
|
-
|
4186
|
+
class PlaceholderDeployPathPart(DeployPathPart, abc.ABC):
|
4187
|
+
placeholder: str # DeployPathPlaceholder
|
3241
4188
|
|
3242
4189
|
def __post_init__(self) -> None:
|
3243
|
-
check.non_empty_str(self.
|
3244
|
-
for c in [*
|
3245
|
-
check.not_in(c, self.
|
3246
|
-
check.in_(self.
|
3247
|
-
|
3248
|
-
def
|
3249
|
-
if
|
3250
|
-
return
|
4190
|
+
check.non_empty_str(self.placeholder)
|
4191
|
+
for c in [*DEPLOY_PATH_PLACEHOLDER_SEPARATORS, DEPLOY_PATH_PLACEHOLDER_PLACEHOLDER, '/']:
|
4192
|
+
check.not_in(c, self.placeholder)
|
4193
|
+
check.in_(self.placeholder, DEPLOY_PATH_PLACEHOLDERS)
|
4194
|
+
|
4195
|
+
def _render_placeholder(self, placeholders: ta.Optional[ta.Mapping[DeployPathPlaceholder, str]] = None) -> str:
|
4196
|
+
if placeholders is not None:
|
4197
|
+
return placeholders[self.placeholder] # type: ignore
|
3251
4198
|
else:
|
3252
|
-
return
|
4199
|
+
return DEPLOY_PATH_PLACEHOLDER_PLACEHOLDER + self.placeholder
|
3253
4200
|
|
3254
4201
|
|
3255
4202
|
@dc.dataclass(frozen=True)
|
3256
|
-
class
|
3257
|
-
def render(self,
|
3258
|
-
return self.
|
4203
|
+
class PlaceholderDirDeployPathPart(PlaceholderDeployPathPart, DirDeployPathPart):
|
4204
|
+
def render(self, placeholders: ta.Optional[ta.Mapping[DeployPathPlaceholder, str]] = None) -> str:
|
4205
|
+
return self._render_placeholder(placeholders)
|
3259
4206
|
|
3260
4207
|
|
3261
4208
|
@dc.dataclass(frozen=True)
|
3262
|
-
class
|
4209
|
+
class PlaceholderFileDeployPathPart(PlaceholderDeployPathPart, FileDeployPathPart):
|
3263
4210
|
suffix: str
|
3264
4211
|
|
3265
4212
|
def __post_init__(self) -> None:
|
3266
4213
|
super().__post_init__()
|
3267
4214
|
if self.suffix:
|
3268
|
-
for c in [
|
4215
|
+
for c in [DEPLOY_PATH_PLACEHOLDER_PLACEHOLDER, '/']:
|
3269
4216
|
check.not_in(c, self.suffix)
|
3270
4217
|
|
3271
|
-
def render(self,
|
3272
|
-
return self.
|
4218
|
+
def render(self, placeholders: ta.Optional[ta.Mapping[DeployPathPlaceholder, str]] = None) -> str:
|
4219
|
+
return self._render_placeholder(placeholders) + self.suffix
|
3273
4220
|
|
3274
4221
|
|
3275
4222
|
##
|
@@ -3286,22 +4233,22 @@ class DeployPath:
|
|
3286
4233
|
|
3287
4234
|
pd = {}
|
3288
4235
|
for i, p in enumerate(self.parts):
|
3289
|
-
if isinstance(p,
|
3290
|
-
if p.
|
3291
|
-
raise DeployPathError('Duplicate
|
3292
|
-
pd[p.
|
4236
|
+
if isinstance(p, PlaceholderDeployPathPart):
|
4237
|
+
if p.placeholder in pd:
|
4238
|
+
raise DeployPathError('Duplicate placeholders in path', self)
|
4239
|
+
pd[p.placeholder] = i
|
3293
4240
|
|
3294
4241
|
if 'tag' in pd:
|
3295
4242
|
if 'app' not in pd or pd['app'] >= pd['tag']:
|
3296
|
-
raise DeployPathError('Tag
|
4243
|
+
raise DeployPathError('Tag placeholder in path without preceding app', self)
|
3297
4244
|
|
3298
4245
|
@property
|
3299
4246
|
def kind(self) -> ta.Literal['file', 'dir']:
|
3300
4247
|
return self.parts[-1].kind
|
3301
4248
|
|
3302
|
-
def render(self,
|
4249
|
+
def render(self, placeholders: ta.Optional[ta.Mapping[DeployPathPlaceholder, str]] = None) -> str:
|
3303
4250
|
return os.path.join( # noqa
|
3304
|
-
*[p.render(
|
4251
|
+
*[p.render(placeholders) for p in self.parts],
|
3305
4252
|
*([''] if self.kind == 'dir' else []),
|
3306
4253
|
)
|
3307
4254
|
|
@@ -3329,6 +4276,34 @@ class DeployPathOwner(abc.ABC):
|
|
3329
4276
|
raise NotImplementedError
|
3330
4277
|
|
3331
4278
|
|
4279
|
+
########################################
|
4280
|
+
# ../deploy/specs.py
|
4281
|
+
|
4282
|
+
|
4283
|
+
##
|
4284
|
+
|
4285
|
+
|
4286
|
+
@dc.dataclass(frozen=True)
|
4287
|
+
class DeployGitRepo:
|
4288
|
+
host: ta.Optional[str] = None
|
4289
|
+
username: ta.Optional[str] = None
|
4290
|
+
path: ta.Optional[str] = None
|
4291
|
+
|
4292
|
+
def __post_init__(self) -> None:
|
4293
|
+
check.not_in('..', check.non_empty_str(self.host))
|
4294
|
+
check.not_in('.', check.non_empty_str(self.path))
|
4295
|
+
|
4296
|
+
|
4297
|
+
##
|
4298
|
+
|
4299
|
+
|
4300
|
+
@dc.dataclass(frozen=True)
|
4301
|
+
class DeploySpec:
|
4302
|
+
app: DeployApp
|
4303
|
+
repo: DeployGitRepo
|
4304
|
+
rev: DeployRev
|
4305
|
+
|
4306
|
+
|
3332
4307
|
########################################
|
3333
4308
|
# ../remote/config.py
|
3334
4309
|
|
@@ -3389,6 +4364,75 @@ def get_remote_payload_src(
|
|
3389
4364
|
return importlib.resources.files(__package__.split('.')[0] + '.scripts').joinpath('manage.py').read_text()
|
3390
4365
|
|
3391
4366
|
|
4367
|
+
########################################
|
4368
|
+
# ../system/platforms.py
|
4369
|
+
|
4370
|
+
|
4371
|
+
##
|
4372
|
+
|
4373
|
+
|
4374
|
+
@dc.dataclass(frozen=True)
|
4375
|
+
class Platform(abc.ABC): # noqa
|
4376
|
+
pass
|
4377
|
+
|
4378
|
+
|
4379
|
+
class LinuxPlatform(Platform, abc.ABC):
|
4380
|
+
pass
|
4381
|
+
|
4382
|
+
|
4383
|
+
class UbuntuPlatform(LinuxPlatform):
|
4384
|
+
pass
|
4385
|
+
|
4386
|
+
|
4387
|
+
class AmazonLinuxPlatform(LinuxPlatform):
|
4388
|
+
pass
|
4389
|
+
|
4390
|
+
|
4391
|
+
class GenericLinuxPlatform(LinuxPlatform):
|
4392
|
+
pass
|
4393
|
+
|
4394
|
+
|
4395
|
+
class DarwinPlatform(Platform):
|
4396
|
+
pass
|
4397
|
+
|
4398
|
+
|
4399
|
+
class UnknownPlatform(Platform):
|
4400
|
+
pass
|
4401
|
+
|
4402
|
+
|
4403
|
+
##
|
4404
|
+
|
4405
|
+
|
4406
|
+
def _detect_system_platform() -> Platform:
|
4407
|
+
plat = sys.platform
|
4408
|
+
|
4409
|
+
if plat == 'linux':
|
4410
|
+
if (osr := LinuxOsRelease.read()) is None:
|
4411
|
+
return GenericLinuxPlatform()
|
4412
|
+
|
4413
|
+
if osr.id == 'amzn':
|
4414
|
+
return AmazonLinuxPlatform()
|
4415
|
+
|
4416
|
+
elif osr.id == 'ubuntu':
|
4417
|
+
return UbuntuPlatform()
|
4418
|
+
|
4419
|
+
else:
|
4420
|
+
return GenericLinuxPlatform()
|
4421
|
+
|
4422
|
+
elif plat == 'darwin':
|
4423
|
+
return DarwinPlatform()
|
4424
|
+
|
4425
|
+
else:
|
4426
|
+
return UnknownPlatform()
|
4427
|
+
|
4428
|
+
|
4429
|
+
@cached_nullary
|
4430
|
+
def detect_system_platform() -> Platform:
|
4431
|
+
platform = _detect_system_platform()
|
4432
|
+
log.info('Detected platform: %r', platform)
|
4433
|
+
return platform
|
4434
|
+
|
4435
|
+
|
3392
4436
|
########################################
|
3393
4437
|
# ../targets/targets.py
|
3394
4438
|
"""
|
@@ -3582,6 +4626,8 @@ def _get_argparse_arg_ann_kwargs(ann: ta.Any) -> ta.Mapping[str, ta.Any]:
|
|
3582
4626
|
return {'action': 'store_true'}
|
3583
4627
|
elif ann is list:
|
3584
4628
|
return {'action': 'append'}
|
4629
|
+
elif is_optional_alias(ann):
|
4630
|
+
return _get_argparse_arg_ann_kwargs(get_optional_alias_arg(ann))
|
3585
4631
|
else:
|
3586
4632
|
raise TypeError(ann)
|
3587
4633
|
|
@@ -4711,936 +5757,805 @@ inj = Injection
|
|
4711
5757
|
|
4712
5758
|
|
4713
5759
|
########################################
|
4714
|
-
# ../../../omlish/lite/
|
5760
|
+
# ../../../omlish/lite/marshal.py
|
4715
5761
|
"""
|
4716
5762
|
TODO:
|
4717
|
-
-
|
4718
|
-
-
|
5763
|
+
- pickle stdlib objs? have to pin to 3.8 pickle protocol, will be cross-version
|
5764
|
+
- namedtuple
|
5765
|
+
- literals
|
5766
|
+
- newtypes?
|
4719
5767
|
"""
|
4720
5768
|
|
4721
5769
|
|
4722
|
-
log = logging.getLogger(__name__)
|
4723
|
-
|
4724
|
-
|
4725
5770
|
##
|
4726
5771
|
|
4727
5772
|
|
4728
|
-
|
5773
|
+
@dc.dataclass(frozen=True)
|
5774
|
+
class ObjMarshalOptions:
|
5775
|
+
raw_bytes: bool = False
|
5776
|
+
nonstrict_dataclasses: bool = False
|
4729
5777
|
|
4730
|
-
def filter(self, record):
|
4731
|
-
record.tid = threading.get_native_id()
|
4732
|
-
return True
|
4733
5778
|
|
5779
|
+
class ObjMarshaler(abc.ABC):
|
5780
|
+
@abc.abstractmethod
|
5781
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5782
|
+
raise NotImplementedError
|
4734
5783
|
|
4735
|
-
|
5784
|
+
@abc.abstractmethod
|
5785
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5786
|
+
raise NotImplementedError
|
4736
5787
|
|
4737
5788
|
|
4738
|
-
class
|
5789
|
+
class NopObjMarshaler(ObjMarshaler):
|
5790
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5791
|
+
return o
|
4739
5792
|
|
4740
|
-
|
4741
|
-
|
4742
|
-
'msg': False,
|
4743
|
-
'args': False,
|
4744
|
-
'levelname': False,
|
4745
|
-
'levelno': False,
|
4746
|
-
'pathname': False,
|
4747
|
-
'filename': False,
|
4748
|
-
'module': False,
|
4749
|
-
'exc_info': True,
|
4750
|
-
'exc_text': True,
|
4751
|
-
'stack_info': True,
|
4752
|
-
'lineno': False,
|
4753
|
-
'funcName': False,
|
4754
|
-
'created': False,
|
4755
|
-
'msecs': False,
|
4756
|
-
'relativeCreated': False,
|
4757
|
-
'thread': False,
|
4758
|
-
'threadName': False,
|
4759
|
-
'processName': False,
|
4760
|
-
'process': False,
|
4761
|
-
}
|
5793
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5794
|
+
return o
|
4762
5795
|
|
4763
|
-
def format(self, record: logging.LogRecord) -> str:
|
4764
|
-
dct = {
|
4765
|
-
k: v
|
4766
|
-
for k, o in self.KEYS.items()
|
4767
|
-
for v in [getattr(record, k)]
|
4768
|
-
if not (o and v is None)
|
4769
|
-
}
|
4770
|
-
return json_dumps_compact(dct)
|
4771
5796
|
|
5797
|
+
@dc.dataclass()
|
5798
|
+
class ProxyObjMarshaler(ObjMarshaler):
|
5799
|
+
m: ta.Optional[ObjMarshaler] = None
|
4772
5800
|
|
4773
|
-
|
5801
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5802
|
+
return check.not_none(self.m).marshal(o, ctx)
|
4774
5803
|
|
5804
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5805
|
+
return check.not_none(self.m).unmarshal(o, ctx)
|
4775
5806
|
|
4776
|
-
STANDARD_LOG_FORMAT_PARTS = [
|
4777
|
-
('asctime', '%(asctime)-15s'),
|
4778
|
-
('process', 'pid=%(process)-6s'),
|
4779
|
-
('thread', 'tid=%(thread)x'),
|
4780
|
-
('levelname', '%(levelname)s'),
|
4781
|
-
('name', '%(name)s'),
|
4782
|
-
('separator', '::'),
|
4783
|
-
('message', '%(message)s'),
|
4784
|
-
]
|
4785
5807
|
|
5808
|
+
@dc.dataclass(frozen=True)
|
5809
|
+
class CastObjMarshaler(ObjMarshaler):
|
5810
|
+
ty: type
|
4786
5811
|
|
4787
|
-
|
5812
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5813
|
+
return o
|
4788
5814
|
|
4789
|
-
|
4790
|
-
|
4791
|
-
return ' '.join(v for k, v in parts)
|
5815
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5816
|
+
return self.ty(o)
|
4792
5817
|
|
4793
|
-
converter = datetime.datetime.fromtimestamp # type: ignore
|
4794
5818
|
|
4795
|
-
|
4796
|
-
|
4797
|
-
|
4798
|
-
return ct.strftime(datefmt) # noqa
|
4799
|
-
else:
|
4800
|
-
t = ct.strftime('%Y-%m-%d %H:%M:%S')
|
4801
|
-
return '%s.%03d' % (t, record.msecs) # noqa
|
5819
|
+
class DynamicObjMarshaler(ObjMarshaler):
|
5820
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5821
|
+
return ctx.manager.marshal_obj(o, opts=ctx.options)
|
4802
5822
|
|
5823
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5824
|
+
return o
|
4803
5825
|
|
4804
|
-
##
|
4805
5826
|
|
5827
|
+
@dc.dataclass(frozen=True)
|
5828
|
+
class Base64ObjMarshaler(ObjMarshaler):
|
5829
|
+
ty: type
|
4806
5830
|
|
4807
|
-
|
4808
|
-
|
4809
|
-
self._underlying = underlying
|
5831
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5832
|
+
return base64.b64encode(o).decode('ascii')
|
4810
5833
|
|
4811
|
-
|
4812
|
-
|
4813
|
-
return self._underlying
|
5834
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5835
|
+
return self.ty(base64.b64decode(o))
|
4814
5836
|
|
4815
|
-
@property
|
4816
|
-
def filters(self):
|
4817
|
-
return self._underlying.filters
|
4818
5837
|
|
4819
|
-
|
4820
|
-
|
4821
|
-
|
5838
|
+
@dc.dataclass(frozen=True)
|
5839
|
+
class BytesSwitchedObjMarshaler(ObjMarshaler):
|
5840
|
+
m: ObjMarshaler
|
4822
5841
|
|
4823
|
-
def
|
4824
|
-
|
5842
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5843
|
+
if ctx.options.raw_bytes:
|
5844
|
+
return o
|
5845
|
+
return self.m.marshal(o, ctx)
|
4825
5846
|
|
4826
|
-
def
|
4827
|
-
|
5847
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5848
|
+
if ctx.options.raw_bytes:
|
5849
|
+
return o
|
5850
|
+
return self.m.unmarshal(o, ctx)
|
4828
5851
|
|
4829
|
-
def filter(self, record):
|
4830
|
-
return self._underlying.filter(record)
|
4831
5852
|
|
5853
|
+
@dc.dataclass(frozen=True)
|
5854
|
+
class EnumObjMarshaler(ObjMarshaler):
|
5855
|
+
ty: type
|
4832
5856
|
|
4833
|
-
|
4834
|
-
|
4835
|
-
ProxyLogFilterer.__init__(self, underlying)
|
5857
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5858
|
+
return o.name
|
4836
5859
|
|
4837
|
-
|
5860
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5861
|
+
return self.ty.__members__[o] # type: ignore
|
4838
5862
|
|
4839
|
-
@property
|
4840
|
-
def underlying(self) -> logging.Handler:
|
4841
|
-
return self._underlying
|
4842
5863
|
|
4843
|
-
|
4844
|
-
|
5864
|
+
@dc.dataclass(frozen=True)
|
5865
|
+
class OptionalObjMarshaler(ObjMarshaler):
|
5866
|
+
item: ObjMarshaler
|
4845
5867
|
|
4846
|
-
def
|
4847
|
-
|
5868
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5869
|
+
if o is None:
|
5870
|
+
return None
|
5871
|
+
return self.item.marshal(o, ctx)
|
4848
5872
|
|
4849
|
-
|
4850
|
-
|
4851
|
-
|
5873
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5874
|
+
if o is None:
|
5875
|
+
return None
|
5876
|
+
return self.item.unmarshal(o, ctx)
|
4852
5877
|
|
4853
|
-
@property
|
4854
|
-
def level(self):
|
4855
|
-
return self._underlying.level
|
4856
5878
|
|
4857
|
-
|
4858
|
-
|
4859
|
-
|
5879
|
+
@dc.dataclass(frozen=True)
|
5880
|
+
class MappingObjMarshaler(ObjMarshaler):
|
5881
|
+
ty: type
|
5882
|
+
km: ObjMarshaler
|
5883
|
+
vm: ObjMarshaler
|
4860
5884
|
|
4861
|
-
|
4862
|
-
|
4863
|
-
return self._underlying.formatter
|
5885
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5886
|
+
return {self.km.marshal(k, ctx): self.vm.marshal(v, ctx) for k, v in o.items()}
|
4864
5887
|
|
4865
|
-
|
4866
|
-
|
4867
|
-
self._underlying.formatter = formatter
|
5888
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5889
|
+
return self.ty((self.km.unmarshal(k, ctx), self.vm.unmarshal(v, ctx)) for k, v in o.items())
|
4868
5890
|
|
4869
|
-
def createLock(self):
|
4870
|
-
self._underlying.createLock()
|
4871
5891
|
|
4872
|
-
|
4873
|
-
|
5892
|
+
@dc.dataclass(frozen=True)
|
5893
|
+
class IterableObjMarshaler(ObjMarshaler):
|
5894
|
+
ty: type
|
5895
|
+
item: ObjMarshaler
|
4874
5896
|
|
4875
|
-
def
|
4876
|
-
self.
|
5897
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5898
|
+
return [self.item.marshal(e, ctx) for e in o]
|
4877
5899
|
|
4878
|
-
def
|
4879
|
-
self.
|
5900
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5901
|
+
return self.ty(self.item.unmarshal(e, ctx) for e in o)
|
4880
5902
|
|
4881
|
-
def format(self, record):
|
4882
|
-
return self._underlying.format(record)
|
4883
5903
|
|
4884
|
-
|
4885
|
-
|
5904
|
+
@dc.dataclass(frozen=True)
|
5905
|
+
class DataclassObjMarshaler(ObjMarshaler):
|
5906
|
+
ty: type
|
5907
|
+
fs: ta.Mapping[str, ObjMarshaler]
|
5908
|
+
nonstrict: bool = False
|
4886
5909
|
|
4887
|
-
def
|
4888
|
-
return
|
5910
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5911
|
+
return {
|
5912
|
+
k: m.marshal(getattr(o, k), ctx)
|
5913
|
+
for k, m in self.fs.items()
|
5914
|
+
}
|
4889
5915
|
|
4890
|
-
def
|
4891
|
-
self.
|
5916
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5917
|
+
return self.ty(**{
|
5918
|
+
k: self.fs[k].unmarshal(v, ctx)
|
5919
|
+
for k, v in o.items()
|
5920
|
+
if not (self.nonstrict or ctx.options.nonstrict_dataclasses) or k in self.fs
|
5921
|
+
})
|
4892
5922
|
|
4893
|
-
def flush(self):
|
4894
|
-
self._underlying.flush()
|
4895
5923
|
|
4896
|
-
|
4897
|
-
|
5924
|
+
@dc.dataclass(frozen=True)
|
5925
|
+
class PolymorphicObjMarshaler(ObjMarshaler):
|
5926
|
+
class Impl(ta.NamedTuple):
|
5927
|
+
ty: type
|
5928
|
+
tag: str
|
5929
|
+
m: ObjMarshaler
|
4898
5930
|
|
4899
|
-
|
4900
|
-
|
5931
|
+
impls_by_ty: ta.Mapping[type, Impl]
|
5932
|
+
impls_by_tag: ta.Mapping[str, Impl]
|
4901
5933
|
|
5934
|
+
@classmethod
|
5935
|
+
def of(cls, impls: ta.Iterable[Impl]) -> 'PolymorphicObjMarshaler':
|
5936
|
+
return cls(
|
5937
|
+
{i.ty: i for i in impls},
|
5938
|
+
{i.tag: i for i in impls},
|
5939
|
+
)
|
4902
5940
|
|
4903
|
-
|
5941
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5942
|
+
impl = self.impls_by_ty[type(o)]
|
5943
|
+
return {impl.tag: impl.m.marshal(o, ctx)}
|
4904
5944
|
|
5945
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5946
|
+
[(t, v)] = o.items()
|
5947
|
+
impl = self.impls_by_tag[t]
|
5948
|
+
return impl.m.unmarshal(v, ctx)
|
4905
5949
|
|
4906
|
-
class StandardLogHandler(ProxyLogHandler):
|
4907
|
-
pass
|
4908
5950
|
|
5951
|
+
@dc.dataclass(frozen=True)
|
5952
|
+
class DatetimeObjMarshaler(ObjMarshaler):
|
5953
|
+
ty: type
|
4909
5954
|
|
4910
|
-
|
5955
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5956
|
+
return o.isoformat()
|
4911
5957
|
|
5958
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5959
|
+
return self.ty.fromisoformat(o) # type: ignore
|
4912
5960
|
|
4913
|
-
@contextlib.contextmanager
|
4914
|
-
def _locking_logging_module_lock() -> ta.Iterator[None]:
|
4915
|
-
if hasattr(logging, '_acquireLock'):
|
4916
|
-
logging._acquireLock() # noqa
|
4917
|
-
try:
|
4918
|
-
yield
|
4919
|
-
finally:
|
4920
|
-
logging._releaseLock() # type: ignore # noqa
|
4921
5961
|
|
4922
|
-
|
4923
|
-
|
4924
|
-
|
4925
|
-
yield
|
5962
|
+
class DecimalObjMarshaler(ObjMarshaler):
|
5963
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5964
|
+
return str(check.isinstance(o, decimal.Decimal))
|
4926
5965
|
|
4927
|
-
|
4928
|
-
|
4929
|
-
|
4930
|
-
|
4931
|
-
def configure_standard_logging(
|
4932
|
-
level: ta.Union[int, str] = logging.INFO,
|
4933
|
-
*,
|
4934
|
-
json: bool = False,
|
4935
|
-
target: ta.Optional[logging.Logger] = None,
|
4936
|
-
force: bool = False,
|
4937
|
-
handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
|
4938
|
-
) -> ta.Optional[StandardLogHandler]:
|
4939
|
-
with _locking_logging_module_lock():
|
4940
|
-
if target is None:
|
4941
|
-
target = logging.root
|
4942
|
-
|
4943
|
-
#
|
4944
|
-
|
4945
|
-
if not force:
|
4946
|
-
if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
|
4947
|
-
return None
|
5966
|
+
def unmarshal(self, v: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5967
|
+
return decimal.Decimal(check.isinstance(v, str))
|
4948
5968
|
|
4949
|
-
#
|
4950
5969
|
|
4951
|
-
|
4952
|
-
|
4953
|
-
|
4954
|
-
|
5970
|
+
class FractionObjMarshaler(ObjMarshaler):
|
5971
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5972
|
+
fr = check.isinstance(o, fractions.Fraction)
|
5973
|
+
return [fr.numerator, fr.denominator]
|
4955
5974
|
|
4956
|
-
|
5975
|
+
def unmarshal(self, v: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5976
|
+
num, denom = check.isinstance(v, list)
|
5977
|
+
return fractions.Fraction(num, denom)
|
4957
5978
|
|
4958
|
-
formatter: logging.Formatter
|
4959
|
-
if json:
|
4960
|
-
formatter = JsonLogFormatter()
|
4961
|
-
else:
|
4962
|
-
formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
|
4963
|
-
handler.setFormatter(formatter)
|
4964
5979
|
|
4965
|
-
|
5980
|
+
class UuidObjMarshaler(ObjMarshaler):
|
5981
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5982
|
+
return str(o)
|
4966
5983
|
|
4967
|
-
|
5984
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5985
|
+
return uuid.UUID(o)
|
4968
5986
|
|
4969
|
-
#
|
4970
5987
|
|
4971
|
-
|
5988
|
+
##
|
4972
5989
|
|
4973
|
-
#
|
4974
5990
|
|
4975
|
-
|
4976
|
-
|
5991
|
+
_DEFAULT_OBJ_MARSHALERS: ta.Dict[ta.Any, ObjMarshaler] = {
|
5992
|
+
**{t: NopObjMarshaler() for t in (type(None),)},
|
5993
|
+
**{t: CastObjMarshaler(t) for t in (int, float, str, bool)},
|
5994
|
+
**{t: BytesSwitchedObjMarshaler(Base64ObjMarshaler(t)) for t in (bytes, bytearray)},
|
5995
|
+
**{t: IterableObjMarshaler(t, DynamicObjMarshaler()) for t in (list, tuple, set, frozenset)},
|
5996
|
+
**{t: MappingObjMarshaler(t, DynamicObjMarshaler(), DynamicObjMarshaler()) for t in (dict,)},
|
4977
5997
|
|
4978
|
-
|
5998
|
+
ta.Any: DynamicObjMarshaler(),
|
4979
5999
|
|
4980
|
-
|
6000
|
+
**{t: DatetimeObjMarshaler(t) for t in (datetime.date, datetime.time, datetime.datetime)},
|
6001
|
+
decimal.Decimal: DecimalObjMarshaler(),
|
6002
|
+
fractions.Fraction: FractionObjMarshaler(),
|
6003
|
+
uuid.UUID: UuidObjMarshaler(),
|
6004
|
+
}
|
4981
6005
|
|
6006
|
+
_OBJ_MARSHALER_GENERIC_MAPPING_TYPES: ta.Dict[ta.Any, type] = {
|
6007
|
+
**{t: t for t in (dict,)},
|
6008
|
+
**{t: dict for t in (collections.abc.Mapping, collections.abc.MutableMapping)},
|
6009
|
+
}
|
4982
6010
|
|
4983
|
-
|
4984
|
-
|
4985
|
-
|
4986
|
-
|
4987
|
-
|
4988
|
-
|
4989
|
-
|
4990
|
-
"""
|
6011
|
+
_OBJ_MARSHALER_GENERIC_ITERABLE_TYPES: ta.Dict[ta.Any, type] = {
|
6012
|
+
**{t: t for t in (list, tuple, set, frozenset)},
|
6013
|
+
collections.abc.Set: frozenset,
|
6014
|
+
collections.abc.MutableSet: set,
|
6015
|
+
collections.abc.Sequence: tuple,
|
6016
|
+
collections.abc.MutableSequence: list,
|
6017
|
+
}
|
4991
6018
|
|
4992
6019
|
|
4993
6020
|
##
|
4994
6021
|
|
4995
6022
|
|
4996
|
-
|
4997
|
-
|
4998
|
-
|
4999
|
-
|
5000
|
-
|
5001
|
-
|
5002
|
-
class ObjMarshaler(abc.ABC):
|
5003
|
-
@abc.abstractmethod
|
5004
|
-
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5005
|
-
raise NotImplementedError
|
6023
|
+
class ObjMarshalerManager:
|
6024
|
+
def __init__(
|
6025
|
+
self,
|
6026
|
+
*,
|
6027
|
+
default_options: ObjMarshalOptions = ObjMarshalOptions(),
|
5006
6028
|
|
5007
|
-
|
5008
|
-
|
5009
|
-
|
6029
|
+
default_obj_marshalers: ta.Dict[ta.Any, ObjMarshaler] = _DEFAULT_OBJ_MARSHALERS, # noqa
|
6030
|
+
generic_mapping_types: ta.Dict[ta.Any, type] = _OBJ_MARSHALER_GENERIC_MAPPING_TYPES, # noqa
|
6031
|
+
generic_iterable_types: ta.Dict[ta.Any, type] = _OBJ_MARSHALER_GENERIC_ITERABLE_TYPES, # noqa
|
6032
|
+
) -> None:
|
6033
|
+
super().__init__()
|
5010
6034
|
|
6035
|
+
self._default_options = default_options
|
5011
6036
|
|
5012
|
-
|
5013
|
-
|
5014
|
-
|
6037
|
+
self._obj_marshalers = dict(default_obj_marshalers)
|
6038
|
+
self._generic_mapping_types = generic_mapping_types
|
6039
|
+
self._generic_iterable_types = generic_iterable_types
|
5015
6040
|
|
5016
|
-
|
5017
|
-
|
6041
|
+
self._lock = threading.RLock()
|
6042
|
+
self._marshalers: ta.Dict[ta.Any, ObjMarshaler] = dict(_DEFAULT_OBJ_MARSHALERS)
|
6043
|
+
self._proxies: ta.Dict[ta.Any, ProxyObjMarshaler] = {}
|
5018
6044
|
|
6045
|
+
#
|
5019
6046
|
|
5020
|
-
|
5021
|
-
|
5022
|
-
|
6047
|
+
def make_obj_marshaler(
|
6048
|
+
self,
|
6049
|
+
ty: ta.Any,
|
6050
|
+
rec: ta.Callable[[ta.Any], ObjMarshaler],
|
6051
|
+
*,
|
6052
|
+
nonstrict_dataclasses: bool = False,
|
6053
|
+
) -> ObjMarshaler:
|
6054
|
+
if isinstance(ty, type):
|
6055
|
+
if abc.ABC in ty.__bases__:
|
6056
|
+
impls = [ity for ity in deep_subclasses(ty) if abc.ABC not in ity.__bases__] # type: ignore
|
6057
|
+
if all(ity.__qualname__.endswith(ty.__name__) for ity in impls):
|
6058
|
+
ins = {ity: snake_case(ity.__qualname__[:-len(ty.__name__)]) for ity in impls}
|
6059
|
+
else:
|
6060
|
+
ins = {ity: ity.__qualname__ for ity in impls}
|
6061
|
+
return PolymorphicObjMarshaler.of([
|
6062
|
+
PolymorphicObjMarshaler.Impl(
|
6063
|
+
ity,
|
6064
|
+
itn,
|
6065
|
+
rec(ity),
|
6066
|
+
)
|
6067
|
+
for ity, itn in ins.items()
|
6068
|
+
])
|
5023
6069
|
|
5024
|
-
|
5025
|
-
|
6070
|
+
if issubclass(ty, enum.Enum):
|
6071
|
+
return EnumObjMarshaler(ty)
|
5026
6072
|
|
5027
|
-
|
5028
|
-
|
6073
|
+
if dc.is_dataclass(ty):
|
6074
|
+
return DataclassObjMarshaler(
|
6075
|
+
ty,
|
6076
|
+
{f.name: rec(f.type) for f in dc.fields(ty)},
|
6077
|
+
nonstrict=nonstrict_dataclasses,
|
6078
|
+
)
|
5029
6079
|
|
6080
|
+
if is_generic_alias(ty):
|
6081
|
+
try:
|
6082
|
+
mt = self._generic_mapping_types[ta.get_origin(ty)]
|
6083
|
+
except KeyError:
|
6084
|
+
pass
|
6085
|
+
else:
|
6086
|
+
k, v = ta.get_args(ty)
|
6087
|
+
return MappingObjMarshaler(mt, rec(k), rec(v))
|
5030
6088
|
|
5031
|
-
|
5032
|
-
|
5033
|
-
|
6089
|
+
try:
|
6090
|
+
st = self._generic_iterable_types[ta.get_origin(ty)]
|
6091
|
+
except KeyError:
|
6092
|
+
pass
|
6093
|
+
else:
|
6094
|
+
[e] = ta.get_args(ty)
|
6095
|
+
return IterableObjMarshaler(st, rec(e))
|
5034
6096
|
|
5035
|
-
|
5036
|
-
|
6097
|
+
if is_union_alias(ty):
|
6098
|
+
return OptionalObjMarshaler(rec(get_optional_alias_arg(ty)))
|
5037
6099
|
|
5038
|
-
|
5039
|
-
return self.ty(o)
|
6100
|
+
raise TypeError(ty)
|
5040
6101
|
|
6102
|
+
#
|
5041
6103
|
|
5042
|
-
|
5043
|
-
|
5044
|
-
|
6104
|
+
def register_opj_marshaler(self, ty: ta.Any, m: ObjMarshaler) -> None:
|
6105
|
+
with self._lock:
|
6106
|
+
if ty in self._obj_marshalers:
|
6107
|
+
raise KeyError(ty)
|
6108
|
+
self._obj_marshalers[ty] = m
|
5045
6109
|
|
5046
|
-
def
|
5047
|
-
|
6110
|
+
def get_obj_marshaler(
|
6111
|
+
self,
|
6112
|
+
ty: ta.Any,
|
6113
|
+
*,
|
6114
|
+
no_cache: bool = False,
|
6115
|
+
**kwargs: ta.Any,
|
6116
|
+
) -> ObjMarshaler:
|
6117
|
+
with self._lock:
|
6118
|
+
if not no_cache:
|
6119
|
+
try:
|
6120
|
+
return self._obj_marshalers[ty]
|
6121
|
+
except KeyError:
|
6122
|
+
pass
|
5048
6123
|
|
6124
|
+
try:
|
6125
|
+
return self._proxies[ty]
|
6126
|
+
except KeyError:
|
6127
|
+
pass
|
5049
6128
|
|
5050
|
-
|
5051
|
-
|
5052
|
-
|
6129
|
+
rec = functools.partial(
|
6130
|
+
self.get_obj_marshaler,
|
6131
|
+
no_cache=no_cache,
|
6132
|
+
**kwargs,
|
6133
|
+
)
|
5053
6134
|
|
5054
|
-
|
5055
|
-
|
6135
|
+
p = ProxyObjMarshaler()
|
6136
|
+
self._proxies[ty] = p
|
6137
|
+
try:
|
6138
|
+
m = self.make_obj_marshaler(ty, rec, **kwargs)
|
6139
|
+
finally:
|
6140
|
+
del self._proxies[ty]
|
6141
|
+
p.m = m
|
5056
6142
|
|
5057
|
-
|
5058
|
-
|
6143
|
+
if not no_cache:
|
6144
|
+
self._obj_marshalers[ty] = m
|
6145
|
+
return m
|
5059
6146
|
|
6147
|
+
#
|
5060
6148
|
|
5061
|
-
|
5062
|
-
|
5063
|
-
|
6149
|
+
def _make_context(self, opts: ta.Optional[ObjMarshalOptions]) -> 'ObjMarshalContext':
|
6150
|
+
return ObjMarshalContext(
|
6151
|
+
options=opts or self._default_options,
|
6152
|
+
manager=self,
|
6153
|
+
)
|
5064
6154
|
|
5065
|
-
def
|
5066
|
-
|
5067
|
-
|
5068
|
-
|
6155
|
+
def marshal_obj(
|
6156
|
+
self,
|
6157
|
+
o: ta.Any,
|
6158
|
+
ty: ta.Any = None,
|
6159
|
+
opts: ta.Optional[ObjMarshalOptions] = None,
|
6160
|
+
) -> ta.Any:
|
6161
|
+
m = self.get_obj_marshaler(ty if ty is not None else type(o))
|
6162
|
+
return m.marshal(o, self._make_context(opts))
|
5069
6163
|
|
5070
|
-
def
|
5071
|
-
|
5072
|
-
|
5073
|
-
|
6164
|
+
def unmarshal_obj(
|
6165
|
+
self,
|
6166
|
+
o: ta.Any,
|
6167
|
+
ty: ta.Union[ta.Type[T], ta.Any],
|
6168
|
+
opts: ta.Optional[ObjMarshalOptions] = None,
|
6169
|
+
) -> T:
|
6170
|
+
m = self.get_obj_marshaler(ty)
|
6171
|
+
return m.unmarshal(o, self._make_context(opts))
|
6172
|
+
|
6173
|
+
def roundtrip_obj(
|
6174
|
+
self,
|
6175
|
+
o: ta.Any,
|
6176
|
+
ty: ta.Any = None,
|
6177
|
+
opts: ta.Optional[ObjMarshalOptions] = None,
|
6178
|
+
) -> ta.Any:
|
6179
|
+
if ty is None:
|
6180
|
+
ty = type(o)
|
6181
|
+
m: ta.Any = self.marshal_obj(o, ty, opts)
|
6182
|
+
u: ta.Any = self.unmarshal_obj(m, ty, opts)
|
6183
|
+
return u
|
5074
6184
|
|
5075
6185
|
|
5076
6186
|
@dc.dataclass(frozen=True)
|
5077
|
-
class
|
5078
|
-
|
6187
|
+
class ObjMarshalContext:
|
6188
|
+
options: ObjMarshalOptions
|
6189
|
+
manager: ObjMarshalerManager
|
5079
6190
|
|
5080
|
-
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5081
|
-
return o.name
|
5082
6191
|
|
5083
|
-
|
5084
|
-
return self.ty.__members__[o] # type: ignore
|
6192
|
+
##
|
5085
6193
|
|
5086
6194
|
|
5087
|
-
|
5088
|
-
class OptionalObjMarshaler(ObjMarshaler):
|
5089
|
-
item: ObjMarshaler
|
6195
|
+
OBJ_MARSHALER_MANAGER = ObjMarshalerManager()
|
5090
6196
|
|
5091
|
-
|
5092
|
-
|
5093
|
-
return None
|
5094
|
-
return self.item.marshal(o, ctx)
|
6197
|
+
register_opj_marshaler = OBJ_MARSHALER_MANAGER.register_opj_marshaler
|
6198
|
+
get_obj_marshaler = OBJ_MARSHALER_MANAGER.get_obj_marshaler
|
5095
6199
|
|
5096
|
-
|
5097
|
-
|
5098
|
-
return None
|
5099
|
-
return self.item.unmarshal(o, ctx)
|
6200
|
+
marshal_obj = OBJ_MARSHALER_MANAGER.marshal_obj
|
6201
|
+
unmarshal_obj = OBJ_MARSHALER_MANAGER.unmarshal_obj
|
5100
6202
|
|
5101
6203
|
|
5102
|
-
|
5103
|
-
|
5104
|
-
ty: type
|
5105
|
-
km: ObjMarshaler
|
5106
|
-
vm: ObjMarshaler
|
6204
|
+
########################################
|
6205
|
+
# ../../../omlish/lite/runtime.py
|
5107
6206
|
|
5108
|
-
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5109
|
-
return {self.km.marshal(k, ctx): self.vm.marshal(v, ctx) for k, v in o.items()}
|
5110
6207
|
|
5111
|
-
|
5112
|
-
|
6208
|
+
@cached_nullary
|
6209
|
+
def is_debugger_attached() -> bool:
|
6210
|
+
return any(frame[1].endswith('pydevd.py') for frame in inspect.stack())
|
5113
6211
|
|
5114
6212
|
|
5115
|
-
|
5116
|
-
class IterableObjMarshaler(ObjMarshaler):
|
5117
|
-
ty: type
|
5118
|
-
item: ObjMarshaler
|
6213
|
+
REQUIRED_PYTHON_VERSION = (3, 8)
|
5119
6214
|
|
5120
|
-
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5121
|
-
return [self.item.marshal(e, ctx) for e in o]
|
5122
6215
|
|
5123
|
-
|
5124
|
-
|
6216
|
+
def check_runtime_version() -> None:
|
6217
|
+
if sys.version_info < REQUIRED_PYTHON_VERSION:
|
6218
|
+
raise OSError(f'Requires python {REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
|
5125
6219
|
|
5126
6220
|
|
5127
|
-
|
5128
|
-
|
5129
|
-
|
5130
|
-
|
5131
|
-
|
6221
|
+
########################################
|
6222
|
+
# ../../../omlish/logs/json.py
|
6223
|
+
"""
|
6224
|
+
TODO:
|
6225
|
+
- translate json keys
|
6226
|
+
"""
|
5132
6227
|
|
5133
|
-
|
5134
|
-
|
5135
|
-
|
5136
|
-
|
6228
|
+
|
6229
|
+
class JsonLogFormatter(logging.Formatter):
|
6230
|
+
KEYS: ta.Mapping[str, bool] = {
|
6231
|
+
'name': False,
|
6232
|
+
'msg': False,
|
6233
|
+
'args': False,
|
6234
|
+
'levelname': False,
|
6235
|
+
'levelno': False,
|
6236
|
+
'pathname': False,
|
6237
|
+
'filename': False,
|
6238
|
+
'module': False,
|
6239
|
+
'exc_info': True,
|
6240
|
+
'exc_text': True,
|
6241
|
+
'stack_info': True,
|
6242
|
+
'lineno': False,
|
6243
|
+
'funcName': False,
|
6244
|
+
'created': False,
|
6245
|
+
'msecs': False,
|
6246
|
+
'relativeCreated': False,
|
6247
|
+
'thread': False,
|
6248
|
+
'threadName': False,
|
6249
|
+
'processName': False,
|
6250
|
+
'process': False,
|
6251
|
+
}
|
6252
|
+
|
6253
|
+
def __init__(
|
6254
|
+
self,
|
6255
|
+
*args: ta.Any,
|
6256
|
+
json_dumps: ta.Optional[ta.Callable[[ta.Any], str]] = None,
|
6257
|
+
**kwargs: ta.Any,
|
6258
|
+
) -> None:
|
6259
|
+
super().__init__(*args, **kwargs)
|
6260
|
+
|
6261
|
+
if json_dumps is None:
|
6262
|
+
json_dumps = json_dumps_compact
|
6263
|
+
self._json_dumps = json_dumps
|
6264
|
+
|
6265
|
+
def format(self, record: logging.LogRecord) -> str:
|
6266
|
+
dct = {
|
6267
|
+
k: v
|
6268
|
+
for k, o in self.KEYS.items()
|
6269
|
+
for v in [getattr(record, k)]
|
6270
|
+
if not (o and v is None)
|
5137
6271
|
}
|
6272
|
+
return self._json_dumps(dct)
|
5138
6273
|
|
5139
|
-
|
5140
|
-
|
5141
|
-
|
5142
|
-
|
5143
|
-
|
5144
|
-
|
6274
|
+
|
6275
|
+
########################################
|
6276
|
+
# ../../../omdev/interp/types.py
|
6277
|
+
|
6278
|
+
|
6279
|
+
# See https://peps.python.org/pep-3149/
|
6280
|
+
INTERP_OPT_GLYPHS_BY_ATTR: ta.Mapping[str, str] = collections.OrderedDict([
|
6281
|
+
('debug', 'd'),
|
6282
|
+
('threaded', 't'),
|
6283
|
+
])
|
6284
|
+
|
6285
|
+
INTERP_OPT_ATTRS_BY_GLYPH: ta.Mapping[str, str] = collections.OrderedDict(
|
6286
|
+
(g, a) for a, g in INTERP_OPT_GLYPHS_BY_ATTR.items()
|
6287
|
+
)
|
5145
6288
|
|
5146
6289
|
|
5147
6290
|
@dc.dataclass(frozen=True)
|
5148
|
-
class
|
5149
|
-
|
5150
|
-
|
5151
|
-
tag: str
|
5152
|
-
m: ObjMarshaler
|
6291
|
+
class InterpOpts:
|
6292
|
+
threaded: bool = False
|
6293
|
+
debug: bool = False
|
5153
6294
|
|
5154
|
-
|
5155
|
-
|
6295
|
+
def __str__(self) -> str:
|
6296
|
+
return ''.join(g for a, g in INTERP_OPT_GLYPHS_BY_ATTR.items() if getattr(self, a))
|
5156
6297
|
|
5157
6298
|
@classmethod
|
5158
|
-
def
|
6299
|
+
def parse(cls, s: str) -> 'InterpOpts':
|
6300
|
+
return cls(**{INTERP_OPT_ATTRS_BY_GLYPH[g]: True for g in s})
|
6301
|
+
|
6302
|
+
@classmethod
|
6303
|
+
def parse_suffix(cls, s: str) -> ta.Tuple[str, 'InterpOpts']:
|
6304
|
+
kw = {}
|
6305
|
+
while s and (a := INTERP_OPT_ATTRS_BY_GLYPH.get(s[-1])):
|
6306
|
+
s, kw[a] = s[:-1], True
|
6307
|
+
return s, cls(**kw)
|
6308
|
+
|
6309
|
+
|
6310
|
+
@dc.dataclass(frozen=True)
|
6311
|
+
class InterpVersion:
|
6312
|
+
version: Version
|
6313
|
+
opts: InterpOpts
|
6314
|
+
|
6315
|
+
def __str__(self) -> str:
|
6316
|
+
return str(self.version) + str(self.opts)
|
6317
|
+
|
6318
|
+
@classmethod
|
6319
|
+
def parse(cls, s: str) -> 'InterpVersion':
|
6320
|
+
s, o = InterpOpts.parse_suffix(s)
|
6321
|
+
v = Version(s)
|
5159
6322
|
return cls(
|
5160
|
-
|
5161
|
-
|
6323
|
+
version=v,
|
6324
|
+
opts=o,
|
5162
6325
|
)
|
5163
6326
|
|
5164
|
-
|
5165
|
-
|
5166
|
-
|
5167
|
-
|
5168
|
-
|
5169
|
-
|
5170
|
-
impl = self.impls_by_tag[t]
|
5171
|
-
return impl.m.unmarshal(v, ctx)
|
6327
|
+
@classmethod
|
6328
|
+
def try_parse(cls, s: str) -> ta.Optional['InterpVersion']:
|
6329
|
+
try:
|
6330
|
+
return cls.parse(s)
|
6331
|
+
except (KeyError, InvalidVersion):
|
6332
|
+
return None
|
5172
6333
|
|
5173
6334
|
|
5174
6335
|
@dc.dataclass(frozen=True)
|
5175
|
-
class
|
5176
|
-
|
6336
|
+
class InterpSpecifier:
|
6337
|
+
specifier: Specifier
|
6338
|
+
opts: InterpOpts
|
5177
6339
|
|
5178
|
-
def
|
5179
|
-
return
|
6340
|
+
def __str__(self) -> str:
|
6341
|
+
return str(self.specifier) + str(self.opts)
|
5180
6342
|
|
5181
|
-
|
5182
|
-
|
6343
|
+
@classmethod
|
6344
|
+
def parse(cls, s: str) -> 'InterpSpecifier':
|
6345
|
+
s, o = InterpOpts.parse_suffix(s)
|
6346
|
+
if not any(s.startswith(o) for o in Specifier.OPERATORS):
|
6347
|
+
s = '~=' + s
|
6348
|
+
if s.count('.') < 2:
|
6349
|
+
s += '.0'
|
6350
|
+
return cls(
|
6351
|
+
specifier=Specifier(s),
|
6352
|
+
opts=o,
|
6353
|
+
)
|
5183
6354
|
|
6355
|
+
def contains(self, iv: InterpVersion) -> bool:
|
6356
|
+
return self.specifier.contains(iv.version) and self.opts == iv.opts
|
5184
6357
|
|
5185
|
-
|
5186
|
-
|
5187
|
-
return str(check.isinstance(o, decimal.Decimal))
|
6358
|
+
def __contains__(self, iv: InterpVersion) -> bool:
|
6359
|
+
return self.contains(iv)
|
5188
6360
|
|
5189
|
-
def unmarshal(self, v: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5190
|
-
return decimal.Decimal(check.isinstance(v, str))
|
5191
6361
|
|
6362
|
+
@dc.dataclass(frozen=True)
|
6363
|
+
class Interp:
|
6364
|
+
exe: str
|
6365
|
+
version: InterpVersion
|
5192
6366
|
|
5193
|
-
class FractionObjMarshaler(ObjMarshaler):
|
5194
|
-
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5195
|
-
fr = check.isinstance(o, fractions.Fraction)
|
5196
|
-
return [fr.numerator, fr.denominator]
|
5197
6367
|
|
5198
|
-
|
5199
|
-
|
5200
|
-
|
6368
|
+
########################################
|
6369
|
+
# ../../configs.py
|
6370
|
+
|
6371
|
+
|
6372
|
+
def parse_config_file(
|
6373
|
+
name: str,
|
6374
|
+
f: ta.TextIO,
|
6375
|
+
) -> ConfigMapping:
|
6376
|
+
if name.endswith('.toml'):
|
6377
|
+
return toml_loads(f.read())
|
6378
|
+
|
6379
|
+
elif any(name.endswith(e) for e in ('.yml', '.yaml')):
|
6380
|
+
yaml = __import__('yaml')
|
6381
|
+
return yaml.safe_load(f)
|
6382
|
+
|
6383
|
+
elif name.endswith('.ini'):
|
6384
|
+
import configparser
|
6385
|
+
cp = configparser.ConfigParser()
|
6386
|
+
cp.read_file(f)
|
6387
|
+
config_dct: ta.Dict[str, ta.Any] = {}
|
6388
|
+
for sec in cp.sections():
|
6389
|
+
cd = config_dct
|
6390
|
+
for k in sec.split('.'):
|
6391
|
+
cd = cd.setdefault(k, {})
|
6392
|
+
cd.update(cp.items(sec))
|
6393
|
+
return config_dct
|
5201
6394
|
|
6395
|
+
else:
|
6396
|
+
return json.loads(f.read())
|
5202
6397
|
|
5203
|
-
class UuidObjMarshaler(ObjMarshaler):
|
5204
|
-
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
5205
|
-
return str(o)
|
5206
6398
|
|
5207
|
-
|
5208
|
-
|
6399
|
+
def read_config_file(
|
6400
|
+
path: str,
|
6401
|
+
cls: ta.Type[T],
|
6402
|
+
*,
|
6403
|
+
prepare: ta.Optional[ta.Callable[[ConfigMapping], ConfigMapping]] = None,
|
6404
|
+
) -> T:
|
6405
|
+
with open(path) as cf:
|
6406
|
+
config_dct = parse_config_file(os.path.basename(path), cf)
|
5209
6407
|
|
6408
|
+
if prepare is not None:
|
6409
|
+
config_dct = prepare(config_dct)
|
5210
6410
|
|
5211
|
-
|
6411
|
+
return unmarshal_obj(config_dct, cls)
|
5212
6412
|
|
5213
6413
|
|
5214
|
-
|
5215
|
-
|
5216
|
-
|
5217
|
-
|
5218
|
-
|
5219
|
-
|
6414
|
+
def build_config_named_children(
|
6415
|
+
o: ta.Union[
|
6416
|
+
ta.Sequence[ConfigMapping],
|
6417
|
+
ta.Mapping[str, ConfigMapping],
|
6418
|
+
None,
|
6419
|
+
],
|
6420
|
+
*,
|
6421
|
+
name_key: str = 'name',
|
6422
|
+
) -> ta.Optional[ta.Sequence[ConfigMapping]]:
|
6423
|
+
if o is None:
|
6424
|
+
return None
|
5220
6425
|
|
5221
|
-
ta.
|
6426
|
+
lst: ta.List[ConfigMapping] = []
|
6427
|
+
if isinstance(o, ta.Mapping):
|
6428
|
+
for k, v in o.items():
|
6429
|
+
check.isinstance(v, ta.Mapping)
|
6430
|
+
if name_key in v:
|
6431
|
+
n = v[name_key]
|
6432
|
+
if k != n:
|
6433
|
+
raise KeyError(f'Given names do not match: {n} != {k}')
|
6434
|
+
lst.append(v)
|
6435
|
+
else:
|
6436
|
+
lst.append({name_key: k, **v})
|
5222
6437
|
|
5223
|
-
|
5224
|
-
|
5225
|
-
|
5226
|
-
uuid.UUID: UuidObjMarshaler(),
|
5227
|
-
}
|
6438
|
+
else:
|
6439
|
+
check.not_isinstance(o, str)
|
6440
|
+
lst.extend(o)
|
5228
6441
|
|
5229
|
-
|
5230
|
-
|
5231
|
-
|
5232
|
-
|
6442
|
+
seen = set()
|
6443
|
+
for d in lst:
|
6444
|
+
n = d['name']
|
6445
|
+
if n in d:
|
6446
|
+
raise KeyError(f'Duplicate name: {n}')
|
6447
|
+
seen.add(n)
|
5233
6448
|
|
5234
|
-
|
5235
|
-
**{t: t for t in (list, tuple, set, frozenset)},
|
5236
|
-
collections.abc.Set: frozenset,
|
5237
|
-
collections.abc.MutableSet: set,
|
5238
|
-
collections.abc.Sequence: tuple,
|
5239
|
-
collections.abc.MutableSequence: list,
|
5240
|
-
}
|
6449
|
+
return lst
|
5241
6450
|
|
5242
6451
|
|
5243
|
-
|
6452
|
+
########################################
|
6453
|
+
# ../commands/marshal.py
|
5244
6454
|
|
5245
6455
|
|
5246
|
-
|
5247
|
-
|
5248
|
-
|
5249
|
-
|
5250
|
-
|
6456
|
+
def install_command_marshaling(
|
6457
|
+
cmds: CommandNameMap,
|
6458
|
+
msh: ObjMarshalerManager,
|
6459
|
+
) -> None:
|
6460
|
+
for fn in [
|
6461
|
+
lambda c: c,
|
6462
|
+
lambda c: c.Output,
|
6463
|
+
]:
|
6464
|
+
msh.register_opj_marshaler(
|
6465
|
+
fn(Command),
|
6466
|
+
PolymorphicObjMarshaler.of([
|
6467
|
+
PolymorphicObjMarshaler.Impl(
|
6468
|
+
fn(cmd),
|
6469
|
+
name,
|
6470
|
+
msh.get_obj_marshaler(fn(cmd)),
|
6471
|
+
)
|
6472
|
+
for name, cmd in cmds.items()
|
6473
|
+
]),
|
6474
|
+
)
|
5251
6475
|
|
5252
|
-
default_obj_marshalers: ta.Dict[ta.Any, ObjMarshaler] = _DEFAULT_OBJ_MARSHALERS, # noqa
|
5253
|
-
generic_mapping_types: ta.Dict[ta.Any, type] = _OBJ_MARSHALER_GENERIC_MAPPING_TYPES, # noqa
|
5254
|
-
generic_iterable_types: ta.Dict[ta.Any, type] = _OBJ_MARSHALER_GENERIC_ITERABLE_TYPES, # noqa
|
5255
|
-
) -> None:
|
5256
|
-
super().__init__()
|
5257
6476
|
|
5258
|
-
|
6477
|
+
########################################
|
6478
|
+
# ../commands/ping.py
|
5259
6479
|
|
5260
|
-
self._obj_marshalers = dict(default_obj_marshalers)
|
5261
|
-
self._generic_mapping_types = generic_mapping_types
|
5262
|
-
self._generic_iterable_types = generic_iterable_types
|
5263
6480
|
|
5264
|
-
|
5265
|
-
self._marshalers: ta.Dict[ta.Any, ObjMarshaler] = dict(_DEFAULT_OBJ_MARSHALERS)
|
5266
|
-
self._proxies: ta.Dict[ta.Any, ProxyObjMarshaler] = {}
|
6481
|
+
##
|
5267
6482
|
|
5268
|
-
#
|
5269
6483
|
|
5270
|
-
|
5271
|
-
|
5272
|
-
|
5273
|
-
rec: ta.Callable[[ta.Any], ObjMarshaler],
|
5274
|
-
*,
|
5275
|
-
nonstrict_dataclasses: bool = False,
|
5276
|
-
) -> ObjMarshaler:
|
5277
|
-
if isinstance(ty, type):
|
5278
|
-
if abc.ABC in ty.__bases__:
|
5279
|
-
impls = [ity for ity in deep_subclasses(ty) if abc.ABC not in ity.__bases__] # type: ignore
|
5280
|
-
if all(ity.__qualname__.endswith(ty.__name__) for ity in impls):
|
5281
|
-
ins = {ity: snake_case(ity.__qualname__[:-len(ty.__name__)]) for ity in impls}
|
5282
|
-
else:
|
5283
|
-
ins = {ity: ity.__qualname__ for ity in impls}
|
5284
|
-
return PolymorphicObjMarshaler.of([
|
5285
|
-
PolymorphicObjMarshaler.Impl(
|
5286
|
-
ity,
|
5287
|
-
itn,
|
5288
|
-
rec(ity),
|
5289
|
-
)
|
5290
|
-
for ity, itn in ins.items()
|
5291
|
-
])
|
6484
|
+
@dc.dataclass(frozen=True)
|
6485
|
+
class PingCommand(Command['PingCommand.Output']):
|
6486
|
+
time: float = dc.field(default_factory=time.time)
|
5292
6487
|
|
5293
|
-
|
5294
|
-
|
6488
|
+
@dc.dataclass(frozen=True)
|
6489
|
+
class Output(Command.Output):
|
6490
|
+
time: float
|
5295
6491
|
|
5296
|
-
if dc.is_dataclass(ty):
|
5297
|
-
return DataclassObjMarshaler(
|
5298
|
-
ty,
|
5299
|
-
{f.name: rec(f.type) for f in dc.fields(ty)},
|
5300
|
-
nonstrict=nonstrict_dataclasses,
|
5301
|
-
)
|
5302
6492
|
|
5303
|
-
|
5304
|
-
|
5305
|
-
|
5306
|
-
except KeyError:
|
5307
|
-
pass
|
5308
|
-
else:
|
5309
|
-
k, v = ta.get_args(ty)
|
5310
|
-
return MappingObjMarshaler(mt, rec(k), rec(v))
|
6493
|
+
class PingCommandExecutor(CommandExecutor[PingCommand, PingCommand.Output]):
|
6494
|
+
async def execute(self, cmd: PingCommand) -> PingCommand.Output:
|
6495
|
+
return PingCommand.Output(cmd.time)
|
5311
6496
|
|
5312
|
-
try:
|
5313
|
-
st = self._generic_iterable_types[ta.get_origin(ty)]
|
5314
|
-
except KeyError:
|
5315
|
-
pass
|
5316
|
-
else:
|
5317
|
-
[e] = ta.get_args(ty)
|
5318
|
-
return IterableObjMarshaler(st, rec(e))
|
5319
6497
|
|
5320
|
-
|
5321
|
-
|
6498
|
+
########################################
|
6499
|
+
# ../commands/types.py
|
5322
6500
|
|
5323
|
-
raise TypeError(ty)
|
5324
6501
|
|
5325
|
-
|
6502
|
+
CommandExecutorMap = ta.NewType('CommandExecutorMap', ta.Mapping[ta.Type[Command], CommandExecutor])
|
5326
6503
|
|
5327
|
-
def register_opj_marshaler(self, ty: ta.Any, m: ObjMarshaler) -> None:
|
5328
|
-
with self._lock:
|
5329
|
-
if ty in self._obj_marshalers:
|
5330
|
-
raise KeyError(ty)
|
5331
|
-
self._obj_marshalers[ty] = m
|
5332
6504
|
|
5333
|
-
|
5334
|
-
|
5335
|
-
ty: ta.Any,
|
5336
|
-
*,
|
5337
|
-
no_cache: bool = False,
|
5338
|
-
**kwargs: ta.Any,
|
5339
|
-
) -> ObjMarshaler:
|
5340
|
-
with self._lock:
|
5341
|
-
if not no_cache:
|
5342
|
-
try:
|
5343
|
-
return self._obj_marshalers[ty]
|
5344
|
-
except KeyError:
|
5345
|
-
pass
|
6505
|
+
########################################
|
6506
|
+
# ../deploy/commands.py
|
5346
6507
|
|
5347
|
-
try:
|
5348
|
-
return self._proxies[ty]
|
5349
|
-
except KeyError:
|
5350
|
-
pass
|
5351
6508
|
|
5352
|
-
|
5353
|
-
self.get_obj_marshaler,
|
5354
|
-
no_cache=no_cache,
|
5355
|
-
**kwargs,
|
5356
|
-
)
|
6509
|
+
##
|
5357
6510
|
|
5358
|
-
p = ProxyObjMarshaler()
|
5359
|
-
self._proxies[ty] = p
|
5360
|
-
try:
|
5361
|
-
m = self.make_obj_marshaler(ty, rec, **kwargs)
|
5362
|
-
finally:
|
5363
|
-
del self._proxies[ty]
|
5364
|
-
p.m = m
|
5365
6511
|
|
5366
|
-
|
5367
|
-
|
5368
|
-
|
6512
|
+
@dc.dataclass(frozen=True)
|
6513
|
+
class DeployCommand(Command['DeployCommand.Output']):
|
6514
|
+
@dc.dataclass(frozen=True)
|
6515
|
+
class Output(Command.Output):
|
6516
|
+
pass
|
5369
6517
|
|
5370
|
-
#
|
5371
6518
|
|
5372
|
-
|
5373
|
-
|
5374
|
-
|
5375
|
-
manager=self,
|
5376
|
-
)
|
6519
|
+
class DeployCommandExecutor(CommandExecutor[DeployCommand, DeployCommand.Output]):
|
6520
|
+
async def execute(self, cmd: DeployCommand) -> DeployCommand.Output:
|
6521
|
+
log.info('Deploying!')
|
5377
6522
|
|
5378
|
-
|
5379
|
-
self,
|
5380
|
-
o: ta.Any,
|
5381
|
-
ty: ta.Any = None,
|
5382
|
-
opts: ta.Optional[ObjMarshalOptions] = None,
|
5383
|
-
) -> ta.Any:
|
5384
|
-
m = self.get_obj_marshaler(ty if ty is not None else type(o))
|
5385
|
-
return m.marshal(o, self._make_context(opts))
|
6523
|
+
return DeployCommand.Output()
|
5386
6524
|
|
5387
|
-
def unmarshal_obj(
|
5388
|
-
self,
|
5389
|
-
o: ta.Any,
|
5390
|
-
ty: ta.Union[ta.Type[T], ta.Any],
|
5391
|
-
opts: ta.Optional[ObjMarshalOptions] = None,
|
5392
|
-
) -> T:
|
5393
|
-
m = self.get_obj_marshaler(ty)
|
5394
|
-
return m.unmarshal(o, self._make_context(opts))
|
5395
6525
|
|
5396
|
-
|
5397
|
-
|
5398
|
-
o: ta.Any,
|
5399
|
-
ty: ta.Any = None,
|
5400
|
-
opts: ta.Optional[ObjMarshalOptions] = None,
|
5401
|
-
) -> ta.Any:
|
5402
|
-
if ty is None:
|
5403
|
-
ty = type(o)
|
5404
|
-
m: ta.Any = self.marshal_obj(o, ty, opts)
|
5405
|
-
u: ta.Any = self.unmarshal_obj(m, ty, opts)
|
5406
|
-
return u
|
6526
|
+
########################################
|
6527
|
+
# ../marshal.py
|
5407
6528
|
|
5408
6529
|
|
5409
6530
|
@dc.dataclass(frozen=True)
|
5410
|
-
class
|
5411
|
-
|
5412
|
-
manager: ObjMarshalerManager
|
5413
|
-
|
5414
|
-
|
5415
|
-
##
|
5416
|
-
|
5417
|
-
|
5418
|
-
OBJ_MARSHALER_MANAGER = ObjMarshalerManager()
|
6531
|
+
class ObjMarshalerInstaller:
|
6532
|
+
fn: ta.Callable[[ObjMarshalerManager], None]
|
5419
6533
|
|
5420
|
-
register_opj_marshaler = OBJ_MARSHALER_MANAGER.register_opj_marshaler
|
5421
|
-
get_obj_marshaler = OBJ_MARSHALER_MANAGER.get_obj_marshaler
|
5422
6534
|
|
5423
|
-
|
5424
|
-
unmarshal_obj = OBJ_MARSHALER_MANAGER.unmarshal_obj
|
6535
|
+
ObjMarshalerInstallers = ta.NewType('ObjMarshalerInstallers', ta.Sequence[ObjMarshalerInstaller])
|
5425
6536
|
|
5426
6537
|
|
5427
6538
|
########################################
|
5428
|
-
#
|
5429
|
-
|
6539
|
+
# ../remote/channel.py
|
5430
6540
|
|
5431
|
-
@cached_nullary
|
5432
|
-
def is_debugger_attached() -> bool:
|
5433
|
-
return any(frame[1].endswith('pydevd.py') for frame in inspect.stack())
|
5434
6541
|
|
6542
|
+
##
|
5435
6543
|
|
5436
|
-
REQUIRED_PYTHON_VERSION = (3, 8)
|
5437
6544
|
|
6545
|
+
class RemoteChannel(abc.ABC):
|
6546
|
+
@abc.abstractmethod
|
6547
|
+
def send_obj(self, o: ta.Any, ty: ta.Any = None) -> ta.Awaitable[None]:
|
6548
|
+
raise NotImplementedError
|
5438
6549
|
|
5439
|
-
|
5440
|
-
|
5441
|
-
raise
|
6550
|
+
@abc.abstractmethod
|
6551
|
+
def recv_obj(self, ty: ta.Type[T]) -> ta.Awaitable[ta.Optional[T]]:
|
6552
|
+
raise NotImplementedError
|
5442
6553
|
|
6554
|
+
def set_marshaler(self, msh: ObjMarshalerManager) -> None: # noqa
|
6555
|
+
pass
|
5443
6556
|
|
5444
|
-
########################################
|
5445
|
-
# ../../../omdev/interp/types.py
|
5446
6557
|
|
5447
|
-
|
5448
|
-
# See https://peps.python.org/pep-3149/
|
5449
|
-
INTERP_OPT_GLYPHS_BY_ATTR: ta.Mapping[str, str] = collections.OrderedDict([
|
5450
|
-
('debug', 'd'),
|
5451
|
-
('threaded', 't'),
|
5452
|
-
])
|
5453
|
-
|
5454
|
-
INTERP_OPT_ATTRS_BY_GLYPH: ta.Mapping[str, str] = collections.OrderedDict(
|
5455
|
-
(g, a) for a, g in INTERP_OPT_GLYPHS_BY_ATTR.items()
|
5456
|
-
)
|
5457
|
-
|
5458
|
-
|
5459
|
-
@dc.dataclass(frozen=True)
|
5460
|
-
class InterpOpts:
|
5461
|
-
threaded: bool = False
|
5462
|
-
debug: bool = False
|
5463
|
-
|
5464
|
-
def __str__(self) -> str:
|
5465
|
-
return ''.join(g for a, g in INTERP_OPT_GLYPHS_BY_ATTR.items() if getattr(self, a))
|
5466
|
-
|
5467
|
-
@classmethod
|
5468
|
-
def parse(cls, s: str) -> 'InterpOpts':
|
5469
|
-
return cls(**{INTERP_OPT_ATTRS_BY_GLYPH[g]: True for g in s})
|
5470
|
-
|
5471
|
-
@classmethod
|
5472
|
-
def parse_suffix(cls, s: str) -> ta.Tuple[str, 'InterpOpts']:
|
5473
|
-
kw = {}
|
5474
|
-
while s and (a := INTERP_OPT_ATTRS_BY_GLYPH.get(s[-1])):
|
5475
|
-
s, kw[a] = s[:-1], True
|
5476
|
-
return s, cls(**kw)
|
5477
|
-
|
5478
|
-
|
5479
|
-
@dc.dataclass(frozen=True)
|
5480
|
-
class InterpVersion:
|
5481
|
-
version: Version
|
5482
|
-
opts: InterpOpts
|
5483
|
-
|
5484
|
-
def __str__(self) -> str:
|
5485
|
-
return str(self.version) + str(self.opts)
|
5486
|
-
|
5487
|
-
@classmethod
|
5488
|
-
def parse(cls, s: str) -> 'InterpVersion':
|
5489
|
-
s, o = InterpOpts.parse_suffix(s)
|
5490
|
-
v = Version(s)
|
5491
|
-
return cls(
|
5492
|
-
version=v,
|
5493
|
-
opts=o,
|
5494
|
-
)
|
5495
|
-
|
5496
|
-
@classmethod
|
5497
|
-
def try_parse(cls, s: str) -> ta.Optional['InterpVersion']:
|
5498
|
-
try:
|
5499
|
-
return cls.parse(s)
|
5500
|
-
except (KeyError, InvalidVersion):
|
5501
|
-
return None
|
5502
|
-
|
5503
|
-
|
5504
|
-
@dc.dataclass(frozen=True)
|
5505
|
-
class InterpSpecifier:
|
5506
|
-
specifier: Specifier
|
5507
|
-
opts: InterpOpts
|
5508
|
-
|
5509
|
-
def __str__(self) -> str:
|
5510
|
-
return str(self.specifier) + str(self.opts)
|
5511
|
-
|
5512
|
-
@classmethod
|
5513
|
-
def parse(cls, s: str) -> 'InterpSpecifier':
|
5514
|
-
s, o = InterpOpts.parse_suffix(s)
|
5515
|
-
if not any(s.startswith(o) for o in Specifier.OPERATORS):
|
5516
|
-
s = '~=' + s
|
5517
|
-
if s.count('.') < 2:
|
5518
|
-
s += '.0'
|
5519
|
-
return cls(
|
5520
|
-
specifier=Specifier(s),
|
5521
|
-
opts=o,
|
5522
|
-
)
|
5523
|
-
|
5524
|
-
def contains(self, iv: InterpVersion) -> bool:
|
5525
|
-
return self.specifier.contains(iv.version) and self.opts == iv.opts
|
5526
|
-
|
5527
|
-
def __contains__(self, iv: InterpVersion) -> bool:
|
5528
|
-
return self.contains(iv)
|
5529
|
-
|
5530
|
-
|
5531
|
-
@dc.dataclass(frozen=True)
|
5532
|
-
class Interp:
|
5533
|
-
exe: str
|
5534
|
-
version: InterpVersion
|
5535
|
-
|
5536
|
-
|
5537
|
-
########################################
|
5538
|
-
# ../commands/marshal.py
|
5539
|
-
|
5540
|
-
|
5541
|
-
def install_command_marshaling(
|
5542
|
-
cmds: CommandNameMap,
|
5543
|
-
msh: ObjMarshalerManager,
|
5544
|
-
) -> None:
|
5545
|
-
for fn in [
|
5546
|
-
lambda c: c,
|
5547
|
-
lambda c: c.Output,
|
5548
|
-
]:
|
5549
|
-
msh.register_opj_marshaler(
|
5550
|
-
fn(Command),
|
5551
|
-
PolymorphicObjMarshaler.of([
|
5552
|
-
PolymorphicObjMarshaler.Impl(
|
5553
|
-
fn(cmd),
|
5554
|
-
name,
|
5555
|
-
msh.get_obj_marshaler(fn(cmd)),
|
5556
|
-
)
|
5557
|
-
for name, cmd in cmds.items()
|
5558
|
-
]),
|
5559
|
-
)
|
5560
|
-
|
5561
|
-
|
5562
|
-
########################################
|
5563
|
-
# ../commands/ping.py
|
5564
|
-
|
5565
|
-
|
5566
|
-
##
|
5567
|
-
|
5568
|
-
|
5569
|
-
@dc.dataclass(frozen=True)
|
5570
|
-
class PingCommand(Command['PingCommand.Output']):
|
5571
|
-
time: float = dc.field(default_factory=time.time)
|
5572
|
-
|
5573
|
-
@dc.dataclass(frozen=True)
|
5574
|
-
class Output(Command.Output):
|
5575
|
-
time: float
|
5576
|
-
|
5577
|
-
|
5578
|
-
class PingCommandExecutor(CommandExecutor[PingCommand, PingCommand.Output]):
|
5579
|
-
async def execute(self, cmd: PingCommand) -> PingCommand.Output:
|
5580
|
-
return PingCommand.Output(cmd.time)
|
5581
|
-
|
5582
|
-
|
5583
|
-
########################################
|
5584
|
-
# ../commands/types.py
|
5585
|
-
|
5586
|
-
|
5587
|
-
CommandExecutorMap = ta.NewType('CommandExecutorMap', ta.Mapping[ta.Type[Command], CommandExecutor])
|
5588
|
-
|
5589
|
-
|
5590
|
-
########################################
|
5591
|
-
# ../deploy/commands.py
|
5592
|
-
|
5593
|
-
|
5594
|
-
##
|
5595
|
-
|
5596
|
-
|
5597
|
-
@dc.dataclass(frozen=True)
|
5598
|
-
class DeployCommand(Command['DeployCommand.Output']):
|
5599
|
-
@dc.dataclass(frozen=True)
|
5600
|
-
class Output(Command.Output):
|
5601
|
-
pass
|
5602
|
-
|
5603
|
-
|
5604
|
-
class DeployCommandExecutor(CommandExecutor[DeployCommand, DeployCommand.Output]):
|
5605
|
-
async def execute(self, cmd: DeployCommand) -> DeployCommand.Output:
|
5606
|
-
log.info('Deploying!')
|
5607
|
-
|
5608
|
-
return DeployCommand.Output()
|
5609
|
-
|
5610
|
-
|
5611
|
-
########################################
|
5612
|
-
# ../marshal.py
|
5613
|
-
|
5614
|
-
|
5615
|
-
@dc.dataclass(frozen=True)
|
5616
|
-
class ObjMarshalerInstaller:
|
5617
|
-
fn: ta.Callable[[ObjMarshalerManager], None]
|
5618
|
-
|
5619
|
-
|
5620
|
-
ObjMarshalerInstallers = ta.NewType('ObjMarshalerInstallers', ta.Sequence[ObjMarshalerInstaller])
|
5621
|
-
|
5622
|
-
|
5623
|
-
########################################
|
5624
|
-
# ../remote/channel.py
|
5625
|
-
|
5626
|
-
|
5627
|
-
##
|
5628
|
-
|
5629
|
-
|
5630
|
-
class RemoteChannel(abc.ABC):
|
5631
|
-
@abc.abstractmethod
|
5632
|
-
def send_obj(self, o: ta.Any, ty: ta.Any = None) -> ta.Awaitable[None]:
|
5633
|
-
raise NotImplementedError
|
5634
|
-
|
5635
|
-
@abc.abstractmethod
|
5636
|
-
def recv_obj(self, ty: ta.Type[T]) -> ta.Awaitable[ta.Optional[T]]:
|
5637
|
-
raise NotImplementedError
|
5638
|
-
|
5639
|
-
def set_marshaler(self, msh: ObjMarshalerManager) -> None: # noqa
|
5640
|
-
pass
|
5641
|
-
|
5642
|
-
|
5643
|
-
##
|
6558
|
+
##
|
5644
6559
|
|
5645
6560
|
|
5646
6561
|
class RemoteChannelImpl(RemoteChannel):
|
@@ -5700,76 +6615,136 @@ class RemoteChannelImpl(RemoteChannel):
|
|
5700
6615
|
|
5701
6616
|
|
5702
6617
|
########################################
|
5703
|
-
# ../system/
|
6618
|
+
# ../system/config.py
|
5704
6619
|
|
5705
6620
|
|
5706
|
-
|
6621
|
+
@dc.dataclass(frozen=True)
|
6622
|
+
class SystemConfig:
|
6623
|
+
platform: ta.Optional[Platform] = None
|
5707
6624
|
|
5708
6625
|
|
5709
|
-
|
5710
|
-
|
5711
|
-
|
6626
|
+
########################################
|
6627
|
+
# ../../../omlish/logs/standard.py
|
6628
|
+
"""
|
6629
|
+
TODO:
|
6630
|
+
- structured
|
6631
|
+
- prefixed
|
6632
|
+
- debug
|
6633
|
+
"""
|
5712
6634
|
|
5713
6635
|
|
5714
|
-
|
5715
|
-
pass
|
6636
|
+
##
|
5716
6637
|
|
5717
6638
|
|
5718
|
-
|
5719
|
-
|
6639
|
+
STANDARD_LOG_FORMAT_PARTS = [
|
6640
|
+
('asctime', '%(asctime)-15s'),
|
6641
|
+
('process', 'pid=%(process)-6s'),
|
6642
|
+
('thread', 'tid=%(thread)x'),
|
6643
|
+
('levelname', '%(levelname)s'),
|
6644
|
+
('name', '%(name)s'),
|
6645
|
+
('separator', '::'),
|
6646
|
+
('message', '%(message)s'),
|
6647
|
+
]
|
5720
6648
|
|
5721
6649
|
|
5722
|
-
class
|
5723
|
-
|
6650
|
+
class StandardLogFormatter(logging.Formatter):
|
6651
|
+
@staticmethod
|
6652
|
+
def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
|
6653
|
+
return ' '.join(v for k, v in parts)
|
5724
6654
|
|
6655
|
+
converter = datetime.datetime.fromtimestamp # type: ignore
|
5725
6656
|
|
5726
|
-
|
5727
|
-
|
6657
|
+
def formatTime(self, record, datefmt=None):
|
6658
|
+
ct = self.converter(record.created) # type: ignore
|
6659
|
+
if datefmt:
|
6660
|
+
return ct.strftime(datefmt) # noqa
|
6661
|
+
else:
|
6662
|
+
t = ct.strftime('%Y-%m-%d %H:%M:%S')
|
6663
|
+
return '%s.%03d' % (t, record.msecs) # noqa
|
5728
6664
|
|
5729
6665
|
|
5730
|
-
|
5731
|
-
pass
|
6666
|
+
##
|
5732
6667
|
|
5733
6668
|
|
5734
|
-
class
|
6669
|
+
class StandardLogHandler(ProxyLogHandler):
|
5735
6670
|
pass
|
5736
6671
|
|
5737
6672
|
|
5738
6673
|
##
|
5739
6674
|
|
5740
6675
|
|
5741
|
-
|
5742
|
-
|
5743
|
-
|
5744
|
-
|
5745
|
-
|
5746
|
-
|
6676
|
+
@contextlib.contextmanager
|
6677
|
+
def _locking_logging_module_lock() -> ta.Iterator[None]:
|
6678
|
+
if hasattr(logging, '_acquireLock'):
|
6679
|
+
logging._acquireLock() # noqa
|
6680
|
+
try:
|
6681
|
+
yield
|
6682
|
+
finally:
|
6683
|
+
logging._releaseLock() # type: ignore # noqa
|
5747
6684
|
|
5748
|
-
|
5749
|
-
|
6685
|
+
elif hasattr(logging, '_lock'):
|
6686
|
+
# https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
|
6687
|
+
with logging._lock: # noqa
|
6688
|
+
yield
|
5750
6689
|
|
5751
|
-
|
5752
|
-
|
6690
|
+
else:
|
6691
|
+
raise Exception("Can't find lock in logging module")
|
6692
|
+
|
6693
|
+
|
6694
|
+
def configure_standard_logging(
|
6695
|
+
level: ta.Union[int, str] = logging.INFO,
|
6696
|
+
*,
|
6697
|
+
json: bool = False,
|
6698
|
+
target: ta.Optional[logging.Logger] = None,
|
6699
|
+
force: bool = False,
|
6700
|
+
handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
|
6701
|
+
) -> ta.Optional[StandardLogHandler]:
|
6702
|
+
with _locking_logging_module_lock():
|
6703
|
+
if target is None:
|
6704
|
+
target = logging.root
|
6705
|
+
|
6706
|
+
#
|
6707
|
+
|
6708
|
+
if not force:
|
6709
|
+
if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
|
6710
|
+
return None
|
6711
|
+
|
6712
|
+
#
|
6713
|
+
|
6714
|
+
if handler_factory is not None:
|
6715
|
+
handler = handler_factory()
|
6716
|
+
else:
|
6717
|
+
handler = logging.StreamHandler()
|
6718
|
+
|
6719
|
+
#
|
5753
6720
|
|
6721
|
+
formatter: logging.Formatter
|
6722
|
+
if json:
|
6723
|
+
formatter = JsonLogFormatter()
|
5754
6724
|
else:
|
5755
|
-
|
6725
|
+
formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
|
6726
|
+
handler.setFormatter(formatter)
|
5756
6727
|
|
5757
|
-
|
5758
|
-
return DarwinPlatform()
|
6728
|
+
#
|
5759
6729
|
|
5760
|
-
|
5761
|
-
return UnknownPlatform()
|
6730
|
+
handler.addFilter(TidLogFilter())
|
5762
6731
|
|
6732
|
+
#
|
5763
6733
|
|
5764
|
-
|
5765
|
-
|
5766
|
-
|
5767
|
-
|
5768
|
-
|
6734
|
+
target.addHandler(handler)
|
6735
|
+
|
6736
|
+
#
|
6737
|
+
|
6738
|
+
if level is not None:
|
6739
|
+
target.setLevel(level)
|
6740
|
+
|
6741
|
+
#
|
6742
|
+
|
6743
|
+
return StandardLogHandler(handler)
|
5769
6744
|
|
5770
6745
|
|
5771
6746
|
########################################
|
5772
|
-
# ../../../omlish/
|
6747
|
+
# ../../../omlish/subprocesses.py
|
5773
6748
|
|
5774
6749
|
|
5775
6750
|
##
|
@@ -5820,8 +6795,8 @@ def subprocess_close(
|
|
5820
6795
|
##
|
5821
6796
|
|
5822
6797
|
|
5823
|
-
class
|
5824
|
-
DEFAULT_LOGGER: ta.ClassVar[ta.Optional[logging.Logger]] =
|
6798
|
+
class BaseSubprocesses(abc.ABC): # noqa
|
6799
|
+
DEFAULT_LOGGER: ta.ClassVar[ta.Optional[logging.Logger]] = None
|
5825
6800
|
|
5826
6801
|
def __init__(
|
5827
6802
|
self,
|
@@ -5834,6 +6809,9 @@ class AbstractSubprocesses(abc.ABC): # noqa
|
|
5834
6809
|
self._log = log if log is not None else self.DEFAULT_LOGGER
|
5835
6810
|
self._try_exceptions = try_exceptions if try_exceptions is not None else self.DEFAULT_TRY_EXCEPTIONS
|
5836
6811
|
|
6812
|
+
def set_logger(self, log: ta.Optional[logging.Logger]) -> None:
|
6813
|
+
self._log = log
|
6814
|
+
|
5837
6815
|
#
|
5838
6816
|
|
5839
6817
|
def prepare_args(
|
@@ -5945,23 +6923,25 @@ class AbstractSubprocesses(abc.ABC): # noqa
|
|
5945
6923
|
##
|
5946
6924
|
|
5947
6925
|
|
5948
|
-
class
|
6926
|
+
class AbstractSubprocesses(BaseSubprocesses, abc.ABC):
|
6927
|
+
@abc.abstractmethod
|
5949
6928
|
def check_call(
|
5950
6929
|
self,
|
5951
6930
|
*cmd: str,
|
5952
6931
|
stdout: ta.Any = sys.stderr,
|
5953
6932
|
**kwargs: ta.Any,
|
5954
6933
|
) -> None:
|
5955
|
-
|
5956
|
-
subprocess.check_call(cmd, **kwargs)
|
6934
|
+
raise NotImplementedError
|
5957
6935
|
|
6936
|
+
@abc.abstractmethod
|
5958
6937
|
def check_output(
|
5959
6938
|
self,
|
5960
6939
|
*cmd: str,
|
5961
6940
|
**kwargs: ta.Any,
|
5962
6941
|
) -> bytes:
|
5963
|
-
|
5964
|
-
|
6942
|
+
raise NotImplementedError
|
6943
|
+
|
6944
|
+
#
|
5965
6945
|
|
5966
6946
|
def check_output_str(
|
5967
6947
|
self,
|
@@ -6003,9 +6983,109 @@ class Subprocesses(AbstractSubprocesses):
|
|
6003
6983
|
return ret.decode().strip()
|
6004
6984
|
|
6005
6985
|
|
6986
|
+
##
|
6987
|
+
|
6988
|
+
|
6989
|
+
class Subprocesses(AbstractSubprocesses):
|
6990
|
+
def check_call(
|
6991
|
+
self,
|
6992
|
+
*cmd: str,
|
6993
|
+
stdout: ta.Any = sys.stderr,
|
6994
|
+
**kwargs: ta.Any,
|
6995
|
+
) -> None:
|
6996
|
+
with self.prepare_and_wrap(*cmd, stdout=stdout, **kwargs) as (cmd, kwargs): # noqa
|
6997
|
+
subprocess.check_call(cmd, **kwargs)
|
6998
|
+
|
6999
|
+
def check_output(
|
7000
|
+
self,
|
7001
|
+
*cmd: str,
|
7002
|
+
**kwargs: ta.Any,
|
7003
|
+
) -> bytes:
|
7004
|
+
with self.prepare_and_wrap(*cmd, **kwargs) as (cmd, kwargs): # noqa
|
7005
|
+
return subprocess.check_output(cmd, **kwargs)
|
7006
|
+
|
7007
|
+
|
6006
7008
|
subprocesses = Subprocesses()
|
6007
7009
|
|
6008
7010
|
|
7011
|
+
##
|
7012
|
+
|
7013
|
+
|
7014
|
+
class AbstractAsyncSubprocesses(BaseSubprocesses):
|
7015
|
+
@abc.abstractmethod
|
7016
|
+
async def check_call(
|
7017
|
+
self,
|
7018
|
+
*cmd: str,
|
7019
|
+
stdout: ta.Any = sys.stderr,
|
7020
|
+
**kwargs: ta.Any,
|
7021
|
+
) -> None:
|
7022
|
+
raise NotImplementedError
|
7023
|
+
|
7024
|
+
@abc.abstractmethod
|
7025
|
+
async def check_output(
|
7026
|
+
self,
|
7027
|
+
*cmd: str,
|
7028
|
+
**kwargs: ta.Any,
|
7029
|
+
) -> bytes:
|
7030
|
+
raise NotImplementedError
|
7031
|
+
|
7032
|
+
#
|
7033
|
+
|
7034
|
+
async def check_output_str(
|
7035
|
+
self,
|
7036
|
+
*cmd: str,
|
7037
|
+
**kwargs: ta.Any,
|
7038
|
+
) -> str:
|
7039
|
+
return (await self.check_output(*cmd, **kwargs)).decode().strip()
|
7040
|
+
|
7041
|
+
#
|
7042
|
+
|
7043
|
+
async def try_call(
|
7044
|
+
self,
|
7045
|
+
*cmd: str,
|
7046
|
+
**kwargs: ta.Any,
|
7047
|
+
) -> bool:
|
7048
|
+
if isinstance(await self.async_try_fn(self.check_call, *cmd, **kwargs), Exception):
|
7049
|
+
return False
|
7050
|
+
else:
|
7051
|
+
return True
|
7052
|
+
|
7053
|
+
async def try_output(
|
7054
|
+
self,
|
7055
|
+
*cmd: str,
|
7056
|
+
**kwargs: ta.Any,
|
7057
|
+
) -> ta.Optional[bytes]:
|
7058
|
+
if isinstance(ret := await self.async_try_fn(self.check_output, *cmd, **kwargs), Exception):
|
7059
|
+
return None
|
7060
|
+
else:
|
7061
|
+
return ret
|
7062
|
+
|
7063
|
+
async def try_output_str(
|
7064
|
+
self,
|
7065
|
+
*cmd: str,
|
7066
|
+
**kwargs: ta.Any,
|
7067
|
+
) -> ta.Optional[str]:
|
7068
|
+
if (ret := await self.try_output(*cmd, **kwargs)) is None:
|
7069
|
+
return None
|
7070
|
+
else:
|
7071
|
+
return ret.decode().strip()
|
7072
|
+
|
7073
|
+
|
7074
|
+
########################################
|
7075
|
+
# ../bootstrap.py
|
7076
|
+
|
7077
|
+
|
7078
|
+
@dc.dataclass(frozen=True)
|
7079
|
+
class MainBootstrap:
|
7080
|
+
main_config: MainConfig = MainConfig()
|
7081
|
+
|
7082
|
+
deploy_config: DeployConfig = DeployConfig()
|
7083
|
+
|
7084
|
+
remote_config: RemoteConfig = RemoteConfig()
|
7085
|
+
|
7086
|
+
system_config: SystemConfig = SystemConfig()
|
7087
|
+
|
7088
|
+
|
6009
7089
|
########################################
|
6010
7090
|
# ../commands/local.py
|
6011
7091
|
|
@@ -6423,16 +7503,7 @@ class RemoteCommandExecutor(CommandExecutor):
|
|
6423
7503
|
|
6424
7504
|
|
6425
7505
|
########################################
|
6426
|
-
#
|
6427
|
-
|
6428
|
-
|
6429
|
-
@dc.dataclass(frozen=True)
|
6430
|
-
class SystemConfig:
|
6431
|
-
platform: ta.Optional[Platform] = None
|
6432
|
-
|
6433
|
-
|
6434
|
-
########################################
|
6435
|
-
# ../../../omlish/lite/asyncio/subprocesses.py
|
7506
|
+
# ../../../omlish/asyncs/asyncio/subprocesses.py
|
6436
7507
|
|
6437
7508
|
|
6438
7509
|
##
|
@@ -6443,6 +7514,8 @@ class AsyncioProcessCommunicator:
|
|
6443
7514
|
self,
|
6444
7515
|
proc: asyncio.subprocess.Process,
|
6445
7516
|
loop: ta.Optional[ta.Any] = None,
|
7517
|
+
*,
|
7518
|
+
log: ta.Optional[logging.Logger] = None,
|
6446
7519
|
) -> None:
|
6447
7520
|
super().__init__()
|
6448
7521
|
|
@@ -6451,6 +7524,7 @@ class AsyncioProcessCommunicator:
|
|
6451
7524
|
|
6452
7525
|
self._proc = proc
|
6453
7526
|
self._loop = loop
|
7527
|
+
self._log = log
|
6454
7528
|
|
6455
7529
|
self._transport: asyncio.base_subprocess.BaseSubprocessTransport = check.isinstance(
|
6456
7530
|
proc._transport, # type: ignore # noqa
|
@@ -6466,19 +7540,19 @@ class AsyncioProcessCommunicator:
|
|
6466
7540
|
try:
|
6467
7541
|
if input is not None:
|
6468
7542
|
stdin.write(input)
|
6469
|
-
if self._debug:
|
6470
|
-
|
7543
|
+
if self._debug and self._log is not None:
|
7544
|
+
self._log.debug('%r communicate: feed stdin (%s bytes)', self, len(input))
|
6471
7545
|
|
6472
7546
|
await stdin.drain()
|
6473
7547
|
|
6474
7548
|
except (BrokenPipeError, ConnectionResetError) as exc:
|
6475
7549
|
# communicate() ignores BrokenPipeError and ConnectionResetError. write() and drain() can raise these
|
6476
7550
|
# exceptions.
|
6477
|
-
if self._debug:
|
6478
|
-
|
7551
|
+
if self._debug and self._log is not None:
|
7552
|
+
self._log.debug('%r communicate: stdin got %r', self, exc)
|
6479
7553
|
|
6480
|
-
if self._debug:
|
6481
|
-
|
7554
|
+
if self._debug and self._log is not None:
|
7555
|
+
self._log.debug('%r communicate: close stdin', self)
|
6482
7556
|
|
6483
7557
|
stdin.close()
|
6484
7558
|
|
@@ -6494,15 +7568,15 @@ class AsyncioProcessCommunicator:
|
|
6494
7568
|
check.equal(fd, 1)
|
6495
7569
|
stream = check.not_none(self._proc.stdout)
|
6496
7570
|
|
6497
|
-
if self._debug:
|
7571
|
+
if self._debug and self._log is not None:
|
6498
7572
|
name = 'stdout' if fd == 1 else 'stderr'
|
6499
|
-
|
7573
|
+
self._log.debug('%r communicate: read %s', self, name)
|
6500
7574
|
|
6501
7575
|
output = await stream.read()
|
6502
7576
|
|
6503
|
-
if self._debug:
|
7577
|
+
if self._debug and self._log is not None:
|
6504
7578
|
name = 'stdout' if fd == 1 else 'stderr'
|
6505
|
-
|
7579
|
+
self._log.debug('%r communicate: close %s', self, name)
|
6506
7580
|
|
6507
7581
|
transport.close()
|
6508
7582
|
|
@@ -6551,7 +7625,7 @@ class AsyncioProcessCommunicator:
|
|
6551
7625
|
##
|
6552
7626
|
|
6553
7627
|
|
6554
|
-
class AsyncioSubprocesses(
|
7628
|
+
class AsyncioSubprocesses(AbstractAsyncSubprocesses):
|
6555
7629
|
async def communicate(
|
6556
7630
|
self,
|
6557
7631
|
proc: asyncio.subprocess.Process,
|
@@ -6648,45 +7722,6 @@ class AsyncioSubprocesses(AbstractSubprocesses):
|
|
6648
7722
|
with self.prepare_and_wrap(*cmd, stdout=subprocess.PIPE, check=True, **kwargs) as (cmd, kwargs): # noqa
|
6649
7723
|
return check.not_none((await self.run(*cmd, **kwargs)).stdout)
|
6650
7724
|
|
6651
|
-
async def check_output_str(
|
6652
|
-
self,
|
6653
|
-
*cmd: str,
|
6654
|
-
**kwargs: ta.Any,
|
6655
|
-
) -> str:
|
6656
|
-
return (await self.check_output(*cmd, **kwargs)).decode().strip()
|
6657
|
-
|
6658
|
-
#
|
6659
|
-
|
6660
|
-
async def try_call(
|
6661
|
-
self,
|
6662
|
-
*cmd: str,
|
6663
|
-
**kwargs: ta.Any,
|
6664
|
-
) -> bool:
|
6665
|
-
if isinstance(await self.async_try_fn(self.check_call, *cmd, **kwargs), Exception):
|
6666
|
-
return False
|
6667
|
-
else:
|
6668
|
-
return True
|
6669
|
-
|
6670
|
-
async def try_output(
|
6671
|
-
self,
|
6672
|
-
*cmd: str,
|
6673
|
-
**kwargs: ta.Any,
|
6674
|
-
) -> ta.Optional[bytes]:
|
6675
|
-
if isinstance(ret := await self.async_try_fn(self.check_output, *cmd, **kwargs), Exception):
|
6676
|
-
return None
|
6677
|
-
else:
|
6678
|
-
return ret
|
6679
|
-
|
6680
|
-
async def try_output_str(
|
6681
|
-
self,
|
6682
|
-
*cmd: str,
|
6683
|
-
**kwargs: ta.Any,
|
6684
|
-
) -> ta.Optional[str]:
|
6685
|
-
if (ret := await self.try_output(*cmd, **kwargs)) is None:
|
6686
|
-
return None
|
6687
|
-
else:
|
6688
|
-
return ret.decode().strip()
|
6689
|
-
|
6690
7725
|
|
6691
7726
|
asyncio_subprocesses = AsyncioSubprocesses()
|
6692
7727
|
|
@@ -6786,21 +7821,6 @@ class InterpInspector:
|
|
6786
7821
|
INTERP_INSPECTOR = InterpInspector()
|
6787
7822
|
|
6788
7823
|
|
6789
|
-
########################################
|
6790
|
-
# ../bootstrap.py
|
6791
|
-
|
6792
|
-
|
6793
|
-
@dc.dataclass(frozen=True)
|
6794
|
-
class MainBootstrap:
|
6795
|
-
main_config: MainConfig = MainConfig()
|
6796
|
-
|
6797
|
-
deploy_config: DeployConfig = DeployConfig()
|
6798
|
-
|
6799
|
-
remote_config: RemoteConfig = RemoteConfig()
|
6800
|
-
|
6801
|
-
system_config: SystemConfig = SystemConfig()
|
6802
|
-
|
6803
|
-
|
6804
7824
|
########################################
|
6805
7825
|
# ../commands/subprocess.py
|
6806
7826
|
|
@@ -6887,39 +7907,22 @@ github.com/wrmsr/omlish@rev
|
|
6887
7907
|
##
|
6888
7908
|
|
6889
7909
|
|
6890
|
-
|
6891
|
-
|
6892
|
-
|
6893
|
-
|
6894
|
-
|
7910
|
+
class DeployGitManager(DeployPathOwner):
|
7911
|
+
def __init__(
|
7912
|
+
self,
|
7913
|
+
*,
|
7914
|
+
deploy_home: ta.Optional[DeployHome] = None,
|
7915
|
+
) -> None:
|
7916
|
+
super().__init__()
|
6895
7917
|
|
6896
|
-
|
6897
|
-
check.not_in('..', check.non_empty_str(self.host))
|
6898
|
-
check.not_in('.', check.non_empty_str(self.path))
|
6899
|
-
|
6900
|
-
|
6901
|
-
@dc.dataclass(frozen=True)
|
6902
|
-
class DeployGitSpec:
|
6903
|
-
repo: DeployGitRepo
|
6904
|
-
rev: DeployRev
|
6905
|
-
|
6906
|
-
|
6907
|
-
##
|
6908
|
-
|
6909
|
-
|
6910
|
-
class DeployGitManager(DeployPathOwner):
|
6911
|
-
def __init__(
|
6912
|
-
self,
|
6913
|
-
*,
|
6914
|
-
deploy_home: DeployHome,
|
6915
|
-
) -> None:
|
6916
|
-
super().__init__()
|
6917
|
-
|
6918
|
-
self._deploy_home = deploy_home
|
6919
|
-
self._dir = os.path.join(deploy_home, 'git')
|
7918
|
+
self._deploy_home = deploy_home
|
6920
7919
|
|
6921
7920
|
self._repo_dirs: ta.Dict[DeployGitRepo, DeployGitManager.RepoDir] = {}
|
6922
7921
|
|
7922
|
+
@cached_nullary
|
7923
|
+
def _dir(self) -> str:
|
7924
|
+
return os.path.join(check.non_empty_str(self._deploy_home), 'git')
|
7925
|
+
|
6923
7926
|
def get_deploy_paths(self) -> ta.AbstractSet[DeployPath]:
|
6924
7927
|
return {
|
6925
7928
|
DeployPath.parse('git'),
|
@@ -6936,7 +7939,7 @@ class DeployGitManager(DeployPathOwner):
|
|
6936
7939
|
self._git = git
|
6937
7940
|
self._repo = repo
|
6938
7941
|
self._dir = os.path.join(
|
6939
|
-
self._git._dir, # noqa
|
7942
|
+
self._git._dir(), # noqa
|
6940
7943
|
check.non_empty_str(repo.host),
|
6941
7944
|
check.non_empty_str(repo.path),
|
6942
7945
|
)
|
@@ -6993,8 +7996,8 @@ class DeployGitManager(DeployPathOwner):
|
|
6993
7996
|
repo_dir = self._repo_dirs[repo] = DeployGitManager.RepoDir(self, repo)
|
6994
7997
|
return repo_dir
|
6995
7998
|
|
6996
|
-
async def checkout(self,
|
6997
|
-
await self.get_repo_dir(
|
7999
|
+
async def checkout(self, repo: DeployGitRepo, rev: DeployRev, dst_dir: str) -> None:
|
8000
|
+
await self.get_repo_dir(repo).checkout(rev, dst_dir)
|
6998
8001
|
|
6999
8002
|
|
7000
8003
|
########################################
|
@@ -7010,12 +8013,15 @@ class DeployVenvManager(DeployPathOwner):
|
|
7010
8013
|
def __init__(
|
7011
8014
|
self,
|
7012
8015
|
*,
|
7013
|
-
deploy_home: DeployHome,
|
8016
|
+
deploy_home: ta.Optional[DeployHome] = None,
|
7014
8017
|
) -> None:
|
7015
8018
|
super().__init__()
|
7016
8019
|
|
7017
8020
|
self._deploy_home = deploy_home
|
7018
|
-
|
8021
|
+
|
8022
|
+
@cached_nullary
|
8023
|
+
def _dir(self) -> str:
|
8024
|
+
return os.path.join(check.non_empty_str(self._deploy_home), 'venvs')
|
7019
8025
|
|
7020
8026
|
def get_deploy_paths(self) -> ta.AbstractSet[DeployPath]:
|
7021
8027
|
return {
|
@@ -7052,10 +8058,152 @@ class DeployVenvManager(DeployPathOwner):
|
|
7052
8058
|
|
7053
8059
|
async def setup_app_venv(self, app_tag: DeployAppTag) -> None:
|
7054
8060
|
await self.setup_venv(
|
7055
|
-
os.path.join(self._deploy_home, 'apps', app_tag.app, app_tag.tag),
|
7056
|
-
os.path.join(self.
|
8061
|
+
os.path.join(check.non_empty_str(self._deploy_home), 'apps', app_tag.app, app_tag.tag),
|
8062
|
+
os.path.join(self._dir(), app_tag.app, app_tag.tag),
|
8063
|
+
)
|
8064
|
+
|
8065
|
+
|
8066
|
+
########################################
|
8067
|
+
# ../remote/_main.py
|
8068
|
+
|
8069
|
+
|
8070
|
+
##
|
8071
|
+
|
8072
|
+
|
8073
|
+
class _RemoteExecutionLogHandler(logging.Handler):
|
8074
|
+
def __init__(self, fn: ta.Callable[[str], None]) -> None:
|
8075
|
+
super().__init__()
|
8076
|
+
self._fn = fn
|
8077
|
+
|
8078
|
+
def emit(self, record):
|
8079
|
+
msg = self.format(record)
|
8080
|
+
self._fn(msg)
|
8081
|
+
|
8082
|
+
|
8083
|
+
##
|
8084
|
+
|
8085
|
+
|
8086
|
+
class _RemoteExecutionMain:
|
8087
|
+
def __init__(
|
8088
|
+
self,
|
8089
|
+
chan: RemoteChannel,
|
8090
|
+
) -> None:
|
8091
|
+
super().__init__()
|
8092
|
+
|
8093
|
+
self._chan = chan
|
8094
|
+
|
8095
|
+
self.__bootstrap: ta.Optional[MainBootstrap] = None
|
8096
|
+
self.__injector: ta.Optional[Injector] = None
|
8097
|
+
|
8098
|
+
@property
|
8099
|
+
def _bootstrap(self) -> MainBootstrap:
|
8100
|
+
return check.not_none(self.__bootstrap)
|
8101
|
+
|
8102
|
+
@property
|
8103
|
+
def _injector(self) -> Injector:
|
8104
|
+
return check.not_none(self.__injector)
|
8105
|
+
|
8106
|
+
#
|
8107
|
+
|
8108
|
+
def _timebomb_main(
|
8109
|
+
self,
|
8110
|
+
delay_s: float,
|
8111
|
+
*,
|
8112
|
+
sig: int = signal.SIGINT,
|
8113
|
+
code: int = 1,
|
8114
|
+
) -> None:
|
8115
|
+
time.sleep(delay_s)
|
8116
|
+
|
8117
|
+
if (pgid := os.getpgid(0)) == os.getpid():
|
8118
|
+
os.killpg(pgid, sig)
|
8119
|
+
|
8120
|
+
os._exit(code) # noqa
|
8121
|
+
|
8122
|
+
@cached_nullary
|
8123
|
+
def _timebomb_thread(self) -> ta.Optional[threading.Thread]:
|
8124
|
+
if (tbd := self._bootstrap.remote_config.timebomb_delay_s) is None:
|
8125
|
+
return None
|
8126
|
+
|
8127
|
+
thr = threading.Thread(
|
8128
|
+
target=functools.partial(self._timebomb_main, tbd),
|
8129
|
+
name=f'{self.__class__.__name__}.timebomb',
|
8130
|
+
daemon=True,
|
8131
|
+
)
|
8132
|
+
|
8133
|
+
thr.start()
|
8134
|
+
|
8135
|
+
log.debug('Started timebomb thread: %r', thr)
|
8136
|
+
|
8137
|
+
return thr
|
8138
|
+
|
8139
|
+
#
|
8140
|
+
|
8141
|
+
@cached_nullary
|
8142
|
+
def _log_handler(self) -> _RemoteLogHandler:
|
8143
|
+
return _RemoteLogHandler(self._chan)
|
8144
|
+
|
8145
|
+
#
|
8146
|
+
|
8147
|
+
async def _setup(self) -> None:
|
8148
|
+
check.none(self.__bootstrap)
|
8149
|
+
check.none(self.__injector)
|
8150
|
+
|
8151
|
+
# Bootstrap
|
8152
|
+
|
8153
|
+
self.__bootstrap = check.not_none(await self._chan.recv_obj(MainBootstrap))
|
8154
|
+
|
8155
|
+
if (prd := self._bootstrap.remote_config.pycharm_remote_debug) is not None:
|
8156
|
+
pycharm_debug_connect(prd)
|
8157
|
+
|
8158
|
+
self.__injector = main_bootstrap(self._bootstrap)
|
8159
|
+
|
8160
|
+
self._chan.set_marshaler(self._injector[ObjMarshalerManager])
|
8161
|
+
|
8162
|
+
# Post-bootstrap
|
8163
|
+
|
8164
|
+
if self._bootstrap.remote_config.set_pgid:
|
8165
|
+
if os.getpgid(0) != os.getpid():
|
8166
|
+
log.debug('Setting pgid')
|
8167
|
+
os.setpgid(0, 0)
|
8168
|
+
|
8169
|
+
if (ds := self._bootstrap.remote_config.deathsig) is not None:
|
8170
|
+
log.debug('Setting deathsig: %s', ds)
|
8171
|
+
set_process_deathsig(int(signal.Signals[f'SIG{ds.upper()}']))
|
8172
|
+
|
8173
|
+
self._timebomb_thread()
|
8174
|
+
|
8175
|
+
if self._bootstrap.remote_config.forward_logging:
|
8176
|
+
log.debug('Installing log forwarder')
|
8177
|
+
logging.root.addHandler(self._log_handler())
|
8178
|
+
|
8179
|
+
#
|
8180
|
+
|
8181
|
+
async def run(self) -> None:
|
8182
|
+
await self._setup()
|
8183
|
+
|
8184
|
+
executor = self._injector[LocalCommandExecutor]
|
8185
|
+
|
8186
|
+
handler = _RemoteCommandHandler(self._chan, executor)
|
8187
|
+
|
8188
|
+
await handler.run()
|
8189
|
+
|
8190
|
+
|
8191
|
+
def _remote_execution_main() -> None:
|
8192
|
+
rt = pyremote_bootstrap_finalize() # noqa
|
8193
|
+
|
8194
|
+
async def inner() -> None:
|
8195
|
+
input = await asyncio_open_stream_reader(rt.input) # noqa
|
8196
|
+
output = await asyncio_open_stream_writer(rt.output)
|
8197
|
+
|
8198
|
+
chan = RemoteChannelImpl(
|
8199
|
+
input,
|
8200
|
+
output,
|
7057
8201
|
)
|
7058
8202
|
|
8203
|
+
await _RemoteExecutionMain(chan).run()
|
8204
|
+
|
8205
|
+
asyncio.run(inner())
|
8206
|
+
|
7059
8207
|
|
7060
8208
|
########################################
|
7061
8209
|
# ../remote/spawning.py
|
@@ -7129,12 +8277,8 @@ class SubprocessRemoteSpawning(RemoteSpawning):
|
|
7129
8277
|
) -> ta.AsyncGenerator[RemoteSpawning.Spawned, None]:
|
7130
8278
|
pc = self._prepare_cmd(tgt, src)
|
7131
8279
|
|
7132
|
-
cmd = pc.cmd
|
7133
|
-
if not debug:
|
7134
|
-
cmd = subprocess_maybe_shell_wrap_exec(*cmd)
|
7135
|
-
|
7136
8280
|
async with asyncio_subprocesses.popen(
|
7137
|
-
*cmd,
|
8281
|
+
*pc.cmd,
|
7138
8282
|
shell=pc.shell,
|
7139
8283
|
stdin=subprocess.PIPE,
|
7140
8284
|
stdout=subprocess.PIPE,
|
@@ -7456,14 +8600,14 @@ def make_deploy_tag(
|
|
7456
8600
|
now = datetime.datetime.utcnow() # noqa
|
7457
8601
|
now_fmt = '%Y%m%dT%H%M%S'
|
7458
8602
|
now_str = now.strftime(now_fmt)
|
7459
|
-
return DeployTag('-'.join([
|
8603
|
+
return DeployTag('-'.join([now_str, rev]))
|
7460
8604
|
|
7461
8605
|
|
7462
8606
|
class DeployAppManager(DeployPathOwner):
|
7463
8607
|
def __init__(
|
7464
8608
|
self,
|
7465
8609
|
*,
|
7466
|
-
deploy_home: DeployHome,
|
8610
|
+
deploy_home: ta.Optional[DeployHome] = None,
|
7467
8611
|
git: DeployGitManager,
|
7468
8612
|
venvs: DeployVenvManager,
|
7469
8613
|
) -> None:
|
@@ -7473,7 +8617,9 @@ class DeployAppManager(DeployPathOwner):
|
|
7473
8617
|
self._git = git
|
7474
8618
|
self._venvs = venvs
|
7475
8619
|
|
7476
|
-
|
8620
|
+
@cached_nullary
|
8621
|
+
def _dir(self) -> str:
|
8622
|
+
return os.path.join(check.non_empty_str(self._deploy_home), 'apps')
|
7477
8623
|
|
7478
8624
|
def get_deploy_paths(self) -> ta.AbstractSet[DeployPath]:
|
7479
8625
|
return {
|
@@ -7482,20 +8628,16 @@ class DeployAppManager(DeployPathOwner):
|
|
7482
8628
|
|
7483
8629
|
async def prepare_app(
|
7484
8630
|
self,
|
7485
|
-
|
7486
|
-
rev: DeployRev,
|
7487
|
-
repo: DeployGitRepo,
|
8631
|
+
spec: DeploySpec,
|
7488
8632
|
):
|
7489
|
-
app_tag = DeployAppTag(app, make_deploy_tag(rev))
|
7490
|
-
app_dir = os.path.join(self._dir, app, app_tag.tag)
|
8633
|
+
app_tag = DeployAppTag(spec.app, make_deploy_tag(spec.rev))
|
8634
|
+
app_dir = os.path.join(self._dir(), spec.app, app_tag.tag)
|
7491
8635
|
|
7492
8636
|
#
|
7493
8637
|
|
7494
8638
|
await self._git.checkout(
|
7495
|
-
|
7496
|
-
|
7497
|
-
rev=rev,
|
7498
|
-
),
|
8639
|
+
spec.repo,
|
8640
|
+
spec.rev,
|
7499
8641
|
app_dir,
|
7500
8642
|
)
|
7501
8643
|
|
@@ -7505,145 +8647,122 @@ class DeployAppManager(DeployPathOwner):
|
|
7505
8647
|
|
7506
8648
|
|
7507
8649
|
########################################
|
7508
|
-
# ../remote/
|
7509
|
-
|
7510
|
-
|
7511
|
-
##
|
7512
|
-
|
7513
|
-
|
7514
|
-
class _RemoteExecutionLogHandler(logging.Handler):
|
7515
|
-
def __init__(self, fn: ta.Callable[[str], None]) -> None:
|
7516
|
-
super().__init__()
|
7517
|
-
self._fn = fn
|
7518
|
-
|
7519
|
-
def emit(self, record):
|
7520
|
-
msg = self.format(record)
|
7521
|
-
self._fn(msg)
|
8650
|
+
# ../remote/connection.py
|
7522
8651
|
|
7523
8652
|
|
7524
8653
|
##
|
7525
8654
|
|
7526
8655
|
|
7527
|
-
class
|
8656
|
+
class PyremoteRemoteExecutionConnector:
|
7528
8657
|
def __init__(
|
7529
8658
|
self,
|
7530
|
-
|
8659
|
+
*,
|
8660
|
+
spawning: RemoteSpawning,
|
8661
|
+
msh: ObjMarshalerManager,
|
8662
|
+
payload_file: ta.Optional[RemoteExecutionPayloadFile] = None,
|
7531
8663
|
) -> None:
|
7532
8664
|
super().__init__()
|
7533
8665
|
|
7534
|
-
self.
|
7535
|
-
|
7536
|
-
self.
|
7537
|
-
self.__injector: ta.Optional[Injector] = None
|
7538
|
-
|
7539
|
-
@property
|
7540
|
-
def _bootstrap(self) -> MainBootstrap:
|
7541
|
-
return check.not_none(self.__bootstrap)
|
7542
|
-
|
7543
|
-
@property
|
7544
|
-
def _injector(self) -> Injector:
|
7545
|
-
return check.not_none(self.__injector)
|
8666
|
+
self._spawning = spawning
|
8667
|
+
self._msh = msh
|
8668
|
+
self._payload_file = payload_file
|
7546
8669
|
|
7547
8670
|
#
|
7548
8671
|
|
7549
|
-
def _timebomb_main(
|
7550
|
-
self,
|
7551
|
-
delay_s: float,
|
7552
|
-
*,
|
7553
|
-
sig: int = signal.SIGINT,
|
7554
|
-
code: int = 1,
|
7555
|
-
) -> None:
|
7556
|
-
time.sleep(delay_s)
|
7557
|
-
|
7558
|
-
if (pgid := os.getpgid(0)) == os.getpid():
|
7559
|
-
os.killpg(pgid, sig)
|
7560
|
-
|
7561
|
-
os._exit(code) # noqa
|
7562
|
-
|
7563
8672
|
@cached_nullary
|
7564
|
-
def
|
7565
|
-
|
7566
|
-
return None
|
7567
|
-
|
7568
|
-
thr = threading.Thread(
|
7569
|
-
target=functools.partial(self._timebomb_main, tbd),
|
7570
|
-
name=f'{self.__class__.__name__}.timebomb',
|
7571
|
-
daemon=True,
|
7572
|
-
)
|
7573
|
-
|
7574
|
-
thr.start()
|
7575
|
-
|
7576
|
-
log.debug('Started timebomb thread: %r', thr)
|
7577
|
-
|
7578
|
-
return thr
|
8673
|
+
def _payload_src(self) -> str:
|
8674
|
+
return get_remote_payload_src(file=self._payload_file)
|
7579
8675
|
|
7580
|
-
|
8676
|
+
@cached_nullary
|
8677
|
+
def _remote_src(self) -> ta.Sequence[str]:
|
8678
|
+
return [
|
8679
|
+
self._payload_src(),
|
8680
|
+
'_remote_execution_main()',
|
8681
|
+
]
|
7581
8682
|
|
7582
8683
|
@cached_nullary
|
7583
|
-
def
|
7584
|
-
return
|
8684
|
+
def _spawn_src(self) -> str:
|
8685
|
+
return pyremote_build_bootstrap_cmd(__package__ or 'manage')
|
7585
8686
|
|
7586
8687
|
#
|
7587
8688
|
|
7588
|
-
|
7589
|
-
|
7590
|
-
|
7591
|
-
|
7592
|
-
|
7593
|
-
|
7594
|
-
|
7595
|
-
|
7596
|
-
if (prd := self._bootstrap.remote_config.pycharm_remote_debug) is not None:
|
7597
|
-
pycharm_debug_connect(prd)
|
7598
|
-
|
7599
|
-
self.__injector = main_bootstrap(self._bootstrap)
|
7600
|
-
|
7601
|
-
self._chan.set_marshaler(self._injector[ObjMarshalerManager])
|
7602
|
-
|
7603
|
-
# Post-bootstrap
|
8689
|
+
@contextlib.asynccontextmanager
|
8690
|
+
async def connect(
|
8691
|
+
self,
|
8692
|
+
tgt: RemoteSpawning.Target,
|
8693
|
+
bs: MainBootstrap,
|
8694
|
+
) -> ta.AsyncGenerator[RemoteCommandExecutor, None]:
|
8695
|
+
spawn_src = self._spawn_src()
|
8696
|
+
remote_src = self._remote_src()
|
7604
8697
|
|
7605
|
-
|
7606
|
-
|
7607
|
-
|
7608
|
-
|
8698
|
+
async with self._spawning.spawn(
|
8699
|
+
tgt,
|
8700
|
+
spawn_src,
|
8701
|
+
debug=bs.main_config.debug,
|
8702
|
+
) as proc:
|
8703
|
+
res = await PyremoteBootstrapDriver( # noqa
|
8704
|
+
remote_src,
|
8705
|
+
PyremoteBootstrapOptions(
|
8706
|
+
debug=bs.main_config.debug,
|
8707
|
+
),
|
8708
|
+
).async_run(
|
8709
|
+
proc.stdout,
|
8710
|
+
proc.stdin,
|
8711
|
+
)
|
7609
8712
|
|
7610
|
-
|
7611
|
-
|
7612
|
-
|
8713
|
+
chan = RemoteChannelImpl(
|
8714
|
+
proc.stdout,
|
8715
|
+
proc.stdin,
|
8716
|
+
msh=self._msh,
|
8717
|
+
)
|
7613
8718
|
|
7614
|
-
|
8719
|
+
await chan.send_obj(bs)
|
7615
8720
|
|
7616
|
-
|
7617
|
-
|
7618
|
-
|
8721
|
+
rce: RemoteCommandExecutor
|
8722
|
+
async with aclosing(RemoteCommandExecutor(chan)) as rce:
|
8723
|
+
await rce.start()
|
7619
8724
|
|
7620
|
-
|
8725
|
+
yield rce
|
7621
8726
|
|
7622
|
-
async def run(self) -> None:
|
7623
|
-
await self._setup()
|
7624
8727
|
|
7625
|
-
|
8728
|
+
##
|
7626
8729
|
|
7627
|
-
handler = _RemoteCommandHandler(self._chan, executor)
|
7628
8730
|
|
7629
|
-
|
8731
|
+
class InProcessRemoteExecutionConnector:
|
8732
|
+
def __init__(
|
8733
|
+
self,
|
8734
|
+
*,
|
8735
|
+
msh: ObjMarshalerManager,
|
8736
|
+
local_executor: LocalCommandExecutor,
|
8737
|
+
) -> None:
|
8738
|
+
super().__init__()
|
7630
8739
|
|
8740
|
+
self._msh = msh
|
8741
|
+
self._local_executor = local_executor
|
7631
8742
|
|
7632
|
-
|
7633
|
-
|
8743
|
+
@contextlib.asynccontextmanager
|
8744
|
+
async def connect(self) -> ta.AsyncGenerator[RemoteCommandExecutor, None]:
|
8745
|
+
r0, w0 = asyncio_create_bytes_channel()
|
8746
|
+
r1, w1 = asyncio_create_bytes_channel()
|
7634
8747
|
|
7635
|
-
|
7636
|
-
|
7637
|
-
output = await asyncio_open_stream_writer(rt.output)
|
8748
|
+
remote_chan = RemoteChannelImpl(r0, w1, msh=self._msh)
|
8749
|
+
local_chan = RemoteChannelImpl(r1, w0, msh=self._msh)
|
7638
8750
|
|
7639
|
-
|
7640
|
-
|
7641
|
-
|
8751
|
+
rch = _RemoteCommandHandler(
|
8752
|
+
remote_chan,
|
8753
|
+
self._local_executor,
|
7642
8754
|
)
|
8755
|
+
rch_task = asyncio.create_task(rch.run()) # noqa
|
8756
|
+
try:
|
8757
|
+
rce: RemoteCommandExecutor
|
8758
|
+
async with aclosing(RemoteCommandExecutor(local_chan)) as rce:
|
8759
|
+
await rce.start()
|
7643
8760
|
|
7644
|
-
|
8761
|
+
yield rce
|
7645
8762
|
|
7646
|
-
|
8763
|
+
finally:
|
8764
|
+
rch.stop()
|
8765
|
+
await rch_task
|
7647
8766
|
|
7648
8767
|
|
7649
8768
|
########################################
|
@@ -7977,6 +9096,7 @@ class PyenvVersionInstaller:
|
|
7977
9096
|
self._version,
|
7978
9097
|
]
|
7979
9098
|
|
9099
|
+
full_args: ta.List[str]
|
7980
9100
|
if self._given_install_name is not None:
|
7981
9101
|
full_args = [
|
7982
9102
|
os.path.join(check.not_none(await self._pyenv.root()), 'plugins', 'python-build', 'bin', 'python-build'), # noqa
|
@@ -8191,318 +9311,53 @@ class SystemInterpProvider(InterpProvider):
|
|
8191
9311
|
continue
|
8192
9312
|
out.append(name)
|
8193
9313
|
|
8194
|
-
return out
|
8195
|
-
|
8196
|
-
@cached_nullary
|
8197
|
-
def exes(self) -> ta.List[str]:
|
8198
|
-
return self._re_which(
|
8199
|
-
re.compile(r'python3(\.\d+)?'),
|
8200
|
-
path=self.path,
|
8201
|
-
)
|
8202
|
-
|
8203
|
-
#
|
8204
|
-
|
8205
|
-
async def get_exe_version(self, exe: str) -> ta.Optional[InterpVersion]:
|
8206
|
-
if not self.inspect:
|
8207
|
-
s = os.path.basename(exe)
|
8208
|
-
if s.startswith('python'):
|
8209
|
-
s = s[len('python'):]
|
8210
|
-
if '.' in s:
|
8211
|
-
try:
|
8212
|
-
return InterpVersion.parse(s)
|
8213
|
-
except InvalidVersion:
|
8214
|
-
pass
|
8215
|
-
ii = await self.inspector.inspect(exe)
|
8216
|
-
return ii.iv if ii is not None else None
|
8217
|
-
|
8218
|
-
async def exe_versions(self) -> ta.Sequence[ta.Tuple[str, InterpVersion]]:
|
8219
|
-
lst = []
|
8220
|
-
for e in self.exes():
|
8221
|
-
if (ev := await self.get_exe_version(e)) is None:
|
8222
|
-
log.debug('Invalid system version: %s', e)
|
8223
|
-
continue
|
8224
|
-
lst.append((e, ev))
|
8225
|
-
return lst
|
8226
|
-
|
8227
|
-
#
|
8228
|
-
|
8229
|
-
async def get_installed_versions(self, spec: InterpSpecifier) -> ta.Sequence[InterpVersion]:
|
8230
|
-
return [ev for e, ev in await self.exe_versions()]
|
8231
|
-
|
8232
|
-
async def get_installed_version(self, version: InterpVersion) -> Interp:
|
8233
|
-
for e, ev in await self.exe_versions():
|
8234
|
-
if ev != version:
|
8235
|
-
continue
|
8236
|
-
return Interp(
|
8237
|
-
exe=e,
|
8238
|
-
version=ev,
|
8239
|
-
)
|
8240
|
-
raise KeyError(version)
|
8241
|
-
|
8242
|
-
|
8243
|
-
########################################
|
8244
|
-
# ../remote/connection.py
|
8245
|
-
|
8246
|
-
|
8247
|
-
##
|
8248
|
-
|
8249
|
-
|
8250
|
-
class PyremoteRemoteExecutionConnector:
|
8251
|
-
def __init__(
|
8252
|
-
self,
|
8253
|
-
*,
|
8254
|
-
spawning: RemoteSpawning,
|
8255
|
-
msh: ObjMarshalerManager,
|
8256
|
-
payload_file: ta.Optional[RemoteExecutionPayloadFile] = None,
|
8257
|
-
) -> None:
|
8258
|
-
super().__init__()
|
8259
|
-
|
8260
|
-
self._spawning = spawning
|
8261
|
-
self._msh = msh
|
8262
|
-
self._payload_file = payload_file
|
8263
|
-
|
8264
|
-
#
|
8265
|
-
|
8266
|
-
@cached_nullary
|
8267
|
-
def _payload_src(self) -> str:
|
8268
|
-
return get_remote_payload_src(file=self._payload_file)
|
8269
|
-
|
8270
|
-
@cached_nullary
|
8271
|
-
def _remote_src(self) -> ta.Sequence[str]:
|
8272
|
-
return [
|
8273
|
-
self._payload_src(),
|
8274
|
-
'_remote_execution_main()',
|
8275
|
-
]
|
8276
|
-
|
8277
|
-
@cached_nullary
|
8278
|
-
def _spawn_src(self) -> str:
|
8279
|
-
return pyremote_build_bootstrap_cmd(__package__ or 'manage')
|
8280
|
-
|
8281
|
-
#
|
8282
|
-
|
8283
|
-
@contextlib.asynccontextmanager
|
8284
|
-
async def connect(
|
8285
|
-
self,
|
8286
|
-
tgt: RemoteSpawning.Target,
|
8287
|
-
bs: MainBootstrap,
|
8288
|
-
) -> ta.AsyncGenerator[RemoteCommandExecutor, None]:
|
8289
|
-
spawn_src = self._spawn_src()
|
8290
|
-
remote_src = self._remote_src()
|
8291
|
-
|
8292
|
-
async with self._spawning.spawn(
|
8293
|
-
tgt,
|
8294
|
-
spawn_src,
|
8295
|
-
debug=bs.main_config.debug,
|
8296
|
-
) as proc:
|
8297
|
-
res = await PyremoteBootstrapDriver( # noqa
|
8298
|
-
remote_src,
|
8299
|
-
PyremoteBootstrapOptions(
|
8300
|
-
debug=bs.main_config.debug,
|
8301
|
-
),
|
8302
|
-
).async_run(
|
8303
|
-
proc.stdout,
|
8304
|
-
proc.stdin,
|
8305
|
-
)
|
8306
|
-
|
8307
|
-
chan = RemoteChannelImpl(
|
8308
|
-
proc.stdout,
|
8309
|
-
proc.stdin,
|
8310
|
-
msh=self._msh,
|
8311
|
-
)
|
8312
|
-
|
8313
|
-
await chan.send_obj(bs)
|
8314
|
-
|
8315
|
-
rce: RemoteCommandExecutor
|
8316
|
-
async with aclosing(RemoteCommandExecutor(chan)) as rce:
|
8317
|
-
await rce.start()
|
8318
|
-
|
8319
|
-
yield rce
|
8320
|
-
|
8321
|
-
|
8322
|
-
##
|
8323
|
-
|
8324
|
-
|
8325
|
-
class InProcessRemoteExecutionConnector:
|
8326
|
-
def __init__(
|
8327
|
-
self,
|
8328
|
-
*,
|
8329
|
-
msh: ObjMarshalerManager,
|
8330
|
-
local_executor: LocalCommandExecutor,
|
8331
|
-
) -> None:
|
8332
|
-
super().__init__()
|
8333
|
-
|
8334
|
-
self._msh = msh
|
8335
|
-
self._local_executor = local_executor
|
8336
|
-
|
8337
|
-
@contextlib.asynccontextmanager
|
8338
|
-
async def connect(self) -> ta.AsyncGenerator[RemoteCommandExecutor, None]:
|
8339
|
-
r0, w0 = asyncio_create_bytes_channel()
|
8340
|
-
r1, w1 = asyncio_create_bytes_channel()
|
8341
|
-
|
8342
|
-
remote_chan = RemoteChannelImpl(r0, w1, msh=self._msh)
|
8343
|
-
local_chan = RemoteChannelImpl(r1, w0, msh=self._msh)
|
8344
|
-
|
8345
|
-
rch = _RemoteCommandHandler(
|
8346
|
-
remote_chan,
|
8347
|
-
self._local_executor,
|
8348
|
-
)
|
8349
|
-
rch_task = asyncio.create_task(rch.run()) # noqa
|
8350
|
-
try:
|
8351
|
-
rce: RemoteCommandExecutor
|
8352
|
-
async with aclosing(RemoteCommandExecutor(local_chan)) as rce:
|
8353
|
-
await rce.start()
|
8354
|
-
|
8355
|
-
yield rce
|
8356
|
-
|
8357
|
-
finally:
|
8358
|
-
rch.stop()
|
8359
|
-
await rch_task
|
8360
|
-
|
8361
|
-
|
8362
|
-
########################################
|
8363
|
-
# ../system/inject.py
|
8364
|
-
|
8365
|
-
|
8366
|
-
def bind_system(
|
8367
|
-
*,
|
8368
|
-
system_config: SystemConfig,
|
8369
|
-
) -> InjectorBindings:
|
8370
|
-
lst: ta.List[InjectorBindingOrBindings] = [
|
8371
|
-
inj.bind(system_config),
|
8372
|
-
]
|
8373
|
-
|
8374
|
-
#
|
8375
|
-
|
8376
|
-
platform = system_config.platform or detect_system_platform()
|
8377
|
-
lst.append(inj.bind(platform, key=Platform))
|
8378
|
-
|
8379
|
-
#
|
8380
|
-
|
8381
|
-
if isinstance(platform, AmazonLinuxPlatform):
|
8382
|
-
lst.extend([
|
8383
|
-
inj.bind(YumSystemPackageManager, singleton=True),
|
8384
|
-
inj.bind(SystemPackageManager, to_key=YumSystemPackageManager),
|
8385
|
-
])
|
8386
|
-
|
8387
|
-
elif isinstance(platform, LinuxPlatform):
|
8388
|
-
lst.extend([
|
8389
|
-
inj.bind(AptSystemPackageManager, singleton=True),
|
8390
|
-
inj.bind(SystemPackageManager, to_key=AptSystemPackageManager),
|
8391
|
-
])
|
8392
|
-
|
8393
|
-
elif isinstance(platform, DarwinPlatform):
|
8394
|
-
lst.extend([
|
8395
|
-
inj.bind(BrewSystemPackageManager, singleton=True),
|
8396
|
-
inj.bind(SystemPackageManager, to_key=BrewSystemPackageManager),
|
8397
|
-
])
|
8398
|
-
|
8399
|
-
#
|
8400
|
-
|
8401
|
-
lst.extend([
|
8402
|
-
bind_command(CheckSystemPackageCommand, CheckSystemPackageCommandExecutor),
|
8403
|
-
])
|
8404
|
-
|
8405
|
-
#
|
8406
|
-
|
8407
|
-
return inj.as_bindings(*lst)
|
8408
|
-
|
8409
|
-
|
8410
|
-
########################################
|
8411
|
-
# ../../../omdev/interp/resolvers.py
|
8412
|
-
|
8413
|
-
|
8414
|
-
INTERP_PROVIDER_TYPES_BY_NAME: ta.Mapping[str, ta.Type[InterpProvider]] = {
|
8415
|
-
cls.name: cls for cls in deep_subclasses(InterpProvider) if abc.ABC not in cls.__bases__ # type: ignore
|
8416
|
-
}
|
8417
|
-
|
8418
|
-
|
8419
|
-
class InterpResolver:
|
8420
|
-
def __init__(
|
8421
|
-
self,
|
8422
|
-
providers: ta.Sequence[ta.Tuple[str, InterpProvider]],
|
8423
|
-
) -> None:
|
8424
|
-
super().__init__()
|
8425
|
-
|
8426
|
-
self._providers: ta.Mapping[str, InterpProvider] = collections.OrderedDict(providers)
|
8427
|
-
|
8428
|
-
async def _resolve_installed(self, spec: InterpSpecifier) -> ta.Optional[ta.Tuple[InterpProvider, InterpVersion]]:
|
8429
|
-
lst = [
|
8430
|
-
(i, si)
|
8431
|
-
for i, p in enumerate(self._providers.values())
|
8432
|
-
for si in await p.get_installed_versions(spec)
|
8433
|
-
if spec.contains(si)
|
8434
|
-
]
|
8435
|
-
|
8436
|
-
slst = sorted(lst, key=lambda t: (-t[0], t[1].version))
|
8437
|
-
if not slst:
|
8438
|
-
return None
|
8439
|
-
|
8440
|
-
bi, bv = slst[-1]
|
8441
|
-
bp = list(self._providers.values())[bi]
|
8442
|
-
return (bp, bv)
|
8443
|
-
|
8444
|
-
async def resolve(
|
8445
|
-
self,
|
8446
|
-
spec: InterpSpecifier,
|
8447
|
-
*,
|
8448
|
-
install: bool = False,
|
8449
|
-
) -> ta.Optional[Interp]:
|
8450
|
-
tup = await self._resolve_installed(spec)
|
8451
|
-
if tup is not None:
|
8452
|
-
bp, bv = tup
|
8453
|
-
return await bp.get_installed_version(bv)
|
8454
|
-
|
8455
|
-
if not install:
|
8456
|
-
return None
|
8457
|
-
|
8458
|
-
tp = list(self._providers.values())[0] # noqa
|
8459
|
-
|
8460
|
-
sv = sorted(
|
8461
|
-
[s for s in await tp.get_installable_versions(spec) if s in spec],
|
8462
|
-
key=lambda s: s.version,
|
8463
|
-
)
|
8464
|
-
if not sv:
|
8465
|
-
return None
|
8466
|
-
|
8467
|
-
bv = sv[-1]
|
8468
|
-
return await tp.install_version(bv)
|
8469
|
-
|
8470
|
-
async def list(self, spec: InterpSpecifier) -> None:
|
8471
|
-
print('installed:')
|
8472
|
-
for n, p in self._providers.items():
|
8473
|
-
lst = [
|
8474
|
-
si
|
8475
|
-
for si in await p.get_installed_versions(spec)
|
8476
|
-
if spec.contains(si)
|
8477
|
-
]
|
8478
|
-
if lst:
|
8479
|
-
print(f' {n}')
|
8480
|
-
for si in lst:
|
8481
|
-
print(f' {si}')
|
8482
|
-
|
8483
|
-
print()
|
9314
|
+
return out
|
8484
9315
|
|
8485
|
-
|
8486
|
-
|
8487
|
-
|
8488
|
-
|
8489
|
-
|
8490
|
-
|
8491
|
-
]
|
8492
|
-
if lst:
|
8493
|
-
print(f' {n}')
|
8494
|
-
for si in lst:
|
8495
|
-
print(f' {si}')
|
9316
|
+
@cached_nullary
|
9317
|
+
def exes(self) -> ta.List[str]:
|
9318
|
+
return self._re_which(
|
9319
|
+
re.compile(r'python3(\.\d+)?'),
|
9320
|
+
path=self.path,
|
9321
|
+
)
|
8496
9322
|
|
9323
|
+
#
|
8497
9324
|
|
8498
|
-
|
8499
|
-
|
8500
|
-
|
9325
|
+
async def get_exe_version(self, exe: str) -> ta.Optional[InterpVersion]:
|
9326
|
+
if not self.inspect:
|
9327
|
+
s = os.path.basename(exe)
|
9328
|
+
if s.startswith('python'):
|
9329
|
+
s = s[len('python'):]
|
9330
|
+
if '.' in s:
|
9331
|
+
try:
|
9332
|
+
return InterpVersion.parse(s)
|
9333
|
+
except InvalidVersion:
|
9334
|
+
pass
|
9335
|
+
ii = await self.inspector.inspect(exe)
|
9336
|
+
return ii.iv if ii is not None else None
|
8501
9337
|
|
8502
|
-
|
9338
|
+
async def exe_versions(self) -> ta.Sequence[ta.Tuple[str, InterpVersion]]:
|
9339
|
+
lst = []
|
9340
|
+
for e in self.exes():
|
9341
|
+
if (ev := await self.get_exe_version(e)) is None:
|
9342
|
+
log.debug('Invalid system version: %s', e)
|
9343
|
+
continue
|
9344
|
+
lst.append((e, ev))
|
9345
|
+
return lst
|
8503
9346
|
|
8504
|
-
|
8505
|
-
|
9347
|
+
#
|
9348
|
+
|
9349
|
+
async def get_installed_versions(self, spec: InterpSpecifier) -> ta.Sequence[InterpVersion]:
|
9350
|
+
return [ev for e, ev in await self.exe_versions()]
|
9351
|
+
|
9352
|
+
async def get_installed_version(self, version: InterpVersion) -> Interp:
|
9353
|
+
for e, ev in await self.exe_versions():
|
9354
|
+
if ev != version:
|
9355
|
+
continue
|
9356
|
+
return Interp(
|
9357
|
+
exe=e,
|
9358
|
+
version=ev,
|
9359
|
+
)
|
9360
|
+
raise KeyError(version)
|
8506
9361
|
|
8507
9362
|
|
8508
9363
|
########################################
|
@@ -8533,6 +9388,54 @@ def bind_remote(
|
|
8533
9388
|
return inj.as_bindings(*lst)
|
8534
9389
|
|
8535
9390
|
|
9391
|
+
########################################
|
9392
|
+
# ../system/inject.py
|
9393
|
+
|
9394
|
+
|
9395
|
+
def bind_system(
|
9396
|
+
*,
|
9397
|
+
system_config: SystemConfig,
|
9398
|
+
) -> InjectorBindings:
|
9399
|
+
lst: ta.List[InjectorBindingOrBindings] = [
|
9400
|
+
inj.bind(system_config),
|
9401
|
+
]
|
9402
|
+
|
9403
|
+
#
|
9404
|
+
|
9405
|
+
platform = system_config.platform or detect_system_platform()
|
9406
|
+
lst.append(inj.bind(platform, key=Platform))
|
9407
|
+
|
9408
|
+
#
|
9409
|
+
|
9410
|
+
if isinstance(platform, AmazonLinuxPlatform):
|
9411
|
+
lst.extend([
|
9412
|
+
inj.bind(YumSystemPackageManager, singleton=True),
|
9413
|
+
inj.bind(SystemPackageManager, to_key=YumSystemPackageManager),
|
9414
|
+
])
|
9415
|
+
|
9416
|
+
elif isinstance(platform, LinuxPlatform):
|
9417
|
+
lst.extend([
|
9418
|
+
inj.bind(AptSystemPackageManager, singleton=True),
|
9419
|
+
inj.bind(SystemPackageManager, to_key=AptSystemPackageManager),
|
9420
|
+
])
|
9421
|
+
|
9422
|
+
elif isinstance(platform, DarwinPlatform):
|
9423
|
+
lst.extend([
|
9424
|
+
inj.bind(BrewSystemPackageManager, singleton=True),
|
9425
|
+
inj.bind(SystemPackageManager, to_key=BrewSystemPackageManager),
|
9426
|
+
])
|
9427
|
+
|
9428
|
+
#
|
9429
|
+
|
9430
|
+
lst.extend([
|
9431
|
+
bind_command(CheckSystemPackageCommand, CheckSystemPackageCommandExecutor),
|
9432
|
+
])
|
9433
|
+
|
9434
|
+
#
|
9435
|
+
|
9436
|
+
return inj.as_bindings(*lst)
|
9437
|
+
|
9438
|
+
|
8536
9439
|
########################################
|
8537
9440
|
# ../targets/connection.py
|
8538
9441
|
|
@@ -8668,33 +9571,101 @@ class SshManageTargetConnector(ManageTargetConnector):
|
|
8668
9571
|
|
8669
9572
|
|
8670
9573
|
########################################
|
8671
|
-
#
|
9574
|
+
# ../../../omdev/interp/resolvers.py
|
8672
9575
|
|
8673
9576
|
|
8674
|
-
|
9577
|
+
INTERP_PROVIDER_TYPES_BY_NAME: ta.Mapping[str, ta.Type[InterpProvider]] = {
|
9578
|
+
cls.name: cls for cls in deep_subclasses(InterpProvider) if abc.ABC not in cls.__bases__ # type: ignore
|
9579
|
+
}
|
8675
9580
|
|
8676
9581
|
|
8677
|
-
|
8678
|
-
|
8679
|
-
|
8680
|
-
|
9582
|
+
class InterpResolver:
|
9583
|
+
def __init__(
|
9584
|
+
self,
|
9585
|
+
providers: ta.Sequence[ta.Tuple[str, InterpProvider]],
|
9586
|
+
) -> None:
|
9587
|
+
super().__init__()
|
8681
9588
|
|
8682
|
-
|
8683
|
-
class Output(Command.Output):
|
8684
|
-
exe: str
|
8685
|
-
version: str
|
8686
|
-
opts: InterpOpts
|
9589
|
+
self._providers: ta.Mapping[str, InterpProvider] = collections.OrderedDict(providers)
|
8687
9590
|
|
9591
|
+
async def _resolve_installed(self, spec: InterpSpecifier) -> ta.Optional[ta.Tuple[InterpProvider, InterpVersion]]:
|
9592
|
+
lst = [
|
9593
|
+
(i, si)
|
9594
|
+
for i, p in enumerate(self._providers.values())
|
9595
|
+
for si in await p.get_installed_versions(spec)
|
9596
|
+
if spec.contains(si)
|
9597
|
+
]
|
8688
9598
|
|
8689
|
-
|
8690
|
-
|
8691
|
-
|
8692
|
-
|
8693
|
-
|
8694
|
-
|
8695
|
-
|
8696
|
-
|
9599
|
+
slst = sorted(lst, key=lambda t: (-t[0], t[1].version))
|
9600
|
+
if not slst:
|
9601
|
+
return None
|
9602
|
+
|
9603
|
+
bi, bv = slst[-1]
|
9604
|
+
bp = list(self._providers.values())[bi]
|
9605
|
+
return (bp, bv)
|
9606
|
+
|
9607
|
+
async def resolve(
|
9608
|
+
self,
|
9609
|
+
spec: InterpSpecifier,
|
9610
|
+
*,
|
9611
|
+
install: bool = False,
|
9612
|
+
) -> ta.Optional[Interp]:
|
9613
|
+
tup = await self._resolve_installed(spec)
|
9614
|
+
if tup is not None:
|
9615
|
+
bp, bv = tup
|
9616
|
+
return await bp.get_installed_version(bv)
|
9617
|
+
|
9618
|
+
if not install:
|
9619
|
+
return None
|
9620
|
+
|
9621
|
+
tp = list(self._providers.values())[0] # noqa
|
9622
|
+
|
9623
|
+
sv = sorted(
|
9624
|
+
[s for s in await tp.get_installable_versions(spec) if s in spec],
|
9625
|
+
key=lambda s: s.version,
|
8697
9626
|
)
|
9627
|
+
if not sv:
|
9628
|
+
return None
|
9629
|
+
|
9630
|
+
bv = sv[-1]
|
9631
|
+
return await tp.install_version(bv)
|
9632
|
+
|
9633
|
+
async def list(self, spec: InterpSpecifier) -> None:
|
9634
|
+
print('installed:')
|
9635
|
+
for n, p in self._providers.items():
|
9636
|
+
lst = [
|
9637
|
+
si
|
9638
|
+
for si in await p.get_installed_versions(spec)
|
9639
|
+
if spec.contains(si)
|
9640
|
+
]
|
9641
|
+
if lst:
|
9642
|
+
print(f' {n}')
|
9643
|
+
for si in lst:
|
9644
|
+
print(f' {si}')
|
9645
|
+
|
9646
|
+
print()
|
9647
|
+
|
9648
|
+
print('installable:')
|
9649
|
+
for n, p in self._providers.items():
|
9650
|
+
lst = [
|
9651
|
+
si
|
9652
|
+
for si in await p.get_installable_versions(spec)
|
9653
|
+
if spec.contains(si)
|
9654
|
+
]
|
9655
|
+
if lst:
|
9656
|
+
print(f' {n}')
|
9657
|
+
for si in lst:
|
9658
|
+
print(f' {si}')
|
9659
|
+
|
9660
|
+
|
9661
|
+
DEFAULT_INTERP_RESOLVER = InterpResolver([(p.name, p) for p in [
|
9662
|
+
# pyenv is preferred to system interpreters as it tends to have more support for things like tkinter
|
9663
|
+
PyenvInterpProvider(try_update=True),
|
9664
|
+
|
9665
|
+
RunningInterpProvider(),
|
9666
|
+
|
9667
|
+
SystemInterpProvider(),
|
9668
|
+
]])
|
8698
9669
|
|
8699
9670
|
|
8700
9671
|
########################################
|
@@ -8726,6 +9697,36 @@ def bind_targets() -> InjectorBindings:
|
|
8726
9697
|
return inj.as_bindings(*lst)
|
8727
9698
|
|
8728
9699
|
|
9700
|
+
########################################
|
9701
|
+
# ../deploy/interp.py
|
9702
|
+
|
9703
|
+
|
9704
|
+
##
|
9705
|
+
|
9706
|
+
|
9707
|
+
@dc.dataclass(frozen=True)
|
9708
|
+
class InterpCommand(Command['InterpCommand.Output']):
|
9709
|
+
spec: str
|
9710
|
+
install: bool = False
|
9711
|
+
|
9712
|
+
@dc.dataclass(frozen=True)
|
9713
|
+
class Output(Command.Output):
|
9714
|
+
exe: str
|
9715
|
+
version: str
|
9716
|
+
opts: InterpOpts
|
9717
|
+
|
9718
|
+
|
9719
|
+
class InterpCommandExecutor(CommandExecutor[InterpCommand, InterpCommand.Output]):
|
9720
|
+
async def execute(self, cmd: InterpCommand) -> InterpCommand.Output:
|
9721
|
+
i = InterpSpecifier.parse(check.not_none(cmd.spec))
|
9722
|
+
o = check.not_none(await DEFAULT_INTERP_RESOLVER.resolve(i, install=cmd.install))
|
9723
|
+
return InterpCommand.Output(
|
9724
|
+
exe=o.exe,
|
9725
|
+
version=str(o.version.version),
|
9726
|
+
opts=o.version.opts,
|
9727
|
+
)
|
9728
|
+
|
9729
|
+
|
8729
9730
|
########################################
|
8730
9731
|
# ../deploy/inject.py
|
8731
9732
|
|
@@ -8842,7 +9843,28 @@ def main_bootstrap(bs: MainBootstrap) -> Injector:
|
|
8842
9843
|
# main.py
|
8843
9844
|
|
8844
9845
|
|
9846
|
+
@dc.dataclass(frozen=True)
|
9847
|
+
class ManageConfig:
|
9848
|
+
targets: ta.Optional[ta.Mapping[str, ManageTarget]] = None
|
9849
|
+
|
9850
|
+
|
8845
9851
|
class MainCli(ArgparseCli):
|
9852
|
+
config_file: ta.Optional[str] = argparse_arg('--config-file', help='Config file path') # type: ignore
|
9853
|
+
|
9854
|
+
@cached_nullary
|
9855
|
+
def config(self) -> ManageConfig:
|
9856
|
+
if (cf := self.config_file) is None:
|
9857
|
+
cf = os.path.expanduser('~/.omlish/manage.yml')
|
9858
|
+
if not os.path.isfile(cf):
|
9859
|
+
cf = None
|
9860
|
+
|
9861
|
+
if cf is None:
|
9862
|
+
return ManageConfig()
|
9863
|
+
else:
|
9864
|
+
return read_config_file(cf, ManageConfig)
|
9865
|
+
|
9866
|
+
#
|
9867
|
+
|
8846
9868
|
@argparse_command(
|
8847
9869
|
argparse_arg('--_payload-file'),
|
8848
9870
|
|
@@ -8894,10 +9916,13 @@ class MainCli(ArgparseCli):
|
|
8894
9916
|
|
8895
9917
|
msh = injector[ObjMarshalerManager]
|
8896
9918
|
|
8897
|
-
|
8898
|
-
if not ts.startswith('{'):
|
8899
|
-
|
8900
|
-
|
9919
|
+
tgt: ManageTarget
|
9920
|
+
if not (ts := self.args.target).startswith('{'):
|
9921
|
+
tgt = check.not_none(self.config().targets)[ts]
|
9922
|
+
else:
|
9923
|
+
tgt = msh.unmarshal_obj(json.loads(ts), ManageTarget)
|
9924
|
+
|
9925
|
+
#
|
8901
9926
|
|
8902
9927
|
cmds: ta.List[Command] = []
|
8903
9928
|
cmd: Command
|