ominfra 0.0.0.dev155__py3-none-any.whl → 0.0.0.dev157__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ominfra/scripts/manage.py CHANGED
@@ -37,6 +37,7 @@ import shlex
37
37
  import shutil
38
38
  import signal
39
39
  import site
40
+ import string
40
41
  import struct
41
42
  import subprocess
42
43
  import sys
@@ -68,6 +69,11 @@ VersionCmpLocalType = ta.Union['NegativeInfinityVersionType', _VersionCmpLocalTy
68
69
  VersionCmpKey = ta.Tuple[int, ta.Tuple[int, ...], VersionCmpPrePostDevType, VersionCmpPrePostDevType, VersionCmpPrePostDevType, VersionCmpLocalType] # noqa
69
70
  VersionComparisonMethod = ta.Callable[[VersionCmpKey, VersionCmpKey], bool]
70
71
 
72
+ # ../../omdev/toml/parser.py
73
+ TomlParseFloat = ta.Callable[[str], ta.Any]
74
+ TomlKey = ta.Tuple[str, ...]
75
+ TomlPos = int # ta.TypeAlias
76
+
71
77
  # ../../omlish/asyncs/asyncio/timeouts.py
72
78
  AwaitableT = ta.TypeVar('AwaitableT', bound=ta.Awaitable)
73
79
 
@@ -109,6 +115,9 @@ InjectorProviderFn = ta.Callable[['Injector'], ta.Any]
109
115
  InjectorProviderFnMap = ta.Mapping['InjectorKey', 'InjectorProviderFn']
110
116
  InjectorBindingOrBindings = ta.Union['InjectorBinding', 'InjectorBindings']
111
117
 
118
+ # ../configs.py
119
+ ConfigMapping = ta.Mapping[str, ta.Any]
120
+
112
121
  # ../../omlish/lite/subprocesses.py
113
122
  SubprocessChannelOption = ta.Literal['pipe', 'stdout', 'devnull'] # ta.TypeAlias
114
123
 
@@ -523,6 +532,824 @@ def canonicalize_version(
523
532
  return ''.join(parts)
524
533
 
525
534
 
535
+ ########################################
536
+ # ../../../omdev/toml/parser.py
537
+ # SPDX-License-Identifier: MIT
538
+ # SPDX-FileCopyrightText: 2021 Taneli Hukkinen
539
+ # Licensed to PSF under a Contributor Agreement.
540
+ #
541
+ # PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
542
+ # --------------------------------------------
543
+ #
544
+ # 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization
545
+ # ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated
546
+ # documentation.
547
+ #
548
+ # 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive,
549
+ # royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative
550
+ # works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License
551
+ # Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
552
+ # 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; All
553
+ # Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee.
554
+ #
555
+ # 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and
556
+ # wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in
557
+ # any such work a brief summary of the changes made to Python.
558
+ #
559
+ # 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES,
560
+ # EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY
561
+ # OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY
562
+ # RIGHTS.
563
+ #
564
+ # 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL
565
+ # DAMAGES OR LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF
566
+ # ADVISED OF THE POSSIBILITY THEREOF.
567
+ #
568
+ # 6. This License Agreement will automatically terminate upon a material breach of its terms and conditions.
569
+ #
570
+ # 7. Nothing in this License Agreement shall be deemed to create any relationship of agency, partnership, or joint
571
+ # venture between PSF and Licensee. This License Agreement does not grant permission to use PSF trademarks or trade
572
+ # name in a trademark sense to endorse or promote products or services of Licensee, or any third party.
573
+ #
574
+ # 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this
575
+ # License Agreement.
576
+ #
577
+ # https://github.com/python/cpython/blob/9ce90206b7a4649600218cf0bd4826db79c9a312/Lib/tomllib/_parser.py
578
+
579
+
580
+ ##
581
+
582
+
583
+ _TOML_TIME_RE_STR = r'([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?'
584
+
585
+ TOML_RE_NUMBER = re.compile(
586
+ r"""
587
+ 0
588
+ (?:
589
+ x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex
590
+ |
591
+ b[01](?:_?[01])* # bin
592
+ |
593
+ o[0-7](?:_?[0-7])* # oct
594
+ )
595
+ |
596
+ [+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part
597
+ (?P<floatpart>
598
+ (?:\.[0-9](?:_?[0-9])*)? # optional fractional part
599
+ (?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part
600
+ )
601
+ """,
602
+ flags=re.VERBOSE,
603
+ )
604
+ TOML_RE_LOCALTIME = re.compile(_TOML_TIME_RE_STR)
605
+ TOML_RE_DATETIME = re.compile(
606
+ rf"""
607
+ ([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27
608
+ (?:
609
+ [Tt ]
610
+ {_TOML_TIME_RE_STR}
611
+ (?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset
612
+ )?
613
+ """,
614
+ flags=re.VERBOSE,
615
+ )
616
+
617
+
618
+ def toml_match_to_datetime(match: re.Match) -> ta.Union[datetime.datetime, datetime.date]:
619
+ """Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`.
620
+
621
+ Raises ValueError if the match does not correspond to a valid date or datetime.
622
+ """
623
+ (
624
+ year_str,
625
+ month_str,
626
+ day_str,
627
+ hour_str,
628
+ minute_str,
629
+ sec_str,
630
+ micros_str,
631
+ zulu_time,
632
+ offset_sign_str,
633
+ offset_hour_str,
634
+ offset_minute_str,
635
+ ) = match.groups()
636
+ year, month, day = int(year_str), int(month_str), int(day_str)
637
+ if hour_str is None:
638
+ return datetime.date(year, month, day)
639
+ hour, minute, sec = int(hour_str), int(minute_str), int(sec_str)
640
+ micros = int(micros_str.ljust(6, '0')) if micros_str else 0
641
+ if offset_sign_str:
642
+ tz: ta.Optional[datetime.tzinfo] = toml_cached_tz(
643
+ offset_hour_str, offset_minute_str, offset_sign_str,
644
+ )
645
+ elif zulu_time:
646
+ tz = datetime.UTC
647
+ else: # local date-time
648
+ tz = None
649
+ return datetime.datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz)
650
+
651
+
652
+ @functools.lru_cache() # noqa
653
+ def toml_cached_tz(hour_str: str, minute_str: str, sign_str: str) -> datetime.timezone:
654
+ sign = 1 if sign_str == '+' else -1
655
+ return datetime.timezone(
656
+ datetime.timedelta(
657
+ hours=sign * int(hour_str),
658
+ minutes=sign * int(minute_str),
659
+ ),
660
+ )
661
+
662
+
663
+ def toml_match_to_localtime(match: re.Match) -> datetime.time:
664
+ hour_str, minute_str, sec_str, micros_str = match.groups()
665
+ micros = int(micros_str.ljust(6, '0')) if micros_str else 0
666
+ return datetime.time(int(hour_str), int(minute_str), int(sec_str), micros)
667
+
668
+
669
+ def toml_match_to_number(match: re.Match, parse_float: TomlParseFloat) -> ta.Any:
670
+ if match.group('floatpart'):
671
+ return parse_float(match.group())
672
+ return int(match.group(), 0)
673
+
674
+
675
+ TOML_ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127))
676
+
677
+ # Neither of these sets include quotation mark or backslash. They are currently handled as separate cases in the parser
678
+ # functions.
679
+ TOML_ILLEGAL_BASIC_STR_CHARS = TOML_ASCII_CTRL - frozenset('\t')
680
+ TOML_ILLEGAL_MULTILINE_BASIC_STR_CHARS = TOML_ASCII_CTRL - frozenset('\t\n')
681
+
682
+ TOML_ILLEGAL_LITERAL_STR_CHARS = TOML_ILLEGAL_BASIC_STR_CHARS
683
+ TOML_ILLEGAL_MULTILINE_LITERAL_STR_CHARS = TOML_ILLEGAL_MULTILINE_BASIC_STR_CHARS
684
+
685
+ TOML_ILLEGAL_COMMENT_CHARS = TOML_ILLEGAL_BASIC_STR_CHARS
686
+
687
+ TOML_WS = frozenset(' \t')
688
+ TOML_WS_AND_NEWLINE = TOML_WS | frozenset('\n')
689
+ TOML_BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + '-_')
690
+ TOML_KEY_INITIAL_CHARS = TOML_BARE_KEY_CHARS | frozenset("\"'")
691
+ TOML_HEXDIGIT_CHARS = frozenset(string.hexdigits)
692
+
693
+ TOML_BASIC_STR_ESCAPE_REPLACEMENTS = types.MappingProxyType(
694
+ {
695
+ '\\b': '\u0008', # backspace
696
+ '\\t': '\u0009', # tab
697
+ '\\n': '\u000A', # linefeed
698
+ '\\f': '\u000C', # form feed
699
+ '\\r': '\u000D', # carriage return
700
+ '\\"': '\u0022', # quote
701
+ '\\\\': '\u005C', # backslash
702
+ },
703
+ )
704
+
705
+
706
+ class TomlDecodeError(ValueError):
707
+ """An error raised if a document is not valid TOML."""
708
+
709
+
710
+ def toml_load(fp: ta.BinaryIO, /, *, parse_float: TomlParseFloat = float) -> ta.Dict[str, ta.Any]:
711
+ """Parse TOML from a binary file object."""
712
+ b = fp.read()
713
+ try:
714
+ s = b.decode()
715
+ except AttributeError:
716
+ raise TypeError("File must be opened in binary mode, e.g. use `open('foo.toml', 'rb')`") from None
717
+ return toml_loads(s, parse_float=parse_float)
718
+
719
+
720
+ def toml_loads(s: str, /, *, parse_float: TomlParseFloat = float) -> ta.Dict[str, ta.Any]: # noqa: C901
721
+ """Parse TOML from a string."""
722
+
723
+ # The spec allows converting "\r\n" to "\n", even in string literals. Let's do so to simplify parsing.
724
+ try:
725
+ src = s.replace('\r\n', '\n')
726
+ except (AttributeError, TypeError):
727
+ raise TypeError(f"Expected str object, not '{type(s).__qualname__}'") from None
728
+ pos = 0
729
+ out = TomlOutput(TomlNestedDict(), TomlFlags())
730
+ header: TomlKey = ()
731
+ parse_float = toml_make_safe_parse_float(parse_float)
732
+
733
+ # Parse one statement at a time (typically means one line in TOML source)
734
+ while True:
735
+ # 1. Skip line leading whitespace
736
+ pos = toml_skip_chars(src, pos, TOML_WS)
737
+
738
+ # 2. Parse rules. Expect one of the following:
739
+ # - end of file
740
+ # - end of line
741
+ # - comment
742
+ # - key/value pair
743
+ # - append dict to list (and move to its namespace)
744
+ # - create dict (and move to its namespace)
745
+ # Skip trailing whitespace when applicable.
746
+ try:
747
+ char = src[pos]
748
+ except IndexError:
749
+ break
750
+ if char == '\n':
751
+ pos += 1
752
+ continue
753
+ if char in TOML_KEY_INITIAL_CHARS:
754
+ pos = toml_key_value_rule(src, pos, out, header, parse_float)
755
+ pos = toml_skip_chars(src, pos, TOML_WS)
756
+ elif char == '[':
757
+ try:
758
+ second_char: ta.Optional[str] = src[pos + 1]
759
+ except IndexError:
760
+ second_char = None
761
+ out.flags.finalize_pending()
762
+ if second_char == '[':
763
+ pos, header = toml_create_list_rule(src, pos, out)
764
+ else:
765
+ pos, header = toml_create_dict_rule(src, pos, out)
766
+ pos = toml_skip_chars(src, pos, TOML_WS)
767
+ elif char != '#':
768
+ raise toml_suffixed_err(src, pos, 'Invalid statement')
769
+
770
+ # 3. Skip comment
771
+ pos = toml_skip_comment(src, pos)
772
+
773
+ # 4. Expect end of line or end of file
774
+ try:
775
+ char = src[pos]
776
+ except IndexError:
777
+ break
778
+ if char != '\n':
779
+ raise toml_suffixed_err(
780
+ src, pos, 'Expected newline or end of document after a statement',
781
+ )
782
+ pos += 1
783
+
784
+ return out.data.dict
785
+
786
+
787
+ class TomlFlags:
788
+ """Flags that map to parsed keys/namespaces."""
789
+
790
+ # Marks an immutable namespace (inline array or inline table).
791
+ FROZEN = 0
792
+ # Marks a nest that has been explicitly created and can no longer be opened using the "[table]" syntax.
793
+ EXPLICIT_NEST = 1
794
+
795
+ def __init__(self) -> None:
796
+ self._flags: ta.Dict[str, dict] = {}
797
+ self._pending_flags: ta.Set[ta.Tuple[TomlKey, int]] = set()
798
+
799
+ def add_pending(self, key: TomlKey, flag: int) -> None:
800
+ self._pending_flags.add((key, flag))
801
+
802
+ def finalize_pending(self) -> None:
803
+ for key, flag in self._pending_flags:
804
+ self.set(key, flag, recursive=False)
805
+ self._pending_flags.clear()
806
+
807
+ def unset_all(self, key: TomlKey) -> None:
808
+ cont = self._flags
809
+ for k in key[:-1]:
810
+ if k not in cont:
811
+ return
812
+ cont = cont[k]['nested']
813
+ cont.pop(key[-1], None)
814
+
815
+ def set(self, key: TomlKey, flag: int, *, recursive: bool) -> None: # noqa: A003
816
+ cont = self._flags
817
+ key_parent, key_stem = key[:-1], key[-1]
818
+ for k in key_parent:
819
+ if k not in cont:
820
+ cont[k] = {'flags': set(), 'recursive_flags': set(), 'nested': {}}
821
+ cont = cont[k]['nested']
822
+ if key_stem not in cont:
823
+ cont[key_stem] = {'flags': set(), 'recursive_flags': set(), 'nested': {}}
824
+ cont[key_stem]['recursive_flags' if recursive else 'flags'].add(flag)
825
+
826
+ def is_(self, key: TomlKey, flag: int) -> bool:
827
+ if not key:
828
+ return False # document root has no flags
829
+ cont = self._flags
830
+ for k in key[:-1]:
831
+ if k not in cont:
832
+ return False
833
+ inner_cont = cont[k]
834
+ if flag in inner_cont['recursive_flags']:
835
+ return True
836
+ cont = inner_cont['nested']
837
+ key_stem = key[-1]
838
+ if key_stem in cont:
839
+ cont = cont[key_stem]
840
+ return flag in cont['flags'] or flag in cont['recursive_flags']
841
+ return False
842
+
843
+
844
+ class TomlNestedDict:
845
+ def __init__(self) -> None:
846
+ # The parsed content of the TOML document
847
+ self.dict: ta.Dict[str, ta.Any] = {}
848
+
849
+ def get_or_create_nest(
850
+ self,
851
+ key: TomlKey,
852
+ *,
853
+ access_lists: bool = True,
854
+ ) -> dict:
855
+ cont: ta.Any = self.dict
856
+ for k in key:
857
+ if k not in cont:
858
+ cont[k] = {}
859
+ cont = cont[k]
860
+ if access_lists and isinstance(cont, list):
861
+ cont = cont[-1]
862
+ if not isinstance(cont, dict):
863
+ raise KeyError('There is no nest behind this key')
864
+ return cont
865
+
866
+ def append_nest_to_list(self, key: TomlKey) -> None:
867
+ cont = self.get_or_create_nest(key[:-1])
868
+ last_key = key[-1]
869
+ if last_key in cont:
870
+ list_ = cont[last_key]
871
+ if not isinstance(list_, list):
872
+ raise KeyError('An object other than list found behind this key')
873
+ list_.append({})
874
+ else:
875
+ cont[last_key] = [{}]
876
+
877
+
878
+ class TomlOutput(ta.NamedTuple):
879
+ data: TomlNestedDict
880
+ flags: TomlFlags
881
+
882
+
883
+ def toml_skip_chars(src: str, pos: TomlPos, chars: ta.Iterable[str]) -> TomlPos:
884
+ try:
885
+ while src[pos] in chars:
886
+ pos += 1
887
+ except IndexError:
888
+ pass
889
+ return pos
890
+
891
+
892
+ def toml_skip_until(
893
+ src: str,
894
+ pos: TomlPos,
895
+ expect: str,
896
+ *,
897
+ error_on: ta.FrozenSet[str],
898
+ error_on_eof: bool,
899
+ ) -> TomlPos:
900
+ try:
901
+ new_pos = src.index(expect, pos)
902
+ except ValueError:
903
+ new_pos = len(src)
904
+ if error_on_eof:
905
+ raise toml_suffixed_err(src, new_pos, f'Expected {expect!r}') from None
906
+
907
+ if not error_on.isdisjoint(src[pos:new_pos]):
908
+ while src[pos] not in error_on:
909
+ pos += 1
910
+ raise toml_suffixed_err(src, pos, f'Found invalid character {src[pos]!r}')
911
+ return new_pos
912
+
913
+
914
+ def toml_skip_comment(src: str, pos: TomlPos) -> TomlPos:
915
+ try:
916
+ char: ta.Optional[str] = src[pos]
917
+ except IndexError:
918
+ char = None
919
+ if char == '#':
920
+ return toml_skip_until(
921
+ src, pos + 1, '\n', error_on=TOML_ILLEGAL_COMMENT_CHARS, error_on_eof=False,
922
+ )
923
+ return pos
924
+
925
+
926
+ def toml_skip_comments_and_array_ws(src: str, pos: TomlPos) -> TomlPos:
927
+ while True:
928
+ pos_before_skip = pos
929
+ pos = toml_skip_chars(src, pos, TOML_WS_AND_NEWLINE)
930
+ pos = toml_skip_comment(src, pos)
931
+ if pos == pos_before_skip:
932
+ return pos
933
+
934
+
935
+ def toml_create_dict_rule(src: str, pos: TomlPos, out: TomlOutput) -> ta.Tuple[TomlPos, TomlKey]:
936
+ pos += 1 # Skip "["
937
+ pos = toml_skip_chars(src, pos, TOML_WS)
938
+ pos, key = toml_parse_key(src, pos)
939
+
940
+ if out.flags.is_(key, TomlFlags.EXPLICIT_NEST) or out.flags.is_(key, TomlFlags.FROZEN):
941
+ raise toml_suffixed_err(src, pos, f'Cannot declare {key} twice')
942
+ out.flags.set(key, TomlFlags.EXPLICIT_NEST, recursive=False)
943
+ try:
944
+ out.data.get_or_create_nest(key)
945
+ except KeyError:
946
+ raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
947
+
948
+ if not src.startswith(']', pos):
949
+ raise toml_suffixed_err(src, pos, "Expected ']' at the end of a table declaration")
950
+ return pos + 1, key
951
+
952
+
953
+ def toml_create_list_rule(src: str, pos: TomlPos, out: TomlOutput) -> ta.Tuple[TomlPos, TomlKey]:
954
+ pos += 2 # Skip "[["
955
+ pos = toml_skip_chars(src, pos, TOML_WS)
956
+ pos, key = toml_parse_key(src, pos)
957
+
958
+ if out.flags.is_(key, TomlFlags.FROZEN):
959
+ raise toml_suffixed_err(src, pos, f'Cannot mutate immutable namespace {key}')
960
+ # Free the namespace now that it points to another empty list item...
961
+ out.flags.unset_all(key)
962
+ # ...but this key precisely is still prohibited from table declaration
963
+ out.flags.set(key, TomlFlags.EXPLICIT_NEST, recursive=False)
964
+ try:
965
+ out.data.append_nest_to_list(key)
966
+ except KeyError:
967
+ raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
968
+
969
+ if not src.startswith(']]', pos):
970
+ raise toml_suffixed_err(src, pos, "Expected ']]' at the end of an array declaration")
971
+ return pos + 2, key
972
+
973
+
974
+ def toml_key_value_rule(
975
+ src: str,
976
+ pos: TomlPos,
977
+ out: TomlOutput,
978
+ header: TomlKey,
979
+ parse_float: TomlParseFloat,
980
+ ) -> TomlPos:
981
+ pos, key, value = toml_parse_key_value_pair(src, pos, parse_float)
982
+ key_parent, key_stem = key[:-1], key[-1]
983
+ abs_key_parent = header + key_parent
984
+
985
+ relative_path_cont_keys = (header + key[:i] for i in range(1, len(key)))
986
+ for cont_key in relative_path_cont_keys:
987
+ # Check that dotted key syntax does not redefine an existing table
988
+ if out.flags.is_(cont_key, TomlFlags.EXPLICIT_NEST):
989
+ raise toml_suffixed_err(src, pos, f'Cannot redefine namespace {cont_key}')
990
+ # Containers in the relative path can't be opened with the table syntax or dotted key/value syntax in following
991
+ # table sections.
992
+ out.flags.add_pending(cont_key, TomlFlags.EXPLICIT_NEST)
993
+
994
+ if out.flags.is_(abs_key_parent, TomlFlags.FROZEN):
995
+ raise toml_suffixed_err(
996
+ src,
997
+ pos,
998
+ f'Cannot mutate immutable namespace {abs_key_parent}',
999
+ )
1000
+
1001
+ try:
1002
+ nest = out.data.get_or_create_nest(abs_key_parent)
1003
+ except KeyError:
1004
+ raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
1005
+ if key_stem in nest:
1006
+ raise toml_suffixed_err(src, pos, 'Cannot overwrite a value')
1007
+ # Mark inline table and array namespaces recursively immutable
1008
+ if isinstance(value, (dict, list)):
1009
+ out.flags.set(header + key, TomlFlags.FROZEN, recursive=True)
1010
+ nest[key_stem] = value
1011
+ return pos
1012
+
1013
+
1014
+ def toml_parse_key_value_pair(
1015
+ src: str,
1016
+ pos: TomlPos,
1017
+ parse_float: TomlParseFloat,
1018
+ ) -> ta.Tuple[TomlPos, TomlKey, ta.Any]:
1019
+ pos, key = toml_parse_key(src, pos)
1020
+ try:
1021
+ char: ta.Optional[str] = src[pos]
1022
+ except IndexError:
1023
+ char = None
1024
+ if char != '=':
1025
+ raise toml_suffixed_err(src, pos, "Expected '=' after a key in a key/value pair")
1026
+ pos += 1
1027
+ pos = toml_skip_chars(src, pos, TOML_WS)
1028
+ pos, value = toml_parse_value(src, pos, parse_float)
1029
+ return pos, key, value
1030
+
1031
+
1032
+ def toml_parse_key(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, TomlKey]:
1033
+ pos, key_part = toml_parse_key_part(src, pos)
1034
+ key: TomlKey = (key_part,)
1035
+ pos = toml_skip_chars(src, pos, TOML_WS)
1036
+ while True:
1037
+ try:
1038
+ char: ta.Optional[str] = src[pos]
1039
+ except IndexError:
1040
+ char = None
1041
+ if char != '.':
1042
+ return pos, key
1043
+ pos += 1
1044
+ pos = toml_skip_chars(src, pos, TOML_WS)
1045
+ pos, key_part = toml_parse_key_part(src, pos)
1046
+ key += (key_part,)
1047
+ pos = toml_skip_chars(src, pos, TOML_WS)
1048
+
1049
+
1050
+ def toml_parse_key_part(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
1051
+ try:
1052
+ char: ta.Optional[str] = src[pos]
1053
+ except IndexError:
1054
+ char = None
1055
+ if char in TOML_BARE_KEY_CHARS:
1056
+ start_pos = pos
1057
+ pos = toml_skip_chars(src, pos, TOML_BARE_KEY_CHARS)
1058
+ return pos, src[start_pos:pos]
1059
+ if char == "'":
1060
+ return toml_parse_literal_str(src, pos)
1061
+ if char == '"':
1062
+ return toml_parse_one_line_basic_str(src, pos)
1063
+ raise toml_suffixed_err(src, pos, 'Invalid initial character for a key part')
1064
+
1065
+
1066
+ def toml_parse_one_line_basic_str(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
1067
+ pos += 1
1068
+ return toml_parse_basic_str(src, pos, multiline=False)
1069
+
1070
+
1071
+ def toml_parse_array(src: str, pos: TomlPos, parse_float: TomlParseFloat) -> ta.Tuple[TomlPos, list]:
1072
+ pos += 1
1073
+ array: list = []
1074
+
1075
+ pos = toml_skip_comments_and_array_ws(src, pos)
1076
+ if src.startswith(']', pos):
1077
+ return pos + 1, array
1078
+ while True:
1079
+ pos, val = toml_parse_value(src, pos, parse_float)
1080
+ array.append(val)
1081
+ pos = toml_skip_comments_and_array_ws(src, pos)
1082
+
1083
+ c = src[pos:pos + 1]
1084
+ if c == ']':
1085
+ return pos + 1, array
1086
+ if c != ',':
1087
+ raise toml_suffixed_err(src, pos, 'Unclosed array')
1088
+ pos += 1
1089
+
1090
+ pos = toml_skip_comments_and_array_ws(src, pos)
1091
+ if src.startswith(']', pos):
1092
+ return pos + 1, array
1093
+
1094
+
1095
+ def toml_parse_inline_table(src: str, pos: TomlPos, parse_float: TomlParseFloat) -> ta.Tuple[TomlPos, dict]:
1096
+ pos += 1
1097
+ nested_dict = TomlNestedDict()
1098
+ flags = TomlFlags()
1099
+
1100
+ pos = toml_skip_chars(src, pos, TOML_WS)
1101
+ if src.startswith('}', pos):
1102
+ return pos + 1, nested_dict.dict
1103
+ while True:
1104
+ pos, key, value = toml_parse_key_value_pair(src, pos, parse_float)
1105
+ key_parent, key_stem = key[:-1], key[-1]
1106
+ if flags.is_(key, TomlFlags.FROZEN):
1107
+ raise toml_suffixed_err(src, pos, f'Cannot mutate immutable namespace {key}')
1108
+ try:
1109
+ nest = nested_dict.get_or_create_nest(key_parent, access_lists=False)
1110
+ except KeyError:
1111
+ raise toml_suffixed_err(src, pos, 'Cannot overwrite a value') from None
1112
+ if key_stem in nest:
1113
+ raise toml_suffixed_err(src, pos, f'Duplicate inline table key {key_stem!r}')
1114
+ nest[key_stem] = value
1115
+ pos = toml_skip_chars(src, pos, TOML_WS)
1116
+ c = src[pos:pos + 1]
1117
+ if c == '}':
1118
+ return pos + 1, nested_dict.dict
1119
+ if c != ',':
1120
+ raise toml_suffixed_err(src, pos, 'Unclosed inline table')
1121
+ if isinstance(value, (dict, list)):
1122
+ flags.set(key, TomlFlags.FROZEN, recursive=True)
1123
+ pos += 1
1124
+ pos = toml_skip_chars(src, pos, TOML_WS)
1125
+
1126
+
1127
+ def toml_parse_basic_str_escape(
1128
+ src: str,
1129
+ pos: TomlPos,
1130
+ *,
1131
+ multiline: bool = False,
1132
+ ) -> ta.Tuple[TomlPos, str]:
1133
+ escape_id = src[pos:pos + 2]
1134
+ pos += 2
1135
+ if multiline and escape_id in {'\\ ', '\\\t', '\\\n'}:
1136
+ # Skip whitespace until next non-whitespace character or end of the doc. Error if non-whitespace is found before
1137
+ # newline.
1138
+ if escape_id != '\\\n':
1139
+ pos = toml_skip_chars(src, pos, TOML_WS)
1140
+ try:
1141
+ char = src[pos]
1142
+ except IndexError:
1143
+ return pos, ''
1144
+ if char != '\n':
1145
+ raise toml_suffixed_err(src, pos, "Unescaped '\\' in a string")
1146
+ pos += 1
1147
+ pos = toml_skip_chars(src, pos, TOML_WS_AND_NEWLINE)
1148
+ return pos, ''
1149
+ if escape_id == '\\u':
1150
+ return toml_parse_hex_char(src, pos, 4)
1151
+ if escape_id == '\\U':
1152
+ return toml_parse_hex_char(src, pos, 8)
1153
+ try:
1154
+ return pos, TOML_BASIC_STR_ESCAPE_REPLACEMENTS[escape_id]
1155
+ except KeyError:
1156
+ raise toml_suffixed_err(src, pos, "Unescaped '\\' in a string") from None
1157
+
1158
+
1159
+ def toml_parse_basic_str_escape_multiline(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
1160
+ return toml_parse_basic_str_escape(src, pos, multiline=True)
1161
+
1162
+
1163
+ def toml_parse_hex_char(src: str, pos: TomlPos, hex_len: int) -> ta.Tuple[TomlPos, str]:
1164
+ hex_str = src[pos:pos + hex_len]
1165
+ if len(hex_str) != hex_len or not TOML_HEXDIGIT_CHARS.issuperset(hex_str):
1166
+ raise toml_suffixed_err(src, pos, 'Invalid hex value')
1167
+ pos += hex_len
1168
+ hex_int = int(hex_str, 16)
1169
+ if not toml_is_unicode_scalar_value(hex_int):
1170
+ raise toml_suffixed_err(src, pos, 'Escaped character is not a Unicode scalar value')
1171
+ return pos, chr(hex_int)
1172
+
1173
+
1174
+ def toml_parse_literal_str(src: str, pos: TomlPos) -> ta.Tuple[TomlPos, str]:
1175
+ pos += 1 # Skip starting apostrophe
1176
+ start_pos = pos
1177
+ pos = toml_skip_until(
1178
+ src, pos, "'", error_on=TOML_ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True,
1179
+ )
1180
+ return pos + 1, src[start_pos:pos] # Skip ending apostrophe
1181
+
1182
+
1183
+ def toml_parse_multiline_str(src: str, pos: TomlPos, *, literal: bool) -> ta.Tuple[TomlPos, str]:
1184
+ pos += 3
1185
+ if src.startswith('\n', pos):
1186
+ pos += 1
1187
+
1188
+ if literal:
1189
+ delim = "'"
1190
+ end_pos = toml_skip_until(
1191
+ src,
1192
+ pos,
1193
+ "'''",
1194
+ error_on=TOML_ILLEGAL_MULTILINE_LITERAL_STR_CHARS,
1195
+ error_on_eof=True,
1196
+ )
1197
+ result = src[pos:end_pos]
1198
+ pos = end_pos + 3
1199
+ else:
1200
+ delim = '"'
1201
+ pos, result = toml_parse_basic_str(src, pos, multiline=True)
1202
+
1203
+ # Add at maximum two extra apostrophes/quotes if the end sequence is 4 or 5 chars long instead of just 3.
1204
+ if not src.startswith(delim, pos):
1205
+ return pos, result
1206
+ pos += 1
1207
+ if not src.startswith(delim, pos):
1208
+ return pos, result + delim
1209
+ pos += 1
1210
+ return pos, result + (delim * 2)
1211
+
1212
+
1213
+ def toml_parse_basic_str(src: str, pos: TomlPos, *, multiline: bool) -> ta.Tuple[TomlPos, str]:
1214
+ if multiline:
1215
+ error_on = TOML_ILLEGAL_MULTILINE_BASIC_STR_CHARS
1216
+ parse_escapes = toml_parse_basic_str_escape_multiline
1217
+ else:
1218
+ error_on = TOML_ILLEGAL_BASIC_STR_CHARS
1219
+ parse_escapes = toml_parse_basic_str_escape
1220
+ result = ''
1221
+ start_pos = pos
1222
+ while True:
1223
+ try:
1224
+ char = src[pos]
1225
+ except IndexError:
1226
+ raise toml_suffixed_err(src, pos, 'Unterminated string') from None
1227
+ if char == '"':
1228
+ if not multiline:
1229
+ return pos + 1, result + src[start_pos:pos]
1230
+ if src.startswith('"""', pos):
1231
+ return pos + 3, result + src[start_pos:pos]
1232
+ pos += 1
1233
+ continue
1234
+ if char == '\\':
1235
+ result += src[start_pos:pos]
1236
+ pos, parsed_escape = parse_escapes(src, pos)
1237
+ result += parsed_escape
1238
+ start_pos = pos
1239
+ continue
1240
+ if char in error_on:
1241
+ raise toml_suffixed_err(src, pos, f'Illegal character {char!r}')
1242
+ pos += 1
1243
+
1244
+
1245
+ def toml_parse_value( # noqa: C901
1246
+ src: str,
1247
+ pos: TomlPos,
1248
+ parse_float: TomlParseFloat,
1249
+ ) -> ta.Tuple[TomlPos, ta.Any]:
1250
+ try:
1251
+ char: ta.Optional[str] = src[pos]
1252
+ except IndexError:
1253
+ char = None
1254
+
1255
+ # IMPORTANT: order conditions based on speed of checking and likelihood
1256
+
1257
+ # Basic strings
1258
+ if char == '"':
1259
+ if src.startswith('"""', pos):
1260
+ return toml_parse_multiline_str(src, pos, literal=False)
1261
+ return toml_parse_one_line_basic_str(src, pos)
1262
+
1263
+ # Literal strings
1264
+ if char == "'":
1265
+ if src.startswith("'''", pos):
1266
+ return toml_parse_multiline_str(src, pos, literal=True)
1267
+ return toml_parse_literal_str(src, pos)
1268
+
1269
+ # Booleans
1270
+ if char == 't':
1271
+ if src.startswith('true', pos):
1272
+ return pos + 4, True
1273
+ if char == 'f':
1274
+ if src.startswith('false', pos):
1275
+ return pos + 5, False
1276
+
1277
+ # Arrays
1278
+ if char == '[':
1279
+ return toml_parse_array(src, pos, parse_float)
1280
+
1281
+ # Inline tables
1282
+ if char == '{':
1283
+ return toml_parse_inline_table(src, pos, parse_float)
1284
+
1285
+ # Dates and times
1286
+ datetime_match = TOML_RE_DATETIME.match(src, pos)
1287
+ if datetime_match:
1288
+ try:
1289
+ datetime_obj = toml_match_to_datetime(datetime_match)
1290
+ except ValueError as e:
1291
+ raise toml_suffixed_err(src, pos, 'Invalid date or datetime') from e
1292
+ return datetime_match.end(), datetime_obj
1293
+ localtime_match = TOML_RE_LOCALTIME.match(src, pos)
1294
+ if localtime_match:
1295
+ return localtime_match.end(), toml_match_to_localtime(localtime_match)
1296
+
1297
+ # Integers and "normal" floats. The regex will greedily match any type starting with a decimal char, so needs to be
1298
+ # located after handling of dates and times.
1299
+ number_match = TOML_RE_NUMBER.match(src, pos)
1300
+ if number_match:
1301
+ return number_match.end(), toml_match_to_number(number_match, parse_float)
1302
+
1303
+ # Special floats
1304
+ first_three = src[pos:pos + 3]
1305
+ if first_three in {'inf', 'nan'}:
1306
+ return pos + 3, parse_float(first_three)
1307
+ first_four = src[pos:pos + 4]
1308
+ if first_four in {'-inf', '+inf', '-nan', '+nan'}:
1309
+ return pos + 4, parse_float(first_four)
1310
+
1311
+ raise toml_suffixed_err(src, pos, 'Invalid value')
1312
+
1313
+
1314
+ def toml_suffixed_err(src: str, pos: TomlPos, msg: str) -> TomlDecodeError:
1315
+ """Return a `TomlDecodeError` where error message is suffixed with coordinates in source."""
1316
+
1317
+ def coord_repr(src: str, pos: TomlPos) -> str:
1318
+ if pos >= len(src):
1319
+ return 'end of document'
1320
+ line = src.count('\n', 0, pos) + 1
1321
+ if line == 1:
1322
+ column = pos + 1
1323
+ else:
1324
+ column = pos - src.rindex('\n', 0, pos)
1325
+ return f'line {line}, column {column}'
1326
+
1327
+ return TomlDecodeError(f'{msg} (at {coord_repr(src, pos)})')
1328
+
1329
+
1330
+ def toml_is_unicode_scalar_value(codepoint: int) -> bool:
1331
+ return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111)
1332
+
1333
+
1334
+ def toml_make_safe_parse_float(parse_float: TomlParseFloat) -> TomlParseFloat:
1335
+ """A decorator to make `parse_float` safe.
1336
+
1337
+ `parse_float` must not return dicts or lists, because these types would be mixed with parsed TOML tables and arrays,
1338
+ thus confusing the parser. The returned decorated callable raises `ValueError` instead of returning illegal types.
1339
+ """
1340
+ # The default `float` callable never returns illegal types. Optimize it.
1341
+ if parse_float is float:
1342
+ return float
1343
+
1344
+ def safe_parse_float(float_str: str) -> ta.Any:
1345
+ float_value = parse_float(float_str)
1346
+ if isinstance(float_value, (dict, list)):
1347
+ raise ValueError('parse_float must not return dicts or lists') # noqa
1348
+ return float_value
1349
+
1350
+ return safe_parse_float
1351
+
1352
+
526
1353
  ########################################
527
1354
  # ../config.py
528
1355
 
@@ -1208,8 +2035,8 @@ class _CachedNullary(_AbstractCachedNullary):
1208
2035
  return self._value
1209
2036
 
1210
2037
 
1211
- def cached_nullary(fn): # ta.Callable[..., T]) -> ta.Callable[..., T]:
1212
- return _CachedNullary(fn)
2038
+ def cached_nullary(fn: CallableT) -> CallableT:
2039
+ return _CachedNullary(fn) # type: ignore
1213
2040
 
1214
2041
 
1215
2042
  def static_init(fn: CallableT) -> CallableT:
@@ -3582,6 +4409,8 @@ def _get_argparse_arg_ann_kwargs(ann: ta.Any) -> ta.Mapping[str, ta.Any]:
3582
4409
  return {'action': 'store_true'}
3583
4410
  elif ann is list:
3584
4411
  return {'action': 'append'}
4412
+ elif is_optional_alias(ann):
4413
+ return _get_argparse_arg_ann_kwargs(get_optional_alias_arg(ann))
3585
4414
  else:
3586
4415
  raise TypeError(ann)
3587
4416
 
@@ -4987,6 +5816,7 @@ TODO:
4987
5816
  - pickle stdlib objs? have to pin to 3.8 pickle protocol, will be cross-version
4988
5817
  - namedtuple
4989
5818
  - literals
5819
+ - newtypes?
4990
5820
  """
4991
5821
 
4992
5822
 
@@ -5534,6 +6364,90 @@ class Interp:
5534
6364
  version: InterpVersion
5535
6365
 
5536
6366
 
6367
+ ########################################
6368
+ # ../../configs.py
6369
+
6370
+
6371
+ def parse_config_file(
6372
+ name: str,
6373
+ f: ta.TextIO,
6374
+ ) -> ConfigMapping:
6375
+ if name.endswith('.toml'):
6376
+ return toml_loads(f.read())
6377
+
6378
+ elif any(name.endswith(e) for e in ('.yml', '.yaml')):
6379
+ yaml = __import__('yaml')
6380
+ return yaml.safe_load(f)
6381
+
6382
+ elif name.endswith('.ini'):
6383
+ import configparser
6384
+ cp = configparser.ConfigParser()
6385
+ cp.read_file(f)
6386
+ config_dct: ta.Dict[str, ta.Any] = {}
6387
+ for sec in cp.sections():
6388
+ cd = config_dct
6389
+ for k in sec.split('.'):
6390
+ cd = cd.setdefault(k, {})
6391
+ cd.update(cp.items(sec))
6392
+ return config_dct
6393
+
6394
+ else:
6395
+ return json.loads(f.read())
6396
+
6397
+
6398
+ def read_config_file(
6399
+ path: str,
6400
+ cls: ta.Type[T],
6401
+ *,
6402
+ prepare: ta.Optional[ta.Callable[[ConfigMapping], ConfigMapping]] = None,
6403
+ ) -> T:
6404
+ with open(path) as cf:
6405
+ config_dct = parse_config_file(os.path.basename(path), cf)
6406
+
6407
+ if prepare is not None:
6408
+ config_dct = prepare(config_dct)
6409
+
6410
+ return unmarshal_obj(config_dct, cls)
6411
+
6412
+
6413
+ def build_config_named_children(
6414
+ o: ta.Union[
6415
+ ta.Sequence[ConfigMapping],
6416
+ ta.Mapping[str, ConfigMapping],
6417
+ None,
6418
+ ],
6419
+ *,
6420
+ name_key: str = 'name',
6421
+ ) -> ta.Optional[ta.Sequence[ConfigMapping]]:
6422
+ if o is None:
6423
+ return None
6424
+
6425
+ lst: ta.List[ConfigMapping] = []
6426
+ if isinstance(o, ta.Mapping):
6427
+ for k, v in o.items():
6428
+ check.isinstance(v, ta.Mapping)
6429
+ if name_key in v:
6430
+ n = v[name_key]
6431
+ if k != n:
6432
+ raise KeyError(f'Given names do not match: {n} != {k}')
6433
+ lst.append(v)
6434
+ else:
6435
+ lst.append({name_key: k, **v})
6436
+
6437
+ else:
6438
+ check.not_isinstance(o, str)
6439
+ lst.extend(o)
6440
+
6441
+ seen = set()
6442
+ for d in lst:
6443
+ n = d['name']
6444
+ if n in d:
6445
+ raise KeyError(f'Duplicate name: {n}')
6446
+ seen.add(n)
6447
+
6448
+ return lst
6449
+
6450
+
5537
6451
  ########################################
5538
6452
  # ../commands/marshal.py
5539
6453
 
@@ -5788,168 +6702,222 @@ SUBPROCESS_CHANNEL_OPTION_VALUES: ta.Mapping[SubprocessChannelOption, int] = {
5788
6702
  _SUBPROCESS_SHELL_WRAP_EXECS = False
5789
6703
 
5790
6704
 
5791
- def subprocess_shell_wrap_exec(*args: str) -> ta.Tuple[str, ...]:
5792
- return ('sh', '-c', ' '.join(map(shlex.quote, args)))
6705
+ def subprocess_shell_wrap_exec(*cmd: str) -> ta.Tuple[str, ...]:
6706
+ return ('sh', '-c', ' '.join(map(shlex.quote, cmd)))
5793
6707
 
5794
6708
 
5795
- def subprocess_maybe_shell_wrap_exec(*args: str) -> ta.Tuple[str, ...]:
6709
+ def subprocess_maybe_shell_wrap_exec(*cmd: str) -> ta.Tuple[str, ...]:
5796
6710
  if _SUBPROCESS_SHELL_WRAP_EXECS or is_debugger_attached():
5797
- return subprocess_shell_wrap_exec(*args)
6711
+ return subprocess_shell_wrap_exec(*cmd)
5798
6712
  else:
5799
- return args
5800
-
5801
-
5802
- def prepare_subprocess_invocation(
5803
- *args: str,
5804
- env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
5805
- extra_env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
5806
- quiet: bool = False,
5807
- shell: bool = False,
5808
- **kwargs: ta.Any,
5809
- ) -> ta.Tuple[ta.Tuple[ta.Any, ...], ta.Dict[str, ta.Any]]:
5810
- log.debug('prepare_subprocess_invocation: args=%r', args)
5811
- if extra_env:
5812
- log.debug('prepare_subprocess_invocation: extra_env=%r', extra_env)
5813
-
5814
- if extra_env:
5815
- env = {**(env if env is not None else os.environ), **extra_env}
5816
-
5817
- if quiet and 'stderr' not in kwargs:
5818
- if not log.isEnabledFor(logging.DEBUG):
5819
- kwargs['stderr'] = subprocess.DEVNULL
5820
-
5821
- if not shell:
5822
- args = subprocess_maybe_shell_wrap_exec(*args)
5823
-
5824
- return args, dict(
5825
- env=env,
5826
- shell=shell,
5827
- **kwargs,
5828
- )
6713
+ return cmd
5829
6714
 
5830
6715
 
5831
6716
  ##
5832
6717
 
5833
6718
 
5834
- @contextlib.contextmanager
5835
- def subprocess_common_context(*args: ta.Any, **kwargs: ta.Any) -> ta.Iterator[None]:
5836
- start_time = time.time()
5837
- try:
5838
- log.debug('subprocess_common_context.try: args=%r', args)
5839
- yield
5840
-
5841
- except Exception as exc: # noqa
5842
- log.debug('subprocess_common_context.except: exc=%r', exc)
5843
- raise
6719
+ def subprocess_close(
6720
+ proc: subprocess.Popen,
6721
+ timeout: ta.Optional[float] = None,
6722
+ ) -> None:
6723
+ # TODO: terminate, sleep, kill
6724
+ if proc.stdout:
6725
+ proc.stdout.close()
6726
+ if proc.stderr:
6727
+ proc.stderr.close()
6728
+ if proc.stdin:
6729
+ proc.stdin.close()
5844
6730
 
5845
- finally:
5846
- end_time = time.time()
5847
- elapsed_s = end_time - start_time
5848
- log.debug('subprocess_common_context.finally: elapsed_s=%f args=%r', elapsed_s, args)
6731
+ proc.wait(timeout)
5849
6732
 
5850
6733
 
5851
6734
  ##
5852
6735
 
5853
6736
 
5854
- def subprocess_check_call(
5855
- *args: str,
5856
- stdout: ta.Any = sys.stderr,
5857
- **kwargs: ta.Any,
5858
- ) -> None:
5859
- args, kwargs = prepare_subprocess_invocation(*args, stdout=stdout, **kwargs)
5860
- with subprocess_common_context(*args, **kwargs):
5861
- return subprocess.check_call(args, **kwargs) # type: ignore
6737
+ class AbstractSubprocesses(abc.ABC): # noqa
6738
+ DEFAULT_LOGGER: ta.ClassVar[ta.Optional[logging.Logger]] = log
5862
6739
 
6740
+ def __init__(
6741
+ self,
6742
+ *,
6743
+ log: ta.Optional[logging.Logger] = None,
6744
+ try_exceptions: ta.Optional[ta.Tuple[ta.Type[Exception], ...]] = None,
6745
+ ) -> None:
6746
+ super().__init__()
5863
6747
 
5864
- def subprocess_check_output(
5865
- *args: str,
5866
- **kwargs: ta.Any,
5867
- ) -> bytes:
5868
- args, kwargs = prepare_subprocess_invocation(*args, **kwargs)
5869
- with subprocess_common_context(*args, **kwargs):
5870
- return subprocess.check_output(args, **kwargs)
6748
+ self._log = log if log is not None else self.DEFAULT_LOGGER
6749
+ self._try_exceptions = try_exceptions if try_exceptions is not None else self.DEFAULT_TRY_EXCEPTIONS
5871
6750
 
6751
+ #
5872
6752
 
5873
- def subprocess_check_output_str(*args: str, **kwargs: ta.Any) -> str:
5874
- return subprocess_check_output(*args, **kwargs).decode().strip()
6753
+ def prepare_args(
6754
+ self,
6755
+ *cmd: str,
6756
+ env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
6757
+ extra_env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
6758
+ quiet: bool = False,
6759
+ shell: bool = False,
6760
+ **kwargs: ta.Any,
6761
+ ) -> ta.Tuple[ta.Tuple[ta.Any, ...], ta.Dict[str, ta.Any]]:
6762
+ if self._log:
6763
+ self._log.debug('Subprocesses.prepare_args: cmd=%r', cmd)
6764
+ if extra_env:
6765
+ self._log.debug('Subprocesses.prepare_args: extra_env=%r', extra_env)
5875
6766
 
6767
+ if extra_env:
6768
+ env = {**(env if env is not None else os.environ), **extra_env}
5876
6769
 
5877
- ##
6770
+ if quiet and 'stderr' not in kwargs:
6771
+ if self._log and not self._log.isEnabledFor(logging.DEBUG):
6772
+ kwargs['stderr'] = subprocess.DEVNULL
5878
6773
 
6774
+ if not shell:
6775
+ cmd = subprocess_maybe_shell_wrap_exec(*cmd)
5879
6776
 
5880
- DEFAULT_SUBPROCESS_TRY_EXCEPTIONS: ta.Tuple[ta.Type[Exception], ...] = (
5881
- FileNotFoundError,
5882
- subprocess.CalledProcessError,
5883
- )
6777
+ return cmd, dict(
6778
+ env=env,
6779
+ shell=shell,
6780
+ **kwargs,
6781
+ )
5884
6782
 
6783
+ @contextlib.contextmanager
6784
+ def wrap_call(self, *cmd: ta.Any, **kwargs: ta.Any) -> ta.Iterator[None]:
6785
+ start_time = time.time()
6786
+ try:
6787
+ if self._log:
6788
+ self._log.debug('Subprocesses.wrap_call.try: cmd=%r', cmd)
6789
+ yield
5885
6790
 
5886
- def _subprocess_try_run(
5887
- fn: ta.Callable[..., T],
5888
- *args: ta.Any,
5889
- try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
5890
- **kwargs: ta.Any,
5891
- ) -> ta.Union[T, Exception]:
5892
- try:
5893
- return fn(*args, **kwargs)
5894
- except try_exceptions as e: # noqa
5895
- if log.isEnabledFor(logging.DEBUG):
5896
- log.exception('command failed')
5897
- return e
5898
-
5899
-
5900
- def subprocess_try_call(
5901
- *args: str,
5902
- try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
5903
- **kwargs: ta.Any,
5904
- ) -> bool:
5905
- if isinstance(_subprocess_try_run(
5906
- subprocess_check_call,
5907
- *args,
5908
- try_exceptions=try_exceptions,
5909
- **kwargs,
5910
- ), Exception):
5911
- return False
5912
- else:
5913
- return True
6791
+ except Exception as exc: # noqa
6792
+ if self._log:
6793
+ self._log.debug('Subprocesses.wrap_call.except: exc=%r', exc)
6794
+ raise
5914
6795
 
6796
+ finally:
6797
+ end_time = time.time()
6798
+ elapsed_s = end_time - start_time
6799
+ if self._log:
6800
+ self._log.debug('sSubprocesses.wrap_call.finally: elapsed_s=%f cmd=%r', elapsed_s, cmd)
5915
6801
 
5916
- def subprocess_try_output(
5917
- *args: str,
5918
- try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
5919
- **kwargs: ta.Any,
5920
- ) -> ta.Optional[bytes]:
5921
- if isinstance(ret := _subprocess_try_run(
5922
- subprocess_check_output,
5923
- *args,
5924
- try_exceptions=try_exceptions,
5925
- **kwargs,
5926
- ), Exception):
5927
- return None
5928
- else:
5929
- return ret
6802
+ @contextlib.contextmanager
6803
+ def prepare_and_wrap(
6804
+ self,
6805
+ *cmd: ta.Any,
6806
+ **kwargs: ta.Any,
6807
+ ) -> ta.Iterator[ta.Tuple[
6808
+ ta.Tuple[ta.Any, ...],
6809
+ ta.Dict[str, ta.Any],
6810
+ ]]:
6811
+ cmd, kwargs = self.prepare_args(*cmd, **kwargs)
6812
+ with self.wrap_call(*cmd, **kwargs):
6813
+ yield cmd, kwargs
5930
6814
 
6815
+ #
5931
6816
 
5932
- def subprocess_try_output_str(*args: str, **kwargs: ta.Any) -> ta.Optional[str]:
5933
- out = subprocess_try_output(*args, **kwargs)
5934
- return out.decode().strip() if out is not None else None
6817
+ DEFAULT_TRY_EXCEPTIONS: ta.Tuple[ta.Type[Exception], ...] = (
6818
+ FileNotFoundError,
6819
+ subprocess.CalledProcessError,
6820
+ )
6821
+
6822
+ def try_fn(
6823
+ self,
6824
+ fn: ta.Callable[..., T],
6825
+ *cmd: str,
6826
+ try_exceptions: ta.Optional[ta.Tuple[ta.Type[Exception], ...]] = None,
6827
+ **kwargs: ta.Any,
6828
+ ) -> ta.Union[T, Exception]:
6829
+ if try_exceptions is None:
6830
+ try_exceptions = self._try_exceptions
6831
+
6832
+ try:
6833
+ return fn(*cmd, **kwargs)
6834
+
6835
+ except try_exceptions as e: # noqa
6836
+ if self._log and self._log.isEnabledFor(logging.DEBUG):
6837
+ self._log.exception('command failed')
6838
+ return e
6839
+
6840
+ async def async_try_fn(
6841
+ self,
6842
+ fn: ta.Callable[..., ta.Awaitable[T]],
6843
+ *cmd: ta.Any,
6844
+ try_exceptions: ta.Optional[ta.Tuple[ta.Type[Exception], ...]] = None,
6845
+ **kwargs: ta.Any,
6846
+ ) -> ta.Union[T, Exception]:
6847
+ if try_exceptions is None:
6848
+ try_exceptions = self._try_exceptions
6849
+
6850
+ try:
6851
+ return await fn(*cmd, **kwargs)
6852
+
6853
+ except try_exceptions as e: # noqa
6854
+ if self._log and self._log.isEnabledFor(logging.DEBUG):
6855
+ self._log.exception('command failed')
6856
+ return e
5935
6857
 
5936
6858
 
5937
6859
  ##
5938
6860
 
5939
6861
 
5940
- def subprocess_close(
5941
- proc: subprocess.Popen,
5942
- timeout: ta.Optional[float] = None,
5943
- ) -> None:
5944
- # TODO: terminate, sleep, kill
5945
- if proc.stdout:
5946
- proc.stdout.close()
5947
- if proc.stderr:
5948
- proc.stderr.close()
5949
- if proc.stdin:
5950
- proc.stdin.close()
6862
+ class Subprocesses(AbstractSubprocesses):
6863
+ def check_call(
6864
+ self,
6865
+ *cmd: str,
6866
+ stdout: ta.Any = sys.stderr,
6867
+ **kwargs: ta.Any,
6868
+ ) -> None:
6869
+ with self.prepare_and_wrap(*cmd, stdout=stdout, **kwargs) as (cmd, kwargs): # noqa
6870
+ subprocess.check_call(cmd, **kwargs)
5951
6871
 
5952
- proc.wait(timeout)
6872
+ def check_output(
6873
+ self,
6874
+ *cmd: str,
6875
+ **kwargs: ta.Any,
6876
+ ) -> bytes:
6877
+ with self.prepare_and_wrap(*cmd, **kwargs) as (cmd, kwargs): # noqa
6878
+ return subprocess.check_output(cmd, **kwargs)
6879
+
6880
+ def check_output_str(
6881
+ self,
6882
+ *cmd: str,
6883
+ **kwargs: ta.Any,
6884
+ ) -> str:
6885
+ return self.check_output(*cmd, **kwargs).decode().strip()
6886
+
6887
+ #
6888
+
6889
+ def try_call(
6890
+ self,
6891
+ *cmd: str,
6892
+ **kwargs: ta.Any,
6893
+ ) -> bool:
6894
+ if isinstance(self.try_fn(self.check_call, *cmd, **kwargs), Exception):
6895
+ return False
6896
+ else:
6897
+ return True
6898
+
6899
+ def try_output(
6900
+ self,
6901
+ *cmd: str,
6902
+ **kwargs: ta.Any,
6903
+ ) -> ta.Optional[bytes]:
6904
+ if isinstance(ret := self.try_fn(self.check_output, *cmd, **kwargs), Exception):
6905
+ return None
6906
+ else:
6907
+ return ret
6908
+
6909
+ def try_output_str(
6910
+ self,
6911
+ *cmd: str,
6912
+ **kwargs: ta.Any,
6913
+ ) -> ta.Optional[str]:
6914
+ if (ret := self.try_output(*cmd, **kwargs)) is None:
6915
+ return None
6916
+ else:
6917
+ return ret.decode().strip()
6918
+
6919
+
6920
+ subprocesses = Subprocesses()
5953
6921
 
5954
6922
 
5955
6923
  ########################################
@@ -6384,43 +7352,6 @@ class SystemConfig:
6384
7352
  ##
6385
7353
 
6386
7354
 
6387
- @contextlib.asynccontextmanager
6388
- async def asyncio_subprocess_popen(
6389
- *cmd: str,
6390
- shell: bool = False,
6391
- timeout: ta.Optional[float] = None,
6392
- **kwargs: ta.Any,
6393
- ) -> ta.AsyncGenerator[asyncio.subprocess.Process, None]:
6394
- fac: ta.Any
6395
- if shell:
6396
- fac = functools.partial(
6397
- asyncio.create_subprocess_shell,
6398
- check.single(cmd),
6399
- )
6400
- else:
6401
- fac = functools.partial(
6402
- asyncio.create_subprocess_exec,
6403
- *cmd,
6404
- )
6405
-
6406
- with subprocess_common_context(
6407
- *cmd,
6408
- shell=shell,
6409
- timeout=timeout,
6410
- **kwargs,
6411
- ):
6412
- proc: asyncio.subprocess.Process
6413
- proc = await fac(**kwargs)
6414
- try:
6415
- yield proc
6416
-
6417
- finally:
6418
- await asyncio_maybe_timeout(proc.wait(), timeout)
6419
-
6420
-
6421
- ##
6422
-
6423
-
6424
7355
  class AsyncioProcessCommunicator:
6425
7356
  def __init__(
6426
7357
  self,
@@ -6531,148 +7462,147 @@ class AsyncioProcessCommunicator:
6531
7462
  return await asyncio_maybe_timeout(self._communicate(input), timeout)
6532
7463
 
6533
7464
 
6534
- async def asyncio_subprocess_communicate(
6535
- proc: asyncio.subprocess.Process,
6536
- input: ta.Any = None, # noqa
6537
- timeout: ta.Optional[float] = None,
6538
- ) -> ta.Tuple[ta.Optional[bytes], ta.Optional[bytes]]:
6539
- return await AsyncioProcessCommunicator(proc).communicate(input, timeout) # noqa
6540
-
7465
+ ##
6541
7466
 
6542
- @dc.dataclass(frozen=True)
6543
- class AsyncioSubprocessOutput:
6544
- proc: asyncio.subprocess.Process
6545
- stdout: ta.Optional[bytes]
6546
- stderr: ta.Optional[bytes]
6547
7467
 
7468
+ class AsyncioSubprocesses(AbstractSubprocesses):
7469
+ async def communicate(
7470
+ self,
7471
+ proc: asyncio.subprocess.Process,
7472
+ input: ta.Any = None, # noqa
7473
+ timeout: ta.Optional[float] = None,
7474
+ ) -> ta.Tuple[ta.Optional[bytes], ta.Optional[bytes]]:
7475
+ return await AsyncioProcessCommunicator(proc).communicate(input, timeout) # noqa
6548
7476
 
6549
- async def asyncio_subprocess_run(
6550
- *args: str,
6551
- input: ta.Any = None, # noqa
6552
- timeout: ta.Optional[float] = None,
6553
- check: bool = False, # noqa
6554
- capture_output: ta.Optional[bool] = None,
6555
- **kwargs: ta.Any,
6556
- ) -> AsyncioSubprocessOutput:
6557
- if capture_output:
6558
- kwargs.setdefault('stdout', subprocess.PIPE)
6559
- kwargs.setdefault('stderr', subprocess.PIPE)
6560
-
6561
- args, kwargs = prepare_subprocess_invocation(*args, **kwargs)
6562
-
6563
- proc: asyncio.subprocess.Process
6564
- async with asyncio_subprocess_popen(*args, **kwargs) as proc:
6565
- stdout, stderr = await asyncio_subprocess_communicate(proc, input, timeout)
6566
-
6567
- if check and proc.returncode:
6568
- raise subprocess.CalledProcessError(
6569
- proc.returncode,
6570
- args,
6571
- output=stdout,
6572
- stderr=stderr,
6573
- )
7477
+ #
6574
7478
 
6575
- return AsyncioSubprocessOutput(
6576
- proc,
6577
- stdout,
6578
- stderr,
6579
- )
7479
+ @contextlib.asynccontextmanager
7480
+ async def popen(
7481
+ self,
7482
+ *cmd: str,
7483
+ shell: bool = False,
7484
+ timeout: ta.Optional[float] = None,
7485
+ **kwargs: ta.Any,
7486
+ ) -> ta.AsyncGenerator[asyncio.subprocess.Process, None]:
7487
+ fac: ta.Any
7488
+ if shell:
7489
+ fac = functools.partial(
7490
+ asyncio.create_subprocess_shell,
7491
+ check.single(cmd),
7492
+ )
7493
+ else:
7494
+ fac = functools.partial(
7495
+ asyncio.create_subprocess_exec,
7496
+ *cmd,
7497
+ )
6580
7498
 
7499
+ with self.prepare_and_wrap( *cmd, shell=shell, **kwargs) as (cmd, kwargs): # noqa
7500
+ proc: asyncio.subprocess.Process = await fac(**kwargs)
7501
+ try:
7502
+ yield proc
6581
7503
 
6582
- ##
7504
+ finally:
7505
+ await asyncio_maybe_timeout(proc.wait(), timeout)
6583
7506
 
7507
+ #
6584
7508
 
6585
- async def asyncio_subprocess_check_call(
6586
- *args: str,
6587
- stdout: ta.Any = sys.stderr,
6588
- input: ta.Any = None, # noqa
6589
- timeout: ta.Optional[float] = None,
6590
- **kwargs: ta.Any,
6591
- ) -> None:
6592
- await asyncio_subprocess_run(
6593
- *args,
6594
- stdout=stdout,
6595
- input=input,
6596
- timeout=timeout,
6597
- check=True,
6598
- **kwargs,
6599
- )
7509
+ @dc.dataclass(frozen=True)
7510
+ class RunOutput:
7511
+ proc: asyncio.subprocess.Process
7512
+ stdout: ta.Optional[bytes]
7513
+ stderr: ta.Optional[bytes]
6600
7514
 
7515
+ async def run(
7516
+ self,
7517
+ *cmd: str,
7518
+ input: ta.Any = None, # noqa
7519
+ timeout: ta.Optional[float] = None,
7520
+ check: bool = False, # noqa
7521
+ capture_output: ta.Optional[bool] = None,
7522
+ **kwargs: ta.Any,
7523
+ ) -> RunOutput:
7524
+ if capture_output:
7525
+ kwargs.setdefault('stdout', subprocess.PIPE)
7526
+ kwargs.setdefault('stderr', subprocess.PIPE)
6601
7527
 
6602
- async def asyncio_subprocess_check_output(
6603
- *args: str,
6604
- input: ta.Any = None, # noqa
6605
- timeout: ta.Optional[float] = None,
6606
- **kwargs: ta.Any,
6607
- ) -> bytes:
6608
- out = await asyncio_subprocess_run(
6609
- *args,
6610
- stdout=asyncio.subprocess.PIPE,
6611
- input=input,
6612
- timeout=timeout,
6613
- check=True,
6614
- **kwargs,
6615
- )
7528
+ proc: asyncio.subprocess.Process
7529
+ async with self.popen(*cmd, **kwargs) as proc:
7530
+ stdout, stderr = await self.communicate(proc, input, timeout)
7531
+
7532
+ if check and proc.returncode:
7533
+ raise subprocess.CalledProcessError(
7534
+ proc.returncode,
7535
+ cmd,
7536
+ output=stdout,
7537
+ stderr=stderr,
7538
+ )
6616
7539
 
6617
- return check.not_none(out.stdout)
7540
+ return self.RunOutput(
7541
+ proc,
7542
+ stdout,
7543
+ stderr,
7544
+ )
6618
7545
 
7546
+ #
6619
7547
 
6620
- async def asyncio_subprocess_check_output_str(*args: str, **kwargs: ta.Any) -> str:
6621
- return (await asyncio_subprocess_check_output(*args, **kwargs)).decode().strip()
7548
+ async def check_call(
7549
+ self,
7550
+ *cmd: str,
7551
+ stdout: ta.Any = sys.stderr,
7552
+ **kwargs: ta.Any,
7553
+ ) -> None:
7554
+ with self.prepare_and_wrap(*cmd, stdout=stdout, check=True, **kwargs) as (cmd, kwargs): # noqa
7555
+ await self.run(*cmd, **kwargs)
6622
7556
 
7557
+ async def check_output(
7558
+ self,
7559
+ *cmd: str,
7560
+ **kwargs: ta.Any,
7561
+ ) -> bytes:
7562
+ with self.prepare_and_wrap(*cmd, stdout=subprocess.PIPE, check=True, **kwargs) as (cmd, kwargs): # noqa
7563
+ return check.not_none((await self.run(*cmd, **kwargs)).stdout)
6623
7564
 
6624
- ##
7565
+ async def check_output_str(
7566
+ self,
7567
+ *cmd: str,
7568
+ **kwargs: ta.Any,
7569
+ ) -> str:
7570
+ return (await self.check_output(*cmd, **kwargs)).decode().strip()
6625
7571
 
7572
+ #
6626
7573
 
6627
- async def _asyncio_subprocess_try_run(
6628
- fn: ta.Callable[..., ta.Awaitable[T]],
6629
- *args: ta.Any,
6630
- try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
6631
- **kwargs: ta.Any,
6632
- ) -> ta.Union[T, Exception]:
6633
- try:
6634
- return await fn(*args, **kwargs)
6635
- except try_exceptions as e: # noqa
6636
- if log.isEnabledFor(logging.DEBUG):
6637
- log.exception('command failed')
6638
- return e
6639
-
6640
-
6641
- async def asyncio_subprocess_try_call(
6642
- *args: str,
6643
- try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
6644
- **kwargs: ta.Any,
6645
- ) -> bool:
6646
- if isinstance(await _asyncio_subprocess_try_run(
6647
- asyncio_subprocess_check_call,
6648
- *args,
6649
- try_exceptions=try_exceptions,
6650
- **kwargs,
6651
- ), Exception):
6652
- return False
6653
- else:
6654
- return True
7574
+ async def try_call(
7575
+ self,
7576
+ *cmd: str,
7577
+ **kwargs: ta.Any,
7578
+ ) -> bool:
7579
+ if isinstance(await self.async_try_fn(self.check_call, *cmd, **kwargs), Exception):
7580
+ return False
7581
+ else:
7582
+ return True
6655
7583
 
7584
+ async def try_output(
7585
+ self,
7586
+ *cmd: str,
7587
+ **kwargs: ta.Any,
7588
+ ) -> ta.Optional[bytes]:
7589
+ if isinstance(ret := await self.async_try_fn(self.check_output, *cmd, **kwargs), Exception):
7590
+ return None
7591
+ else:
7592
+ return ret
6656
7593
 
6657
- async def asyncio_subprocess_try_output(
6658
- *args: str,
6659
- try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
6660
- **kwargs: ta.Any,
6661
- ) -> ta.Optional[bytes]:
6662
- if isinstance(ret := await _asyncio_subprocess_try_run(
6663
- asyncio_subprocess_check_output,
6664
- *args,
6665
- try_exceptions=try_exceptions,
6666
- **kwargs,
6667
- ), Exception):
6668
- return None
6669
- else:
6670
- return ret
7594
+ async def try_output_str(
7595
+ self,
7596
+ *cmd: str,
7597
+ **kwargs: ta.Any,
7598
+ ) -> ta.Optional[str]:
7599
+ if (ret := await self.try_output(*cmd, **kwargs)) is None:
7600
+ return None
7601
+ else:
7602
+ return ret.decode().strip()
6671
7603
 
6672
7604
 
6673
- async def asyncio_subprocess_try_output_str(*args: str, **kwargs: ta.Any) -> ta.Optional[str]:
6674
- out = await asyncio_subprocess_try_output(*args, **kwargs)
6675
- return out.decode().strip() if out is not None else None
7605
+ asyncio_subprocesses = AsyncioSubprocesses()
6676
7606
 
6677
7607
 
6678
7608
  ########################################
@@ -6749,7 +7679,7 @@ class InterpInspector:
6749
7679
  return cls._build_inspection(sys.executable, eval(cls._INSPECTION_CODE)) # noqa
6750
7680
 
6751
7681
  async def _inspect(self, exe: str) -> InterpInspection:
6752
- output = await asyncio_subprocess_check_output(exe, '-c', f'print({self._INSPECTION_CODE})', quiet=True)
7682
+ output = await asyncio_subprocesses.check_output(exe, '-c', f'print({self._INSPECTION_CODE})', quiet=True)
6753
7683
  return self._build_inspection(exe, output.decode())
6754
7684
 
6755
7685
  async def inspect(self, exe: str) -> ta.Optional[InterpInspection]:
@@ -6823,7 +7753,7 @@ class SubprocessCommand(Command['SubprocessCommand.Output']):
6823
7753
  class SubprocessCommandExecutor(CommandExecutor[SubprocessCommand, SubprocessCommand.Output]):
6824
7754
  async def execute(self, cmd: SubprocessCommand) -> SubprocessCommand.Output:
6825
7755
  proc: asyncio.subprocess.Process
6826
- async with asyncio_subprocess_popen(
7756
+ async with asyncio_subprocesses.popen(
6827
7757
  *subprocess_maybe_shell_wrap_exec(*cmd.cmd),
6828
7758
 
6829
7759
  shell=cmd.shell,
@@ -6837,7 +7767,7 @@ class SubprocessCommandExecutor(CommandExecutor[SubprocessCommand, SubprocessCom
6837
7767
  timeout=cmd.timeout,
6838
7768
  ) as proc:
6839
7769
  start_time = time.time()
6840
- stdout, stderr = await asyncio_subprocess_communicate(
7770
+ stdout, stderr = await asyncio_subprocesses.communicate(
6841
7771
  proc,
6842
7772
  input=cmd.input,
6843
7773
  timeout=cmd.timeout,
@@ -6937,7 +7867,7 @@ class DeployGitManager(DeployPathOwner):
6937
7867
  return f'https://{self._repo.host}/{self._repo.path}'
6938
7868
 
6939
7869
  async def _call(self, *cmd: str) -> None:
6940
- await asyncio_subprocess_check_call(
7870
+ await asyncio_subprocesses.check_call(
6941
7871
  *cmd,
6942
7872
  cwd=self._dir,
6943
7873
  )
@@ -6963,7 +7893,7 @@ class DeployGitManager(DeployPathOwner):
6963
7893
  # FIXME: temp dir swap
6964
7894
  os.makedirs(dst_dir)
6965
7895
 
6966
- dst_call = functools.partial(asyncio_subprocess_check_call, cwd=dst_dir)
7896
+ dst_call = functools.partial(asyncio_subprocesses.check_call, cwd=dst_dir)
6967
7897
  await dst_call('git', 'init')
6968
7898
 
6969
7899
  await dst_call('git', 'remote', 'add', 'local', self._dir)
@@ -7015,7 +7945,7 @@ class DeployVenvManager(DeployPathOwner):
7015
7945
  ) -> None:
7016
7946
  sys_exe = 'python3'
7017
7947
 
7018
- await asyncio_subprocess_check_call(sys_exe, '-m', 'venv', venv_dir)
7948
+ await asyncio_subprocesses.check_call(sys_exe, '-m', 'venv', venv_dir)
7019
7949
 
7020
7950
  #
7021
7951
 
@@ -7027,12 +7957,12 @@ class DeployVenvManager(DeployPathOwner):
7027
7957
 
7028
7958
  if os.path.isfile(reqs_txt):
7029
7959
  if use_uv:
7030
- await asyncio_subprocess_check_call(venv_exe, '-m', 'pip', 'install', 'uv')
7960
+ await asyncio_subprocesses.check_call(venv_exe, '-m', 'pip', 'install', 'uv')
7031
7961
  pip_cmd = ['-m', 'uv', 'pip']
7032
7962
  else:
7033
7963
  pip_cmd = ['-m', 'pip']
7034
7964
 
7035
- await asyncio_subprocess_check_call(venv_exe, *pip_cmd,'install', '-r', reqs_txt)
7965
+ await asyncio_subprocesses.check_call(venv_exe, *pip_cmd,'install', '-r', reqs_txt)
7036
7966
 
7037
7967
  async def setup_app_venv(self, app_tag: DeployAppTag) -> None:
7038
7968
  await self.setup_venv(
@@ -7113,12 +8043,8 @@ class SubprocessRemoteSpawning(RemoteSpawning):
7113
8043
  ) -> ta.AsyncGenerator[RemoteSpawning.Spawned, None]:
7114
8044
  pc = self._prepare_cmd(tgt, src)
7115
8045
 
7116
- cmd = pc.cmd
7117
- if not debug:
7118
- cmd = subprocess_maybe_shell_wrap_exec(*cmd)
7119
-
7120
- async with asyncio_subprocess_popen(
7121
- *cmd,
8046
+ async with asyncio_subprocesses.popen(
8047
+ *pc.cmd,
7122
8048
  shell=pc.shell,
7123
8049
  stdin=subprocess.PIPE,
7124
8050
  stdout=subprocess.PIPE,
@@ -7179,10 +8105,10 @@ class SystemPackageManager(abc.ABC):
7179
8105
 
7180
8106
  class BrewSystemPackageManager(SystemPackageManager):
7181
8107
  async def update(self) -> None:
7182
- await asyncio_subprocess_check_call('brew', 'update')
8108
+ await asyncio_subprocesses.check_call('brew', 'update')
7183
8109
 
7184
8110
  async def upgrade(self) -> None:
7185
- await asyncio_subprocess_check_call('brew', 'upgrade')
8111
+ await asyncio_subprocesses.check_call('brew', 'upgrade')
7186
8112
 
7187
8113
  async def install(self, *packages: SystemPackageOrStr) -> None:
7188
8114
  es: ta.List[str] = []
@@ -7191,11 +8117,11 @@ class BrewSystemPackageManager(SystemPackageManager):
7191
8117
  es.append(p.name + (f'@{p.version}' if p.version is not None else ''))
7192
8118
  else:
7193
8119
  es.append(p)
7194
- await asyncio_subprocess_check_call('brew', 'install', *es)
8120
+ await asyncio_subprocesses.check_call('brew', 'install', *es)
7195
8121
 
7196
8122
  async def query(self, *packages: SystemPackageOrStr) -> ta.Mapping[str, SystemPackage]:
7197
8123
  pns = [p.name if isinstance(p, SystemPackage) else p for p in packages]
7198
- o = await asyncio_subprocess_check_output('brew', 'info', '--json', *pns)
8124
+ o = await asyncio_subprocesses.check_output('brew', 'info', '--json', *pns)
7199
8125
  j = json.loads(o.decode())
7200
8126
  d: ta.Dict[str, SystemPackage] = {}
7201
8127
  for e in j:
@@ -7214,18 +8140,18 @@ class AptSystemPackageManager(SystemPackageManager):
7214
8140
  }
7215
8141
 
7216
8142
  async def update(self) -> None:
7217
- await asyncio_subprocess_check_call('sudo', 'apt', 'update', env={**os.environ, **self._APT_ENV})
8143
+ await asyncio_subprocesses.check_call('sudo', 'apt', 'update', env={**os.environ, **self._APT_ENV})
7218
8144
 
7219
8145
  async def upgrade(self) -> None:
7220
- await asyncio_subprocess_check_call('sudo', 'apt', 'upgrade', '-y', env={**os.environ, **self._APT_ENV})
8146
+ await asyncio_subprocesses.check_call('sudo', 'apt', 'upgrade', '-y', env={**os.environ, **self._APT_ENV})
7221
8147
 
7222
8148
  async def install(self, *packages: SystemPackageOrStr) -> None:
7223
8149
  pns = [p.name if isinstance(p, SystemPackage) else p for p in packages] # FIXME: versions
7224
- await asyncio_subprocess_check_call('sudo', 'apt', 'install', '-y', *pns, env={**os.environ, **self._APT_ENV})
8150
+ await asyncio_subprocesses.check_call('sudo', 'apt', 'install', '-y', *pns, env={**os.environ, **self._APT_ENV})
7225
8151
 
7226
8152
  async def query(self, *packages: SystemPackageOrStr) -> ta.Mapping[str, SystemPackage]:
7227
8153
  pns = [p.name if isinstance(p, SystemPackage) else p for p in packages]
7228
- out = await asyncio_subprocess_run(
8154
+ out = await asyncio_subprocesses.run(
7229
8155
  'dpkg-query', '-W', '-f=${Package}=${Version}\n', *pns,
7230
8156
  capture_output=True,
7231
8157
  check=False,
@@ -7242,20 +8168,20 @@ class AptSystemPackageManager(SystemPackageManager):
7242
8168
 
7243
8169
  class YumSystemPackageManager(SystemPackageManager):
7244
8170
  async def update(self) -> None:
7245
- await asyncio_subprocess_check_call('sudo', 'yum', 'check-update')
8171
+ await asyncio_subprocesses.check_call('sudo', 'yum', 'check-update')
7246
8172
 
7247
8173
  async def upgrade(self) -> None:
7248
- await asyncio_subprocess_check_call('sudo', 'yum', 'update')
8174
+ await asyncio_subprocesses.check_call('sudo', 'yum', 'update')
7249
8175
 
7250
8176
  async def install(self, *packages: SystemPackageOrStr) -> None:
7251
8177
  pns = [p.name if isinstance(p, SystemPackage) else p for p in packages] # FIXME: versions
7252
- await asyncio_subprocess_check_call('sudo', 'yum', 'install', *pns)
8178
+ await asyncio_subprocesses.check_call('sudo', 'yum', 'install', *pns)
7253
8179
 
7254
8180
  async def query(self, *packages: SystemPackageOrStr) -> ta.Mapping[str, SystemPackage]:
7255
8181
  pns = [p.name if isinstance(p, SystemPackage) else p for p in packages]
7256
8182
  d: ta.Dict[str, SystemPackage] = {}
7257
8183
  for pn in pns:
7258
- out = await asyncio_subprocess_run(
8184
+ out = await asyncio_subprocesses.run(
7259
8185
  'rpm', '-q', pn,
7260
8186
  capture_output=True,
7261
8187
  )
@@ -7704,7 +8630,7 @@ class Pyenv:
7704
8630
  return self._root_kw
7705
8631
 
7706
8632
  if shutil.which('pyenv'):
7707
- return await asyncio_subprocess_check_output_str('pyenv', 'root')
8633
+ return await asyncio_subprocesses.check_output_str('pyenv', 'root')
7708
8634
 
7709
8635
  d = os.path.expanduser('~/.pyenv')
7710
8636
  if os.path.isdir(d) and os.path.isfile(os.path.join(d, 'bin', 'pyenv')):
@@ -7733,7 +8659,7 @@ class Pyenv:
7733
8659
  if await self.root() is None:
7734
8660
  return []
7735
8661
  ret = []
7736
- s = await asyncio_subprocess_check_output_str(await self.exe(), 'install', '--list')
8662
+ s = await asyncio_subprocesses.check_output_str(await self.exe(), 'install', '--list')
7737
8663
  for l in s.splitlines():
7738
8664
  if not l.startswith(' '):
7739
8665
  continue
@@ -7748,7 +8674,7 @@ class Pyenv:
7748
8674
  return False
7749
8675
  if not os.path.isdir(os.path.join(root, '.git')):
7750
8676
  return False
7751
- await asyncio_subprocess_check_call('git', 'pull', cwd=root)
8677
+ await asyncio_subprocesses.check_call('git', 'pull', cwd=root)
7752
8678
  return True
7753
8679
 
7754
8680
 
@@ -7839,7 +8765,7 @@ class DarwinPyenvInstallOpts(PyenvInstallOptsProvider):
7839
8765
  cflags = []
7840
8766
  ldflags = []
7841
8767
  for dep in self.BREW_DEPS:
7842
- dep_prefix = await asyncio_subprocess_check_output_str('brew', '--prefix', dep)
8768
+ dep_prefix = await asyncio_subprocesses.check_output_str('brew', '--prefix', dep)
7843
8769
  cflags.append(f'-I{dep_prefix}/include')
7844
8770
  ldflags.append(f'-L{dep_prefix}/lib')
7845
8771
  return PyenvInstallOpts(
@@ -7849,11 +8775,11 @@ class DarwinPyenvInstallOpts(PyenvInstallOptsProvider):
7849
8775
 
7850
8776
  @async_cached_nullary
7851
8777
  async def brew_tcl_opts(self) -> PyenvInstallOpts:
7852
- if await asyncio_subprocess_try_output('brew', '--prefix', 'tcl-tk') is None:
8778
+ if await asyncio_subprocesses.try_output('brew', '--prefix', 'tcl-tk') is None:
7853
8779
  return PyenvInstallOpts()
7854
8780
 
7855
- tcl_tk_prefix = await asyncio_subprocess_check_output_str('brew', '--prefix', 'tcl-tk')
7856
- tcl_tk_ver_str = await asyncio_subprocess_check_output_str('brew', 'ls', '--versions', 'tcl-tk')
8781
+ tcl_tk_prefix = await asyncio_subprocesses.check_output_str('brew', '--prefix', 'tcl-tk')
8782
+ tcl_tk_ver_str = await asyncio_subprocesses.check_output_str('brew', 'ls', '--versions', 'tcl-tk')
7857
8783
  tcl_tk_ver = '.'.join(tcl_tk_ver_str.split()[1].split('.')[:2])
7858
8784
 
7859
8785
  return PyenvInstallOpts(conf_opts=[
@@ -7961,6 +8887,7 @@ class PyenvVersionInstaller:
7961
8887
  self._version,
7962
8888
  ]
7963
8889
 
8890
+ full_args: ta.List[str]
7964
8891
  if self._given_install_name is not None:
7965
8892
  full_args = [
7966
8893
  os.path.join(check.not_none(await self._pyenv.root()), 'plugins', 'python-build', 'bin', 'python-build'), # noqa
@@ -7974,7 +8901,7 @@ class PyenvVersionInstaller:
7974
8901
  *conf_args,
7975
8902
  ]
7976
8903
 
7977
- await asyncio_subprocess_check_call(
8904
+ await asyncio_subprocesses.check_call(
7978
8905
  *full_args,
7979
8906
  env=env,
7980
8907
  )
@@ -8826,7 +9753,28 @@ def main_bootstrap(bs: MainBootstrap) -> Injector:
8826
9753
  # main.py
8827
9754
 
8828
9755
 
9756
+ @dc.dataclass(frozen=True)
9757
+ class ManageConfig:
9758
+ targets: ta.Optional[ta.Mapping[str, ManageTarget]] = None
9759
+
9760
+
8829
9761
  class MainCli(ArgparseCli):
9762
+ config_file: ta.Optional[str] = argparse_arg('--config-file', help='Config file path') # type: ignore
9763
+
9764
+ @cached_nullary
9765
+ def config(self) -> ManageConfig:
9766
+ if (cf := self.config_file) is None:
9767
+ cf = os.path.expanduser('~/.omlish/manage.yml')
9768
+ if not os.path.isfile(cf):
9769
+ cf = None
9770
+
9771
+ if cf is None:
9772
+ return ManageConfig()
9773
+ else:
9774
+ return read_config_file(cf, ManageConfig)
9775
+
9776
+ #
9777
+
8830
9778
  @argparse_command(
8831
9779
  argparse_arg('--_payload-file'),
8832
9780
 
@@ -8878,10 +9826,13 @@ class MainCli(ArgparseCli):
8878
9826
 
8879
9827
  msh = injector[ObjMarshalerManager]
8880
9828
 
8881
- ts = self.args.target
8882
- if not ts.startswith('{'):
8883
- ts = json.dumps({ts: {}})
8884
- tgt: ManageTarget = msh.unmarshal_obj(json.loads(ts), ManageTarget)
9829
+ tgt: ManageTarget
9830
+ if not (ts := self.args.target).startswith('{'):
9831
+ tgt = check.not_none(self.config().targets)[ts]
9832
+ else:
9833
+ tgt = msh.unmarshal_obj(json.loads(ts), ManageTarget)
9834
+
9835
+ #
8885
9836
 
8886
9837
  cmds: ta.List[Command] = []
8887
9838
  cmd: Command