omdev 0.0.0.dev210__py3-none-any.whl → 0.0.0.dev211__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omdev/ci/cache.py +137 -10
- omdev/ci/ci.py +110 -75
- omdev/ci/cli.py +41 -7
- omdev/ci/compose.py +31 -15
- omdev/ci/{dockertars.py → docker.py} +43 -30
- omdev/ci/github/__init__.py +0 -0
- omdev/ci/github/bootstrap.py +11 -0
- omdev/ci/github/cache.py +355 -0
- omdev/ci/github/cacheapi.py +207 -0
- omdev/ci/github/cli.py +39 -0
- omdev/ci/requirements.py +1 -0
- omdev/ci/shell.py +42 -0
- omdev/ci/utils.py +49 -0
- omdev/scripts/ci.py +1737 -485
- omdev/scripts/interp.py +22 -22
- omdev/scripts/pyproject.py +22 -22
- {omdev-0.0.0.dev210.dist-info → omdev-0.0.0.dev211.dist-info}/METADATA +2 -2
- {omdev-0.0.0.dev210.dist-info → omdev-0.0.0.dev211.dist-info}/RECORD +22 -16
- {omdev-0.0.0.dev210.dist-info → omdev-0.0.0.dev211.dist-info}/LICENSE +0 -0
- {omdev-0.0.0.dev210.dist-info → omdev-0.0.0.dev211.dist-info}/WHEEL +0 -0
- {omdev-0.0.0.dev210.dist-info → omdev-0.0.0.dev211.dist-info}/entry_points.txt +0 -0
- {omdev-0.0.0.dev210.dist-info → omdev-0.0.0.dev211.dist-info}/top_level.txt +0 -0
omdev/scripts/ci.py
CHANGED
@@ -3,7 +3,7 @@
|
|
3
3
|
# @omlish-lite
|
4
4
|
# @omlish-script
|
5
5
|
# @omlish-amalg-output ../ci/cli.py
|
6
|
-
# ruff: noqa: UP006 UP007 UP036
|
6
|
+
# ruff: noqa: N802 UP006 UP007 UP036
|
7
7
|
"""
|
8
8
|
Inputs:
|
9
9
|
- requirements.txt
|
@@ -20,6 +20,7 @@ import asyncio
|
|
20
20
|
import collections
|
21
21
|
import contextlib
|
22
22
|
import dataclasses as dc
|
23
|
+
import datetime
|
23
24
|
import functools
|
24
25
|
import hashlib
|
25
26
|
import inspect
|
@@ -50,8 +51,10 @@ if sys.version_info < (3, 8):
|
|
50
51
|
########################################
|
51
52
|
|
52
53
|
|
53
|
-
#
|
54
|
+
# shell.py
|
54
55
|
T = ta.TypeVar('T')
|
56
|
+
|
57
|
+
# ../../omlish/lite/cached.py
|
55
58
|
CallableT = ta.TypeVar('CallableT', bound=ta.Callable)
|
56
59
|
|
57
60
|
# ../../omlish/lite/check.py
|
@@ -73,72 +76,41 @@ SubprocessChannelOption = ta.Literal['pipe', 'stdout', 'devnull'] # ta.TypeAlia
|
|
73
76
|
|
74
77
|
|
75
78
|
########################################
|
76
|
-
# ../
|
77
|
-
|
78
|
-
|
79
|
-
#
|
80
|
-
|
81
|
-
|
82
|
-
@abc.abstractmethod
|
83
|
-
class FileCache(abc.ABC):
|
84
|
-
@abc.abstractmethod
|
85
|
-
def get_file(self, name: str) -> ta.Optional[str]:
|
86
|
-
raise NotImplementedError
|
87
|
-
|
88
|
-
@abc.abstractmethod
|
89
|
-
def put_file(self, name: str) -> ta.Optional[str]:
|
90
|
-
raise NotImplementedError
|
91
|
-
|
92
|
-
|
93
|
-
#
|
94
|
-
|
95
|
-
|
96
|
-
class DirectoryFileCache(FileCache):
|
97
|
-
def __init__(self, dir: str) -> None: # noqa
|
98
|
-
super().__init__()
|
99
|
-
|
100
|
-
self._dir = dir
|
101
|
-
|
102
|
-
def get_file(self, name: str) -> ta.Optional[str]:
|
103
|
-
file_path = os.path.join(self._dir, name)
|
104
|
-
if not os.path.exists(file_path):
|
105
|
-
return None
|
106
|
-
return file_path
|
107
|
-
|
108
|
-
def put_file(self, file_path: str) -> None:
|
109
|
-
os.makedirs(self._dir, exist_ok=True)
|
110
|
-
cache_file_path = os.path.join(self._dir, os.path.basename(file_path))
|
111
|
-
shutil.copyfile(file_path, cache_file_path)
|
112
|
-
|
113
|
-
|
114
|
-
########################################
|
115
|
-
# ../utils.py
|
116
|
-
|
117
|
-
|
118
|
-
##
|
119
|
-
|
120
|
-
|
121
|
-
def make_temp_file() -> str:
|
122
|
-
file_fd, file = tempfile.mkstemp()
|
123
|
-
os.close(file_fd)
|
124
|
-
return file
|
79
|
+
# ../shell.py
|
125
80
|
|
126
81
|
|
127
82
|
##
|
128
83
|
|
129
84
|
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
with open(yaml_file) as f:
|
134
|
-
return yaml.safe_load(f)
|
135
|
-
|
85
|
+
@dc.dataclass(frozen=True)
|
86
|
+
class ShellCmd:
|
87
|
+
s: str
|
136
88
|
|
137
|
-
|
89
|
+
env: ta.Optional[ta.Mapping[str, str]] = None
|
138
90
|
|
91
|
+
def build_run_kwargs(
|
92
|
+
self,
|
93
|
+
*,
|
94
|
+
env: ta.Optional[ta.Mapping[str, str]] = None,
|
95
|
+
**kwargs: ta.Any,
|
96
|
+
) -> ta.Dict[str, ta.Any]:
|
97
|
+
if env is None:
|
98
|
+
env = os.environ
|
99
|
+
if self.env:
|
100
|
+
if (ek := set(env) & set(self.env)):
|
101
|
+
raise KeyError(*ek)
|
102
|
+
env = {**env, **self.env}
|
103
|
+
|
104
|
+
return dict(
|
105
|
+
env=env,
|
106
|
+
**kwargs,
|
107
|
+
)
|
139
108
|
|
140
|
-
def
|
141
|
-
|
109
|
+
def run(self, fn: ta.Callable[..., T], **kwargs) -> T:
|
110
|
+
return fn(
|
111
|
+
'sh', '-c', self.s,
|
112
|
+
**self.build_run_kwargs(**kwargs),
|
113
|
+
)
|
142
114
|
|
143
115
|
|
144
116
|
########################################
|
@@ -680,6 +652,13 @@ json_dump_compact: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON
|
|
680
652
|
json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_COMPACT_KWARGS)
|
681
653
|
|
682
654
|
|
655
|
+
########################################
|
656
|
+
# ../../../omlish/lite/logs.py
|
657
|
+
|
658
|
+
|
659
|
+
log = logging.getLogger(__name__)
|
660
|
+
|
661
|
+
|
683
662
|
########################################
|
684
663
|
# ../../../omlish/lite/reflect.py
|
685
664
|
|
@@ -769,248 +748,895 @@ def deep_subclasses(cls: ta.Type[T]) -> ta.Iterator[ta.Type[T]]:
|
|
769
748
|
|
770
749
|
|
771
750
|
########################################
|
772
|
-
# ../../../omlish/
|
773
|
-
"""
|
774
|
-
TODO:
|
775
|
-
- default command
|
776
|
-
- auto match all underscores to hyphens
|
777
|
-
- pre-run, post-run hooks
|
778
|
-
- exitstack?
|
779
|
-
"""
|
751
|
+
# ../../../omlish/lite/strings.py
|
780
752
|
|
781
753
|
|
782
754
|
##
|
783
755
|
|
784
756
|
|
785
|
-
|
786
|
-
|
787
|
-
|
788
|
-
|
789
|
-
|
757
|
+
def camel_case(name: str, *, lower: bool = False) -> str:
|
758
|
+
if not name:
|
759
|
+
return ''
|
760
|
+
s = ''.join(map(str.capitalize, name.split('_'))) # noqa
|
761
|
+
if lower:
|
762
|
+
s = s[0].lower() + s[1:]
|
763
|
+
return s
|
790
764
|
|
791
|
-
def __get__(self, instance, owner=None):
|
792
|
-
if instance is None:
|
793
|
-
return self
|
794
|
-
return getattr(instance.args, self.dest) # type: ignore
|
795
765
|
|
766
|
+
def snake_case(name: str) -> str:
|
767
|
+
uppers: list[int | None] = [i for i, c in enumerate(name) if c.isupper()]
|
768
|
+
return '_'.join([name[l:r].lower() for l, r in zip([None, *uppers], [*uppers, None])]).strip('_')
|
796
769
|
|
797
|
-
def argparse_arg(*args, **kwargs) -> ArgparseArg:
|
798
|
-
return ArgparseArg(args, kwargs)
|
799
770
|
|
771
|
+
##
|
800
772
|
|
801
|
-
#
|
802
773
|
|
774
|
+
def is_dunder(name: str) -> bool:
|
775
|
+
return (
|
776
|
+
name[:2] == name[-2:] == '__' and
|
777
|
+
name[2:3] != '_' and
|
778
|
+
name[-3:-2] != '_' and
|
779
|
+
len(name) > 4
|
780
|
+
)
|
803
781
|
|
804
|
-
@dc.dataclass(eq=False)
|
805
|
-
class ArgparseCmd:
|
806
|
-
name: str
|
807
|
-
fn: ArgparseCmdFn
|
808
|
-
args: ta.Sequence[ArgparseArg] = () # noqa
|
809
782
|
|
810
|
-
|
783
|
+
def is_sunder(name: str) -> bool:
|
784
|
+
return (
|
785
|
+
name[0] == name[-1] == '_' and
|
786
|
+
name[1:2] != '_' and
|
787
|
+
name[-2:-1] != '_' and
|
788
|
+
len(name) > 2
|
789
|
+
)
|
811
790
|
|
812
|
-
aliases: ta.Optional[ta.Sequence[str]] = None
|
813
|
-
parent: ta.Optional['ArgparseCmd'] = None
|
814
|
-
accepts_unknown: bool = False
|
815
791
|
|
816
|
-
|
817
|
-
def check_name(s: str) -> None:
|
818
|
-
check.isinstance(s, str)
|
819
|
-
check.not_in('_', s)
|
820
|
-
check.not_empty(s)
|
821
|
-
check_name(self.name)
|
822
|
-
check.not_isinstance(self.aliases, str)
|
823
|
-
for a in self.aliases or []:
|
824
|
-
check_name(a)
|
792
|
+
##
|
825
793
|
|
826
|
-
check.arg(callable(self.fn))
|
827
|
-
check.arg(all(isinstance(a, ArgparseArg) for a in self.args))
|
828
|
-
check.isinstance(self.parent, (ArgparseCmd, type(None)))
|
829
|
-
check.isinstance(self.accepts_unknown, bool)
|
830
794
|
|
831
|
-
|
795
|
+
def strip_with_newline(s: str) -> str:
|
796
|
+
if not s:
|
797
|
+
return ''
|
798
|
+
return s.strip() + '\n'
|
832
799
|
|
833
|
-
def __get__(self, instance, owner=None):
|
834
|
-
if instance is None:
|
835
|
-
return self
|
836
|
-
return dc.replace(self, fn=self.fn.__get__(instance, owner)) # noqa
|
837
800
|
|
838
|
-
|
839
|
-
|
801
|
+
@ta.overload
|
802
|
+
def split_keep_delimiter(s: str, d: str) -> str:
|
803
|
+
...
|
840
804
|
|
841
805
|
|
842
|
-
|
843
|
-
|
844
|
-
|
845
|
-
aliases: ta.Optional[ta.Iterable[str]] = None,
|
846
|
-
parent: ta.Optional[ArgparseCmd] = None,
|
847
|
-
accepts_unknown: bool = False,
|
848
|
-
) -> ta.Any: # ta.Callable[[ArgparseCmdFn], ArgparseCmd]: # FIXME
|
849
|
-
for arg in args:
|
850
|
-
check.isinstance(arg, ArgparseArg)
|
851
|
-
check.isinstance(name, (str, type(None)))
|
852
|
-
check.isinstance(parent, (ArgparseCmd, type(None)))
|
853
|
-
check.not_isinstance(aliases, str)
|
806
|
+
@ta.overload
|
807
|
+
def split_keep_delimiter(s: bytes, d: bytes) -> bytes:
|
808
|
+
...
|
854
809
|
|
855
|
-
def inner(fn):
|
856
|
-
return ArgparseCmd(
|
857
|
-
(name if name is not None else fn.__name__).replace('_', '-'),
|
858
|
-
fn,
|
859
|
-
args,
|
860
|
-
aliases=tuple(aliases) if aliases is not None else None,
|
861
|
-
parent=parent,
|
862
|
-
accepts_unknown=accepts_unknown,
|
863
|
-
)
|
864
810
|
|
865
|
-
|
811
|
+
def split_keep_delimiter(s, d):
|
812
|
+
ps = []
|
813
|
+
i = 0
|
814
|
+
while i < len(s):
|
815
|
+
if (n := s.find(d, i)) < i:
|
816
|
+
ps.append(s[i:])
|
817
|
+
break
|
818
|
+
ps.append(s[i:n + 1])
|
819
|
+
i = n + 1
|
820
|
+
return ps
|
866
821
|
|
867
822
|
|
868
823
|
##
|
869
824
|
|
870
825
|
|
871
|
-
def
|
872
|
-
|
873
|
-
return {}
|
874
|
-
elif ann is int:
|
875
|
-
return {'type': int}
|
876
|
-
elif ann is bool:
|
877
|
-
return {'action': 'store_true'}
|
878
|
-
elif ann is list:
|
879
|
-
return {'action': 'append'}
|
880
|
-
elif is_optional_alias(ann):
|
881
|
-
return _get_argparse_arg_ann_kwargs(get_optional_alias_arg(ann))
|
882
|
-
else:
|
883
|
-
raise TypeError(ann)
|
826
|
+
def attr_repr(obj: ta.Any, *attrs: str) -> str:
|
827
|
+
return f'{type(obj).__name__}({", ".join(f"{attr}={getattr(obj, attr)!r}" for attr in attrs)})'
|
884
828
|
|
885
829
|
|
886
|
-
|
887
|
-
def __init__(self, annotations: ta.Mapping[str, ta.Any]) -> None:
|
888
|
-
super().__init__()
|
889
|
-
self.__annotations__ = annotations # type: ignore
|
830
|
+
##
|
890
831
|
|
891
832
|
|
892
|
-
|
893
|
-
def __init__(self, argv: ta.Optional[ta.Sequence[str]] = None) -> None:
|
894
|
-
super().__init__()
|
833
|
+
FORMAT_NUM_BYTES_SUFFIXES: ta.Sequence[str] = ['B', 'kB', 'MB', 'GB', 'TB', 'PB', 'EB']
|
895
834
|
|
896
|
-
self._argv = argv if argv is not None else sys.argv[1:]
|
897
835
|
|
898
|
-
|
836
|
+
def format_num_bytes(num_bytes: int) -> str:
|
837
|
+
for i, suffix in enumerate(FORMAT_NUM_BYTES_SUFFIXES):
|
838
|
+
value = num_bytes / 1024 ** i
|
839
|
+
if num_bytes < 1024 ** (i + 1):
|
840
|
+
if value.is_integer():
|
841
|
+
return f'{int(value)}{suffix}'
|
842
|
+
else:
|
843
|
+
return f'{value:.2f}{suffix}'
|
899
844
|
|
900
|
-
|
845
|
+
return f'{num_bytes / 1024 ** (len(FORMAT_NUM_BYTES_SUFFIXES) - 1):.2f}{FORMAT_NUM_BYTES_SUFFIXES[-1]}'
|
901
846
|
|
902
|
-
def __init_subclass__(cls, **kwargs: ta.Any) -> None:
|
903
|
-
super().__init_subclass__(**kwargs)
|
904
847
|
|
905
|
-
|
906
|
-
|
907
|
-
mro = cls.__mro__[::-1]
|
908
|
-
for bns in [bcls.__dict__ for bcls in reversed(mro)] + [ns]:
|
909
|
-
bseen = set() # type: ignore
|
910
|
-
for k, v in bns.items():
|
911
|
-
if isinstance(v, (ArgparseCmd, ArgparseArg)):
|
912
|
-
check.not_in(v, bseen)
|
913
|
-
bseen.add(v)
|
914
|
-
objs[k] = v
|
915
|
-
elif k in objs:
|
916
|
-
del [k]
|
848
|
+
########################################
|
849
|
+
# ../../../omlish/logs/filters.py
|
917
850
|
|
918
|
-
#
|
919
851
|
|
920
|
-
|
921
|
-
|
922
|
-
|
923
|
-
|
852
|
+
class TidLogFilter(logging.Filter):
|
853
|
+
def filter(self, record):
|
854
|
+
record.tid = threading.get_native_id()
|
855
|
+
return True
|
924
856
|
|
925
|
-
#
|
926
857
|
|
927
|
-
|
928
|
-
|
929
|
-
else:
|
930
|
-
parser = argparse.ArgumentParser()
|
931
|
-
setattr(cls, '_parser', parser)
|
858
|
+
########################################
|
859
|
+
# ../../../omlish/logs/proxy.py
|
932
860
|
|
933
|
-
#
|
934
861
|
|
935
|
-
|
862
|
+
class ProxyLogFilterer(logging.Filterer):
|
863
|
+
def __init__(self, underlying: logging.Filterer) -> None: # noqa
|
864
|
+
self._underlying = underlying
|
936
865
|
|
937
|
-
|
938
|
-
|
939
|
-
|
940
|
-
raise NotImplementedError
|
866
|
+
@property
|
867
|
+
def underlying(self) -> logging.Filterer:
|
868
|
+
return self._underlying
|
941
869
|
|
942
|
-
|
943
|
-
|
870
|
+
@property
|
871
|
+
def filters(self):
|
872
|
+
return self._underlying.filters
|
944
873
|
|
945
|
-
|
946
|
-
|
947
|
-
|
948
|
-
isinstance(arg.args[0], str) and
|
949
|
-
not (n := check.isinstance(arg.args[0], str)).startswith('-') and
|
950
|
-
'metavar' not in arg.kwargs
|
951
|
-
):
|
952
|
-
subparser.add_argument(
|
953
|
-
n.replace('-', '_'),
|
954
|
-
**arg.kwargs,
|
955
|
-
metavar=n,
|
956
|
-
)
|
957
|
-
else:
|
958
|
-
subparser.add_argument(*arg.args, **arg.kwargs)
|
874
|
+
@filters.setter
|
875
|
+
def filters(self, filters):
|
876
|
+
self._underlying.filters = filters
|
959
877
|
|
960
|
-
|
878
|
+
def addFilter(self, filter): # noqa
|
879
|
+
self._underlying.addFilter(filter)
|
961
880
|
|
962
|
-
|
963
|
-
|
964
|
-
ann_kwargs = _get_argparse_arg_ann_kwargs(anns[att])
|
965
|
-
obj.kwargs = {**ann_kwargs, **obj.kwargs}
|
881
|
+
def removeFilter(self, filter): # noqa
|
882
|
+
self._underlying.removeFilter(filter)
|
966
883
|
|
967
|
-
|
968
|
-
|
969
|
-
obj.dest = obj.kwargs['dest']
|
970
|
-
else:
|
971
|
-
obj.dest = obj.kwargs['dest'] = att # type: ignore
|
884
|
+
def filter(self, record):
|
885
|
+
return self._underlying.filter(record)
|
972
886
|
|
973
|
-
parser.add_argument(*obj.args, **obj.kwargs)
|
974
887
|
|
975
|
-
|
976
|
-
|
888
|
+
class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
|
889
|
+
def __init__(self, underlying: logging.Handler) -> None: # noqa
|
890
|
+
ProxyLogFilterer.__init__(self, underlying)
|
977
891
|
|
978
|
-
|
892
|
+
_underlying: logging.Handler
|
979
893
|
|
980
|
-
|
894
|
+
@property
|
895
|
+
def underlying(self) -> logging.Handler:
|
896
|
+
return self._underlying
|
981
897
|
|
982
|
-
|
983
|
-
|
984
|
-
|
898
|
+
def get_name(self):
|
899
|
+
return self._underlying.get_name()
|
900
|
+
|
901
|
+
def set_name(self, name):
|
902
|
+
self._underlying.set_name(name)
|
985
903
|
|
986
904
|
@property
|
987
|
-
def
|
988
|
-
return self.
|
905
|
+
def name(self):
|
906
|
+
return self._underlying.name
|
989
907
|
|
990
908
|
@property
|
991
|
-
def
|
992
|
-
return self.
|
909
|
+
def level(self):
|
910
|
+
return self._underlying.level
|
911
|
+
|
912
|
+
@level.setter
|
913
|
+
def level(self, level):
|
914
|
+
self._underlying.level = level
|
993
915
|
|
994
916
|
@property
|
995
|
-
def
|
996
|
-
return self.
|
917
|
+
def formatter(self):
|
918
|
+
return self._underlying.formatter
|
997
919
|
|
998
|
-
|
920
|
+
@formatter.setter
|
921
|
+
def formatter(self, formatter):
|
922
|
+
self._underlying.formatter = formatter
|
999
923
|
|
1000
|
-
def
|
1001
|
-
|
924
|
+
def createLock(self):
|
925
|
+
self._underlying.createLock()
|
1002
926
|
|
1003
|
-
def
|
1004
|
-
|
927
|
+
def acquire(self):
|
928
|
+
self._underlying.acquire()
|
1005
929
|
|
1006
|
-
|
1007
|
-
|
1008
|
-
if (parser := self.get_parser()).exit_on_error: # type: ignore
|
1009
|
-
parser.error(msg)
|
1010
|
-
else:
|
1011
|
-
raise argparse.ArgumentError(None, msg)
|
930
|
+
def release(self):
|
931
|
+
self._underlying.release()
|
1012
932
|
|
1013
|
-
|
933
|
+
def setLevel(self, level):
|
934
|
+
self._underlying.setLevel(level)
|
935
|
+
|
936
|
+
def format(self, record):
|
937
|
+
return self._underlying.format(record)
|
938
|
+
|
939
|
+
def emit(self, record):
|
940
|
+
self._underlying.emit(record)
|
941
|
+
|
942
|
+
def handle(self, record):
|
943
|
+
return self._underlying.handle(record)
|
944
|
+
|
945
|
+
def setFormatter(self, fmt):
|
946
|
+
self._underlying.setFormatter(fmt)
|
947
|
+
|
948
|
+
def flush(self):
|
949
|
+
self._underlying.flush()
|
950
|
+
|
951
|
+
def close(self):
|
952
|
+
self._underlying.close()
|
953
|
+
|
954
|
+
def handleError(self, record):
|
955
|
+
self._underlying.handleError(record)
|
956
|
+
|
957
|
+
|
958
|
+
########################################
|
959
|
+
# ../cache.py
|
960
|
+
|
961
|
+
|
962
|
+
##
|
963
|
+
|
964
|
+
|
965
|
+
@abc.abstractmethod
|
966
|
+
class FileCache(abc.ABC):
|
967
|
+
@abc.abstractmethod
|
968
|
+
def get_file(self, key: str) -> ta.Optional[str]:
|
969
|
+
raise NotImplementedError
|
970
|
+
|
971
|
+
@abc.abstractmethod
|
972
|
+
def put_file(self, key: str, file_path: str) -> ta.Optional[str]:
|
973
|
+
raise NotImplementedError
|
974
|
+
|
975
|
+
|
976
|
+
#
|
977
|
+
|
978
|
+
|
979
|
+
class DirectoryFileCache(FileCache):
|
980
|
+
def __init__(self, dir: str) -> None: # noqa
|
981
|
+
super().__init__()
|
982
|
+
|
983
|
+
self._dir = dir
|
984
|
+
|
985
|
+
#
|
986
|
+
|
987
|
+
def get_cache_file_path(
|
988
|
+
self,
|
989
|
+
key: str,
|
990
|
+
*,
|
991
|
+
make_dirs: bool = False,
|
992
|
+
) -> str:
|
993
|
+
if make_dirs:
|
994
|
+
os.makedirs(self._dir, exist_ok=True)
|
995
|
+
return os.path.join(self._dir, key)
|
996
|
+
|
997
|
+
def format_incomplete_file(self, f: str) -> str:
|
998
|
+
return os.path.join(os.path.dirname(f), f'_{os.path.basename(f)}.incomplete')
|
999
|
+
|
1000
|
+
#
|
1001
|
+
|
1002
|
+
def get_file(self, key: str) -> ta.Optional[str]:
|
1003
|
+
cache_file_path = self.get_cache_file_path(key)
|
1004
|
+
if not os.path.exists(cache_file_path):
|
1005
|
+
return None
|
1006
|
+
return cache_file_path
|
1007
|
+
|
1008
|
+
def put_file(self, key: str, file_path: str) -> None:
|
1009
|
+
cache_file_path = self.get_cache_file_path(key, make_dirs=True)
|
1010
|
+
shutil.copyfile(file_path, cache_file_path)
|
1011
|
+
|
1012
|
+
|
1013
|
+
##
|
1014
|
+
|
1015
|
+
|
1016
|
+
class ShellCache(abc.ABC):
|
1017
|
+
@abc.abstractmethod
|
1018
|
+
def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
|
1019
|
+
raise NotImplementedError
|
1020
|
+
|
1021
|
+
class PutFileCmdContext(abc.ABC):
|
1022
|
+
def __init__(self) -> None:
|
1023
|
+
super().__init__()
|
1024
|
+
|
1025
|
+
self._state: ta.Literal['open', 'committed', 'aborted'] = 'open'
|
1026
|
+
|
1027
|
+
@property
|
1028
|
+
def state(self) -> ta.Literal['open', 'committed', 'aborted']:
|
1029
|
+
return self._state
|
1030
|
+
|
1031
|
+
#
|
1032
|
+
|
1033
|
+
@property
|
1034
|
+
@abc.abstractmethod
|
1035
|
+
def cmd(self) -> ShellCmd:
|
1036
|
+
raise NotImplementedError
|
1037
|
+
|
1038
|
+
#
|
1039
|
+
|
1040
|
+
def __enter__(self):
|
1041
|
+
return self
|
1042
|
+
|
1043
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
1044
|
+
if exc_val is None:
|
1045
|
+
self.commit()
|
1046
|
+
else:
|
1047
|
+
self.abort()
|
1048
|
+
|
1049
|
+
#
|
1050
|
+
|
1051
|
+
@abc.abstractmethod
|
1052
|
+
def _commit(self) -> None:
|
1053
|
+
raise NotImplementedError
|
1054
|
+
|
1055
|
+
def commit(self) -> None:
|
1056
|
+
if self._state == 'committed':
|
1057
|
+
return
|
1058
|
+
elif self._state == 'open':
|
1059
|
+
self._commit()
|
1060
|
+
self._state = 'committed'
|
1061
|
+
else:
|
1062
|
+
raise RuntimeError(self._state)
|
1063
|
+
|
1064
|
+
#
|
1065
|
+
|
1066
|
+
@abc.abstractmethod
|
1067
|
+
def _abort(self) -> None:
|
1068
|
+
raise NotImplementedError
|
1069
|
+
|
1070
|
+
def abort(self) -> None:
|
1071
|
+
if self._state == 'aborted':
|
1072
|
+
return
|
1073
|
+
elif self._state == 'open':
|
1074
|
+
self._abort()
|
1075
|
+
self._state = 'committed'
|
1076
|
+
else:
|
1077
|
+
raise RuntimeError(self._state)
|
1078
|
+
|
1079
|
+
@abc.abstractmethod
|
1080
|
+
def put_file_cmd(self, key: str) -> PutFileCmdContext:
|
1081
|
+
raise NotImplementedError
|
1082
|
+
|
1083
|
+
|
1084
|
+
#
|
1085
|
+
|
1086
|
+
|
1087
|
+
class DirectoryShellCache(ShellCache):
|
1088
|
+
def __init__(self, dfc: DirectoryFileCache) -> None:
|
1089
|
+
super().__init__()
|
1090
|
+
|
1091
|
+
self._dfc = dfc
|
1092
|
+
|
1093
|
+
def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
|
1094
|
+
f = self._dfc.get_file(key)
|
1095
|
+
if f is None:
|
1096
|
+
return None
|
1097
|
+
return ShellCmd(f'cat {shlex.quote(f)}')
|
1098
|
+
|
1099
|
+
class _PutFileCmdContext(ShellCache.PutFileCmdContext): # noqa
|
1100
|
+
def __init__(self, tf: str, f: str) -> None:
|
1101
|
+
super().__init__()
|
1102
|
+
|
1103
|
+
self._tf = tf
|
1104
|
+
self._f = f
|
1105
|
+
|
1106
|
+
@property
|
1107
|
+
def cmd(self) -> ShellCmd:
|
1108
|
+
return ShellCmd(f'cat > {shlex.quote(self._tf)}')
|
1109
|
+
|
1110
|
+
def _commit(self) -> None:
|
1111
|
+
os.replace(self._tf, self._f)
|
1112
|
+
|
1113
|
+
def _abort(self) -> None:
|
1114
|
+
os.unlink(self._tf)
|
1115
|
+
|
1116
|
+
def put_file_cmd(self, key: str) -> ShellCache.PutFileCmdContext:
|
1117
|
+
f = self._dfc.get_cache_file_path(key, make_dirs=True)
|
1118
|
+
return self._PutFileCmdContext(self._dfc.format_incomplete_file(f), f)
|
1119
|
+
|
1120
|
+
|
1121
|
+
########################################
|
1122
|
+
# ../github/cacheapi.py
|
1123
|
+
"""
|
1124
|
+
export FILE_SIZE=$(stat --format="%s" $FILE)
|
1125
|
+
|
1126
|
+
export CACHE_ID=$(curl -s \
|
1127
|
+
-X POST \
|
1128
|
+
"${ACTIONS_CACHE_URL}_apis/artifactcache/caches" \
|
1129
|
+
-H 'Content-Type: application/json' \
|
1130
|
+
-H 'Accept: application/json;api-version=6.0-preview.1' \
|
1131
|
+
-H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
|
1132
|
+
-d '{"key": "'"$CACHE_KEY"'", "cacheSize": '"$FILE_SIZE"'}' \
|
1133
|
+
| jq .cacheId)
|
1134
|
+
|
1135
|
+
curl -s \
|
1136
|
+
-X PATCH \
|
1137
|
+
"${ACTIONS_CACHE_URL}_apis/artifactcache/caches/$CACHE_ID" \
|
1138
|
+
-H 'Content-Type: application/octet-stream' \
|
1139
|
+
-H 'Accept: application/json;api-version=6.0-preview.1' \
|
1140
|
+
-H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
|
1141
|
+
-H "Content-Range: bytes 0-$((FILE_SIZE - 1))/*" \
|
1142
|
+
--data-binary @"$FILE"
|
1143
|
+
|
1144
|
+
curl -s \
|
1145
|
+
-X POST \
|
1146
|
+
"${ACTIONS_CACHE_URL}_apis/artifactcache/caches/$CACHE_ID" \
|
1147
|
+
-H 'Content-Type: application/json' \
|
1148
|
+
-H 'Accept: application/json;api-version=6.0-preview.1' \
|
1149
|
+
-H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
|
1150
|
+
-d '{"size": '"$(stat --format="%s" $FILE)"'}'
|
1151
|
+
|
1152
|
+
curl -s \
|
1153
|
+
-X GET \
|
1154
|
+
"${ACTIONS_CACHE_URL}_apis/artifactcache/cache?keys=$CACHE_KEY" \
|
1155
|
+
-H 'Content-Type: application/json' \
|
1156
|
+
-H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
|
1157
|
+
| jq .
|
1158
|
+
"""
|
1159
|
+
|
1160
|
+
|
1161
|
+
##
|
1162
|
+
|
1163
|
+
|
1164
|
+
class GithubCacheServiceV1:
|
1165
|
+
API_VERSION = '6.0-preview.1'
|
1166
|
+
|
1167
|
+
@classmethod
|
1168
|
+
def get_service_url(cls, base_url: str) -> str:
|
1169
|
+
return f'{base_url.rstrip("/")}/_apis/artifactcache'
|
1170
|
+
|
1171
|
+
#
|
1172
|
+
|
1173
|
+
@classmethod
|
1174
|
+
def dataclass_to_json(cls, obj: ta.Any) -> ta.Any:
|
1175
|
+
return {
|
1176
|
+
camel_case(k, lower=True): v
|
1177
|
+
for k, v in dc.asdict(obj).items()
|
1178
|
+
if v is not None
|
1179
|
+
}
|
1180
|
+
|
1181
|
+
@classmethod
|
1182
|
+
def dataclass_from_json(cls, dcls: ta.Type[T], obj: ta.Any) -> T:
|
1183
|
+
return dcls(**{
|
1184
|
+
snake_case(k): v
|
1185
|
+
for k, v in obj.items()
|
1186
|
+
})
|
1187
|
+
|
1188
|
+
#
|
1189
|
+
|
1190
|
+
@dc.dataclass(frozen=True)
|
1191
|
+
class ArtifactCacheEntry:
|
1192
|
+
cache_key: ta.Optional[str]
|
1193
|
+
scope: ta.Optional[str]
|
1194
|
+
cache_version: ta.Optional[str]
|
1195
|
+
creation_time: ta.Optional[str]
|
1196
|
+
archive_location: ta.Optional[str]
|
1197
|
+
|
1198
|
+
@dc.dataclass(frozen=True)
|
1199
|
+
class ArtifactCacheList:
|
1200
|
+
total_count: int
|
1201
|
+
artifact_caches: ta.Optional[ta.Sequence['GithubCacheServiceV1.ArtifactCacheEntry']]
|
1202
|
+
|
1203
|
+
#
|
1204
|
+
|
1205
|
+
@dc.dataclass(frozen=True)
|
1206
|
+
class ReserveCacheRequest:
|
1207
|
+
key: str
|
1208
|
+
cache_size: ta.Optional[int]
|
1209
|
+
version: ta.Optional[str] = None
|
1210
|
+
|
1211
|
+
@dc.dataclass(frozen=True)
|
1212
|
+
class ReserveCacheResponse:
|
1213
|
+
cache_id: int
|
1214
|
+
|
1215
|
+
#
|
1216
|
+
|
1217
|
+
@dc.dataclass(frozen=True)
|
1218
|
+
class CommitCacheRequest:
|
1219
|
+
size: int
|
1220
|
+
|
1221
|
+
#
|
1222
|
+
|
1223
|
+
class CompressionMethod:
|
1224
|
+
GZIP = 'gzip'
|
1225
|
+
ZSTD_WITHOUT_LONG = 'zstd-without-long'
|
1226
|
+
ZSTD = 'zstd'
|
1227
|
+
|
1228
|
+
@dc.dataclass(frozen=True)
|
1229
|
+
class InternalCacheOptions:
|
1230
|
+
compression_method: ta.Optional[str] # CompressionMethod
|
1231
|
+
enable_cross_os_archive: ta.Optional[bool]
|
1232
|
+
cache_size: ta.Optional[int]
|
1233
|
+
|
1234
|
+
|
1235
|
+
class GithubCacheServiceV2:
|
1236
|
+
SERVICE_NAME = 'github.actions.results.api.v1.CacheService'
|
1237
|
+
|
1238
|
+
@dc.dataclass(frozen=True)
|
1239
|
+
class Method:
|
1240
|
+
name: str
|
1241
|
+
request: type
|
1242
|
+
response: type
|
1243
|
+
|
1244
|
+
#
|
1245
|
+
|
1246
|
+
class CacheScopePermission:
|
1247
|
+
READ = 1
|
1248
|
+
WRITE = 2
|
1249
|
+
ALL = READ | WRITE
|
1250
|
+
|
1251
|
+
@dc.dataclass(frozen=True)
|
1252
|
+
class CacheScope:
|
1253
|
+
scope: str
|
1254
|
+
permission: int # CacheScopePermission
|
1255
|
+
|
1256
|
+
@dc.dataclass(frozen=True)
|
1257
|
+
class CacheMetadata:
|
1258
|
+
repository_id: int
|
1259
|
+
scope: ta.Sequence['GithubCacheServiceV2.CacheScope']
|
1260
|
+
|
1261
|
+
#
|
1262
|
+
|
1263
|
+
@dc.dataclass(frozen=True)
|
1264
|
+
class CreateCacheEntryRequest:
|
1265
|
+
key: str
|
1266
|
+
version: str
|
1267
|
+
metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
|
1268
|
+
|
1269
|
+
@dc.dataclass(frozen=True)
|
1270
|
+
class CreateCacheEntryResponse:
|
1271
|
+
ok: bool
|
1272
|
+
signed_upload_url: str
|
1273
|
+
|
1274
|
+
CREATE_CACHE_ENTRY_METHOD = Method(
|
1275
|
+
'CreateCacheEntry',
|
1276
|
+
CreateCacheEntryRequest,
|
1277
|
+
CreateCacheEntryResponse,
|
1278
|
+
)
|
1279
|
+
|
1280
|
+
#
|
1281
|
+
|
1282
|
+
@dc.dataclass(frozen=True)
|
1283
|
+
class FinalizeCacheEntryUploadRequest:
|
1284
|
+
key: str
|
1285
|
+
size_bytes: int
|
1286
|
+
version: str
|
1287
|
+
metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
|
1288
|
+
|
1289
|
+
@dc.dataclass(frozen=True)
|
1290
|
+
class FinalizeCacheEntryUploadResponse:
|
1291
|
+
ok: bool
|
1292
|
+
entry_id: str
|
1293
|
+
|
1294
|
+
FINALIZE_CACHE_ENTRY_METHOD = Method(
|
1295
|
+
'FinalizeCacheEntryUpload',
|
1296
|
+
FinalizeCacheEntryUploadRequest,
|
1297
|
+
FinalizeCacheEntryUploadResponse,
|
1298
|
+
)
|
1299
|
+
|
1300
|
+
#
|
1301
|
+
|
1302
|
+
@dc.dataclass(frozen=True)
|
1303
|
+
class GetCacheEntryDownloadUrlRequest:
|
1304
|
+
key: str
|
1305
|
+
restore_keys: ta.Sequence[str]
|
1306
|
+
version: str
|
1307
|
+
metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
|
1308
|
+
|
1309
|
+
@dc.dataclass(frozen=True)
|
1310
|
+
class GetCacheEntryDownloadUrlResponse:
|
1311
|
+
ok: bool
|
1312
|
+
signed_download_url: str
|
1313
|
+
matched_key: str
|
1314
|
+
|
1315
|
+
GET_CACHE_ENTRY_DOWNLOAD_URL_METHOD = Method(
|
1316
|
+
'GetCacheEntryDownloadURL',
|
1317
|
+
GetCacheEntryDownloadUrlRequest,
|
1318
|
+
GetCacheEntryDownloadUrlResponse,
|
1319
|
+
)
|
1320
|
+
|
1321
|
+
|
1322
|
+
########################################
|
1323
|
+
# ../utils.py
|
1324
|
+
|
1325
|
+
|
1326
|
+
##
|
1327
|
+
|
1328
|
+
|
1329
|
+
def make_temp_file() -> str:
|
1330
|
+
file_fd, file = tempfile.mkstemp()
|
1331
|
+
os.close(file_fd)
|
1332
|
+
return file
|
1333
|
+
|
1334
|
+
|
1335
|
+
##
|
1336
|
+
|
1337
|
+
|
1338
|
+
def read_yaml_file(yaml_file: str) -> ta.Any:
|
1339
|
+
yaml = __import__('yaml')
|
1340
|
+
|
1341
|
+
with open(yaml_file) as f:
|
1342
|
+
return yaml.safe_load(f)
|
1343
|
+
|
1344
|
+
|
1345
|
+
##
|
1346
|
+
|
1347
|
+
|
1348
|
+
def sha256_str(s: str) -> str:
|
1349
|
+
return hashlib.sha256(s.encode('utf-8')).hexdigest()
|
1350
|
+
|
1351
|
+
|
1352
|
+
##
|
1353
|
+
|
1354
|
+
|
1355
|
+
class LogTimingContext:
|
1356
|
+
DEFAULT_LOG: ta.ClassVar[logging.Logger] = log
|
1357
|
+
|
1358
|
+
def __init__(
|
1359
|
+
self,
|
1360
|
+
description: str,
|
1361
|
+
*,
|
1362
|
+
log: ta.Optional[logging.Logger] = None, # noqa
|
1363
|
+
level: int = logging.DEBUG,
|
1364
|
+
) -> None:
|
1365
|
+
super().__init__()
|
1366
|
+
|
1367
|
+
self._description = description
|
1368
|
+
self._log = log if log is not None else self.DEFAULT_LOG
|
1369
|
+
self._level = level
|
1370
|
+
|
1371
|
+
def set_description(self, description: str) -> 'LogTimingContext':
|
1372
|
+
self._description = description
|
1373
|
+
return self
|
1374
|
+
|
1375
|
+
_begin_time: float
|
1376
|
+
_end_time: float
|
1377
|
+
|
1378
|
+
def __enter__(self) -> 'LogTimingContext':
|
1379
|
+
self._begin_time = time.time()
|
1380
|
+
|
1381
|
+
self._log.log(self._level, f'Begin {self._description}') # noqa
|
1382
|
+
|
1383
|
+
return self
|
1384
|
+
|
1385
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
1386
|
+
self._end_time = time.time()
|
1387
|
+
|
1388
|
+
self._log.log(
|
1389
|
+
self._level,
|
1390
|
+
f'End {self._description} - {self._end_time - self._begin_time:0.2f} s elapsed',
|
1391
|
+
)
|
1392
|
+
|
1393
|
+
|
1394
|
+
log_timing_context = LogTimingContext
|
1395
|
+
|
1396
|
+
|
1397
|
+
########################################
|
1398
|
+
# ../../../omlish/argparse/cli.py
|
1399
|
+
"""
|
1400
|
+
TODO:
|
1401
|
+
- default command
|
1402
|
+
- auto match all underscores to hyphens
|
1403
|
+
- pre-run, post-run hooks
|
1404
|
+
- exitstack?
|
1405
|
+
"""
|
1406
|
+
|
1407
|
+
|
1408
|
+
##
|
1409
|
+
|
1410
|
+
|
1411
|
+
@dc.dataclass(eq=False)
|
1412
|
+
class ArgparseArg:
|
1413
|
+
args: ta.Sequence[ta.Any]
|
1414
|
+
kwargs: ta.Mapping[str, ta.Any]
|
1415
|
+
dest: ta.Optional[str] = None
|
1416
|
+
|
1417
|
+
def __get__(self, instance, owner=None):
|
1418
|
+
if instance is None:
|
1419
|
+
return self
|
1420
|
+
return getattr(instance.args, self.dest) # type: ignore
|
1421
|
+
|
1422
|
+
|
1423
|
+
def argparse_arg(*args, **kwargs) -> ArgparseArg:
|
1424
|
+
return ArgparseArg(args, kwargs)
|
1425
|
+
|
1426
|
+
|
1427
|
+
#
|
1428
|
+
|
1429
|
+
|
1430
|
+
@dc.dataclass(eq=False)
|
1431
|
+
class ArgparseCmd:
|
1432
|
+
name: str
|
1433
|
+
fn: ArgparseCmdFn
|
1434
|
+
args: ta.Sequence[ArgparseArg] = () # noqa
|
1435
|
+
|
1436
|
+
# _: dc.KW_ONLY
|
1437
|
+
|
1438
|
+
aliases: ta.Optional[ta.Sequence[str]] = None
|
1439
|
+
parent: ta.Optional['ArgparseCmd'] = None
|
1440
|
+
accepts_unknown: bool = False
|
1441
|
+
|
1442
|
+
def __post_init__(self) -> None:
|
1443
|
+
def check_name(s: str) -> None:
|
1444
|
+
check.isinstance(s, str)
|
1445
|
+
check.not_in('_', s)
|
1446
|
+
check.not_empty(s)
|
1447
|
+
check_name(self.name)
|
1448
|
+
check.not_isinstance(self.aliases, str)
|
1449
|
+
for a in self.aliases or []:
|
1450
|
+
check_name(a)
|
1451
|
+
|
1452
|
+
check.arg(callable(self.fn))
|
1453
|
+
check.arg(all(isinstance(a, ArgparseArg) for a in self.args))
|
1454
|
+
check.isinstance(self.parent, (ArgparseCmd, type(None)))
|
1455
|
+
check.isinstance(self.accepts_unknown, bool)
|
1456
|
+
|
1457
|
+
functools.update_wrapper(self, self.fn)
|
1458
|
+
|
1459
|
+
def __get__(self, instance, owner=None):
|
1460
|
+
if instance is None:
|
1461
|
+
return self
|
1462
|
+
return dc.replace(self, fn=self.fn.__get__(instance, owner)) # noqa
|
1463
|
+
|
1464
|
+
def __call__(self, *args, **kwargs) -> ta.Optional[int]:
|
1465
|
+
return self.fn(*args, **kwargs)
|
1466
|
+
|
1467
|
+
|
1468
|
+
def argparse_cmd(
|
1469
|
+
*args: ArgparseArg,
|
1470
|
+
name: ta.Optional[str] = None,
|
1471
|
+
aliases: ta.Optional[ta.Iterable[str]] = None,
|
1472
|
+
parent: ta.Optional[ArgparseCmd] = None,
|
1473
|
+
accepts_unknown: bool = False,
|
1474
|
+
) -> ta.Any: # ta.Callable[[ArgparseCmdFn], ArgparseCmd]: # FIXME
|
1475
|
+
for arg in args:
|
1476
|
+
check.isinstance(arg, ArgparseArg)
|
1477
|
+
check.isinstance(name, (str, type(None)))
|
1478
|
+
check.isinstance(parent, (ArgparseCmd, type(None)))
|
1479
|
+
check.not_isinstance(aliases, str)
|
1480
|
+
|
1481
|
+
def inner(fn):
|
1482
|
+
return ArgparseCmd(
|
1483
|
+
(name if name is not None else fn.__name__).replace('_', '-'),
|
1484
|
+
fn,
|
1485
|
+
args,
|
1486
|
+
aliases=tuple(aliases) if aliases is not None else None,
|
1487
|
+
parent=parent,
|
1488
|
+
accepts_unknown=accepts_unknown,
|
1489
|
+
)
|
1490
|
+
|
1491
|
+
return inner
|
1492
|
+
|
1493
|
+
|
1494
|
+
##
|
1495
|
+
|
1496
|
+
|
1497
|
+
def _get_argparse_arg_ann_kwargs(ann: ta.Any) -> ta.Mapping[str, ta.Any]:
|
1498
|
+
if ann is str:
|
1499
|
+
return {}
|
1500
|
+
elif ann is int:
|
1501
|
+
return {'type': int}
|
1502
|
+
elif ann is bool:
|
1503
|
+
return {'action': 'store_true'}
|
1504
|
+
elif ann is list:
|
1505
|
+
return {'action': 'append'}
|
1506
|
+
elif is_optional_alias(ann):
|
1507
|
+
return _get_argparse_arg_ann_kwargs(get_optional_alias_arg(ann))
|
1508
|
+
else:
|
1509
|
+
raise TypeError(ann)
|
1510
|
+
|
1511
|
+
|
1512
|
+
class _ArgparseCliAnnotationBox:
|
1513
|
+
def __init__(self, annotations: ta.Mapping[str, ta.Any]) -> None:
|
1514
|
+
super().__init__()
|
1515
|
+
self.__annotations__ = annotations # type: ignore
|
1516
|
+
|
1517
|
+
|
1518
|
+
class ArgparseCli:
|
1519
|
+
def __init__(self, argv: ta.Optional[ta.Sequence[str]] = None) -> None:
|
1520
|
+
super().__init__()
|
1521
|
+
|
1522
|
+
self._argv = argv if argv is not None else sys.argv[1:]
|
1523
|
+
|
1524
|
+
self._args, self._unknown_args = self.get_parser().parse_known_args(self._argv)
|
1525
|
+
|
1526
|
+
#
|
1527
|
+
|
1528
|
+
def __init_subclass__(cls, **kwargs: ta.Any) -> None:
|
1529
|
+
super().__init_subclass__(**kwargs)
|
1530
|
+
|
1531
|
+
ns = cls.__dict__
|
1532
|
+
objs = {}
|
1533
|
+
mro = cls.__mro__[::-1]
|
1534
|
+
for bns in [bcls.__dict__ for bcls in reversed(mro)] + [ns]:
|
1535
|
+
bseen = set() # type: ignore
|
1536
|
+
for k, v in bns.items():
|
1537
|
+
if isinstance(v, (ArgparseCmd, ArgparseArg)):
|
1538
|
+
check.not_in(v, bseen)
|
1539
|
+
bseen.add(v)
|
1540
|
+
objs[k] = v
|
1541
|
+
elif k in objs:
|
1542
|
+
del [k]
|
1543
|
+
|
1544
|
+
#
|
1545
|
+
|
1546
|
+
anns = ta.get_type_hints(_ArgparseCliAnnotationBox({
|
1547
|
+
**{k: v for bcls in reversed(mro) for k, v in getattr(bcls, '__annotations__', {}).items()},
|
1548
|
+
**ns.get('__annotations__', {}),
|
1549
|
+
}), globalns=ns.get('__globals__', {}))
|
1550
|
+
|
1551
|
+
#
|
1552
|
+
|
1553
|
+
if '_parser' in ns:
|
1554
|
+
parser = check.isinstance(ns['_parser'], argparse.ArgumentParser)
|
1555
|
+
else:
|
1556
|
+
parser = argparse.ArgumentParser()
|
1557
|
+
setattr(cls, '_parser', parser)
|
1558
|
+
|
1559
|
+
#
|
1560
|
+
|
1561
|
+
subparsers = parser.add_subparsers()
|
1562
|
+
|
1563
|
+
for att, obj in objs.items():
|
1564
|
+
if isinstance(obj, ArgparseCmd):
|
1565
|
+
if obj.parent is not None:
|
1566
|
+
raise NotImplementedError
|
1567
|
+
|
1568
|
+
for cn in [obj.name, *(obj.aliases or [])]:
|
1569
|
+
subparser = subparsers.add_parser(cn)
|
1570
|
+
|
1571
|
+
for arg in (obj.args or []):
|
1572
|
+
if (
|
1573
|
+
len(arg.args) == 1 and
|
1574
|
+
isinstance(arg.args[0], str) and
|
1575
|
+
not (n := check.isinstance(arg.args[0], str)).startswith('-') and
|
1576
|
+
'metavar' not in arg.kwargs
|
1577
|
+
):
|
1578
|
+
subparser.add_argument(
|
1579
|
+
n.replace('-', '_'),
|
1580
|
+
**arg.kwargs,
|
1581
|
+
metavar=n,
|
1582
|
+
)
|
1583
|
+
else:
|
1584
|
+
subparser.add_argument(*arg.args, **arg.kwargs)
|
1585
|
+
|
1586
|
+
subparser.set_defaults(_cmd=obj)
|
1587
|
+
|
1588
|
+
elif isinstance(obj, ArgparseArg):
|
1589
|
+
if att in anns:
|
1590
|
+
ann_kwargs = _get_argparse_arg_ann_kwargs(anns[att])
|
1591
|
+
obj.kwargs = {**ann_kwargs, **obj.kwargs}
|
1592
|
+
|
1593
|
+
if not obj.dest:
|
1594
|
+
if 'dest' in obj.kwargs:
|
1595
|
+
obj.dest = obj.kwargs['dest']
|
1596
|
+
else:
|
1597
|
+
obj.dest = obj.kwargs['dest'] = att # type: ignore
|
1598
|
+
|
1599
|
+
parser.add_argument(*obj.args, **obj.kwargs)
|
1600
|
+
|
1601
|
+
else:
|
1602
|
+
raise TypeError(obj)
|
1603
|
+
|
1604
|
+
#
|
1605
|
+
|
1606
|
+
_parser: ta.ClassVar[argparse.ArgumentParser]
|
1607
|
+
|
1608
|
+
@classmethod
|
1609
|
+
def get_parser(cls) -> argparse.ArgumentParser:
|
1610
|
+
return cls._parser
|
1611
|
+
|
1612
|
+
@property
|
1613
|
+
def argv(self) -> ta.Sequence[str]:
|
1614
|
+
return self._argv
|
1615
|
+
|
1616
|
+
@property
|
1617
|
+
def args(self) -> argparse.Namespace:
|
1618
|
+
return self._args
|
1619
|
+
|
1620
|
+
@property
|
1621
|
+
def unknown_args(self) -> ta.Sequence[str]:
|
1622
|
+
return self._unknown_args
|
1623
|
+
|
1624
|
+
#
|
1625
|
+
|
1626
|
+
def _bind_cli_cmd(self, cmd: ArgparseCmd) -> ta.Callable:
|
1627
|
+
return cmd.__get__(self, type(self))
|
1628
|
+
|
1629
|
+
def prepare_cli_run(self) -> ta.Optional[ta.Callable]:
|
1630
|
+
cmd = getattr(self.args, '_cmd', None)
|
1631
|
+
|
1632
|
+
if self._unknown_args and not (cmd is not None and cmd.accepts_unknown):
|
1633
|
+
msg = f'unrecognized arguments: {" ".join(self._unknown_args)}'
|
1634
|
+
if (parser := self.get_parser()).exit_on_error: # type: ignore
|
1635
|
+
parser.error(msg)
|
1636
|
+
else:
|
1637
|
+
raise argparse.ArgumentError(None, msg)
|
1638
|
+
|
1639
|
+
if cmd is None:
|
1014
1640
|
self.get_parser().print_help()
|
1015
1641
|
return None
|
1016
1642
|
|
@@ -1079,71 +1705,247 @@ class ExitStacked:
|
|
1079
1705
|
self._exit_contexts()
|
1080
1706
|
return es.__exit__(exc_type, exc_val, exc_tb)
|
1081
1707
|
|
1082
|
-
def _exit_contexts(self) -> None:
|
1083
|
-
pass
|
1708
|
+
def _exit_contexts(self) -> None:
|
1709
|
+
pass
|
1710
|
+
|
1711
|
+
def _enter_context(self, cm: ta.ContextManager[T]) -> T:
|
1712
|
+
es = check.not_none(self._exit_stack)
|
1713
|
+
return es.enter_context(cm)
|
1714
|
+
|
1715
|
+
|
1716
|
+
##
|
1717
|
+
|
1718
|
+
|
1719
|
+
@contextlib.contextmanager
|
1720
|
+
def defer(fn: ta.Callable) -> ta.Generator[ta.Callable, None, None]:
|
1721
|
+
try:
|
1722
|
+
yield fn
|
1723
|
+
finally:
|
1724
|
+
fn()
|
1725
|
+
|
1726
|
+
|
1727
|
+
@contextlib.contextmanager
|
1728
|
+
def attr_setting(obj, attr, val, *, default=None): # noqa
|
1729
|
+
not_set = object()
|
1730
|
+
orig = getattr(obj, attr, not_set)
|
1731
|
+
try:
|
1732
|
+
setattr(obj, attr, val)
|
1733
|
+
if orig is not not_set:
|
1734
|
+
yield orig
|
1735
|
+
else:
|
1736
|
+
yield default
|
1737
|
+
finally:
|
1738
|
+
if orig is not_set:
|
1739
|
+
delattr(obj, attr)
|
1740
|
+
else:
|
1741
|
+
setattr(obj, attr, orig)
|
1742
|
+
|
1743
|
+
|
1744
|
+
##
|
1745
|
+
|
1746
|
+
|
1747
|
+
class aclosing(contextlib.AbstractAsyncContextManager): # noqa
|
1748
|
+
def __init__(self, thing):
|
1749
|
+
self.thing = thing
|
1750
|
+
|
1751
|
+
async def __aenter__(self):
|
1752
|
+
return self.thing
|
1753
|
+
|
1754
|
+
async def __aexit__(self, *exc_info):
|
1755
|
+
await self.thing.aclose()
|
1756
|
+
|
1757
|
+
|
1758
|
+
########################################
|
1759
|
+
# ../../../omlish/lite/runtime.py
|
1760
|
+
|
1761
|
+
|
1762
|
+
@cached_nullary
|
1763
|
+
def is_debugger_attached() -> bool:
|
1764
|
+
return any(frame[1].endswith('pydevd.py') for frame in inspect.stack())
|
1765
|
+
|
1766
|
+
|
1767
|
+
LITE_REQUIRED_PYTHON_VERSION = (3, 8)
|
1768
|
+
|
1769
|
+
|
1770
|
+
def check_lite_runtime_version() -> None:
|
1771
|
+
if sys.version_info < LITE_REQUIRED_PYTHON_VERSION:
|
1772
|
+
raise OSError(f'Requires python {LITE_REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
|
1773
|
+
|
1774
|
+
|
1775
|
+
########################################
|
1776
|
+
# ../../../omlish/logs/json.py
|
1777
|
+
"""
|
1778
|
+
TODO:
|
1779
|
+
- translate json keys
|
1780
|
+
"""
|
1781
|
+
|
1782
|
+
|
1783
|
+
class JsonLogFormatter(logging.Formatter):
|
1784
|
+
KEYS: ta.Mapping[str, bool] = {
|
1785
|
+
'name': False,
|
1786
|
+
'msg': False,
|
1787
|
+
'args': False,
|
1788
|
+
'levelname': False,
|
1789
|
+
'levelno': False,
|
1790
|
+
'pathname': False,
|
1791
|
+
'filename': False,
|
1792
|
+
'module': False,
|
1793
|
+
'exc_info': True,
|
1794
|
+
'exc_text': True,
|
1795
|
+
'stack_info': True,
|
1796
|
+
'lineno': False,
|
1797
|
+
'funcName': False,
|
1798
|
+
'created': False,
|
1799
|
+
'msecs': False,
|
1800
|
+
'relativeCreated': False,
|
1801
|
+
'thread': False,
|
1802
|
+
'threadName': False,
|
1803
|
+
'processName': False,
|
1804
|
+
'process': False,
|
1805
|
+
}
|
1806
|
+
|
1807
|
+
def __init__(
|
1808
|
+
self,
|
1809
|
+
*args: ta.Any,
|
1810
|
+
json_dumps: ta.Optional[ta.Callable[[ta.Any], str]] = None,
|
1811
|
+
**kwargs: ta.Any,
|
1812
|
+
) -> None:
|
1813
|
+
super().__init__(*args, **kwargs)
|
1814
|
+
|
1815
|
+
if json_dumps is None:
|
1816
|
+
json_dumps = json_dumps_compact
|
1817
|
+
self._json_dumps = json_dumps
|
1818
|
+
|
1819
|
+
def format(self, record: logging.LogRecord) -> str:
|
1820
|
+
dct = {
|
1821
|
+
k: v
|
1822
|
+
for k, o in self.KEYS.items()
|
1823
|
+
for v in [getattr(record, k)]
|
1824
|
+
if not (o and v is None)
|
1825
|
+
}
|
1826
|
+
return self._json_dumps(dct)
|
1827
|
+
|
1828
|
+
|
1829
|
+
########################################
|
1830
|
+
# ../../../omlish/logs/standard.py
|
1831
|
+
"""
|
1832
|
+
TODO:
|
1833
|
+
- structured
|
1834
|
+
- prefixed
|
1835
|
+
- debug
|
1836
|
+
- optional noisy? noisy will never be lite - some kinda configure_standard callback mechanism?
|
1837
|
+
"""
|
1838
|
+
|
1839
|
+
|
1840
|
+
##
|
1841
|
+
|
1842
|
+
|
1843
|
+
STANDARD_LOG_FORMAT_PARTS = [
|
1844
|
+
('asctime', '%(asctime)-15s'),
|
1845
|
+
('process', 'pid=%(process)-6s'),
|
1846
|
+
('thread', 'tid=%(thread)x'),
|
1847
|
+
('levelname', '%(levelname)s'),
|
1848
|
+
('name', '%(name)s'),
|
1849
|
+
('separator', '::'),
|
1850
|
+
('message', '%(message)s'),
|
1851
|
+
]
|
1852
|
+
|
1853
|
+
|
1854
|
+
class StandardLogFormatter(logging.Formatter):
|
1855
|
+
@staticmethod
|
1856
|
+
def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
|
1857
|
+
return ' '.join(v for k, v in parts)
|
1858
|
+
|
1859
|
+
converter = datetime.datetime.fromtimestamp # type: ignore
|
1860
|
+
|
1861
|
+
def formatTime(self, record, datefmt=None):
|
1862
|
+
ct = self.converter(record.created) # type: ignore
|
1863
|
+
if datefmt:
|
1864
|
+
return ct.strftime(datefmt) # noqa
|
1865
|
+
else:
|
1866
|
+
t = ct.strftime('%Y-%m-%d %H:%M:%S')
|
1867
|
+
return '%s.%03d' % (t, record.msecs) # noqa
|
1868
|
+
|
1869
|
+
|
1870
|
+
##
|
1871
|
+
|
1084
1872
|
|
1085
|
-
|
1086
|
-
|
1087
|
-
|
1873
|
+
class StandardConfiguredLogHandler(ProxyLogHandler):
|
1874
|
+
def __init_subclass__(cls, **kwargs):
|
1875
|
+
raise TypeError('This class serves only as a marker and should not be subclassed.')
|
1088
1876
|
|
1089
1877
|
|
1090
1878
|
##
|
1091
1879
|
|
1092
1880
|
|
1093
1881
|
@contextlib.contextmanager
|
1094
|
-
def
|
1095
|
-
|
1096
|
-
|
1097
|
-
|
1098
|
-
|
1882
|
+
def _locking_logging_module_lock() -> ta.Iterator[None]:
|
1883
|
+
if hasattr(logging, '_acquireLock'):
|
1884
|
+
logging._acquireLock() # noqa
|
1885
|
+
try:
|
1886
|
+
yield
|
1887
|
+
finally:
|
1888
|
+
logging._releaseLock() # type: ignore # noqa
|
1099
1889
|
|
1890
|
+
elif hasattr(logging, '_lock'):
|
1891
|
+
# https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
|
1892
|
+
with logging._lock: # noqa
|
1893
|
+
yield
|
1100
1894
|
|
1101
|
-
|
1102
|
-
|
1103
|
-
not_set = object()
|
1104
|
-
orig = getattr(obj, attr, not_set)
|
1105
|
-
try:
|
1106
|
-
setattr(obj, attr, val)
|
1107
|
-
if orig is not not_set:
|
1108
|
-
yield orig
|
1109
|
-
else:
|
1110
|
-
yield default
|
1111
|
-
finally:
|
1112
|
-
if orig is not_set:
|
1113
|
-
delattr(obj, attr)
|
1114
|
-
else:
|
1115
|
-
setattr(obj, attr, orig)
|
1895
|
+
else:
|
1896
|
+
raise Exception("Can't find lock in logging module")
|
1116
1897
|
|
1117
1898
|
|
1118
|
-
|
1899
|
+
def configure_standard_logging(
|
1900
|
+
level: ta.Union[int, str] = logging.INFO,
|
1901
|
+
*,
|
1902
|
+
json: bool = False,
|
1903
|
+
target: ta.Optional[logging.Logger] = None,
|
1904
|
+
force: bool = False,
|
1905
|
+
handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
|
1906
|
+
) -> ta.Optional[StandardConfiguredLogHandler]:
|
1907
|
+
with _locking_logging_module_lock():
|
1908
|
+
if target is None:
|
1909
|
+
target = logging.root
|
1119
1910
|
|
1911
|
+
#
|
1120
1912
|
|
1121
|
-
|
1122
|
-
|
1123
|
-
|
1913
|
+
if not force:
|
1914
|
+
if any(isinstance(h, StandardConfiguredLogHandler) for h in list(target.handlers)):
|
1915
|
+
return None
|
1124
1916
|
|
1125
|
-
|
1126
|
-
return self.thing
|
1917
|
+
#
|
1127
1918
|
|
1128
|
-
|
1129
|
-
|
1919
|
+
if handler_factory is not None:
|
1920
|
+
handler = handler_factory()
|
1921
|
+
else:
|
1922
|
+
handler = logging.StreamHandler()
|
1130
1923
|
|
1924
|
+
#
|
1131
1925
|
|
1132
|
-
|
1133
|
-
|
1926
|
+
formatter: logging.Formatter
|
1927
|
+
if json:
|
1928
|
+
formatter = JsonLogFormatter()
|
1929
|
+
else:
|
1930
|
+
formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
|
1931
|
+
handler.setFormatter(formatter)
|
1134
1932
|
|
1933
|
+
#
|
1135
1934
|
|
1136
|
-
|
1137
|
-
def is_debugger_attached() -> bool:
|
1138
|
-
return any(frame[1].endswith('pydevd.py') for frame in inspect.stack())
|
1935
|
+
handler.addFilter(TidLogFilter())
|
1139
1936
|
|
1937
|
+
#
|
1140
1938
|
|
1141
|
-
|
1939
|
+
target.addHandler(handler)
|
1142
1940
|
|
1941
|
+
#
|
1143
1942
|
|
1144
|
-
|
1145
|
-
|
1146
|
-
|
1943
|
+
if level is not None:
|
1944
|
+
target.setLevel(level)
|
1945
|
+
|
1946
|
+
#
|
1947
|
+
|
1948
|
+
return StandardConfiguredLogHandler(handler)
|
1147
1949
|
|
1148
1950
|
|
1149
1951
|
########################################
|
@@ -1513,7 +2315,7 @@ class DockerComposeRun(ExitStacked):
|
|
1513
2315
|
|
1514
2316
|
image: str
|
1515
2317
|
|
1516
|
-
|
2318
|
+
cmd: ShellCmd
|
1517
2319
|
|
1518
2320
|
#
|
1519
2321
|
|
@@ -1523,9 +2325,11 @@ class DockerComposeRun(ExitStacked):
|
|
1523
2325
|
|
1524
2326
|
#
|
1525
2327
|
|
1526
|
-
|
1527
|
-
check.not_isinstance(self.run_cmd, str)
|
2328
|
+
no_dependency_cleanup: bool = False
|
1528
2329
|
|
2330
|
+
#
|
2331
|
+
|
2332
|
+
def __post_init__(self) -> None:
|
1529
2333
|
check.not_isinstance(self.run_options, str)
|
1530
2334
|
|
1531
2335
|
def __init__(self, cfg: Config) -> None:
|
@@ -1605,188 +2409,549 @@ class DockerComposeRun(ExitStacked):
|
|
1605
2409
|
if dep_service not in depends_on:
|
1606
2410
|
continue
|
1607
2411
|
|
1608
|
-
out_dep_service: dict = dict(in_dep_service_dct)
|
1609
|
-
out_services[dep_service] = out_dep_service
|
2412
|
+
out_dep_service: dict = dict(in_dep_service_dct)
|
2413
|
+
out_services[dep_service] = out_dep_service
|
2414
|
+
|
2415
|
+
out_dep_service['ports'] = []
|
2416
|
+
|
2417
|
+
#
|
2418
|
+
|
2419
|
+
return out
|
2420
|
+
|
2421
|
+
@cached_nullary
|
2422
|
+
def rewrite_compose_file(self) -> str:
|
2423
|
+
in_dct = read_yaml_file(self._cfg.compose_file)
|
2424
|
+
|
2425
|
+
out_dct = self._rewrite_compose_dct(in_dct)
|
2426
|
+
|
2427
|
+
#
|
2428
|
+
|
2429
|
+
out_compose_file = make_temp_file()
|
2430
|
+
self._enter_context(defer(lambda: os.unlink(out_compose_file))) # noqa
|
2431
|
+
|
2432
|
+
compose_json = json_dumps_pretty(out_dct)
|
2433
|
+
|
2434
|
+
with open(out_compose_file, 'w') as f:
|
2435
|
+
f.write(compose_json)
|
2436
|
+
|
2437
|
+
return out_compose_file
|
2438
|
+
|
2439
|
+
#
|
2440
|
+
|
2441
|
+
def _cleanup_dependencies(self) -> None:
|
2442
|
+
subprocesses.check_call(
|
2443
|
+
'docker',
|
2444
|
+
'compose',
|
2445
|
+
'-f', self.rewrite_compose_file(),
|
2446
|
+
'down',
|
2447
|
+
)
|
2448
|
+
|
2449
|
+
def run(self) -> None:
|
2450
|
+
self.tag_image()
|
2451
|
+
|
2452
|
+
compose_file = self.rewrite_compose_file()
|
2453
|
+
|
2454
|
+
with contextlib.ExitStack() as es:
|
2455
|
+
if not self._cfg.no_dependency_cleanup:
|
2456
|
+
es.enter_context(defer(self._cleanup_dependencies)) # noqa
|
2457
|
+
|
2458
|
+
sh_cmd = ' '.join([
|
2459
|
+
'docker',
|
2460
|
+
'compose',
|
2461
|
+
'-f', compose_file,
|
2462
|
+
'run',
|
2463
|
+
'--rm',
|
2464
|
+
*itertools.chain.from_iterable(['-e', k] for k in (self._cfg.cmd.env or [])),
|
2465
|
+
*(self._cfg.run_options or []),
|
2466
|
+
self._cfg.service,
|
2467
|
+
'sh', '-c', shlex.quote(self._cfg.cmd.s),
|
2468
|
+
])
|
2469
|
+
|
2470
|
+
run_cmd = dc.replace(self._cfg.cmd, s=sh_cmd)
|
2471
|
+
|
2472
|
+
run_cmd.run(
|
2473
|
+
subprocesses.check_call,
|
2474
|
+
**self._subprocess_kwargs,
|
2475
|
+
)
|
2476
|
+
|
2477
|
+
|
2478
|
+
########################################
|
2479
|
+
# ../docker.py
|
2480
|
+
"""
|
2481
|
+
TODO:
|
2482
|
+
- some less stupid Dockerfile hash
|
2483
|
+
- doesn't change too much though
|
2484
|
+
"""
|
2485
|
+
|
2486
|
+
|
2487
|
+
##
|
2488
|
+
|
2489
|
+
|
2490
|
+
def build_docker_file_hash(docker_file: str) -> str:
|
2491
|
+
with open(docker_file) as f:
|
2492
|
+
contents = f.read()
|
2493
|
+
|
2494
|
+
return sha256_str(contents)
|
2495
|
+
|
2496
|
+
|
2497
|
+
##
|
2498
|
+
|
2499
|
+
|
2500
|
+
def read_docker_tar_image_tag(tar_file: str) -> str:
|
2501
|
+
with tarfile.open(tar_file) as tf:
|
2502
|
+
with contextlib.closing(check.not_none(tf.extractfile('manifest.json'))) as mf:
|
2503
|
+
m = mf.read()
|
2504
|
+
|
2505
|
+
manifests = json.loads(m.decode('utf-8'))
|
2506
|
+
manifest = check.single(manifests)
|
2507
|
+
tag = check.non_empty_str(check.single(manifest['RepoTags']))
|
2508
|
+
return tag
|
2509
|
+
|
2510
|
+
|
2511
|
+
def read_docker_tar_image_id(tar_file: str) -> str:
|
2512
|
+
with tarfile.open(tar_file) as tf:
|
2513
|
+
with contextlib.closing(check.not_none(tf.extractfile('index.json'))) as mf:
|
2514
|
+
i = mf.read()
|
2515
|
+
|
2516
|
+
index = json.loads(i.decode('utf-8'))
|
2517
|
+
manifest = check.single(index['manifests'])
|
2518
|
+
image_id = check.non_empty_str(manifest['digest'])
|
2519
|
+
return image_id
|
2520
|
+
|
2521
|
+
|
2522
|
+
##
|
2523
|
+
|
2524
|
+
|
2525
|
+
def is_docker_image_present(image: str) -> bool:
|
2526
|
+
out = subprocesses.check_output(
|
2527
|
+
'docker',
|
2528
|
+
'images',
|
2529
|
+
'--format', 'json',
|
2530
|
+
image,
|
2531
|
+
)
|
2532
|
+
|
2533
|
+
out_s = out.decode('utf-8').strip()
|
2534
|
+
if not out_s:
|
2535
|
+
return False
|
2536
|
+
|
2537
|
+
json.loads(out_s) # noqa
|
2538
|
+
return True
|
2539
|
+
|
2540
|
+
|
2541
|
+
def pull_docker_image(
|
2542
|
+
image: str,
|
2543
|
+
) -> None:
|
2544
|
+
subprocesses.check_call(
|
2545
|
+
'docker',
|
2546
|
+
'pull',
|
2547
|
+
image,
|
2548
|
+
)
|
2549
|
+
|
2550
|
+
|
2551
|
+
def build_docker_image(
|
2552
|
+
docker_file: str,
|
2553
|
+
*,
|
2554
|
+
cwd: ta.Optional[str] = None,
|
2555
|
+
) -> str:
|
2556
|
+
id_file = make_temp_file()
|
2557
|
+
with defer(lambda: os.unlink(id_file)):
|
2558
|
+
subprocesses.check_call(
|
2559
|
+
'docker',
|
2560
|
+
'build',
|
2561
|
+
'-f', os.path.abspath(docker_file),
|
2562
|
+
'--iidfile', id_file,
|
2563
|
+
'--squash',
|
2564
|
+
'.',
|
2565
|
+
**(dict(cwd=cwd) if cwd is not None else {}),
|
2566
|
+
)
|
2567
|
+
|
2568
|
+
with open(id_file) as f:
|
2569
|
+
image_id = check.single(f.read().strip().splitlines()).strip()
|
2570
|
+
|
2571
|
+
return image_id
|
2572
|
+
|
2573
|
+
|
2574
|
+
##
|
2575
|
+
|
2576
|
+
|
2577
|
+
def save_docker_tar_cmd(
|
2578
|
+
image: str,
|
2579
|
+
output_cmd: ShellCmd,
|
2580
|
+
) -> None:
|
2581
|
+
cmd = dc.replace(output_cmd, s=f'docker save {image} | {output_cmd.s}')
|
2582
|
+
cmd.run(subprocesses.check_call)
|
2583
|
+
|
2584
|
+
|
2585
|
+
def save_docker_tar(
|
2586
|
+
image: str,
|
2587
|
+
tar_file: str,
|
2588
|
+
) -> None:
|
2589
|
+
return save_docker_tar_cmd(
|
2590
|
+
image,
|
2591
|
+
ShellCmd(f'cat > {shlex.quote(tar_file)}'),
|
2592
|
+
)
|
2593
|
+
|
2594
|
+
|
2595
|
+
#
|
2596
|
+
|
2597
|
+
|
2598
|
+
def load_docker_tar_cmd(
|
2599
|
+
input_cmd: ShellCmd,
|
2600
|
+
) -> str:
|
2601
|
+
cmd = dc.replace(input_cmd, s=f'{input_cmd.s} | docker load')
|
2602
|
+
|
2603
|
+
out = cmd.run(subprocesses.check_output).decode()
|
2604
|
+
|
2605
|
+
line = check.single(out.strip().splitlines())
|
2606
|
+
loaded = line.partition(':')[2].strip()
|
2607
|
+
return loaded
|
2608
|
+
|
2609
|
+
|
2610
|
+
def load_docker_tar(
|
2611
|
+
tar_file: str,
|
2612
|
+
) -> str:
|
2613
|
+
return load_docker_tar_cmd(ShellCmd(f'cat {shlex.quote(tar_file)}'))
|
2614
|
+
|
2615
|
+
|
2616
|
+
########################################
|
2617
|
+
# ../github/cache.py
|
2618
|
+
|
2619
|
+
|
2620
|
+
##
|
2621
|
+
|
2622
|
+
|
2623
|
+
class GithubV1CacheShellClient:
|
2624
|
+
BASE_URL_ENV_KEY = 'ACTIONS_CACHE_URL'
|
2625
|
+
AUTH_TOKEN_ENV_KEY = 'ACTIONS_RUNTIME_TOKEN' # noqa
|
2626
|
+
|
2627
|
+
def __init__(
|
2628
|
+
self,
|
2629
|
+
*,
|
2630
|
+
base_url: ta.Optional[str] = None,
|
2631
|
+
auth_token: ta.Optional[str] = None,
|
2632
|
+
) -> None:
|
2633
|
+
super().__init__()
|
2634
|
+
|
2635
|
+
if base_url is None:
|
2636
|
+
base_url = os.environ[self.BASE_URL_ENV_KEY]
|
2637
|
+
self._base_url = check.non_empty_str(base_url)
|
2638
|
+
|
2639
|
+
if auth_token is None:
|
2640
|
+
auth_token = os.environ.get(self.AUTH_TOKEN_ENV_KEY)
|
2641
|
+
self._auth_token = auth_token
|
2642
|
+
|
2643
|
+
self._service_url = GithubCacheServiceV1.get_service_url(self._base_url)
|
2644
|
+
|
2645
|
+
#
|
2646
|
+
|
2647
|
+
_MISSING = object()
|
2648
|
+
|
2649
|
+
def build_headers(
|
2650
|
+
self,
|
2651
|
+
*,
|
2652
|
+
auth_token: ta.Any = _MISSING,
|
2653
|
+
content_type: ta.Optional[str] = None,
|
2654
|
+
) -> ta.Dict[str, str]:
|
2655
|
+
dct = {
|
2656
|
+
'Accept': f'application/json;api-version={GithubCacheServiceV1.API_VERSION}',
|
2657
|
+
}
|
2658
|
+
|
2659
|
+
if auth_token is self._MISSING:
|
2660
|
+
auth_token = self._auth_token
|
2661
|
+
if auth_token:
|
2662
|
+
dct['Authorization'] = f'Bearer {auth_token}'
|
2663
|
+
|
2664
|
+
if content_type is not None:
|
2665
|
+
dct['Content-Type'] = content_type
|
2666
|
+
|
2667
|
+
return dct
|
1610
2668
|
|
1611
|
-
|
2669
|
+
#
|
1612
2670
|
|
1613
|
-
|
2671
|
+
HEADER_AUTH_TOKEN_ENV_KEY = '_GITHUB_CACHE_AUTH_TOKEN' # noqa
|
1614
2672
|
|
1615
|
-
|
2673
|
+
def build_curl_cmd(
|
2674
|
+
self,
|
2675
|
+
method: str,
|
2676
|
+
url: str,
|
2677
|
+
*,
|
2678
|
+
json_content: bool = False,
|
2679
|
+
content_type: ta.Optional[str] = None,
|
2680
|
+
) -> ShellCmd:
|
2681
|
+
if content_type is None and json_content:
|
2682
|
+
content_type = 'application/json'
|
2683
|
+
|
2684
|
+
env = {}
|
2685
|
+
|
2686
|
+
header_auth_token: ta.Optional[str]
|
2687
|
+
if self._auth_token:
|
2688
|
+
env[self.HEADER_AUTH_TOKEN_ENV_KEY] = self._auth_token
|
2689
|
+
header_auth_token = f'${self.HEADER_AUTH_TOKEN_ENV_KEY}'
|
2690
|
+
else:
|
2691
|
+
header_auth_token = None
|
1616
2692
|
|
1617
|
-
|
1618
|
-
|
1619
|
-
|
2693
|
+
hdrs = self.build_headers(
|
2694
|
+
auth_token=header_auth_token,
|
2695
|
+
content_type=content_type,
|
2696
|
+
)
|
1620
2697
|
|
1621
|
-
|
2698
|
+
url = f'{self._service_url}/{url}'
|
1622
2699
|
|
1623
|
-
|
2700
|
+
cmd = ' '.join([
|
2701
|
+
'curl',
|
2702
|
+
'-s',
|
2703
|
+
'-X', method,
|
2704
|
+
url,
|
2705
|
+
*[f'-H "{k}: {v}"' for k, v in hdrs.items()],
|
2706
|
+
])
|
1624
2707
|
|
1625
|
-
|
1626
|
-
|
2708
|
+
return ShellCmd(
|
2709
|
+
cmd,
|
2710
|
+
env=env,
|
2711
|
+
)
|
1627
2712
|
|
1628
|
-
|
2713
|
+
def build_post_json_curl_cmd(
|
2714
|
+
self,
|
2715
|
+
url: str,
|
2716
|
+
obj: ta.Any,
|
2717
|
+
**kwargs: ta.Any,
|
2718
|
+
) -> ShellCmd:
|
2719
|
+
curl_cmd = self.build_curl_cmd(
|
2720
|
+
'POST',
|
2721
|
+
url,
|
2722
|
+
json_content=True,
|
2723
|
+
**kwargs,
|
2724
|
+
)
|
1629
2725
|
|
1630
|
-
|
1631
|
-
f.write(compose_json)
|
2726
|
+
obj_json = json_dumps_compact(obj)
|
1632
2727
|
|
1633
|
-
return
|
2728
|
+
return dc.replace(curl_cmd, s=f'{curl_cmd.s} -d {shlex.quote(obj_json)}')
|
1634
2729
|
|
1635
2730
|
#
|
1636
2731
|
|
1637
|
-
|
1638
|
-
|
2732
|
+
@dc.dataclass()
|
2733
|
+
class CurlError(RuntimeError):
|
2734
|
+
status_code: int
|
2735
|
+
body: ta.Optional[bytes]
|
1639
2736
|
|
1640
|
-
|
2737
|
+
def __str__(self) -> str:
|
2738
|
+
return repr(self)
|
1641
2739
|
|
1642
|
-
|
1643
|
-
|
1644
|
-
|
1645
|
-
|
1646
|
-
|
1647
|
-
|
1648
|
-
|
1649
|
-
|
1650
|
-
self.
|
1651
|
-
*self._cfg.run_cmd,
|
1652
|
-
**self._subprocess_kwargs,
|
2740
|
+
@dc.dataclass(frozen=True)
|
2741
|
+
class CurlResult:
|
2742
|
+
status_code: int
|
2743
|
+
body: ta.Optional[bytes]
|
2744
|
+
|
2745
|
+
def as_error(self) -> 'GithubV1CacheShellClient.CurlError':
|
2746
|
+
return GithubV1CacheShellClient.CurlError(
|
2747
|
+
status_code=self.status_code,
|
2748
|
+
body=self.body,
|
1653
2749
|
)
|
1654
2750
|
|
1655
|
-
|
1656
|
-
|
1657
|
-
|
1658
|
-
|
1659
|
-
|
1660
|
-
|
2751
|
+
def run_curl_cmd(
|
2752
|
+
self,
|
2753
|
+
cmd: ShellCmd,
|
2754
|
+
*,
|
2755
|
+
raise_: bool = False,
|
2756
|
+
) -> CurlResult:
|
2757
|
+
out_file = make_temp_file()
|
2758
|
+
with defer(lambda: os.unlink(out_file)):
|
2759
|
+
run_cmd = dc.replace(cmd, s=f"{cmd.s} -o {out_file} -w '%{{json}}'")
|
2760
|
+
|
2761
|
+
out_json_bytes = run_cmd.run(subprocesses.check_output)
|
2762
|
+
|
2763
|
+
out_json = json.loads(out_json_bytes.decode())
|
2764
|
+
status_code = check.isinstance(out_json['response_code'], int)
|
2765
|
+
|
2766
|
+
with open(out_file, 'rb') as f:
|
2767
|
+
body = f.read()
|
2768
|
+
|
2769
|
+
result = self.CurlResult(
|
2770
|
+
status_code=status_code,
|
2771
|
+
body=body,
|
1661
2772
|
)
|
1662
2773
|
|
2774
|
+
if raise_ and (500 <= status_code <= 600):
|
2775
|
+
raise result.as_error()
|
1663
2776
|
|
1664
|
-
|
1665
|
-
# ../dockertars.py
|
1666
|
-
"""
|
1667
|
-
TODO:
|
1668
|
-
- some less stupid Dockerfile hash
|
1669
|
-
- doesn't change too much though
|
1670
|
-
"""
|
2777
|
+
return result
|
1671
2778
|
|
2779
|
+
def run_json_curl_cmd(
|
2780
|
+
self,
|
2781
|
+
cmd: ShellCmd,
|
2782
|
+
*,
|
2783
|
+
success_status_codes: ta.Optional[ta.Container[int]] = None,
|
2784
|
+
) -> ta.Optional[ta.Any]:
|
2785
|
+
result = self.run_curl_cmd(cmd, raise_=True)
|
1672
2786
|
|
1673
|
-
|
2787
|
+
if success_status_codes is not None:
|
2788
|
+
is_success = result.status_code in success_status_codes
|
2789
|
+
else:
|
2790
|
+
is_success = 200 <= result.status_code < 300
|
1674
2791
|
|
2792
|
+
if is_success:
|
2793
|
+
if not (body := result.body):
|
2794
|
+
return None
|
2795
|
+
return json.loads(body.decode('utf-8-sig'))
|
1675
2796
|
|
1676
|
-
|
1677
|
-
|
1678
|
-
contents = f.read()
|
2797
|
+
elif result.status_code == 404:
|
2798
|
+
return None
|
1679
2799
|
|
1680
|
-
|
2800
|
+
else:
|
2801
|
+
raise result.as_error()
|
1681
2802
|
|
2803
|
+
#
|
1682
2804
|
|
1683
|
-
|
2805
|
+
def build_get_entry_curl_cmd(self, key: str) -> ShellCmd:
|
2806
|
+
return self.build_curl_cmd(
|
2807
|
+
'GET',
|
2808
|
+
f'cache?keys={key}',
|
2809
|
+
)
|
1684
2810
|
|
2811
|
+
def run_get_entry(self, key: str) -> ta.Optional[GithubCacheServiceV1.ArtifactCacheEntry]:
|
2812
|
+
curl_cmd = self.build_get_entry_curl_cmd(key)
|
1685
2813
|
|
1686
|
-
|
1687
|
-
|
1688
|
-
|
1689
|
-
|
2814
|
+
obj = self.run_json_curl_cmd(
|
2815
|
+
curl_cmd,
|
2816
|
+
success_status_codes=[200, 204],
|
2817
|
+
)
|
2818
|
+
if obj is None:
|
2819
|
+
return None
|
1690
2820
|
|
1691
|
-
|
1692
|
-
|
1693
|
-
|
1694
|
-
|
2821
|
+
return GithubCacheServiceV1.dataclass_from_json(
|
2822
|
+
GithubCacheServiceV1.ArtifactCacheEntry,
|
2823
|
+
obj,
|
2824
|
+
)
|
1695
2825
|
|
2826
|
+
#
|
1696
2827
|
|
1697
|
-
def
|
1698
|
-
|
1699
|
-
|
1700
|
-
|
2828
|
+
def build_download_get_entry_cmd(
|
2829
|
+
self,
|
2830
|
+
entry: GithubCacheServiceV1.ArtifactCacheEntry,
|
2831
|
+
out_file: str,
|
2832
|
+
) -> ShellCmd:
|
2833
|
+
return ShellCmd(' '.join([
|
2834
|
+
'aria2c',
|
2835
|
+
'-x', '4',
|
2836
|
+
'-o', out_file,
|
2837
|
+
check.non_empty_str(entry.archive_location),
|
2838
|
+
]))
|
2839
|
+
|
2840
|
+
def download_get_entry(
|
2841
|
+
self,
|
2842
|
+
entry: GithubCacheServiceV1.ArtifactCacheEntry,
|
2843
|
+
out_file: str,
|
2844
|
+
) -> None:
|
2845
|
+
dl_cmd = self.build_download_get_entry_cmd(entry, out_file)
|
2846
|
+
dl_cmd.run(subprocesses.check_call)
|
1701
2847
|
|
1702
|
-
|
1703
|
-
|
1704
|
-
|
1705
|
-
|
2848
|
+
#
|
2849
|
+
|
2850
|
+
def upload_cache_entry(
|
2851
|
+
self,
|
2852
|
+
key: str,
|
2853
|
+
in_file: str,
|
2854
|
+
) -> None:
|
2855
|
+
check.state(os.path.isfile(in_file))
|
2856
|
+
|
2857
|
+
file_size = os.stat(in_file).st_size
|
2858
|
+
|
2859
|
+
reserve_req = GithubCacheServiceV1.ReserveCacheRequest(
|
2860
|
+
key=key,
|
2861
|
+
cache_size=file_size,
|
2862
|
+
)
|
2863
|
+
reserve_cmd = self.build_post_json_curl_cmd(
|
2864
|
+
'caches',
|
2865
|
+
GithubCacheServiceV1.dataclass_to_json(reserve_req),
|
2866
|
+
)
|
2867
|
+
reserve_resp_obj: ta.Any = check.not_none(self.run_json_curl_cmd(
|
2868
|
+
reserve_cmd,
|
2869
|
+
success_status_codes=[201],
|
2870
|
+
))
|
2871
|
+
reserve_resp = GithubCacheServiceV1.dataclass_from_json( # noqa
|
2872
|
+
GithubCacheServiceV1.ReserveCacheResponse,
|
2873
|
+
reserve_resp_obj,
|
2874
|
+
)
|
2875
|
+
|
2876
|
+
raise NotImplementedError
|
1706
2877
|
|
1707
2878
|
|
1708
2879
|
##
|
1709
2880
|
|
1710
2881
|
|
1711
|
-
|
1712
|
-
|
1713
|
-
|
1714
|
-
|
1715
|
-
|
1716
|
-
|
1717
|
-
)
|
2882
|
+
class GithubShellCache(ShellCache):
|
2883
|
+
def __init__(
|
2884
|
+
self,
|
2885
|
+
dir: str, # noqa
|
2886
|
+
*,
|
2887
|
+
client: ta.Optional[GithubV1CacheShellClient] = None,
|
2888
|
+
) -> None:
|
2889
|
+
super().__init__()
|
1718
2890
|
|
1719
|
-
|
1720
|
-
if not out_s:
|
1721
|
-
return False
|
2891
|
+
self._dir = check.not_none(dir)
|
1722
2892
|
|
1723
|
-
|
1724
|
-
|
2893
|
+
if client is None:
|
2894
|
+
client = GithubV1CacheShellClient()
|
2895
|
+
self._client = client
|
1725
2896
|
|
2897
|
+
self._local = DirectoryFileCache(self._dir)
|
1726
2898
|
|
1727
|
-
|
2899
|
+
def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
|
2900
|
+
local_file = self._local.get_cache_file_path(key)
|
2901
|
+
if os.path.exists(local_file):
|
2902
|
+
return ShellCmd(f'cat {shlex.quote(local_file)}')
|
1728
2903
|
|
2904
|
+
if (entry := self._client.run_get_entry(key)) is None:
|
2905
|
+
return None
|
1729
2906
|
|
1730
|
-
|
1731
|
-
|
1732
|
-
|
1733
|
-
) -> None:
|
1734
|
-
subprocesses.check_call(
|
1735
|
-
'docker',
|
1736
|
-
'pull',
|
1737
|
-
image,
|
1738
|
-
)
|
2907
|
+
tmp_file = self._local.format_incomplete_file(local_file)
|
2908
|
+
try:
|
2909
|
+
self._client.download_get_entry(entry, tmp_file)
|
1739
2910
|
|
1740
|
-
|
1741
|
-
'docker',
|
1742
|
-
'save',
|
1743
|
-
image,
|
1744
|
-
'-o', tar_file,
|
1745
|
-
)
|
2911
|
+
os.replace(tmp_file, local_file)
|
1746
2912
|
|
2913
|
+
except BaseException: # noqa
|
2914
|
+
os.unlink(tmp_file)
|
1747
2915
|
|
1748
|
-
|
1749
|
-
docker_file: str,
|
1750
|
-
tar_file: str,
|
1751
|
-
*,
|
1752
|
-
cwd: ta.Optional[str] = None,
|
1753
|
-
) -> str:
|
1754
|
-
id_file = make_temp_file()
|
1755
|
-
with defer(lambda: os.unlink(id_file)):
|
1756
|
-
subprocesses.check_call(
|
1757
|
-
'docker',
|
1758
|
-
'build',
|
1759
|
-
'-f', os.path.abspath(docker_file),
|
1760
|
-
'--iidfile', id_file,
|
1761
|
-
'--squash',
|
1762
|
-
'.',
|
1763
|
-
**(dict(cwd=cwd) if cwd is not None else {}),
|
1764
|
-
)
|
2916
|
+
raise
|
1765
2917
|
|
1766
|
-
|
1767
|
-
image_id = check.single(f.read().strip().splitlines()).strip()
|
2918
|
+
return ShellCmd(f'cat {shlex.quote(local_file)}')
|
1768
2919
|
|
1769
|
-
|
1770
|
-
|
1771
|
-
|
1772
|
-
|
1773
|
-
|
1774
|
-
|
2920
|
+
class _PutFileCmdContext(ShellCache.PutFileCmdContext): # noqa
|
2921
|
+
def __init__(
|
2922
|
+
self,
|
2923
|
+
owner: 'GithubShellCache',
|
2924
|
+
key: str,
|
2925
|
+
tmp_file: str,
|
2926
|
+
local_file: str,
|
2927
|
+
) -> None:
|
2928
|
+
super().__init__()
|
1775
2929
|
|
1776
|
-
|
2930
|
+
self._owner = owner
|
2931
|
+
self._key = key
|
2932
|
+
self._tmp_file = tmp_file
|
2933
|
+
self._local_file = local_file
|
1777
2934
|
|
2935
|
+
@property
|
2936
|
+
def cmd(self) -> ShellCmd:
|
2937
|
+
return ShellCmd(f'cat > {shlex.quote(self._tmp_file)}')
|
1778
2938
|
|
1779
|
-
|
2939
|
+
def _commit(self) -> None:
|
2940
|
+
os.replace(self._tmp_file, self._local_file)
|
1780
2941
|
|
2942
|
+
self._owner._client.upload_cache_entry(self._key, self._local_file) # noqa
|
1781
2943
|
|
1782
|
-
def
|
1783
|
-
|
1784
|
-
|
1785
|
-
|
1786
|
-
|
1787
|
-
|
1788
|
-
|
1789
|
-
|
2944
|
+
def _abort(self) -> None:
|
2945
|
+
os.unlink(self._tmp_file)
|
2946
|
+
|
2947
|
+
def put_file_cmd(self, key: str) -> ShellCache.PutFileCmdContext:
|
2948
|
+
local_file = self._local.get_cache_file_path(key, make_dirs=True)
|
2949
|
+
return self._PutFileCmdContext(
|
2950
|
+
self,
|
2951
|
+
key,
|
2952
|
+
self._local.format_incomplete_file(local_file),
|
2953
|
+
local_file,
|
2954
|
+
)
|
1790
2955
|
|
1791
2956
|
|
1792
2957
|
########################################
|
@@ -1845,6 +3010,7 @@ def download_requirements(
|
|
1845
3010
|
subprocesses.check_call(
|
1846
3011
|
'docker',
|
1847
3012
|
'run',
|
3013
|
+
'--rm',
|
1848
3014
|
'-i',
|
1849
3015
|
'-v', f'{os.path.abspath(requirements_dir)}:/requirements',
|
1850
3016
|
'-v', f'{requirements_txt_dir}:/requirements_txt',
|
@@ -1863,9 +3029,6 @@ def download_requirements(
|
|
1863
3029
|
# ../ci.py
|
1864
3030
|
|
1865
3031
|
|
1866
|
-
##
|
1867
|
-
|
1868
|
-
|
1869
3032
|
class Ci(ExitStacked):
|
1870
3033
|
FILE_NAME_HASH_LEN = 16
|
1871
3034
|
|
@@ -1878,8 +3041,12 @@ class Ci(ExitStacked):
|
|
1878
3041
|
compose_file: str
|
1879
3042
|
service: str
|
1880
3043
|
|
3044
|
+
cmd: ShellCmd
|
3045
|
+
|
1881
3046
|
requirements_txts: ta.Optional[ta.Sequence[str]] = None
|
1882
3047
|
|
3048
|
+
always_pull: bool = False
|
3049
|
+
|
1883
3050
|
def __post_init__(self) -> None:
|
1884
3051
|
check.not_isinstance(self.requirements_txts, str)
|
1885
3052
|
|
@@ -1887,40 +3054,61 @@ class Ci(ExitStacked):
|
|
1887
3054
|
self,
|
1888
3055
|
cfg: Config,
|
1889
3056
|
*,
|
3057
|
+
shell_cache: ta.Optional[ShellCache] = None,
|
1890
3058
|
file_cache: ta.Optional[FileCache] = None,
|
1891
3059
|
) -> None:
|
1892
3060
|
super().__init__()
|
1893
3061
|
|
1894
3062
|
self._cfg = cfg
|
3063
|
+
self._shell_cache = shell_cache
|
1895
3064
|
self._file_cache = file_cache
|
1896
3065
|
|
1897
3066
|
#
|
1898
3067
|
|
1899
|
-
def
|
1900
|
-
if
|
3068
|
+
def _load_cache_docker_image(self, key: str) -> ta.Optional[str]:
|
3069
|
+
if self._shell_cache is None:
|
3070
|
+
return None
|
3071
|
+
|
3072
|
+
get_cache_cmd = self._shell_cache.get_file_cmd(key)
|
3073
|
+
if get_cache_cmd is None:
|
3074
|
+
return None
|
3075
|
+
|
3076
|
+
get_cache_cmd = dc.replace(get_cache_cmd, s=f'{get_cache_cmd.s} | zstd -cd --long') # noqa
|
3077
|
+
|
3078
|
+
return load_docker_tar_cmd(get_cache_cmd)
|
3079
|
+
|
3080
|
+
def _save_cache_docker_image(self, key: str, image: str) -> None:
|
3081
|
+
if self._shell_cache is None:
|
3082
|
+
return
|
3083
|
+
|
3084
|
+
with self._shell_cache.put_file_cmd(key) as put_cache:
|
3085
|
+
put_cache_cmd = put_cache.cmd
|
3086
|
+
|
3087
|
+
put_cache_cmd = dc.replace(put_cache_cmd, s=f'zstd | {put_cache_cmd.s}')
|
3088
|
+
|
3089
|
+
save_docker_tar_cmd(image, put_cache_cmd)
|
3090
|
+
|
3091
|
+
#
|
3092
|
+
|
3093
|
+
def _load_docker_image(self, image: str) -> None:
|
3094
|
+
if not self._cfg.always_pull and is_docker_image_present(image):
|
1901
3095
|
return
|
1902
3096
|
|
1903
3097
|
dep_suffix = image
|
1904
3098
|
for c in '/:.-_':
|
1905
3099
|
dep_suffix = dep_suffix.replace(c, '-')
|
1906
3100
|
|
1907
|
-
|
1908
|
-
|
1909
|
-
if self._file_cache is not None and (cache_tar_file := self._file_cache.get_file(tar_file_name)):
|
1910
|
-
load_docker_tar(cache_tar_file)
|
3101
|
+
cache_key = f'docker-{dep_suffix}'
|
3102
|
+
if self._load_cache_docker_image(cache_key) is not None:
|
1911
3103
|
return
|
1912
3104
|
|
1913
|
-
|
1914
|
-
with defer(lambda: shutil.rmtree(temp_dir)):
|
1915
|
-
temp_tar_file = os.path.join(temp_dir, tar_file_name)
|
3105
|
+
pull_docker_image(image)
|
1916
3106
|
|
1917
|
-
|
1918
|
-
image,
|
1919
|
-
temp_tar_file,
|
1920
|
-
)
|
3107
|
+
self._save_cache_docker_image(cache_key, image)
|
1921
3108
|
|
1922
|
-
|
1923
|
-
|
3109
|
+
def load_docker_image(self, image: str) -> None:
|
3110
|
+
with log_timing_context(f'Load docker image: {image}'):
|
3111
|
+
self._load_docker_image(image)
|
1924
3112
|
|
1925
3113
|
@cached_nullary
|
1926
3114
|
def load_compose_service_dependencies(self) -> None:
|
@@ -1934,46 +3122,46 @@ class Ci(ExitStacked):
|
|
1934
3122
|
|
1935
3123
|
#
|
1936
3124
|
|
1937
|
-
|
1938
|
-
def build_ci_image(self) -> str:
|
3125
|
+
def _resolve_ci_image(self) -> str:
|
1939
3126
|
docker_file_hash = build_docker_file_hash(self._cfg.docker_file)[:self.FILE_NAME_HASH_LEN]
|
1940
3127
|
|
1941
|
-
|
3128
|
+
cache_key = f'ci-{docker_file_hash}'
|
3129
|
+
if (cache_image_id := self._load_cache_docker_image(cache_key)) is not None:
|
3130
|
+
return cache_image_id
|
1942
3131
|
|
1943
|
-
|
1944
|
-
|
1945
|
-
|
1946
|
-
|
1947
|
-
|
1948
|
-
temp_dir = tempfile.mkdtemp()
|
1949
|
-
with defer(lambda: shutil.rmtree(temp_dir)):
|
1950
|
-
temp_tar_file = os.path.join(temp_dir, tar_file_name)
|
3132
|
+
image_id = build_docker_image(
|
3133
|
+
self._cfg.docker_file,
|
3134
|
+
cwd=self._cfg.project_dir,
|
3135
|
+
)
|
1951
3136
|
|
1952
|
-
|
1953
|
-
self._cfg.docker_file,
|
1954
|
-
temp_tar_file,
|
1955
|
-
cwd=self._cfg.project_dir,
|
1956
|
-
)
|
3137
|
+
self._save_cache_docker_image(cache_key, image_id)
|
1957
3138
|
|
1958
|
-
|
1959
|
-
self._file_cache.put_file(temp_tar_file)
|
3139
|
+
return image_id
|
1960
3140
|
|
3141
|
+
@cached_nullary
|
3142
|
+
def resolve_ci_image(self) -> str:
|
3143
|
+
with log_timing_context('Resolve ci image') as ltc:
|
3144
|
+
image_id = self._resolve_ci_image()
|
3145
|
+
ltc.set_description(f'Resolve ci image: {image_id}')
|
1961
3146
|
return image_id
|
1962
3147
|
|
1963
3148
|
#
|
1964
3149
|
|
1965
|
-
|
1966
|
-
|
1967
|
-
|
3150
|
+
def _resolve_requirements_dir(self) -> str:
|
3151
|
+
requirements_txts = [
|
3152
|
+
os.path.join(self._cfg.project_dir, rf)
|
3153
|
+
for rf in check.not_none(self._cfg.requirements_txts)
|
3154
|
+
]
|
1968
3155
|
|
1969
3156
|
requirements_hash = build_requirements_hash(requirements_txts)[:self.FILE_NAME_HASH_LEN]
|
1970
3157
|
|
1971
|
-
|
3158
|
+
tar_file_key = f'requirements-{requirements_hash}'
|
3159
|
+
tar_file_name = f'{tar_file_key}.tar'
|
1972
3160
|
|
1973
3161
|
temp_dir = tempfile.mkdtemp()
|
1974
3162
|
self._enter_context(defer(lambda: shutil.rmtree(temp_dir))) # noqa
|
1975
3163
|
|
1976
|
-
if self._file_cache is not None and (cache_tar_file := self._file_cache.get_file(
|
3164
|
+
if self._file_cache is not None and (cache_tar_file := self._file_cache.get_file(tar_file_key)):
|
1977
3165
|
with tarfile.open(cache_tar_file) as tar:
|
1978
3166
|
tar.extractall(path=temp_dir) # noqa
|
1979
3167
|
|
@@ -1983,7 +3171,7 @@ class Ci(ExitStacked):
|
|
1983
3171
|
os.makedirs(temp_requirements_dir)
|
1984
3172
|
|
1985
3173
|
download_requirements(
|
1986
|
-
self.
|
3174
|
+
self.resolve_ci_image(),
|
1987
3175
|
temp_requirements_dir,
|
1988
3176
|
requirements_txts,
|
1989
3177
|
)
|
@@ -1998,21 +3186,20 @@ class Ci(ExitStacked):
|
|
1998
3186
|
arcname=requirement_file,
|
1999
3187
|
)
|
2000
3188
|
|
2001
|
-
self._file_cache.put_file(temp_tar_file)
|
3189
|
+
self._file_cache.put_file(os.path.basename(tar_file_key), temp_tar_file)
|
2002
3190
|
|
2003
3191
|
return temp_requirements_dir
|
2004
3192
|
|
2005
|
-
|
2006
|
-
|
2007
|
-
|
2008
|
-
|
2009
|
-
|
2010
|
-
|
2011
|
-
|
2012
|
-
requirements_dir = self.build_requirements_dir()
|
3193
|
+
@cached_nullary
|
3194
|
+
def resolve_requirements_dir(self) -> str:
|
3195
|
+
with log_timing_context('Resolve requirements dir') as ltc:
|
3196
|
+
requirements_dir = self._resolve_requirements_dir()
|
3197
|
+
ltc.set_description(f'Resolve requirements dir: {requirements_dir}')
|
3198
|
+
return requirements_dir
|
2013
3199
|
|
2014
|
-
|
3200
|
+
#
|
2015
3201
|
|
3202
|
+
def _run_compose_(self) -> None:
|
2016
3203
|
setup_cmds = [
|
2017
3204
|
'pip install --root-user-action ignore --find-links /requirements --no-index uv',
|
2018
3205
|
(
|
@@ -2023,40 +3210,74 @@ class Ci(ExitStacked):
|
|
2023
3210
|
|
2024
3211
|
#
|
2025
3212
|
|
2026
|
-
|
2027
|
-
|
2028
|
-
|
3213
|
+
ci_cmd = dc.replace(self._cfg.cmd, s=' && '.join([
|
3214
|
+
*setup_cmds,
|
3215
|
+
f'({self._cfg.cmd.s})',
|
3216
|
+
]))
|
2029
3217
|
|
2030
3218
|
#
|
2031
3219
|
|
2032
|
-
bash_src = ' && '.join([
|
2033
|
-
*setup_cmds,
|
2034
|
-
*test_cmds,
|
2035
|
-
])
|
2036
|
-
|
2037
3220
|
with DockerComposeRun(DockerComposeRun.Config(
|
2038
|
-
|
2039
|
-
|
3221
|
+
compose_file=self._cfg.compose_file,
|
3222
|
+
service=self._cfg.service,
|
2040
3223
|
|
2041
|
-
|
3224
|
+
image=self.resolve_ci_image(),
|
2042
3225
|
|
2043
|
-
|
3226
|
+
cmd=ci_cmd,
|
2044
3227
|
|
2045
|
-
|
2046
|
-
|
2047
|
-
|
2048
|
-
|
3228
|
+
run_options=[
|
3229
|
+
'-v', f'{os.path.abspath(self._cfg.project_dir)}:/project',
|
3230
|
+
'-v', f'{os.path.abspath(self.resolve_requirements_dir())}:/requirements',
|
3231
|
+
],
|
2049
3232
|
|
2050
|
-
|
3233
|
+
cwd=self._cfg.project_dir,
|
2051
3234
|
)) as ci_compose_run:
|
2052
3235
|
ci_compose_run.run()
|
2053
3236
|
|
3237
|
+
def _run_compose(self) -> None:
|
3238
|
+
with log_timing_context('Run compose'):
|
3239
|
+
self._run_compose_()
|
3240
|
+
|
3241
|
+
#
|
3242
|
+
|
3243
|
+
def run(self) -> None:
|
3244
|
+
self.load_compose_service_dependencies()
|
3245
|
+
|
3246
|
+
self.resolve_ci_image()
|
3247
|
+
|
3248
|
+
self.resolve_requirements_dir()
|
3249
|
+
|
3250
|
+
self._run_compose()
|
3251
|
+
|
2054
3252
|
|
2055
3253
|
########################################
|
2056
|
-
# cli.py
|
3254
|
+
# ../github/cli.py
|
3255
|
+
"""
|
3256
|
+
See:
|
3257
|
+
- https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28
|
3258
|
+
"""
|
2057
3259
|
|
2058
3260
|
|
2059
|
-
|
3261
|
+
class GithubCli(ArgparseCli):
|
3262
|
+
@argparse_cmd(
|
3263
|
+
argparse_arg('key'),
|
3264
|
+
)
|
3265
|
+
def get_cache_entry(self) -> None:
|
3266
|
+
shell_client = GithubV1CacheShellClient()
|
3267
|
+
entry = shell_client.run_get_entry(self.args.key)
|
3268
|
+
if entry is None:
|
3269
|
+
return
|
3270
|
+
print(json_dumps_pretty(dc.asdict(entry))) # noqa
|
3271
|
+
|
3272
|
+
@argparse_cmd(
|
3273
|
+
argparse_arg('repository-id'),
|
3274
|
+
)
|
3275
|
+
def list_cache_entries(self) -> None:
|
3276
|
+
raise NotImplementedError
|
3277
|
+
|
3278
|
+
|
3279
|
+
########################################
|
3280
|
+
# cli.py
|
2060
3281
|
|
2061
3282
|
|
2062
3283
|
class CiCli(ArgparseCli):
|
@@ -2087,23 +3308,32 @@ class CiCli(ArgparseCli):
|
|
2087
3308
|
|
2088
3309
|
#
|
2089
3310
|
|
3311
|
+
@argparse_cmd(
|
3312
|
+
accepts_unknown=True,
|
3313
|
+
)
|
3314
|
+
def github(self) -> ta.Optional[int]:
|
3315
|
+
return GithubCli(self.unknown_args).cli_run()
|
3316
|
+
|
3317
|
+
#
|
3318
|
+
|
2090
3319
|
@argparse_cmd(
|
2091
3320
|
argparse_arg('project-dir'),
|
2092
3321
|
argparse_arg('service'),
|
2093
3322
|
argparse_arg('--docker-file'),
|
2094
3323
|
argparse_arg('--compose-file'),
|
2095
3324
|
argparse_arg('-r', '--requirements-txt', action='append'),
|
3325
|
+
argparse_arg('--github-cache', action='store_true'),
|
2096
3326
|
argparse_arg('--cache-dir'),
|
3327
|
+
argparse_arg('--always-pull', action='store_true'),
|
2097
3328
|
)
|
2098
3329
|
async def run(self) -> None:
|
2099
|
-
await asyncio.sleep(1)
|
2100
|
-
|
2101
3330
|
project_dir = self.args.project_dir
|
2102
3331
|
docker_file = self.args.docker_file
|
2103
3332
|
compose_file = self.args.compose_file
|
2104
3333
|
service = self.args.service
|
2105
3334
|
requirements_txts = self.args.requirements_txt
|
2106
3335
|
cache_dir = self.args.cache_dir
|
3336
|
+
always_pull = self.args.always_pull
|
2107
3337
|
|
2108
3338
|
#
|
2109
3339
|
|
@@ -2113,7 +3343,7 @@ class CiCli(ArgparseCli):
|
|
2113
3343
|
|
2114
3344
|
def find_alt_file(*alts: str) -> ta.Optional[str]:
|
2115
3345
|
for alt in alts:
|
2116
|
-
alt_file = os.path.join(project_dir, alt)
|
3346
|
+
alt_file = os.path.abspath(os.path.join(project_dir, alt))
|
2117
3347
|
if os.path.isfile(alt_file):
|
2118
3348
|
return alt_file
|
2119
3349
|
return None
|
@@ -2149,24 +3379,44 @@ class CiCli(ArgparseCli):
|
|
2149
3379
|
|
2150
3380
|
#
|
2151
3381
|
|
3382
|
+
shell_cache: ta.Optional[ShellCache] = None
|
2152
3383
|
file_cache: ta.Optional[FileCache] = None
|
2153
3384
|
if cache_dir is not None:
|
2154
3385
|
if not os.path.exists(cache_dir):
|
2155
3386
|
os.makedirs(cache_dir)
|
2156
3387
|
check.state(os.path.isdir(cache_dir))
|
2157
|
-
|
3388
|
+
|
3389
|
+
directory_file_cache = DirectoryFileCache(cache_dir)
|
3390
|
+
|
3391
|
+
file_cache = directory_file_cache
|
3392
|
+
|
3393
|
+
if self.args.github_cache:
|
3394
|
+
shell_cache = GithubShellCache(cache_dir)
|
3395
|
+
else:
|
3396
|
+
shell_cache = DirectoryShellCache(directory_file_cache)
|
2158
3397
|
|
2159
3398
|
#
|
2160
3399
|
|
2161
3400
|
with Ci(
|
2162
3401
|
Ci.Config(
|
2163
3402
|
project_dir=project_dir,
|
3403
|
+
|
2164
3404
|
docker_file=docker_file,
|
3405
|
+
|
2165
3406
|
compose_file=compose_file,
|
2166
3407
|
service=service,
|
3408
|
+
|
2167
3409
|
requirements_txts=requirements_txts,
|
3410
|
+
|
3411
|
+
cmd=ShellCmd(' && '.join([
|
3412
|
+
'cd /project',
|
3413
|
+
'python3 -m pytest -svv test.py',
|
3414
|
+
])),
|
3415
|
+
|
3416
|
+
always_pull=always_pull,
|
2168
3417
|
),
|
2169
3418
|
file_cache=file_cache,
|
3419
|
+
shell_cache=shell_cache,
|
2170
3420
|
) as ci:
|
2171
3421
|
ci.run()
|
2172
3422
|
|
@@ -2176,6 +3426,8 @@ async def _async_main() -> ta.Optional[int]:
|
|
2176
3426
|
|
2177
3427
|
|
2178
3428
|
def _main() -> None:
|
3429
|
+
configure_standard_logging('DEBUG')
|
3430
|
+
|
2179
3431
|
sys.exit(rc if isinstance(rc := asyncio.run(_async_main()), int) else 0)
|
2180
3432
|
|
2181
3433
|
|