omdev 0.0.0.dev210__py3-none-any.whl → 0.0.0.dev212__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
omdev/scripts/ci.py CHANGED
@@ -3,7 +3,7 @@
3
3
  # @omlish-lite
4
4
  # @omlish-script
5
5
  # @omlish-amalg-output ../ci/cli.py
6
- # ruff: noqa: UP006 UP007 UP036
6
+ # ruff: noqa: N802 UP006 UP007 UP036
7
7
  """
8
8
  Inputs:
9
9
  - requirements.txt
@@ -20,6 +20,7 @@ import asyncio
20
20
  import collections
21
21
  import contextlib
22
22
  import dataclasses as dc
23
+ import datetime
23
24
  import functools
24
25
  import hashlib
25
26
  import inspect
@@ -50,8 +51,10 @@ if sys.version_info < (3, 8):
50
51
  ########################################
51
52
 
52
53
 
53
- # ../../omlish/lite/cached.py
54
+ # shell.py
54
55
  T = ta.TypeVar('T')
56
+
57
+ # ../../omlish/lite/cached.py
55
58
  CallableT = ta.TypeVar('CallableT', bound=ta.Callable)
56
59
 
57
60
  # ../../omlish/lite/check.py
@@ -73,72 +76,41 @@ SubprocessChannelOption = ta.Literal['pipe', 'stdout', 'devnull'] # ta.TypeAlia
73
76
 
74
77
 
75
78
  ########################################
76
- # ../cache.py
77
-
78
-
79
- #
80
-
81
-
82
- @abc.abstractmethod
83
- class FileCache(abc.ABC):
84
- @abc.abstractmethod
85
- def get_file(self, name: str) -> ta.Optional[str]:
86
- raise NotImplementedError
87
-
88
- @abc.abstractmethod
89
- def put_file(self, name: str) -> ta.Optional[str]:
90
- raise NotImplementedError
91
-
92
-
93
- #
94
-
95
-
96
- class DirectoryFileCache(FileCache):
97
- def __init__(self, dir: str) -> None: # noqa
98
- super().__init__()
99
-
100
- self._dir = dir
101
-
102
- def get_file(self, name: str) -> ta.Optional[str]:
103
- file_path = os.path.join(self._dir, name)
104
- if not os.path.exists(file_path):
105
- return None
106
- return file_path
107
-
108
- def put_file(self, file_path: str) -> None:
109
- os.makedirs(self._dir, exist_ok=True)
110
- cache_file_path = os.path.join(self._dir, os.path.basename(file_path))
111
- shutil.copyfile(file_path, cache_file_path)
112
-
113
-
114
- ########################################
115
- # ../utils.py
116
-
117
-
118
- ##
119
-
120
-
121
- def make_temp_file() -> str:
122
- file_fd, file = tempfile.mkstemp()
123
- os.close(file_fd)
124
- return file
79
+ # ../shell.py
125
80
 
126
81
 
127
82
  ##
128
83
 
129
84
 
130
- def read_yaml_file(yaml_file: str) -> ta.Any:
131
- yaml = __import__('yaml')
132
-
133
- with open(yaml_file) as f:
134
- return yaml.safe_load(f)
135
-
85
+ @dc.dataclass(frozen=True)
86
+ class ShellCmd:
87
+ s: str
136
88
 
137
- ##
89
+ env: ta.Optional[ta.Mapping[str, str]] = None
138
90
 
91
+ def build_run_kwargs(
92
+ self,
93
+ *,
94
+ env: ta.Optional[ta.Mapping[str, str]] = None,
95
+ **kwargs: ta.Any,
96
+ ) -> ta.Dict[str, ta.Any]:
97
+ if env is None:
98
+ env = os.environ
99
+ if self.env:
100
+ if (ek := set(env) & set(self.env)):
101
+ raise KeyError(*ek)
102
+ env = {**env, **self.env}
103
+
104
+ return dict(
105
+ env=env,
106
+ **kwargs,
107
+ )
139
108
 
140
- def sha256_str(s: str) -> str:
141
- return hashlib.sha256(s.encode('utf-8')).hexdigest()
109
+ def run(self, fn: ta.Callable[..., T], **kwargs) -> T:
110
+ return fn(
111
+ 'sh', '-c', self.s,
112
+ **self.build_run_kwargs(**kwargs),
113
+ )
142
114
 
143
115
 
144
116
  ########################################
@@ -680,6 +652,13 @@ json_dump_compact: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON
680
652
  json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_COMPACT_KWARGS)
681
653
 
682
654
 
655
+ ########################################
656
+ # ../../../omlish/lite/logs.py
657
+
658
+
659
+ log = logging.getLogger(__name__)
660
+
661
+
683
662
  ########################################
684
663
  # ../../../omlish/lite/reflect.py
685
664
 
@@ -769,248 +748,895 @@ def deep_subclasses(cls: ta.Type[T]) -> ta.Iterator[ta.Type[T]]:
769
748
 
770
749
 
771
750
  ########################################
772
- # ../../../omlish/argparse/cli.py
773
- """
774
- TODO:
775
- - default command
776
- - auto match all underscores to hyphens
777
- - pre-run, post-run hooks
778
- - exitstack?
779
- """
751
+ # ../../../omlish/lite/strings.py
780
752
 
781
753
 
782
754
  ##
783
755
 
784
756
 
785
- @dc.dataclass(eq=False)
786
- class ArgparseArg:
787
- args: ta.Sequence[ta.Any]
788
- kwargs: ta.Mapping[str, ta.Any]
789
- dest: ta.Optional[str] = None
757
+ def camel_case(name: str, *, lower: bool = False) -> str:
758
+ if not name:
759
+ return ''
760
+ s = ''.join(map(str.capitalize, name.split('_'))) # noqa
761
+ if lower:
762
+ s = s[0].lower() + s[1:]
763
+ return s
790
764
 
791
- def __get__(self, instance, owner=None):
792
- if instance is None:
793
- return self
794
- return getattr(instance.args, self.dest) # type: ignore
795
765
 
766
+ def snake_case(name: str) -> str:
767
+ uppers: list[int | None] = [i for i, c in enumerate(name) if c.isupper()]
768
+ return '_'.join([name[l:r].lower() for l, r in zip([None, *uppers], [*uppers, None])]).strip('_')
796
769
 
797
- def argparse_arg(*args, **kwargs) -> ArgparseArg:
798
- return ArgparseArg(args, kwargs)
799
770
 
771
+ ##
800
772
 
801
- #
802
773
 
774
+ def is_dunder(name: str) -> bool:
775
+ return (
776
+ name[:2] == name[-2:] == '__' and
777
+ name[2:3] != '_' and
778
+ name[-3:-2] != '_' and
779
+ len(name) > 4
780
+ )
803
781
 
804
- @dc.dataclass(eq=False)
805
- class ArgparseCmd:
806
- name: str
807
- fn: ArgparseCmdFn
808
- args: ta.Sequence[ArgparseArg] = () # noqa
809
782
 
810
- # _: dc.KW_ONLY
783
+ def is_sunder(name: str) -> bool:
784
+ return (
785
+ name[0] == name[-1] == '_' and
786
+ name[1:2] != '_' and
787
+ name[-2:-1] != '_' and
788
+ len(name) > 2
789
+ )
811
790
 
812
- aliases: ta.Optional[ta.Sequence[str]] = None
813
- parent: ta.Optional['ArgparseCmd'] = None
814
- accepts_unknown: bool = False
815
791
 
816
- def __post_init__(self) -> None:
817
- def check_name(s: str) -> None:
818
- check.isinstance(s, str)
819
- check.not_in('_', s)
820
- check.not_empty(s)
821
- check_name(self.name)
822
- check.not_isinstance(self.aliases, str)
823
- for a in self.aliases or []:
824
- check_name(a)
792
+ ##
825
793
 
826
- check.arg(callable(self.fn))
827
- check.arg(all(isinstance(a, ArgparseArg) for a in self.args))
828
- check.isinstance(self.parent, (ArgparseCmd, type(None)))
829
- check.isinstance(self.accepts_unknown, bool)
830
794
 
831
- functools.update_wrapper(self, self.fn)
795
+ def strip_with_newline(s: str) -> str:
796
+ if not s:
797
+ return ''
798
+ return s.strip() + '\n'
832
799
 
833
- def __get__(self, instance, owner=None):
834
- if instance is None:
835
- return self
836
- return dc.replace(self, fn=self.fn.__get__(instance, owner)) # noqa
837
800
 
838
- def __call__(self, *args, **kwargs) -> ta.Optional[int]:
839
- return self.fn(*args, **kwargs)
801
+ @ta.overload
802
+ def split_keep_delimiter(s: str, d: str) -> str:
803
+ ...
840
804
 
841
805
 
842
- def argparse_cmd(
843
- *args: ArgparseArg,
844
- name: ta.Optional[str] = None,
845
- aliases: ta.Optional[ta.Iterable[str]] = None,
846
- parent: ta.Optional[ArgparseCmd] = None,
847
- accepts_unknown: bool = False,
848
- ) -> ta.Any: # ta.Callable[[ArgparseCmdFn], ArgparseCmd]: # FIXME
849
- for arg in args:
850
- check.isinstance(arg, ArgparseArg)
851
- check.isinstance(name, (str, type(None)))
852
- check.isinstance(parent, (ArgparseCmd, type(None)))
853
- check.not_isinstance(aliases, str)
806
+ @ta.overload
807
+ def split_keep_delimiter(s: bytes, d: bytes) -> bytes:
808
+ ...
854
809
 
855
- def inner(fn):
856
- return ArgparseCmd(
857
- (name if name is not None else fn.__name__).replace('_', '-'),
858
- fn,
859
- args,
860
- aliases=tuple(aliases) if aliases is not None else None,
861
- parent=parent,
862
- accepts_unknown=accepts_unknown,
863
- )
864
810
 
865
- return inner
811
+ def split_keep_delimiter(s, d):
812
+ ps = []
813
+ i = 0
814
+ while i < len(s):
815
+ if (n := s.find(d, i)) < i:
816
+ ps.append(s[i:])
817
+ break
818
+ ps.append(s[i:n + 1])
819
+ i = n + 1
820
+ return ps
866
821
 
867
822
 
868
823
  ##
869
824
 
870
825
 
871
- def _get_argparse_arg_ann_kwargs(ann: ta.Any) -> ta.Mapping[str, ta.Any]:
872
- if ann is str:
873
- return {}
874
- elif ann is int:
875
- return {'type': int}
876
- elif ann is bool:
877
- return {'action': 'store_true'}
878
- elif ann is list:
879
- return {'action': 'append'}
880
- elif is_optional_alias(ann):
881
- return _get_argparse_arg_ann_kwargs(get_optional_alias_arg(ann))
882
- else:
883
- raise TypeError(ann)
826
+ def attr_repr(obj: ta.Any, *attrs: str) -> str:
827
+ return f'{type(obj).__name__}({", ".join(f"{attr}={getattr(obj, attr)!r}" for attr in attrs)})'
884
828
 
885
829
 
886
- class _ArgparseCliAnnotationBox:
887
- def __init__(self, annotations: ta.Mapping[str, ta.Any]) -> None:
888
- super().__init__()
889
- self.__annotations__ = annotations # type: ignore
830
+ ##
890
831
 
891
832
 
892
- class ArgparseCli:
893
- def __init__(self, argv: ta.Optional[ta.Sequence[str]] = None) -> None:
894
- super().__init__()
833
+ FORMAT_NUM_BYTES_SUFFIXES: ta.Sequence[str] = ['B', 'kB', 'MB', 'GB', 'TB', 'PB', 'EB']
895
834
 
896
- self._argv = argv if argv is not None else sys.argv[1:]
897
835
 
898
- self._args, self._unknown_args = self.get_parser().parse_known_args(self._argv)
836
+ def format_num_bytes(num_bytes: int) -> str:
837
+ for i, suffix in enumerate(FORMAT_NUM_BYTES_SUFFIXES):
838
+ value = num_bytes / 1024 ** i
839
+ if num_bytes < 1024 ** (i + 1):
840
+ if value.is_integer():
841
+ return f'{int(value)}{suffix}'
842
+ else:
843
+ return f'{value:.2f}{suffix}'
899
844
 
900
- #
845
+ return f'{num_bytes / 1024 ** (len(FORMAT_NUM_BYTES_SUFFIXES) - 1):.2f}{FORMAT_NUM_BYTES_SUFFIXES[-1]}'
901
846
 
902
- def __init_subclass__(cls, **kwargs: ta.Any) -> None:
903
- super().__init_subclass__(**kwargs)
904
847
 
905
- ns = cls.__dict__
906
- objs = {}
907
- mro = cls.__mro__[::-1]
908
- for bns in [bcls.__dict__ for bcls in reversed(mro)] + [ns]:
909
- bseen = set() # type: ignore
910
- for k, v in bns.items():
911
- if isinstance(v, (ArgparseCmd, ArgparseArg)):
912
- check.not_in(v, bseen)
913
- bseen.add(v)
914
- objs[k] = v
915
- elif k in objs:
916
- del [k]
848
+ ########################################
849
+ # ../../../omlish/logs/filters.py
917
850
 
918
- #
919
851
 
920
- anns = ta.get_type_hints(_ArgparseCliAnnotationBox({
921
- **{k: v for bcls in reversed(mro) for k, v in getattr(bcls, '__annotations__', {}).items()},
922
- **ns.get('__annotations__', {}),
923
- }), globalns=ns.get('__globals__', {}))
852
+ class TidLogFilter(logging.Filter):
853
+ def filter(self, record):
854
+ record.tid = threading.get_native_id()
855
+ return True
924
856
 
925
- #
926
857
 
927
- if '_parser' in ns:
928
- parser = check.isinstance(ns['_parser'], argparse.ArgumentParser)
929
- else:
930
- parser = argparse.ArgumentParser()
931
- setattr(cls, '_parser', parser)
858
+ ########################################
859
+ # ../../../omlish/logs/proxy.py
932
860
 
933
- #
934
861
 
935
- subparsers = parser.add_subparsers()
862
+ class ProxyLogFilterer(logging.Filterer):
863
+ def __init__(self, underlying: logging.Filterer) -> None: # noqa
864
+ self._underlying = underlying
936
865
 
937
- for att, obj in objs.items():
938
- if isinstance(obj, ArgparseCmd):
939
- if obj.parent is not None:
940
- raise NotImplementedError
866
+ @property
867
+ def underlying(self) -> logging.Filterer:
868
+ return self._underlying
941
869
 
942
- for cn in [obj.name, *(obj.aliases or [])]:
943
- subparser = subparsers.add_parser(cn)
870
+ @property
871
+ def filters(self):
872
+ return self._underlying.filters
944
873
 
945
- for arg in (obj.args or []):
946
- if (
947
- len(arg.args) == 1 and
948
- isinstance(arg.args[0], str) and
949
- not (n := check.isinstance(arg.args[0], str)).startswith('-') and
950
- 'metavar' not in arg.kwargs
951
- ):
952
- subparser.add_argument(
953
- n.replace('-', '_'),
954
- **arg.kwargs,
955
- metavar=n,
956
- )
957
- else:
958
- subparser.add_argument(*arg.args, **arg.kwargs)
874
+ @filters.setter
875
+ def filters(self, filters):
876
+ self._underlying.filters = filters
959
877
 
960
- subparser.set_defaults(_cmd=obj)
878
+ def addFilter(self, filter): # noqa
879
+ self._underlying.addFilter(filter)
961
880
 
962
- elif isinstance(obj, ArgparseArg):
963
- if att in anns:
964
- ann_kwargs = _get_argparse_arg_ann_kwargs(anns[att])
965
- obj.kwargs = {**ann_kwargs, **obj.kwargs}
881
+ def removeFilter(self, filter): # noqa
882
+ self._underlying.removeFilter(filter)
966
883
 
967
- if not obj.dest:
968
- if 'dest' in obj.kwargs:
969
- obj.dest = obj.kwargs['dest']
970
- else:
971
- obj.dest = obj.kwargs['dest'] = att # type: ignore
884
+ def filter(self, record):
885
+ return self._underlying.filter(record)
972
886
 
973
- parser.add_argument(*obj.args, **obj.kwargs)
974
887
 
975
- else:
976
- raise TypeError(obj)
888
+ class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
889
+ def __init__(self, underlying: logging.Handler) -> None: # noqa
890
+ ProxyLogFilterer.__init__(self, underlying)
977
891
 
978
- #
892
+ _underlying: logging.Handler
979
893
 
980
- _parser: ta.ClassVar[argparse.ArgumentParser]
894
+ @property
895
+ def underlying(self) -> logging.Handler:
896
+ return self._underlying
981
897
 
982
- @classmethod
983
- def get_parser(cls) -> argparse.ArgumentParser:
984
- return cls._parser
898
+ def get_name(self):
899
+ return self._underlying.get_name()
900
+
901
+ def set_name(self, name):
902
+ self._underlying.set_name(name)
985
903
 
986
904
  @property
987
- def argv(self) -> ta.Sequence[str]:
988
- return self._argv
905
+ def name(self):
906
+ return self._underlying.name
989
907
 
990
908
  @property
991
- def args(self) -> argparse.Namespace:
992
- return self._args
909
+ def level(self):
910
+ return self._underlying.level
911
+
912
+ @level.setter
913
+ def level(self, level):
914
+ self._underlying.level = level
993
915
 
994
916
  @property
995
- def unknown_args(self) -> ta.Sequence[str]:
996
- return self._unknown_args
917
+ def formatter(self):
918
+ return self._underlying.formatter
997
919
 
998
- #
920
+ @formatter.setter
921
+ def formatter(self, formatter):
922
+ self._underlying.formatter = formatter
999
923
 
1000
- def _bind_cli_cmd(self, cmd: ArgparseCmd) -> ta.Callable:
1001
- return cmd.__get__(self, type(self))
924
+ def createLock(self):
925
+ self._underlying.createLock()
1002
926
 
1003
- def prepare_cli_run(self) -> ta.Optional[ta.Callable]:
1004
- cmd = getattr(self.args, '_cmd', None)
927
+ def acquire(self):
928
+ self._underlying.acquire()
1005
929
 
1006
- if self._unknown_args and not (cmd is not None and cmd.accepts_unknown):
1007
- msg = f'unrecognized arguments: {" ".join(self._unknown_args)}'
1008
- if (parser := self.get_parser()).exit_on_error: # type: ignore
1009
- parser.error(msg)
1010
- else:
1011
- raise argparse.ArgumentError(None, msg)
930
+ def release(self):
931
+ self._underlying.release()
1012
932
 
1013
- if cmd is None:
933
+ def setLevel(self, level):
934
+ self._underlying.setLevel(level)
935
+
936
+ def format(self, record):
937
+ return self._underlying.format(record)
938
+
939
+ def emit(self, record):
940
+ self._underlying.emit(record)
941
+
942
+ def handle(self, record):
943
+ return self._underlying.handle(record)
944
+
945
+ def setFormatter(self, fmt):
946
+ self._underlying.setFormatter(fmt)
947
+
948
+ def flush(self):
949
+ self._underlying.flush()
950
+
951
+ def close(self):
952
+ self._underlying.close()
953
+
954
+ def handleError(self, record):
955
+ self._underlying.handleError(record)
956
+
957
+
958
+ ########################################
959
+ # ../cache.py
960
+
961
+
962
+ ##
963
+
964
+
965
+ @abc.abstractmethod
966
+ class FileCache(abc.ABC):
967
+ @abc.abstractmethod
968
+ def get_file(self, key: str) -> ta.Optional[str]:
969
+ raise NotImplementedError
970
+
971
+ @abc.abstractmethod
972
+ def put_file(self, key: str, file_path: str) -> ta.Optional[str]:
973
+ raise NotImplementedError
974
+
975
+
976
+ #
977
+
978
+
979
+ class DirectoryFileCache(FileCache):
980
+ def __init__(self, dir: str) -> None: # noqa
981
+ super().__init__()
982
+
983
+ self._dir = dir
984
+
985
+ #
986
+
987
+ def get_cache_file_path(
988
+ self,
989
+ key: str,
990
+ *,
991
+ make_dirs: bool = False,
992
+ ) -> str:
993
+ if make_dirs:
994
+ os.makedirs(self._dir, exist_ok=True)
995
+ return os.path.join(self._dir, key)
996
+
997
+ def format_incomplete_file(self, f: str) -> str:
998
+ return os.path.join(os.path.dirname(f), f'_{os.path.basename(f)}.incomplete')
999
+
1000
+ #
1001
+
1002
+ def get_file(self, key: str) -> ta.Optional[str]:
1003
+ cache_file_path = self.get_cache_file_path(key)
1004
+ if not os.path.exists(cache_file_path):
1005
+ return None
1006
+ return cache_file_path
1007
+
1008
+ def put_file(self, key: str, file_path: str) -> None:
1009
+ cache_file_path = self.get_cache_file_path(key, make_dirs=True)
1010
+ shutil.copyfile(file_path, cache_file_path)
1011
+
1012
+
1013
+ ##
1014
+
1015
+
1016
+ class ShellCache(abc.ABC):
1017
+ @abc.abstractmethod
1018
+ def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
1019
+ raise NotImplementedError
1020
+
1021
+ class PutFileCmdContext(abc.ABC):
1022
+ def __init__(self) -> None:
1023
+ super().__init__()
1024
+
1025
+ self._state: ta.Literal['open', 'committed', 'aborted'] = 'open'
1026
+
1027
+ @property
1028
+ def state(self) -> ta.Literal['open', 'committed', 'aborted']:
1029
+ return self._state
1030
+
1031
+ #
1032
+
1033
+ @property
1034
+ @abc.abstractmethod
1035
+ def cmd(self) -> ShellCmd:
1036
+ raise NotImplementedError
1037
+
1038
+ #
1039
+
1040
+ def __enter__(self):
1041
+ return self
1042
+
1043
+ def __exit__(self, exc_type, exc_val, exc_tb):
1044
+ if exc_val is None:
1045
+ self.commit()
1046
+ else:
1047
+ self.abort()
1048
+
1049
+ #
1050
+
1051
+ @abc.abstractmethod
1052
+ def _commit(self) -> None:
1053
+ raise NotImplementedError
1054
+
1055
+ def commit(self) -> None:
1056
+ if self._state == 'committed':
1057
+ return
1058
+ elif self._state == 'open':
1059
+ self._commit()
1060
+ self._state = 'committed'
1061
+ else:
1062
+ raise RuntimeError(self._state)
1063
+
1064
+ #
1065
+
1066
+ @abc.abstractmethod
1067
+ def _abort(self) -> None:
1068
+ raise NotImplementedError
1069
+
1070
+ def abort(self) -> None:
1071
+ if self._state == 'aborted':
1072
+ return
1073
+ elif self._state == 'open':
1074
+ self._abort()
1075
+ self._state = 'committed'
1076
+ else:
1077
+ raise RuntimeError(self._state)
1078
+
1079
+ @abc.abstractmethod
1080
+ def put_file_cmd(self, key: str) -> PutFileCmdContext:
1081
+ raise NotImplementedError
1082
+
1083
+
1084
+ #
1085
+
1086
+
1087
+ class DirectoryShellCache(ShellCache):
1088
+ def __init__(self, dfc: DirectoryFileCache) -> None:
1089
+ super().__init__()
1090
+
1091
+ self._dfc = dfc
1092
+
1093
+ def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
1094
+ f = self._dfc.get_file(key)
1095
+ if f is None:
1096
+ return None
1097
+ return ShellCmd(f'cat {shlex.quote(f)}')
1098
+
1099
+ class _PutFileCmdContext(ShellCache.PutFileCmdContext): # noqa
1100
+ def __init__(self, tf: str, f: str) -> None:
1101
+ super().__init__()
1102
+
1103
+ self._tf = tf
1104
+ self._f = f
1105
+
1106
+ @property
1107
+ def cmd(self) -> ShellCmd:
1108
+ return ShellCmd(f'cat > {shlex.quote(self._tf)}')
1109
+
1110
+ def _commit(self) -> None:
1111
+ os.replace(self._tf, self._f)
1112
+
1113
+ def _abort(self) -> None:
1114
+ os.unlink(self._tf)
1115
+
1116
+ def put_file_cmd(self, key: str) -> ShellCache.PutFileCmdContext:
1117
+ f = self._dfc.get_cache_file_path(key, make_dirs=True)
1118
+ return self._PutFileCmdContext(self._dfc.format_incomplete_file(f), f)
1119
+
1120
+
1121
+ ########################################
1122
+ # ../github/cacheapi.py
1123
+ """
1124
+ export FILE_SIZE=$(stat --format="%s" $FILE)
1125
+
1126
+ export CACHE_ID=$(curl -s \
1127
+ -X POST \
1128
+ "${ACTIONS_CACHE_URL}_apis/artifactcache/caches" \
1129
+ -H 'Content-Type: application/json' \
1130
+ -H 'Accept: application/json;api-version=6.0-preview.1' \
1131
+ -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
1132
+ -d '{"key": "'"$CACHE_KEY"'", "cacheSize": '"$FILE_SIZE"'}' \
1133
+ | jq .cacheId)
1134
+
1135
+ curl -s \
1136
+ -X PATCH \
1137
+ "${ACTIONS_CACHE_URL}_apis/artifactcache/caches/$CACHE_ID" \
1138
+ -H 'Content-Type: application/octet-stream' \
1139
+ -H 'Accept: application/json;api-version=6.0-preview.1' \
1140
+ -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
1141
+ -H "Content-Range: bytes 0-$((FILE_SIZE - 1))/*" \
1142
+ --data-binary @"$FILE"
1143
+
1144
+ curl -s \
1145
+ -X POST \
1146
+ "${ACTIONS_CACHE_URL}_apis/artifactcache/caches/$CACHE_ID" \
1147
+ -H 'Content-Type: application/json' \
1148
+ -H 'Accept: application/json;api-version=6.0-preview.1' \
1149
+ -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
1150
+ -d '{"size": '"$(stat --format="%s" $FILE)"'}'
1151
+
1152
+ curl -s \
1153
+ -X GET \
1154
+ "${ACTIONS_CACHE_URL}_apis/artifactcache/cache?keys=$CACHE_KEY" \
1155
+ -H 'Content-Type: application/json' \
1156
+ -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
1157
+ | jq .
1158
+ """
1159
+
1160
+
1161
+ ##
1162
+
1163
+
1164
+ class GithubCacheServiceV1:
1165
+ API_VERSION = '6.0-preview.1'
1166
+
1167
+ @classmethod
1168
+ def get_service_url(cls, base_url: str) -> str:
1169
+ return f'{base_url.rstrip("/")}/_apis/artifactcache'
1170
+
1171
+ #
1172
+
1173
+ @classmethod
1174
+ def dataclass_to_json(cls, obj: ta.Any) -> ta.Any:
1175
+ return {
1176
+ camel_case(k, lower=True): v
1177
+ for k, v in dc.asdict(obj).items()
1178
+ if v is not None
1179
+ }
1180
+
1181
+ @classmethod
1182
+ def dataclass_from_json(cls, dcls: ta.Type[T], obj: ta.Any) -> T:
1183
+ return dcls(**{
1184
+ snake_case(k): v
1185
+ for k, v in obj.items()
1186
+ })
1187
+
1188
+ #
1189
+
1190
+ @dc.dataclass(frozen=True)
1191
+ class ArtifactCacheEntry:
1192
+ cache_key: ta.Optional[str]
1193
+ scope: ta.Optional[str]
1194
+ cache_version: ta.Optional[str]
1195
+ creation_time: ta.Optional[str]
1196
+ archive_location: ta.Optional[str]
1197
+
1198
+ @dc.dataclass(frozen=True)
1199
+ class ArtifactCacheList:
1200
+ total_count: int
1201
+ artifact_caches: ta.Optional[ta.Sequence['GithubCacheServiceV1.ArtifactCacheEntry']]
1202
+
1203
+ #
1204
+
1205
+ @dc.dataclass(frozen=True)
1206
+ class ReserveCacheRequest:
1207
+ key: str
1208
+ cache_size: ta.Optional[int]
1209
+ version: ta.Optional[str] = None
1210
+
1211
+ @dc.dataclass(frozen=True)
1212
+ class ReserveCacheResponse:
1213
+ cache_id: int
1214
+
1215
+ #
1216
+
1217
+ @dc.dataclass(frozen=True)
1218
+ class CommitCacheRequest:
1219
+ size: int
1220
+
1221
+ #
1222
+
1223
+ class CompressionMethod:
1224
+ GZIP = 'gzip'
1225
+ ZSTD_WITHOUT_LONG = 'zstd-without-long'
1226
+ ZSTD = 'zstd'
1227
+
1228
+ @dc.dataclass(frozen=True)
1229
+ class InternalCacheOptions:
1230
+ compression_method: ta.Optional[str] # CompressionMethod
1231
+ enable_cross_os_archive: ta.Optional[bool]
1232
+ cache_size: ta.Optional[int]
1233
+
1234
+
1235
+ class GithubCacheServiceV2:
1236
+ SERVICE_NAME = 'github.actions.results.api.v1.CacheService'
1237
+
1238
+ @dc.dataclass(frozen=True)
1239
+ class Method:
1240
+ name: str
1241
+ request: type
1242
+ response: type
1243
+
1244
+ #
1245
+
1246
+ class CacheScopePermission:
1247
+ READ = 1
1248
+ WRITE = 2
1249
+ ALL = READ | WRITE
1250
+
1251
+ @dc.dataclass(frozen=True)
1252
+ class CacheScope:
1253
+ scope: str
1254
+ permission: int # CacheScopePermission
1255
+
1256
+ @dc.dataclass(frozen=True)
1257
+ class CacheMetadata:
1258
+ repository_id: int
1259
+ scope: ta.Sequence['GithubCacheServiceV2.CacheScope']
1260
+
1261
+ #
1262
+
1263
+ @dc.dataclass(frozen=True)
1264
+ class CreateCacheEntryRequest:
1265
+ key: str
1266
+ version: str
1267
+ metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
1268
+
1269
+ @dc.dataclass(frozen=True)
1270
+ class CreateCacheEntryResponse:
1271
+ ok: bool
1272
+ signed_upload_url: str
1273
+
1274
+ CREATE_CACHE_ENTRY_METHOD = Method(
1275
+ 'CreateCacheEntry',
1276
+ CreateCacheEntryRequest,
1277
+ CreateCacheEntryResponse,
1278
+ )
1279
+
1280
+ #
1281
+
1282
+ @dc.dataclass(frozen=True)
1283
+ class FinalizeCacheEntryUploadRequest:
1284
+ key: str
1285
+ size_bytes: int
1286
+ version: str
1287
+ metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
1288
+
1289
+ @dc.dataclass(frozen=True)
1290
+ class FinalizeCacheEntryUploadResponse:
1291
+ ok: bool
1292
+ entry_id: str
1293
+
1294
+ FINALIZE_CACHE_ENTRY_METHOD = Method(
1295
+ 'FinalizeCacheEntryUpload',
1296
+ FinalizeCacheEntryUploadRequest,
1297
+ FinalizeCacheEntryUploadResponse,
1298
+ )
1299
+
1300
+ #
1301
+
1302
+ @dc.dataclass(frozen=True)
1303
+ class GetCacheEntryDownloadUrlRequest:
1304
+ key: str
1305
+ restore_keys: ta.Sequence[str]
1306
+ version: str
1307
+ metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
1308
+
1309
+ @dc.dataclass(frozen=True)
1310
+ class GetCacheEntryDownloadUrlResponse:
1311
+ ok: bool
1312
+ signed_download_url: str
1313
+ matched_key: str
1314
+
1315
+ GET_CACHE_ENTRY_DOWNLOAD_URL_METHOD = Method(
1316
+ 'GetCacheEntryDownloadURL',
1317
+ GetCacheEntryDownloadUrlRequest,
1318
+ GetCacheEntryDownloadUrlResponse,
1319
+ )
1320
+
1321
+
1322
+ ########################################
1323
+ # ../utils.py
1324
+
1325
+
1326
+ ##
1327
+
1328
+
1329
+ def make_temp_file() -> str:
1330
+ file_fd, file = tempfile.mkstemp()
1331
+ os.close(file_fd)
1332
+ return file
1333
+
1334
+
1335
+ ##
1336
+
1337
+
1338
+ def read_yaml_file(yaml_file: str) -> ta.Any:
1339
+ yaml = __import__('yaml')
1340
+
1341
+ with open(yaml_file) as f:
1342
+ return yaml.safe_load(f)
1343
+
1344
+
1345
+ ##
1346
+
1347
+
1348
+ def sha256_str(s: str) -> str:
1349
+ return hashlib.sha256(s.encode('utf-8')).hexdigest()
1350
+
1351
+
1352
+ ##
1353
+
1354
+
1355
+ class LogTimingContext:
1356
+ DEFAULT_LOG: ta.ClassVar[logging.Logger] = log
1357
+
1358
+ def __init__(
1359
+ self,
1360
+ description: str,
1361
+ *,
1362
+ log: ta.Optional[logging.Logger] = None, # noqa
1363
+ level: int = logging.DEBUG,
1364
+ ) -> None:
1365
+ super().__init__()
1366
+
1367
+ self._description = description
1368
+ self._log = log if log is not None else self.DEFAULT_LOG
1369
+ self._level = level
1370
+
1371
+ def set_description(self, description: str) -> 'LogTimingContext':
1372
+ self._description = description
1373
+ return self
1374
+
1375
+ _begin_time: float
1376
+ _end_time: float
1377
+
1378
+ def __enter__(self) -> 'LogTimingContext':
1379
+ self._begin_time = time.time()
1380
+
1381
+ self._log.log(self._level, f'Begin {self._description}') # noqa
1382
+
1383
+ return self
1384
+
1385
+ def __exit__(self, exc_type, exc_val, exc_tb):
1386
+ self._end_time = time.time()
1387
+
1388
+ self._log.log(
1389
+ self._level,
1390
+ f'End {self._description} - {self._end_time - self._begin_time:0.2f} s elapsed',
1391
+ )
1392
+
1393
+
1394
+ log_timing_context = LogTimingContext
1395
+
1396
+
1397
+ ########################################
1398
+ # ../../../omlish/argparse/cli.py
1399
+ """
1400
+ TODO:
1401
+ - default command
1402
+ - auto match all underscores to hyphens
1403
+ - pre-run, post-run hooks
1404
+ - exitstack?
1405
+ """
1406
+
1407
+
1408
+ ##
1409
+
1410
+
1411
+ @dc.dataclass(eq=False)
1412
+ class ArgparseArg:
1413
+ args: ta.Sequence[ta.Any]
1414
+ kwargs: ta.Mapping[str, ta.Any]
1415
+ dest: ta.Optional[str] = None
1416
+
1417
+ def __get__(self, instance, owner=None):
1418
+ if instance is None:
1419
+ return self
1420
+ return getattr(instance.args, self.dest) # type: ignore
1421
+
1422
+
1423
+ def argparse_arg(*args, **kwargs) -> ArgparseArg:
1424
+ return ArgparseArg(args, kwargs)
1425
+
1426
+
1427
+ #
1428
+
1429
+
1430
+ @dc.dataclass(eq=False)
1431
+ class ArgparseCmd:
1432
+ name: str
1433
+ fn: ArgparseCmdFn
1434
+ args: ta.Sequence[ArgparseArg] = () # noqa
1435
+
1436
+ # _: dc.KW_ONLY
1437
+
1438
+ aliases: ta.Optional[ta.Sequence[str]] = None
1439
+ parent: ta.Optional['ArgparseCmd'] = None
1440
+ accepts_unknown: bool = False
1441
+
1442
+ def __post_init__(self) -> None:
1443
+ def check_name(s: str) -> None:
1444
+ check.isinstance(s, str)
1445
+ check.not_in('_', s)
1446
+ check.not_empty(s)
1447
+ check_name(self.name)
1448
+ check.not_isinstance(self.aliases, str)
1449
+ for a in self.aliases or []:
1450
+ check_name(a)
1451
+
1452
+ check.arg(callable(self.fn))
1453
+ check.arg(all(isinstance(a, ArgparseArg) for a in self.args))
1454
+ check.isinstance(self.parent, (ArgparseCmd, type(None)))
1455
+ check.isinstance(self.accepts_unknown, bool)
1456
+
1457
+ functools.update_wrapper(self, self.fn)
1458
+
1459
+ def __get__(self, instance, owner=None):
1460
+ if instance is None:
1461
+ return self
1462
+ return dc.replace(self, fn=self.fn.__get__(instance, owner)) # noqa
1463
+
1464
+ def __call__(self, *args, **kwargs) -> ta.Optional[int]:
1465
+ return self.fn(*args, **kwargs)
1466
+
1467
+
1468
+ def argparse_cmd(
1469
+ *args: ArgparseArg,
1470
+ name: ta.Optional[str] = None,
1471
+ aliases: ta.Optional[ta.Iterable[str]] = None,
1472
+ parent: ta.Optional[ArgparseCmd] = None,
1473
+ accepts_unknown: bool = False,
1474
+ ) -> ta.Any: # ta.Callable[[ArgparseCmdFn], ArgparseCmd]: # FIXME
1475
+ for arg in args:
1476
+ check.isinstance(arg, ArgparseArg)
1477
+ check.isinstance(name, (str, type(None)))
1478
+ check.isinstance(parent, (ArgparseCmd, type(None)))
1479
+ check.not_isinstance(aliases, str)
1480
+
1481
+ def inner(fn):
1482
+ return ArgparseCmd(
1483
+ (name if name is not None else fn.__name__).replace('_', '-'),
1484
+ fn,
1485
+ args,
1486
+ aliases=tuple(aliases) if aliases is not None else None,
1487
+ parent=parent,
1488
+ accepts_unknown=accepts_unknown,
1489
+ )
1490
+
1491
+ return inner
1492
+
1493
+
1494
+ ##
1495
+
1496
+
1497
+ def _get_argparse_arg_ann_kwargs(ann: ta.Any) -> ta.Mapping[str, ta.Any]:
1498
+ if ann is str:
1499
+ return {}
1500
+ elif ann is int:
1501
+ return {'type': int}
1502
+ elif ann is bool:
1503
+ return {'action': 'store_true'}
1504
+ elif ann is list:
1505
+ return {'action': 'append'}
1506
+ elif is_optional_alias(ann):
1507
+ return _get_argparse_arg_ann_kwargs(get_optional_alias_arg(ann))
1508
+ else:
1509
+ raise TypeError(ann)
1510
+
1511
+
1512
+ class _ArgparseCliAnnotationBox:
1513
+ def __init__(self, annotations: ta.Mapping[str, ta.Any]) -> None:
1514
+ super().__init__()
1515
+ self.__annotations__ = annotations # type: ignore
1516
+
1517
+
1518
+ class ArgparseCli:
1519
+ def __init__(self, argv: ta.Optional[ta.Sequence[str]] = None) -> None:
1520
+ super().__init__()
1521
+
1522
+ self._argv = argv if argv is not None else sys.argv[1:]
1523
+
1524
+ self._args, self._unknown_args = self.get_parser().parse_known_args(self._argv)
1525
+
1526
+ #
1527
+
1528
+ def __init_subclass__(cls, **kwargs: ta.Any) -> None:
1529
+ super().__init_subclass__(**kwargs)
1530
+
1531
+ ns = cls.__dict__
1532
+ objs = {}
1533
+ mro = cls.__mro__[::-1]
1534
+ for bns in [bcls.__dict__ for bcls in reversed(mro)] + [ns]:
1535
+ bseen = set() # type: ignore
1536
+ for k, v in bns.items():
1537
+ if isinstance(v, (ArgparseCmd, ArgparseArg)):
1538
+ check.not_in(v, bseen)
1539
+ bseen.add(v)
1540
+ objs[k] = v
1541
+ elif k in objs:
1542
+ del [k]
1543
+
1544
+ #
1545
+
1546
+ anns = ta.get_type_hints(_ArgparseCliAnnotationBox({
1547
+ **{k: v for bcls in reversed(mro) for k, v in getattr(bcls, '__annotations__', {}).items()},
1548
+ **ns.get('__annotations__', {}),
1549
+ }), globalns=ns.get('__globals__', {}))
1550
+
1551
+ #
1552
+
1553
+ if '_parser' in ns:
1554
+ parser = check.isinstance(ns['_parser'], argparse.ArgumentParser)
1555
+ else:
1556
+ parser = argparse.ArgumentParser()
1557
+ setattr(cls, '_parser', parser)
1558
+
1559
+ #
1560
+
1561
+ subparsers = parser.add_subparsers()
1562
+
1563
+ for att, obj in objs.items():
1564
+ if isinstance(obj, ArgparseCmd):
1565
+ if obj.parent is not None:
1566
+ raise NotImplementedError
1567
+
1568
+ for cn in [obj.name, *(obj.aliases or [])]:
1569
+ subparser = subparsers.add_parser(cn)
1570
+
1571
+ for arg in (obj.args or []):
1572
+ if (
1573
+ len(arg.args) == 1 and
1574
+ isinstance(arg.args[0], str) and
1575
+ not (n := check.isinstance(arg.args[0], str)).startswith('-') and
1576
+ 'metavar' not in arg.kwargs
1577
+ ):
1578
+ subparser.add_argument(
1579
+ n.replace('-', '_'),
1580
+ **arg.kwargs,
1581
+ metavar=n,
1582
+ )
1583
+ else:
1584
+ subparser.add_argument(*arg.args, **arg.kwargs)
1585
+
1586
+ subparser.set_defaults(_cmd=obj)
1587
+
1588
+ elif isinstance(obj, ArgparseArg):
1589
+ if att in anns:
1590
+ ann_kwargs = _get_argparse_arg_ann_kwargs(anns[att])
1591
+ obj.kwargs = {**ann_kwargs, **obj.kwargs}
1592
+
1593
+ if not obj.dest:
1594
+ if 'dest' in obj.kwargs:
1595
+ obj.dest = obj.kwargs['dest']
1596
+ else:
1597
+ obj.dest = obj.kwargs['dest'] = att # type: ignore
1598
+
1599
+ parser.add_argument(*obj.args, **obj.kwargs)
1600
+
1601
+ else:
1602
+ raise TypeError(obj)
1603
+
1604
+ #
1605
+
1606
+ _parser: ta.ClassVar[argparse.ArgumentParser]
1607
+
1608
+ @classmethod
1609
+ def get_parser(cls) -> argparse.ArgumentParser:
1610
+ return cls._parser
1611
+
1612
+ @property
1613
+ def argv(self) -> ta.Sequence[str]:
1614
+ return self._argv
1615
+
1616
+ @property
1617
+ def args(self) -> argparse.Namespace:
1618
+ return self._args
1619
+
1620
+ @property
1621
+ def unknown_args(self) -> ta.Sequence[str]:
1622
+ return self._unknown_args
1623
+
1624
+ #
1625
+
1626
+ def _bind_cli_cmd(self, cmd: ArgparseCmd) -> ta.Callable:
1627
+ return cmd.__get__(self, type(self))
1628
+
1629
+ def prepare_cli_run(self) -> ta.Optional[ta.Callable]:
1630
+ cmd = getattr(self.args, '_cmd', None)
1631
+
1632
+ if self._unknown_args and not (cmd is not None and cmd.accepts_unknown):
1633
+ msg = f'unrecognized arguments: {" ".join(self._unknown_args)}'
1634
+ if (parser := self.get_parser()).exit_on_error: # type: ignore
1635
+ parser.error(msg)
1636
+ else:
1637
+ raise argparse.ArgumentError(None, msg)
1638
+
1639
+ if cmd is None:
1014
1640
  self.get_parser().print_help()
1015
1641
  return None
1016
1642
 
@@ -1118,32 +1744,208 @@ def attr_setting(obj, attr, val, *, default=None): # noqa
1118
1744
  ##
1119
1745
 
1120
1746
 
1121
- class aclosing(contextlib.AbstractAsyncContextManager): # noqa
1122
- def __init__(self, thing):
1123
- self.thing = thing
1747
+ class aclosing(contextlib.AbstractAsyncContextManager): # noqa
1748
+ def __init__(self, thing):
1749
+ self.thing = thing
1750
+
1751
+ async def __aenter__(self):
1752
+ return self.thing
1753
+
1754
+ async def __aexit__(self, *exc_info):
1755
+ await self.thing.aclose()
1756
+
1757
+
1758
+ ########################################
1759
+ # ../../../omlish/lite/runtime.py
1760
+
1761
+
1762
+ @cached_nullary
1763
+ def is_debugger_attached() -> bool:
1764
+ return any(frame[1].endswith('pydevd.py') for frame in inspect.stack())
1765
+
1766
+
1767
+ LITE_REQUIRED_PYTHON_VERSION = (3, 8)
1768
+
1769
+
1770
+ def check_lite_runtime_version() -> None:
1771
+ if sys.version_info < LITE_REQUIRED_PYTHON_VERSION:
1772
+ raise OSError(f'Requires python {LITE_REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
1773
+
1774
+
1775
+ ########################################
1776
+ # ../../../omlish/logs/json.py
1777
+ """
1778
+ TODO:
1779
+ - translate json keys
1780
+ """
1781
+
1782
+
1783
+ class JsonLogFormatter(logging.Formatter):
1784
+ KEYS: ta.Mapping[str, bool] = {
1785
+ 'name': False,
1786
+ 'msg': False,
1787
+ 'args': False,
1788
+ 'levelname': False,
1789
+ 'levelno': False,
1790
+ 'pathname': False,
1791
+ 'filename': False,
1792
+ 'module': False,
1793
+ 'exc_info': True,
1794
+ 'exc_text': True,
1795
+ 'stack_info': True,
1796
+ 'lineno': False,
1797
+ 'funcName': False,
1798
+ 'created': False,
1799
+ 'msecs': False,
1800
+ 'relativeCreated': False,
1801
+ 'thread': False,
1802
+ 'threadName': False,
1803
+ 'processName': False,
1804
+ 'process': False,
1805
+ }
1806
+
1807
+ def __init__(
1808
+ self,
1809
+ *args: ta.Any,
1810
+ json_dumps: ta.Optional[ta.Callable[[ta.Any], str]] = None,
1811
+ **kwargs: ta.Any,
1812
+ ) -> None:
1813
+ super().__init__(*args, **kwargs)
1814
+
1815
+ if json_dumps is None:
1816
+ json_dumps = json_dumps_compact
1817
+ self._json_dumps = json_dumps
1818
+
1819
+ def format(self, record: logging.LogRecord) -> str:
1820
+ dct = {
1821
+ k: v
1822
+ for k, o in self.KEYS.items()
1823
+ for v in [getattr(record, k)]
1824
+ if not (o and v is None)
1825
+ }
1826
+ return self._json_dumps(dct)
1827
+
1828
+
1829
+ ########################################
1830
+ # ../../../omlish/logs/standard.py
1831
+ """
1832
+ TODO:
1833
+ - structured
1834
+ - prefixed
1835
+ - debug
1836
+ - optional noisy? noisy will never be lite - some kinda configure_standard callback mechanism?
1837
+ """
1838
+
1839
+
1840
+ ##
1841
+
1842
+
1843
+ STANDARD_LOG_FORMAT_PARTS = [
1844
+ ('asctime', '%(asctime)-15s'),
1845
+ ('process', 'pid=%(process)-6s'),
1846
+ ('thread', 'tid=%(thread)x'),
1847
+ ('levelname', '%(levelname)s'),
1848
+ ('name', '%(name)s'),
1849
+ ('separator', '::'),
1850
+ ('message', '%(message)s'),
1851
+ ]
1852
+
1853
+
1854
+ class StandardLogFormatter(logging.Formatter):
1855
+ @staticmethod
1856
+ def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
1857
+ return ' '.join(v for k, v in parts)
1858
+
1859
+ converter = datetime.datetime.fromtimestamp # type: ignore
1860
+
1861
+ def formatTime(self, record, datefmt=None):
1862
+ ct = self.converter(record.created) # type: ignore
1863
+ if datefmt:
1864
+ return ct.strftime(datefmt) # noqa
1865
+ else:
1866
+ t = ct.strftime('%Y-%m-%d %H:%M:%S')
1867
+ return '%s.%03d' % (t, record.msecs) # noqa
1868
+
1869
+
1870
+ ##
1871
+
1872
+
1873
+ class StandardConfiguredLogHandler(ProxyLogHandler):
1874
+ def __init_subclass__(cls, **kwargs):
1875
+ raise TypeError('This class serves only as a marker and should not be subclassed.')
1876
+
1877
+
1878
+ ##
1879
+
1880
+
1881
+ @contextlib.contextmanager
1882
+ def _locking_logging_module_lock() -> ta.Iterator[None]:
1883
+ if hasattr(logging, '_acquireLock'):
1884
+ logging._acquireLock() # noqa
1885
+ try:
1886
+ yield
1887
+ finally:
1888
+ logging._releaseLock() # type: ignore # noqa
1124
1889
 
1125
- async def __aenter__(self):
1126
- return self.thing
1890
+ elif hasattr(logging, '_lock'):
1891
+ # https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
1892
+ with logging._lock: # noqa
1893
+ yield
1127
1894
 
1128
- async def __aexit__(self, *exc_info):
1129
- await self.thing.aclose()
1895
+ else:
1896
+ raise Exception("Can't find lock in logging module")
1130
1897
 
1131
1898
 
1132
- ########################################
1133
- # ../../../omlish/lite/runtime.py
1899
+ def configure_standard_logging(
1900
+ level: ta.Union[int, str] = logging.INFO,
1901
+ *,
1902
+ json: bool = False,
1903
+ target: ta.Optional[logging.Logger] = None,
1904
+ force: bool = False,
1905
+ handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
1906
+ ) -> ta.Optional[StandardConfiguredLogHandler]:
1907
+ with _locking_logging_module_lock():
1908
+ if target is None:
1909
+ target = logging.root
1134
1910
 
1911
+ #
1135
1912
 
1136
- @cached_nullary
1137
- def is_debugger_attached() -> bool:
1138
- return any(frame[1].endswith('pydevd.py') for frame in inspect.stack())
1913
+ if not force:
1914
+ if any(isinstance(h, StandardConfiguredLogHandler) for h in list(target.handlers)):
1915
+ return None
1139
1916
 
1917
+ #
1140
1918
 
1141
- LITE_REQUIRED_PYTHON_VERSION = (3, 8)
1919
+ if handler_factory is not None:
1920
+ handler = handler_factory()
1921
+ else:
1922
+ handler = logging.StreamHandler()
1923
+
1924
+ #
1142
1925
 
1926
+ formatter: logging.Formatter
1927
+ if json:
1928
+ formatter = JsonLogFormatter()
1929
+ else:
1930
+ formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
1931
+ handler.setFormatter(formatter)
1143
1932
 
1144
- def check_lite_runtime_version() -> None:
1145
- if sys.version_info < LITE_REQUIRED_PYTHON_VERSION:
1146
- raise OSError(f'Requires python {LITE_REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
1933
+ #
1934
+
1935
+ handler.addFilter(TidLogFilter())
1936
+
1937
+ #
1938
+
1939
+ target.addHandler(handler)
1940
+
1941
+ #
1942
+
1943
+ if level is not None:
1944
+ target.setLevel(level)
1945
+
1946
+ #
1947
+
1948
+ return StandardConfiguredLogHandler(handler)
1147
1949
 
1148
1950
 
1149
1951
  ########################################
@@ -1513,7 +2315,7 @@ class DockerComposeRun(ExitStacked):
1513
2315
 
1514
2316
  image: str
1515
2317
 
1516
- run_cmd: ta.Sequence[str]
2318
+ cmd: ShellCmd
1517
2319
 
1518
2320
  #
1519
2321
 
@@ -1523,9 +2325,11 @@ class DockerComposeRun(ExitStacked):
1523
2325
 
1524
2326
  #
1525
2327
 
1526
- def __post_init__(self) -> None:
1527
- check.not_isinstance(self.run_cmd, str)
2328
+ no_dependency_cleanup: bool = False
2329
+
2330
+ #
1528
2331
 
2332
+ def __post_init__(self) -> None:
1529
2333
  check.not_isinstance(self.run_options, str)
1530
2334
 
1531
2335
  def __init__(self, cfg: Config) -> None:
@@ -1597,196 +2401,560 @@ class DockerComposeRun(ExitStacked):
1597
2401
  for l in out_service.get('links', [])
1598
2402
  ]
1599
2403
 
1600
- #
2404
+ #
2405
+
2406
+ depends_on = in_service.get('depends_on', [])
2407
+
2408
+ for dep_service, in_dep_service_dct in list(in_services.items()):
2409
+ if dep_service not in depends_on:
2410
+ continue
2411
+
2412
+ out_dep_service: dict = dict(in_dep_service_dct)
2413
+ out_services[dep_service] = out_dep_service
2414
+
2415
+ out_dep_service['ports'] = []
2416
+
2417
+ #
2418
+
2419
+ return out
2420
+
2421
+ @cached_nullary
2422
+ def rewrite_compose_file(self) -> str:
2423
+ in_dct = read_yaml_file(self._cfg.compose_file)
2424
+
2425
+ out_dct = self._rewrite_compose_dct(in_dct)
2426
+
2427
+ #
2428
+
2429
+ out_compose_file = make_temp_file()
2430
+ self._enter_context(defer(lambda: os.unlink(out_compose_file))) # noqa
2431
+
2432
+ compose_json = json_dumps_pretty(out_dct)
2433
+
2434
+ with open(out_compose_file, 'w') as f:
2435
+ f.write(compose_json)
2436
+
2437
+ return out_compose_file
2438
+
2439
+ #
2440
+
2441
+ def _cleanup_dependencies(self) -> None:
2442
+ subprocesses.check_call(
2443
+ 'docker',
2444
+ 'compose',
2445
+ '-f', self.rewrite_compose_file(),
2446
+ 'down',
2447
+ )
2448
+
2449
+ def run(self) -> None:
2450
+ self.tag_image()
2451
+
2452
+ compose_file = self.rewrite_compose_file()
2453
+
2454
+ with contextlib.ExitStack() as es:
2455
+ if not self._cfg.no_dependency_cleanup:
2456
+ es.enter_context(defer(self._cleanup_dependencies)) # noqa
2457
+
2458
+ sh_cmd = ' '.join([
2459
+ 'docker',
2460
+ 'compose',
2461
+ '-f', compose_file,
2462
+ 'run',
2463
+ '--rm',
2464
+ *itertools.chain.from_iterable(
2465
+ ['-e', k]
2466
+ for k in (self._cfg.cmd.env or [])
2467
+ ),
2468
+ *(self._cfg.run_options or []),
2469
+ self._cfg.service,
2470
+ 'sh', '-c', shlex.quote(self._cfg.cmd.s),
2471
+ ])
2472
+
2473
+ run_cmd = dc.replace(self._cfg.cmd, s=sh_cmd)
2474
+
2475
+ run_cmd.run(
2476
+ subprocesses.check_call,
2477
+ **self._subprocess_kwargs,
2478
+ )
2479
+
2480
+
2481
+ ########################################
2482
+ # ../docker.py
2483
+ """
2484
+ TODO:
2485
+ - some less stupid Dockerfile hash
2486
+ - doesn't change too much though
2487
+ """
2488
+
2489
+
2490
+ ##
2491
+
2492
+
2493
+ def build_docker_file_hash(docker_file: str) -> str:
2494
+ with open(docker_file) as f:
2495
+ contents = f.read()
2496
+
2497
+ return sha256_str(contents)
2498
+
2499
+
2500
+ ##
2501
+
2502
+
2503
+ def read_docker_tar_image_tag(tar_file: str) -> str:
2504
+ with tarfile.open(tar_file) as tf:
2505
+ with contextlib.closing(check.not_none(tf.extractfile('manifest.json'))) as mf:
2506
+ m = mf.read()
2507
+
2508
+ manifests = json.loads(m.decode('utf-8'))
2509
+ manifest = check.single(manifests)
2510
+ tag = check.non_empty_str(check.single(manifest['RepoTags']))
2511
+ return tag
2512
+
2513
+
2514
+ def read_docker_tar_image_id(tar_file: str) -> str:
2515
+ with tarfile.open(tar_file) as tf:
2516
+ with contextlib.closing(check.not_none(tf.extractfile('index.json'))) as mf:
2517
+ i = mf.read()
2518
+
2519
+ index = json.loads(i.decode('utf-8'))
2520
+ manifest = check.single(index['manifests'])
2521
+ image_id = check.non_empty_str(manifest['digest'])
2522
+ return image_id
2523
+
2524
+
2525
+ ##
2526
+
2527
+
2528
+ def is_docker_image_present(image: str) -> bool:
2529
+ out = subprocesses.check_output(
2530
+ 'docker',
2531
+ 'images',
2532
+ '--format', 'json',
2533
+ image,
2534
+ )
2535
+
2536
+ out_s = out.decode('utf-8').strip()
2537
+ if not out_s:
2538
+ return False
2539
+
2540
+ json.loads(out_s) # noqa
2541
+ return True
2542
+
2543
+
2544
+ def pull_docker_image(
2545
+ image: str,
2546
+ ) -> None:
2547
+ subprocesses.check_call(
2548
+ 'docker',
2549
+ 'pull',
2550
+ image,
2551
+ )
2552
+
2553
+
2554
+ def build_docker_image(
2555
+ docker_file: str,
2556
+ *,
2557
+ cwd: ta.Optional[str] = None,
2558
+ ) -> str:
2559
+ id_file = make_temp_file()
2560
+ with defer(lambda: os.unlink(id_file)):
2561
+ subprocesses.check_call(
2562
+ 'docker',
2563
+ 'build',
2564
+ '-f', os.path.abspath(docker_file),
2565
+ '--iidfile', id_file,
2566
+ '--squash',
2567
+ '.',
2568
+ **(dict(cwd=cwd) if cwd is not None else {}),
2569
+ )
2570
+
2571
+ with open(id_file) as f:
2572
+ image_id = check.single(f.read().strip().splitlines()).strip()
2573
+
2574
+ return image_id
2575
+
2576
+
2577
+ ##
2578
+
2579
+
2580
+ def save_docker_tar_cmd(
2581
+ image: str,
2582
+ output_cmd: ShellCmd,
2583
+ ) -> None:
2584
+ cmd = dc.replace(output_cmd, s=f'docker save {image} | {output_cmd.s}')
2585
+ cmd.run(subprocesses.check_call)
2586
+
2587
+
2588
+ def save_docker_tar(
2589
+ image: str,
2590
+ tar_file: str,
2591
+ ) -> None:
2592
+ return save_docker_tar_cmd(
2593
+ image,
2594
+ ShellCmd(f'cat > {shlex.quote(tar_file)}'),
2595
+ )
2596
+
2597
+
2598
+ #
2599
+
2600
+
2601
+ def load_docker_tar_cmd(
2602
+ input_cmd: ShellCmd,
2603
+ ) -> str:
2604
+ cmd = dc.replace(input_cmd, s=f'{input_cmd.s} | docker load')
2605
+
2606
+ out = cmd.run(subprocesses.check_output).decode()
2607
+
2608
+ line = check.single(out.strip().splitlines())
2609
+ loaded = line.partition(':')[2].strip()
2610
+ return loaded
2611
+
2612
+
2613
+ def load_docker_tar(
2614
+ tar_file: str,
2615
+ ) -> str:
2616
+ return load_docker_tar_cmd(ShellCmd(f'cat {shlex.quote(tar_file)}'))
2617
+
2618
+
2619
+ ########################################
2620
+ # ../github/cache.py
2621
+
2622
+
2623
+ ##
2624
+
2625
+
2626
+ class GithubV1CacheShellClient:
2627
+ BASE_URL_ENV_KEY = 'ACTIONS_CACHE_URL'
2628
+ AUTH_TOKEN_ENV_KEY = 'ACTIONS_RUNTIME_TOKEN' # noqa
2629
+
2630
+ def __init__(
2631
+ self,
2632
+ *,
2633
+ base_url: ta.Optional[str] = None,
2634
+ auth_token: ta.Optional[str] = None,
2635
+ ) -> None:
2636
+ super().__init__()
2637
+
2638
+ if base_url is None:
2639
+ base_url = os.environ[self.BASE_URL_ENV_KEY]
2640
+ self._base_url = check.non_empty_str(base_url)
2641
+
2642
+ if auth_token is None:
2643
+ auth_token = os.environ.get(self.AUTH_TOKEN_ENV_KEY)
2644
+ self._auth_token = auth_token
2645
+
2646
+ self._service_url = GithubCacheServiceV1.get_service_url(self._base_url)
2647
+
2648
+ #
2649
+
2650
+ _MISSING = object()
2651
+
2652
+ def build_headers(
2653
+ self,
2654
+ *,
2655
+ auth_token: ta.Any = _MISSING,
2656
+ content_type: ta.Optional[str] = None,
2657
+ ) -> ta.Dict[str, str]:
2658
+ dct = {
2659
+ 'Accept': f'application/json;api-version={GithubCacheServiceV1.API_VERSION}',
2660
+ }
2661
+
2662
+ if auth_token is self._MISSING:
2663
+ auth_token = self._auth_token
2664
+ if auth_token:
2665
+ dct['Authorization'] = f'Bearer {auth_token}'
2666
+
2667
+ if content_type is not None:
2668
+ dct['Content-Type'] = content_type
2669
+
2670
+ return dct
2671
+
2672
+ #
2673
+
2674
+ HEADER_AUTH_TOKEN_ENV_KEY = '_GITHUB_CACHE_AUTH_TOKEN' # noqa
2675
+
2676
+ def build_curl_cmd(
2677
+ self,
2678
+ method: str,
2679
+ url: str,
2680
+ *,
2681
+ json_content: bool = False,
2682
+ content_type: ta.Optional[str] = None,
2683
+ ) -> ShellCmd:
2684
+ if content_type is None and json_content:
2685
+ content_type = 'application/json'
2686
+
2687
+ env = {}
2688
+
2689
+ header_auth_token: ta.Optional[str]
2690
+ if self._auth_token:
2691
+ env[self.HEADER_AUTH_TOKEN_ENV_KEY] = self._auth_token
2692
+ header_auth_token = f'${self.HEADER_AUTH_TOKEN_ENV_KEY}'
2693
+ else:
2694
+ header_auth_token = None
2695
+
2696
+ hdrs = self.build_headers(
2697
+ auth_token=header_auth_token,
2698
+ content_type=content_type,
2699
+ )
2700
+
2701
+ url = f'{self._service_url}/{url}'
1601
2702
 
1602
- depends_on = in_service.get('depends_on', [])
2703
+ cmd = ' '.join([
2704
+ 'curl',
2705
+ '-s',
2706
+ '-X', method,
2707
+ url,
2708
+ *[f'-H "{k}: {v}"' for k, v in hdrs.items()],
2709
+ ])
1603
2710
 
1604
- for dep_service, in_dep_service_dct in list(in_services.items()):
1605
- if dep_service not in depends_on:
1606
- continue
2711
+ return ShellCmd(
2712
+ cmd,
2713
+ env=env,
2714
+ )
1607
2715
 
1608
- out_dep_service: dict = dict(in_dep_service_dct)
1609
- out_services[dep_service] = out_dep_service
2716
+ def build_post_json_curl_cmd(
2717
+ self,
2718
+ url: str,
2719
+ obj: ta.Any,
2720
+ **kwargs: ta.Any,
2721
+ ) -> ShellCmd:
2722
+ curl_cmd = self.build_curl_cmd(
2723
+ 'POST',
2724
+ url,
2725
+ json_content=True,
2726
+ **kwargs,
2727
+ )
1610
2728
 
1611
- out_dep_service['ports'] = []
2729
+ obj_json = json_dumps_compact(obj)
1612
2730
 
1613
- #
2731
+ return dc.replace(curl_cmd, s=f'{curl_cmd.s} -d {shlex.quote(obj_json)}')
1614
2732
 
1615
- return out
2733
+ #
1616
2734
 
1617
- @cached_nullary
1618
- def rewrite_compose_file(self) -> str:
1619
- in_dct = read_yaml_file(self._cfg.compose_file)
2735
+ @dc.dataclass()
2736
+ class CurlError(RuntimeError):
2737
+ status_code: int
2738
+ body: ta.Optional[bytes]
1620
2739
 
1621
- out_dct = self._rewrite_compose_dct(in_dct)
2740
+ def __str__(self) -> str:
2741
+ return repr(self)
1622
2742
 
1623
- #
2743
+ @dc.dataclass(frozen=True)
2744
+ class CurlResult:
2745
+ status_code: int
2746
+ body: ta.Optional[bytes]
2747
+
2748
+ def as_error(self) -> 'GithubV1CacheShellClient.CurlError':
2749
+ return GithubV1CacheShellClient.CurlError(
2750
+ status_code=self.status_code,
2751
+ body=self.body,
2752
+ )
1624
2753
 
1625
- out_compose_file = make_temp_file()
1626
- self._enter_context(defer(lambda: os.unlink(out_compose_file))) # noqa
2754
+ def run_curl_cmd(
2755
+ self,
2756
+ cmd: ShellCmd,
2757
+ *,
2758
+ raise_: bool = False,
2759
+ ) -> CurlResult:
2760
+ out_file = make_temp_file()
2761
+ with defer(lambda: os.unlink(out_file)):
2762
+ run_cmd = dc.replace(cmd, s=f"{cmd.s} -o {out_file} -w '%{{json}}'")
1627
2763
 
1628
- compose_json = json_dumps_pretty(out_dct)
2764
+ out_json_bytes = run_cmd.run(subprocesses.check_output)
1629
2765
 
1630
- with open(out_compose_file, 'w') as f:
1631
- f.write(compose_json)
2766
+ out_json = json.loads(out_json_bytes.decode())
2767
+ status_code = check.isinstance(out_json['response_code'], int)
1632
2768
 
1633
- return out_compose_file
2769
+ with open(out_file, 'rb') as f:
2770
+ body = f.read()
1634
2771
 
1635
- #
2772
+ result = self.CurlResult(
2773
+ status_code=status_code,
2774
+ body=body,
2775
+ )
1636
2776
 
1637
- def run(self) -> None:
1638
- self.tag_image()
2777
+ if raise_ and (500 <= status_code <= 600):
2778
+ raise result.as_error()
1639
2779
 
1640
- compose_file = self.rewrite_compose_file()
2780
+ return result
1641
2781
 
1642
- try:
1643
- subprocesses.check_call(
1644
- 'docker',
1645
- 'compose',
1646
- '-f', compose_file,
1647
- 'run',
1648
- '--rm',
1649
- *self._cfg.run_options or [],
1650
- self._cfg.service,
1651
- *self._cfg.run_cmd,
1652
- **self._subprocess_kwargs,
1653
- )
2782
+ def run_json_curl_cmd(
2783
+ self,
2784
+ cmd: ShellCmd,
2785
+ *,
2786
+ success_status_codes: ta.Optional[ta.Container[int]] = None,
2787
+ ) -> ta.Optional[ta.Any]:
2788
+ result = self.run_curl_cmd(cmd, raise_=True)
1654
2789
 
1655
- finally:
1656
- subprocesses.check_call(
1657
- 'docker',
1658
- 'compose',
1659
- '-f', compose_file,
1660
- 'down',
1661
- )
2790
+ if success_status_codes is not None:
2791
+ is_success = result.status_code in success_status_codes
2792
+ else:
2793
+ is_success = 200 <= result.status_code < 300
1662
2794
 
2795
+ if is_success:
2796
+ if not (body := result.body):
2797
+ return None
2798
+ return json.loads(body.decode('utf-8-sig'))
1663
2799
 
1664
- ########################################
1665
- # ../dockertars.py
1666
- """
1667
- TODO:
1668
- - some less stupid Dockerfile hash
1669
- - doesn't change too much though
1670
- """
2800
+ elif result.status_code == 404:
2801
+ return None
1671
2802
 
2803
+ else:
2804
+ raise result.as_error()
1672
2805
 
1673
- ##
2806
+ #
1674
2807
 
2808
+ def build_get_entry_curl_cmd(self, key: str) -> ShellCmd:
2809
+ return self.build_curl_cmd(
2810
+ 'GET',
2811
+ f'cache?keys={key}',
2812
+ )
1675
2813
 
1676
- def build_docker_file_hash(docker_file: str) -> str:
1677
- with open(docker_file) as f:
1678
- contents = f.read()
2814
+ def run_get_entry(self, key: str) -> ta.Optional[GithubCacheServiceV1.ArtifactCacheEntry]:
2815
+ curl_cmd = self.build_get_entry_curl_cmd(key)
1679
2816
 
1680
- return sha256_str(contents)
2817
+ obj = self.run_json_curl_cmd(
2818
+ curl_cmd,
2819
+ success_status_codes=[200, 204],
2820
+ )
2821
+ if obj is None:
2822
+ return None
1681
2823
 
2824
+ return GithubCacheServiceV1.dataclass_from_json(
2825
+ GithubCacheServiceV1.ArtifactCacheEntry,
2826
+ obj,
2827
+ )
1682
2828
 
1683
- ##
2829
+ #
1684
2830
 
2831
+ def build_download_get_entry_cmd(
2832
+ self,
2833
+ entry: GithubCacheServiceV1.ArtifactCacheEntry,
2834
+ out_file: str,
2835
+ ) -> ShellCmd:
2836
+ return ShellCmd(' '.join([
2837
+ 'aria2c',
2838
+ '-x', '4',
2839
+ '-o', out_file,
2840
+ check.non_empty_str(entry.archive_location),
2841
+ ]))
2842
+
2843
+ def download_get_entry(
2844
+ self,
2845
+ entry: GithubCacheServiceV1.ArtifactCacheEntry,
2846
+ out_file: str,
2847
+ ) -> None:
2848
+ dl_cmd = self.build_download_get_entry_cmd(entry, out_file)
2849
+ dl_cmd.run(subprocesses.check_call)
1685
2850
 
1686
- def read_docker_tar_image_tag(tar_file: str) -> str:
1687
- with tarfile.open(tar_file) as tf:
1688
- with contextlib.closing(check.not_none(tf.extractfile('manifest.json'))) as mf:
1689
- m = mf.read()
2851
+ #
1690
2852
 
1691
- manifests = json.loads(m.decode('utf-8'))
1692
- manifest = check.single(manifests)
1693
- tag = check.non_empty_str(check.single(manifest['RepoTags']))
1694
- return tag
2853
+ def upload_cache_entry(
2854
+ self,
2855
+ key: str,
2856
+ in_file: str,
2857
+ ) -> None:
2858
+ check.state(os.path.isfile(in_file))
1695
2859
 
2860
+ file_size = os.stat(in_file).st_size
1696
2861
 
1697
- def read_docker_tar_image_id(tar_file: str) -> str:
1698
- with tarfile.open(tar_file) as tf:
1699
- with contextlib.closing(check.not_none(tf.extractfile('index.json'))) as mf:
1700
- i = mf.read()
2862
+ reserve_req = GithubCacheServiceV1.ReserveCacheRequest(
2863
+ key=key,
2864
+ cache_size=file_size,
2865
+ )
2866
+ reserve_cmd = self.build_post_json_curl_cmd(
2867
+ 'caches',
2868
+ GithubCacheServiceV1.dataclass_to_json(reserve_req),
2869
+ )
2870
+ reserve_resp_obj: ta.Any = check.not_none(self.run_json_curl_cmd(
2871
+ reserve_cmd,
2872
+ success_status_codes=[201],
2873
+ ))
2874
+ reserve_resp = GithubCacheServiceV1.dataclass_from_json( # noqa
2875
+ GithubCacheServiceV1.ReserveCacheResponse,
2876
+ reserve_resp_obj,
2877
+ )
1701
2878
 
1702
- index = json.loads(i.decode('utf-8'))
1703
- manifest = check.single(index['manifests'])
1704
- image_id = check.non_empty_str(manifest['digest'])
1705
- return image_id
2879
+ raise NotImplementedError
1706
2880
 
1707
2881
 
1708
2882
  ##
1709
2883
 
1710
2884
 
1711
- def is_docker_image_present(image: str) -> bool:
1712
- out = subprocesses.check_output(
1713
- 'docker',
1714
- 'images',
1715
- '--format', 'json',
1716
- image,
1717
- )
2885
+ class GithubShellCache(ShellCache):
2886
+ def __init__(
2887
+ self,
2888
+ dir: str, # noqa
2889
+ *,
2890
+ client: ta.Optional[GithubV1CacheShellClient] = None,
2891
+ ) -> None:
2892
+ super().__init__()
1718
2893
 
1719
- out_s = out.decode('utf-8').strip()
1720
- if not out_s:
1721
- return False
2894
+ self._dir = check.not_none(dir)
1722
2895
 
1723
- json.loads(out_s) # noqa
1724
- return True
2896
+ if client is None:
2897
+ client = GithubV1CacheShellClient()
2898
+ self._client = client
1725
2899
 
2900
+ self._local = DirectoryFileCache(self._dir)
1726
2901
 
1727
- ##
2902
+ def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
2903
+ local_file = self._local.get_cache_file_path(key)
2904
+ if os.path.exists(local_file):
2905
+ return ShellCmd(f'cat {shlex.quote(local_file)}')
1728
2906
 
2907
+ if (entry := self._client.run_get_entry(key)) is None:
2908
+ return None
1729
2909
 
1730
- def pull_docker_tar(
1731
- image: str,
1732
- tar_file: str,
1733
- ) -> None:
1734
- subprocesses.check_call(
1735
- 'docker',
1736
- 'pull',
1737
- image,
1738
- )
2910
+ tmp_file = self._local.format_incomplete_file(local_file)
2911
+ try:
2912
+ self._client.download_get_entry(entry, tmp_file)
1739
2913
 
1740
- subprocesses.check_call(
1741
- 'docker',
1742
- 'save',
1743
- image,
1744
- '-o', tar_file,
1745
- )
2914
+ os.replace(tmp_file, local_file)
1746
2915
 
2916
+ except BaseException: # noqa
2917
+ os.unlink(tmp_file)
1747
2918
 
1748
- def build_docker_tar(
1749
- docker_file: str,
1750
- tar_file: str,
1751
- *,
1752
- cwd: ta.Optional[str] = None,
1753
- ) -> str:
1754
- id_file = make_temp_file()
1755
- with defer(lambda: os.unlink(id_file)):
1756
- subprocesses.check_call(
1757
- 'docker',
1758
- 'build',
1759
- '-f', os.path.abspath(docker_file),
1760
- '--iidfile', id_file,
1761
- '--squash',
1762
- '.',
1763
- **(dict(cwd=cwd) if cwd is not None else {}),
1764
- )
2919
+ raise
1765
2920
 
1766
- with open(id_file) as f:
1767
- image_id = check.single(f.read().strip().splitlines()).strip()
2921
+ return ShellCmd(f'cat {shlex.quote(local_file)}')
1768
2922
 
1769
- subprocesses.check_call(
1770
- 'docker',
1771
- 'save',
1772
- image_id,
1773
- '-o', tar_file,
1774
- )
2923
+ class _PutFileCmdContext(ShellCache.PutFileCmdContext): # noqa
2924
+ def __init__(
2925
+ self,
2926
+ owner: 'GithubShellCache',
2927
+ key: str,
2928
+ tmp_file: str,
2929
+ local_file: str,
2930
+ ) -> None:
2931
+ super().__init__()
1775
2932
 
1776
- return image_id
2933
+ self._owner = owner
2934
+ self._key = key
2935
+ self._tmp_file = tmp_file
2936
+ self._local_file = local_file
1777
2937
 
2938
+ @property
2939
+ def cmd(self) -> ShellCmd:
2940
+ return ShellCmd(f'cat > {shlex.quote(self._tmp_file)}')
1778
2941
 
1779
- ##
2942
+ def _commit(self) -> None:
2943
+ os.replace(self._tmp_file, self._local_file)
1780
2944
 
2945
+ self._owner._client.upload_cache_entry(self._key, self._local_file) # noqa
1781
2946
 
1782
- def load_docker_tar(
1783
- tar_file: str,
1784
- ) -> None:
1785
- subprocesses.check_call(
1786
- 'docker',
1787
- 'load',
1788
- '-i', tar_file,
1789
- )
2947
+ def _abort(self) -> None:
2948
+ os.unlink(self._tmp_file)
2949
+
2950
+ def put_file_cmd(self, key: str) -> ShellCache.PutFileCmdContext:
2951
+ local_file = self._local.get_cache_file_path(key, make_dirs=True)
2952
+ return self._PutFileCmdContext(
2953
+ self,
2954
+ key,
2955
+ self._local.format_incomplete_file(local_file),
2956
+ local_file,
2957
+ )
1790
2958
 
1791
2959
 
1792
2960
  ########################################
@@ -1845,6 +3013,7 @@ def download_requirements(
1845
3013
  subprocesses.check_call(
1846
3014
  'docker',
1847
3015
  'run',
3016
+ '--rm',
1848
3017
  '-i',
1849
3018
  '-v', f'{os.path.abspath(requirements_dir)}:/requirements',
1850
3019
  '-v', f'{requirements_txt_dir}:/requirements_txt',
@@ -1852,10 +3021,10 @@ def download_requirements(
1852
3021
  'pip',
1853
3022
  'download',
1854
3023
  '-d', '/requirements',
1855
- *itertools.chain.from_iterable([
3024
+ *itertools.chain.from_iterable(
1856
3025
  ['-r', f'/requirements_txt/{os.path.basename(rt)}']
1857
3026
  for rt in requirements_txts
1858
- ]),
3027
+ ),
1859
3028
  )
1860
3029
 
1861
3030
 
@@ -1863,9 +3032,6 @@ def download_requirements(
1863
3032
  # ../ci.py
1864
3033
 
1865
3034
 
1866
- ##
1867
-
1868
-
1869
3035
  class Ci(ExitStacked):
1870
3036
  FILE_NAME_HASH_LEN = 16
1871
3037
 
@@ -1878,8 +3044,12 @@ class Ci(ExitStacked):
1878
3044
  compose_file: str
1879
3045
  service: str
1880
3046
 
3047
+ cmd: ShellCmd
3048
+
1881
3049
  requirements_txts: ta.Optional[ta.Sequence[str]] = None
1882
3050
 
3051
+ always_pull: bool = False
3052
+
1883
3053
  def __post_init__(self) -> None:
1884
3054
  check.not_isinstance(self.requirements_txts, str)
1885
3055
 
@@ -1887,40 +3057,61 @@ class Ci(ExitStacked):
1887
3057
  self,
1888
3058
  cfg: Config,
1889
3059
  *,
3060
+ shell_cache: ta.Optional[ShellCache] = None,
1890
3061
  file_cache: ta.Optional[FileCache] = None,
1891
3062
  ) -> None:
1892
3063
  super().__init__()
1893
3064
 
1894
3065
  self._cfg = cfg
3066
+ self._shell_cache = shell_cache
1895
3067
  self._file_cache = file_cache
1896
3068
 
1897
3069
  #
1898
3070
 
1899
- def load_docker_image(self, image: str) -> None:
1900
- if is_docker_image_present(image):
3071
+ def _load_cache_docker_image(self, key: str) -> ta.Optional[str]:
3072
+ if self._shell_cache is None:
3073
+ return None
3074
+
3075
+ get_cache_cmd = self._shell_cache.get_file_cmd(key)
3076
+ if get_cache_cmd is None:
3077
+ return None
3078
+
3079
+ get_cache_cmd = dc.replace(get_cache_cmd, s=f'{get_cache_cmd.s} | zstd -cd --long') # noqa
3080
+
3081
+ return load_docker_tar_cmd(get_cache_cmd)
3082
+
3083
+ def _save_cache_docker_image(self, key: str, image: str) -> None:
3084
+ if self._shell_cache is None:
3085
+ return
3086
+
3087
+ with self._shell_cache.put_file_cmd(key) as put_cache:
3088
+ put_cache_cmd = put_cache.cmd
3089
+
3090
+ put_cache_cmd = dc.replace(put_cache_cmd, s=f'zstd | {put_cache_cmd.s}')
3091
+
3092
+ save_docker_tar_cmd(image, put_cache_cmd)
3093
+
3094
+ #
3095
+
3096
+ def _load_docker_image(self, image: str) -> None:
3097
+ if not self._cfg.always_pull and is_docker_image_present(image):
1901
3098
  return
1902
3099
 
1903
3100
  dep_suffix = image
1904
3101
  for c in '/:.-_':
1905
3102
  dep_suffix = dep_suffix.replace(c, '-')
1906
3103
 
1907
- tar_file_name = f'docker-{dep_suffix}.tar'
1908
-
1909
- if self._file_cache is not None and (cache_tar_file := self._file_cache.get_file(tar_file_name)):
1910
- load_docker_tar(cache_tar_file)
3104
+ cache_key = f'docker-{dep_suffix}'
3105
+ if self._load_cache_docker_image(cache_key) is not None:
1911
3106
  return
1912
3107
 
1913
- temp_dir = tempfile.mkdtemp()
1914
- with defer(lambda: shutil.rmtree(temp_dir)):
1915
- temp_tar_file = os.path.join(temp_dir, tar_file_name)
3108
+ pull_docker_image(image)
1916
3109
 
1917
- pull_docker_tar(
1918
- image,
1919
- temp_tar_file,
1920
- )
3110
+ self._save_cache_docker_image(cache_key, image)
1921
3111
 
1922
- if self._file_cache is not None:
1923
- self._file_cache.put_file(temp_tar_file)
3112
+ def load_docker_image(self, image: str) -> None:
3113
+ with log_timing_context(f'Load docker image: {image}'):
3114
+ self._load_docker_image(image)
1924
3115
 
1925
3116
  @cached_nullary
1926
3117
  def load_compose_service_dependencies(self) -> None:
@@ -1934,46 +3125,46 @@ class Ci(ExitStacked):
1934
3125
 
1935
3126
  #
1936
3127
 
1937
- @cached_nullary
1938
- def build_ci_image(self) -> str:
3128
+ def _resolve_ci_image(self) -> str:
1939
3129
  docker_file_hash = build_docker_file_hash(self._cfg.docker_file)[:self.FILE_NAME_HASH_LEN]
1940
3130
 
1941
- tar_file_name = f'ci-{docker_file_hash}.tar'
1942
-
1943
- if self._file_cache is not None and (cache_tar_file := self._file_cache.get_file(tar_file_name)):
1944
- image_id = read_docker_tar_image_id(cache_tar_file)
1945
- load_docker_tar(cache_tar_file)
1946
- return image_id
3131
+ cache_key = f'ci-{docker_file_hash}'
3132
+ if (cache_image_id := self._load_cache_docker_image(cache_key)) is not None:
3133
+ return cache_image_id
1947
3134
 
1948
- temp_dir = tempfile.mkdtemp()
1949
- with defer(lambda: shutil.rmtree(temp_dir)):
1950
- temp_tar_file = os.path.join(temp_dir, tar_file_name)
3135
+ image_id = build_docker_image(
3136
+ self._cfg.docker_file,
3137
+ cwd=self._cfg.project_dir,
3138
+ )
1951
3139
 
1952
- image_id = build_docker_tar(
1953
- self._cfg.docker_file,
1954
- temp_tar_file,
1955
- cwd=self._cfg.project_dir,
1956
- )
3140
+ self._save_cache_docker_image(cache_key, image_id)
1957
3141
 
1958
- if self._file_cache is not None:
1959
- self._file_cache.put_file(temp_tar_file)
3142
+ return image_id
1960
3143
 
3144
+ @cached_nullary
3145
+ def resolve_ci_image(self) -> str:
3146
+ with log_timing_context('Resolve ci image') as ltc:
3147
+ image_id = self._resolve_ci_image()
3148
+ ltc.set_description(f'Resolve ci image: {image_id}')
1961
3149
  return image_id
1962
3150
 
1963
3151
  #
1964
3152
 
1965
- @cached_nullary
1966
- def build_requirements_dir(self) -> str:
1967
- requirements_txts = check.not_none(self._cfg.requirements_txts)
3153
+ def _resolve_requirements_dir(self) -> str:
3154
+ requirements_txts = [
3155
+ os.path.join(self._cfg.project_dir, rf)
3156
+ for rf in check.not_none(self._cfg.requirements_txts)
3157
+ ]
1968
3158
 
1969
3159
  requirements_hash = build_requirements_hash(requirements_txts)[:self.FILE_NAME_HASH_LEN]
1970
3160
 
1971
- tar_file_name = f'requirements-{requirements_hash}.tar'
3161
+ tar_file_key = f'requirements-{requirements_hash}'
3162
+ tar_file_name = f'{tar_file_key}.tar'
1972
3163
 
1973
3164
  temp_dir = tempfile.mkdtemp()
1974
3165
  self._enter_context(defer(lambda: shutil.rmtree(temp_dir))) # noqa
1975
3166
 
1976
- if self._file_cache is not None and (cache_tar_file := self._file_cache.get_file(tar_file_name)):
3167
+ if self._file_cache is not None and (cache_tar_file := self._file_cache.get_file(tar_file_key)):
1977
3168
  with tarfile.open(cache_tar_file) as tar:
1978
3169
  tar.extractall(path=temp_dir) # noqa
1979
3170
 
@@ -1983,7 +3174,7 @@ class Ci(ExitStacked):
1983
3174
  os.makedirs(temp_requirements_dir)
1984
3175
 
1985
3176
  download_requirements(
1986
- self.build_ci_image(),
3177
+ self.resolve_ci_image(),
1987
3178
  temp_requirements_dir,
1988
3179
  requirements_txts,
1989
3180
  )
@@ -1998,21 +3189,20 @@ class Ci(ExitStacked):
1998
3189
  arcname=requirement_file,
1999
3190
  )
2000
3191
 
2001
- self._file_cache.put_file(temp_tar_file)
3192
+ self._file_cache.put_file(os.path.basename(tar_file_key), temp_tar_file)
2002
3193
 
2003
3194
  return temp_requirements_dir
2004
3195
 
2005
- #
2006
-
2007
- def run(self) -> None:
2008
- self.load_compose_service_dependencies()
2009
-
2010
- ci_image = self.build_ci_image()
2011
-
2012
- requirements_dir = self.build_requirements_dir()
3196
+ @cached_nullary
3197
+ def resolve_requirements_dir(self) -> str:
3198
+ with log_timing_context('Resolve requirements dir') as ltc:
3199
+ requirements_dir = self._resolve_requirements_dir()
3200
+ ltc.set_description(f'Resolve requirements dir: {requirements_dir}')
3201
+ return requirements_dir
2013
3202
 
2014
- #
3203
+ #
2015
3204
 
3205
+ def _run_compose_(self) -> None:
2016
3206
  setup_cmds = [
2017
3207
  'pip install --root-user-action ignore --find-links /requirements --no-index uv',
2018
3208
  (
@@ -2023,40 +3213,74 @@ class Ci(ExitStacked):
2023
3213
 
2024
3214
  #
2025
3215
 
2026
- test_cmds = [
2027
- '(cd /project && python3 -m pytest -svv test.py)',
2028
- ]
3216
+ ci_cmd = dc.replace(self._cfg.cmd, s=' && '.join([
3217
+ *setup_cmds,
3218
+ f'({self._cfg.cmd.s})',
3219
+ ]))
2029
3220
 
2030
3221
  #
2031
3222
 
2032
- bash_src = ' && '.join([
2033
- *setup_cmds,
2034
- *test_cmds,
2035
- ])
2036
-
2037
3223
  with DockerComposeRun(DockerComposeRun.Config(
2038
- compose_file=self._cfg.compose_file,
2039
- service=self._cfg.service,
3224
+ compose_file=self._cfg.compose_file,
3225
+ service=self._cfg.service,
2040
3226
 
2041
- image=ci_image,
3227
+ image=self.resolve_ci_image(),
2042
3228
 
2043
- run_cmd=['bash', '-c', bash_src],
3229
+ cmd=ci_cmd,
2044
3230
 
2045
- run_options=[
2046
- '-v', f'{os.path.abspath(self._cfg.project_dir)}:/project',
2047
- '-v', f'{os.path.abspath(requirements_dir)}:/requirements',
2048
- ],
3231
+ run_options=[
3232
+ '-v', f'{os.path.abspath(self._cfg.project_dir)}:/project',
3233
+ '-v', f'{os.path.abspath(self.resolve_requirements_dir())}:/requirements',
3234
+ ],
2049
3235
 
2050
- cwd=self._cfg.project_dir,
3236
+ cwd=self._cfg.project_dir,
2051
3237
  )) as ci_compose_run:
2052
3238
  ci_compose_run.run()
2053
3239
 
3240
+ def _run_compose(self) -> None:
3241
+ with log_timing_context('Run compose'):
3242
+ self._run_compose_()
3243
+
3244
+ #
3245
+
3246
+ def run(self) -> None:
3247
+ self.load_compose_service_dependencies()
3248
+
3249
+ self.resolve_ci_image()
3250
+
3251
+ self.resolve_requirements_dir()
3252
+
3253
+ self._run_compose()
3254
+
2054
3255
 
2055
3256
  ########################################
2056
- # cli.py
3257
+ # ../github/cli.py
3258
+ """
3259
+ See:
3260
+ - https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28
3261
+ """
2057
3262
 
2058
3263
 
2059
- ##
3264
+ class GithubCli(ArgparseCli):
3265
+ @argparse_cmd(
3266
+ argparse_arg('key'),
3267
+ )
3268
+ def get_cache_entry(self) -> None:
3269
+ shell_client = GithubV1CacheShellClient()
3270
+ entry = shell_client.run_get_entry(self.args.key)
3271
+ if entry is None:
3272
+ return
3273
+ print(json_dumps_pretty(dc.asdict(entry))) # noqa
3274
+
3275
+ @argparse_cmd(
3276
+ argparse_arg('repository-id'),
3277
+ )
3278
+ def list_cache_entries(self) -> None:
3279
+ raise NotImplementedError
3280
+
3281
+
3282
+ ########################################
3283
+ # cli.py
2060
3284
 
2061
3285
 
2062
3286
  class CiCli(ArgparseCli):
@@ -2087,23 +3311,32 @@ class CiCli(ArgparseCli):
2087
3311
 
2088
3312
  #
2089
3313
 
3314
+ @argparse_cmd(
3315
+ accepts_unknown=True,
3316
+ )
3317
+ def github(self) -> ta.Optional[int]:
3318
+ return GithubCli(self.unknown_args).cli_run()
3319
+
3320
+ #
3321
+
2090
3322
  @argparse_cmd(
2091
3323
  argparse_arg('project-dir'),
2092
3324
  argparse_arg('service'),
2093
3325
  argparse_arg('--docker-file'),
2094
3326
  argparse_arg('--compose-file'),
2095
3327
  argparse_arg('-r', '--requirements-txt', action='append'),
3328
+ argparse_arg('--github-cache', action='store_true'),
2096
3329
  argparse_arg('--cache-dir'),
3330
+ argparse_arg('--always-pull', action='store_true'),
2097
3331
  )
2098
3332
  async def run(self) -> None:
2099
- await asyncio.sleep(1)
2100
-
2101
3333
  project_dir = self.args.project_dir
2102
3334
  docker_file = self.args.docker_file
2103
3335
  compose_file = self.args.compose_file
2104
3336
  service = self.args.service
2105
3337
  requirements_txts = self.args.requirements_txt
2106
3338
  cache_dir = self.args.cache_dir
3339
+ always_pull = self.args.always_pull
2107
3340
 
2108
3341
  #
2109
3342
 
@@ -2113,7 +3346,7 @@ class CiCli(ArgparseCli):
2113
3346
 
2114
3347
  def find_alt_file(*alts: str) -> ta.Optional[str]:
2115
3348
  for alt in alts:
2116
- alt_file = os.path.join(project_dir, alt)
3349
+ alt_file = os.path.abspath(os.path.join(project_dir, alt))
2117
3350
  if os.path.isfile(alt_file):
2118
3351
  return alt_file
2119
3352
  return None
@@ -2128,10 +3361,16 @@ class CiCli(ArgparseCli):
2128
3361
  check.state(os.path.isfile(docker_file))
2129
3362
 
2130
3363
  if compose_file is None:
2131
- compose_file = find_alt_file(
2132
- 'docker/compose.yml',
2133
- 'compose.yml',
2134
- )
3364
+ compose_file = find_alt_file(*[
3365
+ f'{f}.{x}'
3366
+ for f in [
3367
+ 'docker/docker-compose',
3368
+ 'docker/compose',
3369
+ 'docker-compose',
3370
+ 'compose',
3371
+ ]
3372
+ for x in ['yaml', 'yml']
3373
+ ])
2135
3374
  check.state(os.path.isfile(compose_file))
2136
3375
 
2137
3376
  if not requirements_txts:
@@ -2149,24 +3388,44 @@ class CiCli(ArgparseCli):
2149
3388
 
2150
3389
  #
2151
3390
 
3391
+ shell_cache: ta.Optional[ShellCache] = None
2152
3392
  file_cache: ta.Optional[FileCache] = None
2153
3393
  if cache_dir is not None:
2154
3394
  if not os.path.exists(cache_dir):
2155
3395
  os.makedirs(cache_dir)
2156
3396
  check.state(os.path.isdir(cache_dir))
2157
- file_cache = DirectoryFileCache(cache_dir)
3397
+
3398
+ directory_file_cache = DirectoryFileCache(cache_dir)
3399
+
3400
+ file_cache = directory_file_cache
3401
+
3402
+ if self.args.github_cache:
3403
+ shell_cache = GithubShellCache(cache_dir)
3404
+ else:
3405
+ shell_cache = DirectoryShellCache(directory_file_cache)
2158
3406
 
2159
3407
  #
2160
3408
 
2161
3409
  with Ci(
2162
3410
  Ci.Config(
2163
3411
  project_dir=project_dir,
3412
+
2164
3413
  docker_file=docker_file,
3414
+
2165
3415
  compose_file=compose_file,
2166
3416
  service=service,
3417
+
2167
3418
  requirements_txts=requirements_txts,
3419
+
3420
+ cmd=ShellCmd(' && '.join([
3421
+ 'cd /project',
3422
+ 'python3 -m pytest -svv test.py',
3423
+ ])),
3424
+
3425
+ always_pull=always_pull,
2168
3426
  ),
2169
3427
  file_cache=file_cache,
3428
+ shell_cache=shell_cache,
2170
3429
  ) as ci:
2171
3430
  ci.run()
2172
3431
 
@@ -2176,6 +3435,8 @@ async def _async_main() -> ta.Optional[int]:
2176
3435
 
2177
3436
 
2178
3437
  def _main() -> None:
3438
+ configure_standard_logging('DEBUG')
3439
+
2179
3440
  sys.exit(rc if isinstance(rc := asyncio.run(_async_main()), int) else 0)
2180
3441
 
2181
3442