omdev 0.0.0.dev213__py3-none-any.whl → 0.0.0.dev215__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
omdev/scripts/ci.py CHANGED
@@ -3,7 +3,7 @@
3
3
  # @omlish-lite
4
4
  # @omlish-script
5
5
  # @omlish-amalg-output ../ci/cli.py
6
- # ruff: noqa: N802 UP006 UP007 UP036
6
+ # ruff: noqa: N802 TC003 UP006 UP007 UP036
7
7
  """
8
8
  Inputs:
9
9
  - requirements.txt
@@ -12,7 +12,7 @@ Inputs:
12
12
 
13
13
  ==
14
14
 
15
- ./python -m ci run --cache-dir ci/cache ci/project omlish-ci
15
+ ./python -m omdev.ci run --cache-dir omdev/ci/tests/cache omdev/ci/tests/project omlish-ci
16
16
  """
17
17
  import abc
18
18
  import argparse
@@ -25,6 +25,7 @@ import dataclasses as dc
25
25
  import datetime
26
26
  import functools
27
27
  import hashlib
28
+ import http.client
28
29
  import inspect
29
30
  import itertools
30
31
  import json
@@ -42,6 +43,7 @@ import time
42
43
  import types
43
44
  import typing as ta
44
45
  import urllib.parse
46
+ import urllib.request
45
47
 
46
48
 
47
49
  ########################################
@@ -57,12 +59,12 @@ if sys.version_info < (3, 8):
57
59
  # shell.py
58
60
  T = ta.TypeVar('T')
59
61
 
62
+ # ../../omlish/asyncs/asyncio/asyncio.py
63
+ CallableT = ta.TypeVar('CallableT', bound=ta.Callable)
64
+
60
65
  # ../../omlish/asyncs/asyncio/timeouts.py
61
66
  AwaitableT = ta.TypeVar('AwaitableT', bound=ta.Awaitable)
62
67
 
63
- # ../../omlish/lite/cached.py
64
- CallableT = ta.TypeVar('CallableT', bound=ta.Callable)
65
-
66
68
  # ../../omlish/lite/check.py
67
69
  SizedT = ta.TypeVar('SizedT', bound=ta.Sized)
68
70
  CheckMessage = ta.Union[str, ta.Callable[..., ta.Optional[str]], None] # ta.TypeAlias
@@ -82,6 +84,34 @@ AsyncExitStackedT = ta.TypeVar('AsyncExitStackedT', bound='AsyncExitStacked')
82
84
  SubprocessChannelOption = ta.Literal['pipe', 'stdout', 'devnull'] # ta.TypeAlias
83
85
 
84
86
 
87
+ ########################################
88
+ # ../consts.py
89
+
90
+
91
+ CI_CACHE_VERSION = 1
92
+
93
+
94
+ ########################################
95
+ # ../github/env.py
96
+
97
+
98
+ @dc.dataclass(frozen=True)
99
+ class GithubEnvVar:
100
+ k: str
101
+
102
+ def __call__(self) -> ta.Optional[str]:
103
+ return os.environ.get(self.k)
104
+
105
+
106
+ GITHUB_ENV_VARS: ta.Set[GithubEnvVar] = set()
107
+
108
+
109
+ def register_github_env_var(k: str) -> GithubEnvVar:
110
+ ev = GithubEnvVar(k)
111
+ GITHUB_ENV_VARS.add(ev)
112
+ return ev
113
+
114
+
85
115
  ########################################
86
116
  # ../shell.py
87
117
 
@@ -120,6 +150,71 @@ class ShellCmd:
120
150
  )
121
151
 
122
152
 
153
+ ########################################
154
+ # ../../../omlish/asyncs/asyncio/asyncio.py
155
+
156
+
157
+ def asyncio_once(fn: CallableT) -> CallableT:
158
+ future = None
159
+
160
+ @functools.wraps(fn)
161
+ async def inner(*args, **kwargs):
162
+ nonlocal future
163
+ if not future:
164
+ future = asyncio.create_task(fn(*args, **kwargs))
165
+ return await future
166
+
167
+ return ta.cast(CallableT, inner)
168
+
169
+
170
+ def drain_tasks(loop=None):
171
+ if loop is None:
172
+ loop = asyncio.get_running_loop()
173
+
174
+ while loop._ready or loop._scheduled: # noqa
175
+ loop._run_once() # noqa
176
+
177
+
178
+ @contextlib.contextmanager
179
+ def draining_asyncio_tasks() -> ta.Iterator[None]:
180
+ loop = asyncio.get_running_loop()
181
+ try:
182
+ yield
183
+ finally:
184
+ if loop is not None:
185
+ drain_tasks(loop) # noqa
186
+
187
+
188
+ async def asyncio_wait_concurrent(
189
+ coros: ta.Iterable[ta.Awaitable[T]],
190
+ concurrency: ta.Union[int, asyncio.Semaphore],
191
+ *,
192
+ return_when: ta.Any = asyncio.FIRST_EXCEPTION,
193
+ ) -> ta.List[T]:
194
+ if isinstance(concurrency, asyncio.Semaphore):
195
+ semaphore = concurrency
196
+ elif isinstance(concurrency, int):
197
+ semaphore = asyncio.Semaphore(concurrency)
198
+ else:
199
+ raise TypeError(concurrency)
200
+
201
+ async def limited_task(coro):
202
+ async with semaphore:
203
+ return await coro
204
+
205
+ tasks = [asyncio.create_task(limited_task(coro)) for coro in coros]
206
+ done, pending = await asyncio.wait(tasks, return_when=return_when)
207
+
208
+ for task in pending:
209
+ task.cancel()
210
+
211
+ for task in done:
212
+ if task.exception():
213
+ raise task.exception() # type: ignore
214
+
215
+ return [task.result() for task in done]
216
+
217
+
123
218
  ########################################
124
219
  # ../../../omlish/asyncs/asyncio/timeouts.py
125
220
 
@@ -999,170 +1094,183 @@ class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
999
1094
 
1000
1095
 
1001
1096
  ########################################
1002
- # ../cache.py
1097
+ # ../../../omlish/os/files.py
1003
1098
 
1004
1099
 
1005
- ##
1100
+ def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None:
1101
+ if exist_ok:
1102
+ # First try to bump modification time
1103
+ # Implementation note: GNU touch uses the UTIME_NOW option of the utimensat() / futimens() functions.
1104
+ try:
1105
+ os.utime(self, None)
1106
+ except OSError:
1107
+ pass
1108
+ else:
1109
+ return
1110
+ flags = os.O_CREAT | os.O_WRONLY
1111
+ if not exist_ok:
1112
+ flags |= os.O_EXCL
1113
+ fd = os.open(self, flags, mode)
1114
+ os.close(fd)
1006
1115
 
1007
1116
 
1008
- @abc.abstractmethod
1009
- class FileCache(abc.ABC):
1010
- @abc.abstractmethod
1011
- def get_file(self, key: str) -> ta.Optional[str]:
1012
- raise NotImplementedError
1117
+ def unlink_if_exists(path: str) -> None:
1118
+ try:
1119
+ os.unlink(path)
1120
+ except FileNotFoundError:
1121
+ pass
1013
1122
 
1014
- @abc.abstractmethod
1015
- def put_file(self, key: str, file_path: str) -> ta.Optional[str]:
1016
- raise NotImplementedError
1017
1123
 
1124
+ @contextlib.contextmanager
1125
+ def unlinking_if_exists(path: str) -> ta.Iterator[None]:
1126
+ try:
1127
+ yield
1128
+ finally:
1129
+ unlink_if_exists(path)
1018
1130
 
1019
- #
1020
1131
 
1132
+ ########################################
1133
+ # ../cache.py
1021
1134
 
1022
- class DirectoryFileCache(FileCache):
1023
- def __init__(self, dir: str) -> None: # noqa
1024
- super().__init__()
1025
1135
 
1026
- self._dir = dir
1136
+ ##
1027
1137
 
1028
- #
1029
1138
 
1030
- def get_cache_file_path(
1139
+ @abc.abstractmethod
1140
+ class FileCache(abc.ABC):
1141
+ def __init__(
1031
1142
  self,
1032
- key: str,
1033
1143
  *,
1034
- make_dirs: bool = False,
1035
- ) -> str:
1036
- if make_dirs:
1037
- os.makedirs(self._dir, exist_ok=True)
1038
- return os.path.join(self._dir, key)
1039
-
1040
- def format_incomplete_file(self, f: str) -> str:
1041
- return os.path.join(os.path.dirname(f), f'_{os.path.basename(f)}.incomplete')
1042
-
1043
- #
1044
-
1045
- def get_file(self, key: str) -> ta.Optional[str]:
1046
- cache_file_path = self.get_cache_file_path(key)
1047
- if not os.path.exists(cache_file_path):
1048
- return None
1049
- return cache_file_path
1050
-
1051
- def put_file(self, key: str, file_path: str) -> None:
1052
- cache_file_path = self.get_cache_file_path(key, make_dirs=True)
1053
- shutil.copyfile(file_path, cache_file_path)
1144
+ version: int = CI_CACHE_VERSION,
1145
+ ) -> None:
1146
+ super().__init__()
1054
1147
 
1148
+ check.isinstance(version, int)
1149
+ check.arg(version >= 0)
1150
+ self._version = version
1055
1151
 
1056
- ##
1152
+ @property
1153
+ def version(self) -> int:
1154
+ return self._version
1057
1155
 
1156
+ #
1058
1157
 
1059
- class ShellCache(abc.ABC):
1060
1158
  @abc.abstractmethod
1061
- def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
1159
+ def get_file(self, key: str) -> ta.Awaitable[ta.Optional[str]]:
1062
1160
  raise NotImplementedError
1063
1161
 
1064
- class PutFileCmdContext(abc.ABC):
1065
- def __init__(self) -> None:
1066
- super().__init__()
1067
-
1068
- self._state: ta.Literal['open', 'committed', 'aborted'] = 'open'
1069
-
1070
- @property
1071
- def state(self) -> ta.Literal['open', 'committed', 'aborted']:
1072
- return self._state
1073
-
1074
- #
1162
+ @abc.abstractmethod
1163
+ def put_file(
1164
+ self,
1165
+ key: str,
1166
+ file_path: str,
1167
+ *,
1168
+ steal: bool = False,
1169
+ ) -> ta.Awaitable[str]:
1170
+ raise NotImplementedError
1075
1171
 
1076
- @property
1077
- @abc.abstractmethod
1078
- def cmd(self) -> ShellCmd:
1079
- raise NotImplementedError
1080
1172
 
1081
- #
1173
+ #
1082
1174
 
1083
- def __enter__(self):
1084
- return self
1085
1175
 
1086
- def __exit__(self, exc_type, exc_val, exc_tb):
1087
- if exc_val is None:
1088
- self.commit()
1089
- else:
1090
- self.abort()
1176
+ class DirectoryFileCache(FileCache):
1177
+ def __init__(
1178
+ self,
1179
+ dir: str, # noqa
1180
+ *,
1181
+ no_create: bool = False,
1182
+ no_purge: bool = False,
1183
+ **kwargs: ta.Any,
1184
+ ) -> None: # noqa
1185
+ super().__init__(**kwargs)
1091
1186
 
1092
- #
1187
+ self._dir = dir
1188
+ self._no_create = no_create
1189
+ self._no_purge = no_purge
1093
1190
 
1094
- @abc.abstractmethod
1095
- def _commit(self) -> None:
1096
- raise NotImplementedError
1191
+ #
1097
1192
 
1098
- def commit(self) -> None:
1099
- if self._state == 'committed':
1100
- return
1101
- elif self._state == 'open':
1102
- self._commit()
1103
- self._state = 'committed'
1104
- else:
1105
- raise RuntimeError(self._state)
1193
+ VERSION_FILE_NAME = '.ci-cache-version'
1106
1194
 
1107
- #
1195
+ @cached_nullary
1196
+ def setup_dir(self) -> None:
1197
+ version_file = os.path.join(self._dir, self.VERSION_FILE_NAME)
1108
1198
 
1109
- @abc.abstractmethod
1110
- def _abort(self) -> None:
1111
- raise NotImplementedError
1199
+ if self._no_create:
1200
+ check.state(os.path.isdir(self._dir))
1112
1201
 
1113
- def abort(self) -> None:
1114
- if self._state == 'aborted':
1115
- return
1116
- elif self._state == 'open':
1117
- self._abort()
1118
- self._state = 'committed'
1119
- else:
1120
- raise RuntimeError(self._state)
1202
+ elif not os.path.isdir(self._dir):
1203
+ os.makedirs(self._dir)
1204
+ with open(version_file, 'w') as f:
1205
+ f.write(str(self._version))
1206
+ return
1121
1207
 
1122
- @abc.abstractmethod
1123
- def put_file_cmd(self, key: str) -> PutFileCmdContext:
1124
- raise NotImplementedError
1208
+ with open(version_file) as f:
1209
+ dir_version = int(f.read().strip())
1125
1210
 
1211
+ if dir_version == self._version:
1212
+ return
1126
1213
 
1127
- #
1214
+ if self._no_purge:
1215
+ raise RuntimeError(f'{dir_version=} != {self._version=}')
1128
1216
 
1217
+ dirs = [n for n in sorted(os.listdir(self._dir)) if os.path.isdir(os.path.join(self._dir, n))]
1218
+ if dirs:
1219
+ raise RuntimeError(
1220
+ f'Refusing to remove stale cache dir {self._dir!r} '
1221
+ f'due to present directories: {", ".join(dirs)}',
1222
+ )
1129
1223
 
1130
- class DirectoryShellCache(ShellCache):
1131
- def __init__(self, dfc: DirectoryFileCache) -> None:
1132
- super().__init__()
1224
+ for n in sorted(os.listdir(self._dir)):
1225
+ if n.startswith('.'):
1226
+ continue
1227
+ fp = os.path.join(self._dir, n)
1228
+ check.state(os.path.isfile(fp))
1229
+ log.debug('Purging stale cache file: %s', fp)
1230
+ os.unlink(fp)
1133
1231
 
1134
- self._dfc = dfc
1232
+ os.unlink(version_file)
1135
1233
 
1136
- def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
1137
- f = self._dfc.get_file(key)
1138
- if f is None:
1139
- return None
1140
- return ShellCmd(f'cat {shlex.quote(f)}')
1234
+ with open(version_file, 'w') as f:
1235
+ f.write(str(self._version))
1141
1236
 
1142
- class _PutFileCmdContext(ShellCache.PutFileCmdContext): # noqa
1143
- def __init__(self, tf: str, f: str) -> None:
1144
- super().__init__()
1237
+ #
1145
1238
 
1146
- self._tf = tf
1147
- self._f = f
1239
+ def get_cache_file_path(
1240
+ self,
1241
+ key: str,
1242
+ ) -> str:
1243
+ self.setup_dir()
1244
+ return os.path.join(self._dir, key)
1148
1245
 
1149
- @property
1150
- def cmd(self) -> ShellCmd:
1151
- return ShellCmd(f'cat > {shlex.quote(self._tf)}')
1246
+ def format_incomplete_file(self, f: str) -> str:
1247
+ return os.path.join(os.path.dirname(f), f'_{os.path.basename(f)}.incomplete')
1152
1248
 
1153
- def _commit(self) -> None:
1154
- os.replace(self._tf, self._f)
1249
+ #
1155
1250
 
1156
- def _abort(self) -> None:
1157
- os.unlink(self._tf)
1251
+ async def get_file(self, key: str) -> ta.Optional[str]:
1252
+ cache_file_path = self.get_cache_file_path(key)
1253
+ if not os.path.exists(cache_file_path):
1254
+ return None
1255
+ return cache_file_path
1158
1256
 
1159
- def put_file_cmd(self, key: str) -> ShellCache.PutFileCmdContext:
1160
- f = self._dfc.get_cache_file_path(key, make_dirs=True)
1161
- return self._PutFileCmdContext(self._dfc.format_incomplete_file(f), f)
1257
+ async def put_file(
1258
+ self,
1259
+ key: str,
1260
+ file_path: str,
1261
+ *,
1262
+ steal: bool = False,
1263
+ ) -> str:
1264
+ cache_file_path = self.get_cache_file_path(key)
1265
+ if steal:
1266
+ shutil.move(file_path, cache_file_path)
1267
+ else:
1268
+ shutil.copyfile(file_path, cache_file_path)
1269
+ return cache_file_path
1162
1270
 
1163
1271
 
1164
1272
  ########################################
1165
- # ../github/cacheapi.py
1273
+ # ../github/api.py
1166
1274
  """
1167
1275
  export FILE_SIZE=$(stat --format="%s" $FILE)
1168
1276
 
@@ -1363,16 +1471,27 @@ class GithubCacheServiceV2:
1363
1471
 
1364
1472
 
1365
1473
  ########################################
1366
- # ../utils.py
1474
+ # ../github/bootstrap.py
1475
+ """
1476
+ sudo rm -rf \
1477
+ /usr/local/.ghcup \
1478
+ /opt/hostedtoolcache \
1367
1479
 
1480
+ /usr/local/.ghcup 6.4G, 3391250 files
1481
+ /opt/hostedtoolcache 8.0G, 14843980 files
1482
+ /usr/local/lib/android 6.4G, 17251667 files
1483
+ """
1368
1484
 
1369
- ##
1370
1485
 
1486
+ GITHUB_ACTIONS_ENV_VAR = register_github_env_var('GITHUB_ACTIONS')
1487
+
1488
+
1489
+ def is_in_github_actions() -> bool:
1490
+ return GITHUB_ACTIONS_ENV_VAR() is not None
1371
1491
 
1372
- def make_temp_file() -> str:
1373
- file_fd, file = tempfile.mkstemp()
1374
- os.close(file_fd)
1375
- return file
1492
+
1493
+ ########################################
1494
+ # ../utils.py
1376
1495
 
1377
1496
 
1378
1497
  ##
@@ -1421,7 +1540,7 @@ class LogTimingContext:
1421
1540
  def __enter__(self) -> 'LogTimingContext':
1422
1541
  self._begin_time = time.time()
1423
1542
 
1424
- self._log.log(self._level, f'Begin {self._description}') # noqa
1543
+ self._log.log(self._level, f'Begin : {self._description}') # noqa
1425
1544
 
1426
1545
  return self
1427
1546
 
@@ -1430,7 +1549,7 @@ class LogTimingContext:
1430
1549
 
1431
1550
  self._log.log(
1432
1551
  self._level,
1433
- f'End {self._description} - {self._end_time - self._begin_time:0.2f} s elapsed',
1552
+ f'End : {self._description} - {self._end_time - self._begin_time:0.2f} s elapsed',
1434
1553
  )
1435
1554
 
1436
1555
 
@@ -1908,92 +2027,613 @@ class JsonLogFormatter(logging.Formatter):
1908
2027
 
1909
2028
 
1910
2029
  ########################################
1911
- # ../../../omlish/logs/standard.py
1912
- """
1913
- TODO:
1914
- - structured
1915
- - prefixed
1916
- - debug
1917
- - optional noisy? noisy will never be lite - some kinda configure_standard callback mechanism?
1918
- """
2030
+ # ../../../omlish/os/temp.py
1919
2031
 
1920
2032
 
1921
- ##
2033
+ def make_temp_file(**kwargs: ta.Any) -> str:
2034
+ file_fd, file = tempfile.mkstemp(**kwargs)
2035
+ os.close(file_fd)
2036
+ return file
1922
2037
 
1923
2038
 
1924
- STANDARD_LOG_FORMAT_PARTS = [
1925
- ('asctime', '%(asctime)-15s'),
1926
- ('process', 'pid=%(process)-6s'),
1927
- ('thread', 'tid=%(thread)x'),
1928
- ('levelname', '%(levelname)s'),
1929
- ('name', '%(name)s'),
1930
- ('separator', '::'),
1931
- ('message', '%(message)s'),
1932
- ]
2039
+ @contextlib.contextmanager
2040
+ def temp_file_context(**kwargs: ta.Any) -> ta.Iterator[str]:
2041
+ path = make_temp_file(**kwargs)
2042
+ try:
2043
+ yield path
2044
+ finally:
2045
+ unlink_if_exists(path)
1933
2046
 
1934
2047
 
1935
- class StandardLogFormatter(logging.Formatter):
1936
- @staticmethod
1937
- def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
1938
- return ' '.join(v for k, v in parts)
2048
+ @contextlib.contextmanager
2049
+ def temp_dir_context(
2050
+ root_dir: ta.Optional[str] = None,
2051
+ **kwargs: ta.Any,
2052
+ ) -> ta.Iterator[str]:
2053
+ path = tempfile.mkdtemp(dir=root_dir, **kwargs)
2054
+ try:
2055
+ yield path
2056
+ finally:
2057
+ shutil.rmtree(path, ignore_errors=True)
1939
2058
 
1940
- converter = datetime.datetime.fromtimestamp # type: ignore
1941
2059
 
1942
- def formatTime(self, record, datefmt=None):
1943
- ct = self.converter(record.created) # type: ignore
1944
- if datefmt:
1945
- return ct.strftime(datefmt) # noqa
1946
- else:
1947
- t = ct.strftime('%Y-%m-%d %H:%M:%S')
1948
- return '%s.%03d' % (t, record.msecs) # noqa
2060
+ @contextlib.contextmanager
2061
+ def temp_named_file_context(
2062
+ root_dir: ta.Optional[str] = None,
2063
+ cleanup: bool = True,
2064
+ **kwargs: ta.Any,
2065
+ ) -> ta.Iterator[tempfile._TemporaryFileWrapper]: # noqa
2066
+ with tempfile.NamedTemporaryFile(dir=root_dir, delete=False, **kwargs) as f:
2067
+ try:
2068
+ yield f
2069
+ finally:
2070
+ if cleanup:
2071
+ shutil.rmtree(f.name, ignore_errors=True)
2072
+
2073
+
2074
+ ########################################
2075
+ # ../github/client.py
1949
2076
 
1950
2077
 
1951
2078
  ##
1952
2079
 
1953
2080
 
1954
- class StandardConfiguredLogHandler(ProxyLogHandler):
1955
- def __init_subclass__(cls, **kwargs):
1956
- raise TypeError('This class serves only as a marker and should not be subclassed.')
2081
+ class GithubCacheClient(abc.ABC):
2082
+ class Entry(abc.ABC): # noqa
2083
+ pass
2084
+
2085
+ @abc.abstractmethod
2086
+ def get_entry(self, key: str) -> ta.Awaitable[ta.Optional[Entry]]:
2087
+ raise NotImplementedError
2088
+
2089
+ @abc.abstractmethod
2090
+ def download_file(self, entry: Entry, out_file: str) -> ta.Awaitable[None]:
2091
+ raise NotImplementedError
2092
+
2093
+ @abc.abstractmethod
2094
+ def upload_file(self, key: str, in_file: str) -> ta.Awaitable[None]:
2095
+ raise NotImplementedError
1957
2096
 
1958
2097
 
1959
2098
  ##
1960
2099
 
1961
2100
 
1962
- @contextlib.contextmanager
1963
- def _locking_logging_module_lock() -> ta.Iterator[None]:
1964
- if hasattr(logging, '_acquireLock'):
1965
- logging._acquireLock() # noqa
1966
- try:
1967
- yield
1968
- finally:
1969
- logging._releaseLock() # type: ignore # noqa
2101
+ class GithubCacheServiceV1BaseClient(GithubCacheClient, abc.ABC):
2102
+ BASE_URL_ENV_VAR = register_github_env_var('ACTIONS_CACHE_URL')
2103
+ AUTH_TOKEN_ENV_VAR = register_github_env_var('ACTIONS_RUNTIME_TOKEN') # noqa
1970
2104
 
1971
- elif hasattr(logging, '_lock'):
1972
- # https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
1973
- with logging._lock: # noqa
1974
- yield
2105
+ KEY_SUFFIX_ENV_VAR = register_github_env_var('GITHUB_RUN_ID')
1975
2106
 
1976
- else:
1977
- raise Exception("Can't find lock in logging module")
2107
+ #
1978
2108
 
2109
+ def __init__(
2110
+ self,
2111
+ *,
2112
+ base_url: ta.Optional[str] = None,
2113
+ auth_token: ta.Optional[str] = None,
1979
2114
 
1980
- def configure_standard_logging(
1981
- level: ta.Union[int, str] = logging.INFO,
1982
- *,
1983
- json: bool = False,
1984
- target: ta.Optional[logging.Logger] = None,
1985
- force: bool = False,
1986
- handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
1987
- ) -> ta.Optional[StandardConfiguredLogHandler]:
1988
- with _locking_logging_module_lock():
1989
- if target is None:
1990
- target = logging.root
2115
+ key_prefix: ta.Optional[str] = None,
2116
+ key_suffix: ta.Optional[str] = None,
1991
2117
 
1992
- #
2118
+ cache_version: int = CI_CACHE_VERSION,
1993
2119
 
1994
- if not force:
1995
- if any(isinstance(h, StandardConfiguredLogHandler) for h in list(target.handlers)):
1996
- return None
2120
+ loop: ta.Optional[asyncio.AbstractEventLoop] = None,
2121
+ ) -> None:
2122
+ super().__init__()
2123
+
2124
+ #
2125
+
2126
+ if base_url is None:
2127
+ base_url = check.non_empty_str(self.BASE_URL_ENV_VAR())
2128
+ self._service_url = GithubCacheServiceV1.get_service_url(base_url)
2129
+
2130
+ if auth_token is None:
2131
+ auth_token = self.AUTH_TOKEN_ENV_VAR()
2132
+ self._auth_token = auth_token
2133
+
2134
+ #
2135
+
2136
+ self._key_prefix = key_prefix
2137
+
2138
+ if key_suffix is None:
2139
+ key_suffix = self.KEY_SUFFIX_ENV_VAR()
2140
+ self._key_suffix = check.non_empty_str(key_suffix)
2141
+
2142
+ #
2143
+
2144
+ self._cache_version = check.isinstance(cache_version, int)
2145
+
2146
+ #
2147
+
2148
+ self._given_loop = loop
2149
+
2150
+ #
2151
+
2152
+ def _get_loop(self) -> asyncio.AbstractEventLoop:
2153
+ if (loop := self._given_loop) is not None:
2154
+ return loop
2155
+ return asyncio.get_event_loop()
2156
+
2157
+ #
2158
+
2159
+ def build_request_headers(
2160
+ self,
2161
+ headers: ta.Optional[ta.Mapping[str, str]] = None,
2162
+ *,
2163
+ content_type: ta.Optional[str] = None,
2164
+ json_content: bool = False,
2165
+ ) -> ta.Dict[str, str]:
2166
+ dct = {
2167
+ 'Accept': ';'.join([
2168
+ 'application/json',
2169
+ f'api-version={GithubCacheServiceV1.API_VERSION}',
2170
+ ]),
2171
+ }
2172
+
2173
+ if (auth_token := self._auth_token):
2174
+ dct['Authorization'] = f'Bearer {auth_token}'
2175
+
2176
+ if content_type is None and json_content:
2177
+ content_type = 'application/json'
2178
+ if content_type is not None:
2179
+ dct['Content-Type'] = content_type
2180
+
2181
+ if headers:
2182
+ dct.update(headers)
2183
+
2184
+ return dct
2185
+
2186
+ #
2187
+
2188
+ def load_json_bytes(self, b: ta.Optional[bytes]) -> ta.Optional[ta.Any]:
2189
+ if not b:
2190
+ return None
2191
+ return json.loads(b.decode('utf-8-sig'))
2192
+
2193
+ #
2194
+
2195
+ async def send_url_request(
2196
+ self,
2197
+ req: urllib.request.Request,
2198
+ ) -> ta.Tuple[http.client.HTTPResponse, ta.Optional[bytes]]:
2199
+ def run_sync():
2200
+ with urllib.request.urlopen(req) as resp: # noqa
2201
+ body = resp.read()
2202
+ return (resp, body)
2203
+
2204
+ return await self._get_loop().run_in_executor(None, run_sync) # noqa
2205
+
2206
+ #
2207
+
2208
+ @dc.dataclass()
2209
+ class ServiceRequestError(RuntimeError):
2210
+ status_code: int
2211
+ body: ta.Optional[bytes]
2212
+
2213
+ def __str__(self) -> str:
2214
+ return repr(self)
2215
+
2216
+ async def send_service_request(
2217
+ self,
2218
+ path: str,
2219
+ *,
2220
+ method: ta.Optional[str] = None,
2221
+ headers: ta.Optional[ta.Mapping[str, str]] = None,
2222
+ content_type: ta.Optional[str] = None,
2223
+ content: ta.Optional[bytes] = None,
2224
+ json_content: ta.Optional[ta.Any] = None,
2225
+ success_status_codes: ta.Optional[ta.Container[int]] = None,
2226
+ ) -> ta.Optional[ta.Any]:
2227
+ url = f'{self._service_url}/{path}'
2228
+
2229
+ if content is not None and json_content is not None:
2230
+ raise RuntimeError('Must not pass both content and json_content')
2231
+ elif json_content is not None:
2232
+ content = json_dumps_compact(json_content).encode('utf-8')
2233
+ header_json_content = True
2234
+ else:
2235
+ header_json_content = False
2236
+
2237
+ if method is None:
2238
+ method = 'POST' if content is not None else 'GET'
2239
+
2240
+ #
2241
+
2242
+ req = urllib.request.Request( # noqa
2243
+ url,
2244
+ method=method,
2245
+ headers=self.build_request_headers(
2246
+ headers,
2247
+ content_type=content_type,
2248
+ json_content=header_json_content,
2249
+ ),
2250
+ data=content,
2251
+ )
2252
+
2253
+ resp, body = await self.send_url_request(req)
2254
+
2255
+ #
2256
+
2257
+ if success_status_codes is not None:
2258
+ is_success = resp.status in success_status_codes
2259
+ else:
2260
+ is_success = (200 <= resp.status <= 300)
2261
+ if not is_success:
2262
+ raise self.ServiceRequestError(resp.status, body)
2263
+
2264
+ return self.load_json_bytes(body)
2265
+
2266
+ #
2267
+
2268
+ KEY_PART_SEPARATOR = '--'
2269
+
2270
+ def fix_key(self, s: str, partial_suffix: bool = False) -> str:
2271
+ return self.KEY_PART_SEPARATOR.join([
2272
+ *([self._key_prefix] if self._key_prefix else []),
2273
+ s,
2274
+ ('' if partial_suffix else self._key_suffix),
2275
+ ])
2276
+
2277
+ #
2278
+
2279
+ @dc.dataclass(frozen=True)
2280
+ class Entry(GithubCacheClient.Entry):
2281
+ artifact: GithubCacheServiceV1.ArtifactCacheEntry
2282
+
2283
+ #
2284
+
2285
+ def build_get_entry_url_path(self, *keys: str) -> str:
2286
+ qp = dict(
2287
+ keys=','.join(urllib.parse.quote_plus(k) for k in keys),
2288
+ version=str(self._cache_version),
2289
+ )
2290
+
2291
+ return '?'.join([
2292
+ 'cache',
2293
+ '&'.join([
2294
+ f'{k}={v}'
2295
+ for k, v in qp.items()
2296
+ ]),
2297
+ ])
2298
+
2299
+ GET_ENTRY_SUCCESS_STATUS_CODES = (200, 204)
2300
+
2301
+
2302
+ ##
2303
+
2304
+
2305
+ class GithubCacheServiceV1Client(GithubCacheServiceV1BaseClient):
2306
+ DEFAULT_CONCURRENCY = 4
2307
+
2308
+ DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024
2309
+
2310
+ def __init__(
2311
+ self,
2312
+ *,
2313
+ concurrency: int = DEFAULT_CONCURRENCY,
2314
+ chunk_size: int = DEFAULT_CHUNK_SIZE,
2315
+ **kwargs: ta.Any,
2316
+ ) -> None:
2317
+ super().__init__(**kwargs)
2318
+
2319
+ check.arg(concurrency > 0)
2320
+ self._concurrency = concurrency
2321
+
2322
+ check.arg(chunk_size > 0)
2323
+ self._chunk_size = chunk_size
2324
+
2325
+ #
2326
+
2327
+ async def get_entry(self, key: str) -> ta.Optional[GithubCacheServiceV1BaseClient.Entry]:
2328
+ obj = await self.send_service_request(
2329
+ self.build_get_entry_url_path(self.fix_key(key, partial_suffix=True)),
2330
+ )
2331
+ if obj is None:
2332
+ return None
2333
+
2334
+ return self.Entry(GithubCacheServiceV1.dataclass_from_json(
2335
+ GithubCacheServiceV1.ArtifactCacheEntry,
2336
+ obj,
2337
+ ))
2338
+
2339
+ #
2340
+
2341
+ @dc.dataclass(frozen=True)
2342
+ class _DownloadChunk:
2343
+ key: str
2344
+ url: str
2345
+ out_file: str
2346
+ offset: int
2347
+ size: int
2348
+
2349
+ async def _download_file_chunk_urllib(self, chunk: _DownloadChunk) -> None:
2350
+ req = urllib.request.Request( # noqa
2351
+ chunk.url,
2352
+ headers={
2353
+ 'Range': f'bytes={chunk.offset}-{chunk.offset + chunk.size - 1}',
2354
+ },
2355
+ )
2356
+
2357
+ _, buf_ = await self.send_url_request(req)
2358
+
2359
+ buf = check.not_none(buf_)
2360
+ check.equal(len(buf), chunk.size)
2361
+
2362
+ #
2363
+
2364
+ def write_sync():
2365
+ with open(chunk.out_file, 'r+b') as f: # noqa
2366
+ f.seek(chunk.offset, os.SEEK_SET)
2367
+ f.write(buf)
2368
+
2369
+ await self._get_loop().run_in_executor(None, write_sync) # noqa
2370
+
2371
+ # async def _download_file_chunk_curl(self, chunk: _DownloadChunk) -> None:
2372
+ # async with contextlib.AsyncExitStack() as es:
2373
+ # f = open(chunk.out_file, 'r+b')
2374
+ # f.seek(chunk.offset, os.SEEK_SET)
2375
+ #
2376
+ # tmp_file = es.enter_context(temp_file_context()) # noqa
2377
+ #
2378
+ # proc = await es.enter_async_context(asyncio_subprocesses.popen(
2379
+ # 'curl',
2380
+ # '-s',
2381
+ # '-w', '%{json}',
2382
+ # '-H', f'Range: bytes={chunk.offset}-{chunk.offset + chunk.size - 1}',
2383
+ # chunk.url,
2384
+ # output=subprocess.PIPE,
2385
+ # ))
2386
+ #
2387
+ # futs = asyncio.gather(
2388
+ #
2389
+ # )
2390
+ #
2391
+ # await proc.wait()
2392
+ #
2393
+ # with open(tmp_file, 'r') as f: # noqa
2394
+ # curl_json = tmp_file.read()
2395
+ #
2396
+ # curl_res = json.loads(curl_json.decode().strip())
2397
+ #
2398
+ # status_code = check.isinstance(curl_res['response_code'], int)
2399
+ #
2400
+ # if not (200 <= status_code <= 300):
2401
+ # raise RuntimeError(f'Curl chunk download {chunk} failed: {curl_res}')
2402
+
2403
+ async def _download_file_chunk(self, chunk: _DownloadChunk) -> None:
2404
+ with log_timing_context(
2405
+ 'Downloading github cache '
2406
+ f'key {chunk.key} '
2407
+ f'file {chunk.out_file} '
2408
+ f'chunk {chunk.offset} - {chunk.offset + chunk.size}',
2409
+ ):
2410
+ await self._download_file_chunk_urllib(chunk)
2411
+
2412
+ async def _download_file(self, entry: GithubCacheServiceV1BaseClient.Entry, out_file: str) -> None:
2413
+ key = check.non_empty_str(entry.artifact.cache_key)
2414
+ url = check.non_empty_str(entry.artifact.archive_location)
2415
+
2416
+ head_resp, _ = await self.send_url_request(urllib.request.Request( # noqa
2417
+ url,
2418
+ method='HEAD',
2419
+ ))
2420
+ file_size = int(head_resp.headers['Content-Length'])
2421
+
2422
+ #
2423
+
2424
+ with open(out_file, 'xb') as f: # noqa
2425
+ f.truncate(file_size)
2426
+
2427
+ #
2428
+
2429
+ download_tasks = []
2430
+ chunk_size = self._chunk_size
2431
+ for i in range((file_size // chunk_size) + (1 if file_size % chunk_size else 0)):
2432
+ offset = i * chunk_size
2433
+ size = min(chunk_size, file_size - offset)
2434
+ chunk = self._DownloadChunk(
2435
+ key,
2436
+ url,
2437
+ out_file,
2438
+ offset,
2439
+ size,
2440
+ )
2441
+ download_tasks.append(self._download_file_chunk(chunk))
2442
+
2443
+ await asyncio_wait_concurrent(download_tasks, self._concurrency)
2444
+
2445
+ async def download_file(self, entry: GithubCacheClient.Entry, out_file: str) -> None:
2446
+ entry1 = check.isinstance(entry, self.Entry)
2447
+ with log_timing_context(
2448
+ 'Downloading github cache '
2449
+ f'key {entry1.artifact.cache_key} '
2450
+ f'version {entry1.artifact.cache_version} '
2451
+ f'to {out_file}',
2452
+ ):
2453
+ await self._download_file(entry1, out_file)
2454
+
2455
+ #
2456
+
2457
+ async def _upload_file_chunk(
2458
+ self,
2459
+ key: str,
2460
+ cache_id: int,
2461
+ in_file: str,
2462
+ offset: int,
2463
+ size: int,
2464
+ ) -> None:
2465
+ with log_timing_context(
2466
+ f'Uploading github cache {key} '
2467
+ f'file {in_file} '
2468
+ f'chunk {offset} - {offset + size}',
2469
+ ):
2470
+ with open(in_file, 'rb') as f: # noqa
2471
+ f.seek(offset)
2472
+ buf = f.read(size)
2473
+
2474
+ check.equal(len(buf), size)
2475
+
2476
+ await self.send_service_request(
2477
+ f'caches/{cache_id}',
2478
+ method='PATCH',
2479
+ content_type='application/octet-stream',
2480
+ headers={
2481
+ 'Content-Range': f'bytes {offset}-{offset + size - 1}/*',
2482
+ },
2483
+ content=buf,
2484
+ success_status_codes=[204],
2485
+ )
2486
+
2487
+ async def _upload_file(self, key: str, in_file: str) -> None:
2488
+ fixed_key = self.fix_key(key)
2489
+
2490
+ check.state(os.path.isfile(in_file))
2491
+
2492
+ file_size = os.stat(in_file).st_size
2493
+
2494
+ #
2495
+
2496
+ reserve_req = GithubCacheServiceV1.ReserveCacheRequest(
2497
+ key=fixed_key,
2498
+ cache_size=file_size,
2499
+ version=str(self._cache_version),
2500
+ )
2501
+ reserve_resp_obj = await self.send_service_request(
2502
+ 'caches',
2503
+ json_content=GithubCacheServiceV1.dataclass_to_json(reserve_req),
2504
+ success_status_codes=[201],
2505
+ )
2506
+ reserve_resp = GithubCacheServiceV1.dataclass_from_json( # noqa
2507
+ GithubCacheServiceV1.ReserveCacheResponse,
2508
+ reserve_resp_obj,
2509
+ )
2510
+ cache_id = check.isinstance(reserve_resp.cache_id, int)
2511
+
2512
+ log.debug(f'Github cache file {os.path.basename(in_file)} got id {cache_id}') # noqa
2513
+
2514
+ #
2515
+
2516
+ upload_tasks = []
2517
+ chunk_size = self._chunk_size
2518
+ for i in range((file_size // chunk_size) + (1 if file_size % chunk_size else 0)):
2519
+ offset = i * chunk_size
2520
+ size = min(chunk_size, file_size - offset)
2521
+ upload_tasks.append(self._upload_file_chunk(
2522
+ fixed_key,
2523
+ cache_id,
2524
+ in_file,
2525
+ offset,
2526
+ size,
2527
+ ))
2528
+
2529
+ await asyncio_wait_concurrent(upload_tasks, self._concurrency)
2530
+
2531
+ #
2532
+
2533
+ commit_req = GithubCacheServiceV1.CommitCacheRequest(
2534
+ size=file_size,
2535
+ )
2536
+ await self.send_service_request(
2537
+ f'caches/{cache_id}',
2538
+ json_content=GithubCacheServiceV1.dataclass_to_json(commit_req),
2539
+ success_status_codes=[204],
2540
+ )
2541
+
2542
+ async def upload_file(self, key: str, in_file: str) -> None:
2543
+ with log_timing_context(
2544
+ f'Uploading github cache file {os.path.basename(in_file)} '
2545
+ f'key {key}',
2546
+ ):
2547
+ await self._upload_file(key, in_file)
2548
+
2549
+
2550
+ ########################################
2551
+ # ../../../omlish/logs/standard.py
2552
+ """
2553
+ TODO:
2554
+ - structured
2555
+ - prefixed
2556
+ - debug
2557
+ - optional noisy? noisy will never be lite - some kinda configure_standard callback mechanism?
2558
+ """
2559
+
2560
+
2561
+ ##
2562
+
2563
+
2564
+ STANDARD_LOG_FORMAT_PARTS = [
2565
+ ('asctime', '%(asctime)-15s'),
2566
+ ('process', 'pid=%(process)-6s'),
2567
+ ('thread', 'tid=%(thread)x'),
2568
+ ('levelname', '%(levelname)s'),
2569
+ ('name', '%(name)s'),
2570
+ ('separator', '::'),
2571
+ ('message', '%(message)s'),
2572
+ ]
2573
+
2574
+
2575
+ class StandardLogFormatter(logging.Formatter):
2576
+ @staticmethod
2577
+ def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
2578
+ return ' '.join(v for k, v in parts)
2579
+
2580
+ converter = datetime.datetime.fromtimestamp # type: ignore
2581
+
2582
+ def formatTime(self, record, datefmt=None):
2583
+ ct = self.converter(record.created) # type: ignore
2584
+ if datefmt:
2585
+ return ct.strftime(datefmt) # noqa
2586
+ else:
2587
+ t = ct.strftime('%Y-%m-%d %H:%M:%S')
2588
+ return '%s.%03d' % (t, record.msecs) # noqa
2589
+
2590
+
2591
+ ##
2592
+
2593
+
2594
+ class StandardConfiguredLogHandler(ProxyLogHandler):
2595
+ def __init_subclass__(cls, **kwargs):
2596
+ raise TypeError('This class serves only as a marker and should not be subclassed.')
2597
+
2598
+
2599
+ ##
2600
+
2601
+
2602
+ @contextlib.contextmanager
2603
+ def _locking_logging_module_lock() -> ta.Iterator[None]:
2604
+ if hasattr(logging, '_acquireLock'):
2605
+ logging._acquireLock() # noqa
2606
+ try:
2607
+ yield
2608
+ finally:
2609
+ logging._releaseLock() # type: ignore # noqa
2610
+
2611
+ elif hasattr(logging, '_lock'):
2612
+ # https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
2613
+ with logging._lock: # noqa
2614
+ yield
2615
+
2616
+ else:
2617
+ raise Exception("Can't find lock in logging module")
2618
+
2619
+
2620
+ def configure_standard_logging(
2621
+ level: ta.Union[int, str] = logging.INFO,
2622
+ *,
2623
+ json: bool = False,
2624
+ target: ta.Optional[logging.Logger] = None,
2625
+ force: bool = False,
2626
+ handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
2627
+ ) -> ta.Optional[StandardConfiguredLogHandler]:
2628
+ with _locking_logging_module_lock():
2629
+ if target is None:
2630
+ target = logging.root
2631
+
2632
+ #
2633
+
2634
+ if not force:
2635
+ if any(isinstance(h, StandardConfiguredLogHandler) for h in list(target.handlers)):
2636
+ return None
1997
2637
 
1998
2638
  #
1999
2639
 
@@ -2358,201 +2998,97 @@ class AbstractAsyncSubprocesses(BaseSubprocesses):
2358
2998
 
2359
2999
 
2360
3000
  ########################################
2361
- # ../github/curl.py
3001
+ # ../github/cache.py
2362
3002
 
2363
3003
 
2364
3004
  ##
2365
3005
 
2366
3006
 
2367
- class GithubServiceCurlClient:
3007
+ class GithubFileCache(FileCache):
2368
3008
  def __init__(
2369
3009
  self,
2370
- service_url: str,
2371
- auth_token: ta.Optional[str] = None,
3010
+ dir: str, # noqa
2372
3011
  *,
2373
- api_version: ta.Optional[str] = None,
3012
+ client: ta.Optional[GithubCacheClient] = None,
3013
+ **kwargs: ta.Any,
2374
3014
  ) -> None:
2375
- super().__init__()
2376
-
2377
- self._service_url = check.non_empty_str(service_url)
2378
- self._auth_token = auth_token
2379
- self._api_version = api_version
2380
-
2381
- #
2382
-
2383
- _MISSING = object()
2384
-
2385
- def build_headers(
2386
- self,
2387
- headers: ta.Optional[ta.Mapping[str, str]] = None,
2388
- *,
2389
- auth_token: ta.Any = _MISSING,
2390
- content_type: ta.Optional[str] = None,
2391
- ) -> ta.Dict[str, str]:
2392
- dct = {
2393
- 'Accept': ';'.join([
2394
- 'application/json',
2395
- *([f'api-version={self._api_version}'] if self._api_version else []),
2396
- ]),
2397
- }
2398
-
2399
- if auth_token is self._MISSING:
2400
- auth_token = self._auth_token
2401
- if auth_token:
2402
- dct['Authorization'] = f'Bearer {auth_token}'
2403
-
2404
- if content_type is not None:
2405
- dct['Content-Type'] = content_type
2406
-
2407
- if headers:
2408
- dct.update(headers)
2409
-
2410
- return dct
2411
-
2412
- #
2413
-
2414
- HEADER_AUTH_TOKEN_ENV_KEY_PREFIX = '_GITHUB_SERVICE_AUTH_TOKEN' # noqa
2415
-
2416
- @property
2417
- def header_auth_token_env_key(self) -> str:
2418
- return f'{self.HEADER_AUTH_TOKEN_ENV_KEY_PREFIX}_{id(self)}'
2419
-
2420
- def build_cmd(
2421
- self,
2422
- method: str,
2423
- url: str,
2424
- *,
2425
- json_content: bool = False,
2426
- content_type: ta.Optional[str] = None,
2427
- headers: ta.Optional[ta.Dict[str, str]] = None,
2428
- ) -> ShellCmd:
2429
- if content_type is None and json_content:
2430
- content_type = 'application/json'
2431
-
2432
- env = {}
3015
+ super().__init__(**kwargs)
2433
3016
 
2434
- header_auth_token: ta.Optional[str]
2435
- if self._auth_token:
2436
- header_env_key = self.header_auth_token_env_key
2437
- env[header_env_key] = self._auth_token
2438
- header_auth_token = f'${header_env_key}'
2439
- else:
2440
- header_auth_token = None
2441
-
2442
- built_hdrs = self.build_headers(
2443
- headers,
2444
- auth_token=header_auth_token,
2445
- content_type=content_type,
2446
- )
2447
-
2448
- url = f'{self._service_url}/{url}'
2449
-
2450
- cmd = ' '.join([
2451
- 'curl',
2452
- '-s',
2453
- '-X', method,
2454
- url,
2455
- *[f'-H "{k}: {v}"' for k, v in built_hdrs.items()],
2456
- ])
3017
+ self._dir = check.not_none(dir)
2457
3018
 
2458
- return ShellCmd(
2459
- cmd,
2460
- env=env,
2461
- )
3019
+ if client is None:
3020
+ client = GithubCacheServiceV1Client(
3021
+ cache_version=self._version,
3022
+ )
3023
+ self._client: GithubCacheClient = client
2462
3024
 
2463
- def build_post_json_cmd(
2464
- self,
2465
- url: str,
2466
- obj: ta.Any,
2467
- **kwargs: ta.Any,
2468
- ) -> ShellCmd:
2469
- curl_cmd = self.build_cmd(
2470
- 'POST',
2471
- url,
2472
- json_content=True,
2473
- **kwargs,
3025
+ self._local = DirectoryFileCache(
3026
+ self._dir,
3027
+ version=self._version,
2474
3028
  )
2475
3029
 
2476
- obj_json = json_dumps_compact(obj)
2477
-
2478
- return dc.replace(curl_cmd, s=f'{curl_cmd.s} -d {shlex.quote(obj_json)}')
2479
-
2480
- #
3030
+ async def get_file(self, key: str) -> ta.Optional[str]:
3031
+ local_file = self._local.get_cache_file_path(key)
3032
+ if os.path.exists(local_file):
3033
+ return local_file
2481
3034
 
2482
- @dc.dataclass()
2483
- class Error(RuntimeError):
2484
- status_code: int
2485
- body: ta.Optional[bytes]
3035
+ if (entry := await self._client.get_entry(key)) is None:
3036
+ return None
2486
3037
 
2487
- def __str__(self) -> str:
2488
- return repr(self)
3038
+ tmp_file = self._local.format_incomplete_file(local_file)
3039
+ with unlinking_if_exists(tmp_file):
3040
+ await self._client.download_file(entry, tmp_file)
2489
3041
 
2490
- @dc.dataclass(frozen=True)
2491
- class Result:
2492
- status_code: int
2493
- body: ta.Optional[bytes]
3042
+ os.replace(tmp_file, local_file)
2494
3043
 
2495
- def as_error(self) -> 'GithubServiceCurlClient.Error':
2496
- return GithubServiceCurlClient.Error(
2497
- status_code=self.status_code,
2498
- body=self.body,
2499
- )
3044
+ return local_file
2500
3045
 
2501
- def run_cmd(
3046
+ async def put_file(
2502
3047
  self,
2503
- cmd: ShellCmd,
3048
+ key: str,
3049
+ file_path: str,
2504
3050
  *,
2505
- raise_: bool = False,
2506
- **subprocess_kwargs: ta.Any,
2507
- ) -> Result:
2508
- out_file = make_temp_file()
2509
- with defer(lambda: os.unlink(out_file)):
2510
- run_cmd = dc.replace(cmd, s=f"{cmd.s} -o {out_file} -w '%{{json}}'")
2511
-
2512
- out_json_bytes = run_cmd.run(
2513
- subprocesses.check_output,
2514
- **subprocess_kwargs,
2515
- )
2516
-
2517
- out_json = json.loads(out_json_bytes.decode())
2518
- status_code = check.isinstance(out_json['response_code'], int)
2519
-
2520
- with open(out_file, 'rb') as f:
2521
- body = f.read()
3051
+ steal: bool = False,
3052
+ ) -> str:
3053
+ cache_file_path = await self._local.put_file(
3054
+ key,
3055
+ file_path,
3056
+ steal=steal,
3057
+ )
2522
3058
 
2523
- result = self.Result(
2524
- status_code=status_code,
2525
- body=body,
2526
- )
3059
+ await self._client.upload_file(key, cache_file_path)
2527
3060
 
2528
- if raise_ and (500 <= status_code <= 600):
2529
- raise result.as_error()
3061
+ return cache_file_path
2530
3062
 
2531
- return result
2532
3063
 
2533
- def run_json_cmd(
2534
- self,
2535
- cmd: ShellCmd,
2536
- *,
2537
- success_status_codes: ta.Optional[ta.Container[int]] = None,
2538
- ) -> ta.Optional[ta.Any]:
2539
- result = self.run_cmd(cmd, raise_=True)
3064
+ ########################################
3065
+ # ../github/cli.py
3066
+ """
3067
+ See:
3068
+ - https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28
3069
+ """
2540
3070
 
2541
- if success_status_codes is not None:
2542
- is_success = result.status_code in success_status_codes
2543
- else:
2544
- is_success = 200 <= result.status_code < 300
2545
3071
 
2546
- if is_success:
2547
- if not (body := result.body):
2548
- return None
2549
- return json.loads(body.decode('utf-8-sig'))
3072
+ class GithubCli(ArgparseCli):
3073
+ @argparse_cmd()
3074
+ def list_referenced_env_vars(self) -> None:
3075
+ print('\n'.join(sorted(ev.k for ev in GITHUB_ENV_VARS)))
2550
3076
 
2551
- elif result.status_code == 404:
2552
- return None
3077
+ @argparse_cmd(
3078
+ argparse_arg('key'),
3079
+ )
3080
+ async def get_cache_entry(self) -> None:
3081
+ client = GithubCacheServiceV1Client()
3082
+ entry = await client.get_entry(self.args.key)
3083
+ if entry is None:
3084
+ return
3085
+ print(json_dumps_pretty(dc.asdict(entry))) # noqa
2553
3086
 
2554
- else:
2555
- raise result.as_error()
3087
+ @argparse_cmd(
3088
+ argparse_arg('repository-id'),
3089
+ )
3090
+ def list_cache_entries(self) -> None:
3091
+ raise NotImplementedError
2556
3092
 
2557
3093
 
2558
3094
  ########################################
@@ -2937,11 +3473,6 @@ class DockerComposeRun(AsyncExitStacked):
2937
3473
  if k in out_service:
2938
3474
  del out_service[k]
2939
3475
 
2940
- out_service['links'] = [
2941
- f'{l}:{l}' if ':' not in l else l
2942
- for l in out_service.get('links', [])
2943
- ]
2944
-
2945
3476
  #
2946
3477
 
2947
3478
  if not self._cfg.no_dependencies:
@@ -2958,7 +3489,6 @@ class DockerComposeRun(AsyncExitStacked):
2958
3489
 
2959
3490
  else:
2960
3491
  out_service['depends_on'] = []
2961
- out_service['links'] = []
2962
3492
 
2963
3493
  #
2964
3494
 
@@ -3049,444 +3579,134 @@ def read_docker_tar_image_tag(tar_file: str) -> str:
3049
3579
  with contextlib.closing(check.not_none(tf.extractfile('manifest.json'))) as mf:
3050
3580
  m = mf.read()
3051
3581
 
3052
- manifests = json.loads(m.decode('utf-8'))
3053
- manifest = check.single(manifests)
3054
- tag = check.non_empty_str(check.single(manifest['RepoTags']))
3055
- return tag
3056
-
3057
-
3058
- def read_docker_tar_image_id(tar_file: str) -> str:
3059
- with tarfile.open(tar_file) as tf:
3060
- with contextlib.closing(check.not_none(tf.extractfile('index.json'))) as mf:
3061
- i = mf.read()
3062
-
3063
- index = json.loads(i.decode('utf-8'))
3064
- manifest = check.single(index['manifests'])
3065
- image_id = check.non_empty_str(manifest['digest'])
3066
- return image_id
3067
-
3068
-
3069
- ##
3070
-
3071
-
3072
- async def is_docker_image_present(image: str) -> bool:
3073
- out = await asyncio_subprocesses.check_output(
3074
- 'docker',
3075
- 'images',
3076
- '--format', 'json',
3077
- image,
3078
- )
3079
-
3080
- out_s = out.decode('utf-8').strip()
3081
- if not out_s:
3082
- return False
3083
-
3084
- json.loads(out_s) # noqa
3085
- return True
3086
-
3087
-
3088
- async def pull_docker_image(
3089
- image: str,
3090
- ) -> None:
3091
- await asyncio_subprocesses.check_call(
3092
- 'docker',
3093
- 'pull',
3094
- image,
3095
- )
3096
-
3097
-
3098
- async def build_docker_image(
3099
- docker_file: str,
3100
- *,
3101
- tag: ta.Optional[str] = None,
3102
- cwd: ta.Optional[str] = None,
3103
- ) -> str:
3104
- id_file = make_temp_file()
3105
- with defer(lambda: os.unlink(id_file)):
3106
- await asyncio_subprocesses.check_call(
3107
- 'docker',
3108
- 'build',
3109
- '-f', os.path.abspath(docker_file),
3110
- '--iidfile', id_file,
3111
- '--squash',
3112
- *(['--tag', tag] if tag is not None else []),
3113
- '.',
3114
- **(dict(cwd=cwd) if cwd is not None else {}),
3115
- )
3116
-
3117
- with open(id_file) as f: # noqa
3118
- image_id = check.single(f.read().strip().splitlines()).strip()
3119
-
3120
- return image_id
3121
-
3122
-
3123
- async def tag_docker_image(image: str, tag: str) -> None:
3124
- await asyncio_subprocesses.check_call(
3125
- 'docker',
3126
- 'tag',
3127
- image,
3128
- tag,
3129
- )
3130
-
3131
-
3132
- async def delete_docker_tag(tag: str) -> None:
3133
- await asyncio_subprocesses.check_call(
3134
- 'docker',
3135
- 'rmi',
3136
- tag,
3137
- )
3138
-
3139
-
3140
- ##
3141
-
3142
-
3143
- async def save_docker_tar_cmd(
3144
- image: str,
3145
- output_cmd: ShellCmd,
3146
- ) -> None:
3147
- cmd = dc.replace(output_cmd, s=f'docker save {image} | {output_cmd.s}')
3148
- await cmd.run(asyncio_subprocesses.check_call)
3149
-
3150
-
3151
- async def save_docker_tar(
3152
- image: str,
3153
- tar_file: str,
3154
- ) -> None:
3155
- return await save_docker_tar_cmd(
3156
- image,
3157
- ShellCmd(f'cat > {shlex.quote(tar_file)}'),
3158
- )
3159
-
3160
-
3161
- #
3162
-
3163
-
3164
- async def load_docker_tar_cmd(
3165
- input_cmd: ShellCmd,
3166
- ) -> str:
3167
- cmd = dc.replace(input_cmd, s=f'{input_cmd.s} | docker load')
3168
-
3169
- out = (await cmd.run(asyncio_subprocesses.check_output)).decode()
3170
-
3171
- line = check.single(out.strip().splitlines())
3172
- loaded = line.partition(':')[2].strip()
3173
- return loaded
3174
-
3175
-
3176
- async def load_docker_tar(
3177
- tar_file: str,
3178
- ) -> str:
3179
- return await load_docker_tar_cmd(ShellCmd(f'cat {shlex.quote(tar_file)}'))
3180
-
3181
-
3182
- ########################################
3183
- # ../github/cache.py
3184
-
3185
-
3186
- ##
3187
-
3188
-
3189
- class GithubCacheShellClient(abc.ABC):
3190
- class Entry(abc.ABC): # noqa
3191
- pass
3192
-
3193
- @abc.abstractmethod
3194
- def run_get_entry(self, key: str) -> ta.Optional[Entry]:
3195
- raise NotImplementedError
3196
-
3197
- @abc.abstractmethod
3198
- def download_get_entry(self, entry: Entry, out_file: str) -> None:
3199
- raise NotImplementedError
3200
-
3201
- @abc.abstractmethod
3202
- def upload_cache_entry(self, key: str, in_file: str) -> None:
3203
- raise NotImplementedError
3204
-
3205
-
3206
- #
3207
-
3208
-
3209
- class GithubCacheServiceV1ShellClient(GithubCacheShellClient):
3210
- BASE_URL_ENV_KEY = 'ACTIONS_CACHE_URL'
3211
- AUTH_TOKEN_ENV_KEY = 'ACTIONS_RUNTIME_TOKEN' # noqa
3212
-
3213
- KEY_SUFFIX_ENV_KEY = 'GITHUB_RUN_ID'
3214
-
3215
- CACHE_VERSION: ta.ClassVar[int] = 1
3216
-
3217
- #
3218
-
3219
- def __init__(
3220
- self,
3221
- *,
3222
- base_url: ta.Optional[str] = None,
3223
- auth_token: ta.Optional[str] = None,
3224
-
3225
- key_prefix: ta.Optional[str] = None,
3226
- key_suffix: ta.Optional[str] = None,
3227
- ) -> None:
3228
- super().__init__()
3229
-
3230
- #
3231
-
3232
- if base_url is None:
3233
- base_url = os.environ[self.BASE_URL_ENV_KEY]
3234
- service_url = GithubCacheServiceV1.get_service_url(base_url)
3235
-
3236
- if auth_token is None:
3237
- auth_token = os.environ.get(self.AUTH_TOKEN_ENV_KEY)
3238
-
3239
- self._curl = GithubServiceCurlClient(
3240
- service_url,
3241
- auth_token,
3242
- api_version=GithubCacheServiceV1.API_VERSION,
3243
- )
3244
-
3245
- #
3246
-
3247
- self._key_prefix = key_prefix
3248
-
3249
- if key_suffix is None:
3250
- key_suffix = os.environ[self.KEY_SUFFIX_ENV_KEY]
3251
- self._key_suffix = check.non_empty_str(key_suffix)
3252
-
3253
- #
3254
-
3255
- KEY_PART_SEPARATOR = '--'
3256
-
3257
- def fix_key(self, s: str) -> str:
3258
- return self.KEY_PART_SEPARATOR.join([
3259
- *([self._key_prefix] if self._key_prefix else []),
3260
- s,
3261
- self._key_suffix,
3262
- ])
3263
-
3264
- #
3265
-
3266
- @dc.dataclass(frozen=True)
3267
- class Entry(GithubCacheShellClient.Entry):
3268
- artifact: GithubCacheServiceV1.ArtifactCacheEntry
3269
-
3270
- #
3271
-
3272
- def build_get_entry_curl_cmd(self, key: str) -> ShellCmd:
3273
- fixed_key = self.fix_key(key)
3274
-
3275
- qp = dict(
3276
- keys=fixed_key,
3277
- version=str(self.CACHE_VERSION),
3278
- )
3279
-
3280
- return self._curl.build_cmd(
3281
- 'GET',
3282
- shlex.quote('?'.join([
3283
- 'cache',
3284
- '&'.join([
3285
- f'{k}={urllib.parse.quote_plus(v)}'
3286
- for k, v in qp.items()
3287
- ]),
3288
- ])),
3289
- )
3290
-
3291
- def run_get_entry(self, key: str) -> ta.Optional[Entry]:
3292
- fixed_key = self.fix_key(key)
3293
- curl_cmd = self.build_get_entry_curl_cmd(fixed_key)
3294
-
3295
- obj = self._curl.run_json_cmd(
3296
- curl_cmd,
3297
- success_status_codes=[200, 204],
3298
- )
3299
- if obj is None:
3300
- return None
3301
-
3302
- return self.Entry(GithubCacheServiceV1.dataclass_from_json(
3303
- GithubCacheServiceV1.ArtifactCacheEntry,
3304
- obj,
3305
- ))
3306
-
3307
- #
3308
-
3309
- def build_download_get_entry_cmd(self, entry: Entry, out_file: str) -> ShellCmd:
3310
- return ShellCmd(' '.join([
3311
- 'aria2c',
3312
- '-x', '4',
3313
- '-o', out_file,
3314
- check.non_empty_str(entry.artifact.archive_location),
3315
- ]))
3316
-
3317
- def download_get_entry(self, entry: GithubCacheShellClient.Entry, out_file: str) -> None:
3318
- dl_cmd = self.build_download_get_entry_cmd(
3319
- check.isinstance(entry, GithubCacheServiceV1ShellClient.Entry),
3320
- out_file,
3321
- )
3322
- dl_cmd.run(subprocesses.check_call)
3323
-
3324
- #
3325
-
3326
- def upload_cache_entry(self, key: str, in_file: str) -> None:
3327
- fixed_key = self.fix_key(key)
3328
-
3329
- check.state(os.path.isfile(in_file))
3330
-
3331
- file_size = os.stat(in_file).st_size
3332
-
3333
- #
3334
-
3335
- reserve_req = GithubCacheServiceV1.ReserveCacheRequest(
3336
- key=fixed_key,
3337
- cache_size=file_size,
3338
- version=str(self.CACHE_VERSION),
3339
- )
3340
- reserve_cmd = self._curl.build_post_json_cmd(
3341
- 'caches',
3342
- GithubCacheServiceV1.dataclass_to_json(reserve_req),
3343
- )
3344
- reserve_resp_obj: ta.Any = check.not_none(self._curl.run_json_cmd(
3345
- reserve_cmd,
3346
- success_status_codes=[201],
3347
- ))
3348
- reserve_resp = GithubCacheServiceV1.dataclass_from_json( # noqa
3349
- GithubCacheServiceV1.ReserveCacheResponse,
3350
- reserve_resp_obj,
3351
- )
3352
- cache_id = check.isinstance(reserve_resp.cache_id, int)
3353
-
3354
- #
3582
+ manifests = json.loads(m.decode('utf-8'))
3583
+ manifest = check.single(manifests)
3584
+ tag = check.non_empty_str(check.single(manifest['RepoTags']))
3585
+ return tag
3355
3586
 
3356
- tmp_file = make_temp_file()
3357
3587
 
3358
- print(f'{file_size=}')
3359
- num_written = 0
3360
- chunk_size = 32 * 1024 * 1024
3361
- for i in range((file_size // chunk_size) + (1 if file_size % chunk_size else 0)):
3362
- ofs = i * chunk_size
3363
- sz = min(chunk_size, file_size - ofs)
3588
+ def read_docker_tar_image_id(tar_file: str) -> str:
3589
+ with tarfile.open(tar_file) as tf:
3590
+ with contextlib.closing(check.not_none(tf.extractfile('index.json'))) as mf:
3591
+ i = mf.read()
3364
3592
 
3365
- patch_cmd = self._curl.build_cmd(
3366
- 'PATCH',
3367
- f'caches/{cache_id}',
3368
- content_type='application/octet-stream',
3369
- headers={
3370
- 'Content-Range': f'bytes {ofs}-{ofs + sz - 1}/*',
3371
- },
3372
- )
3593
+ index = json.loads(i.decode('utf-8'))
3594
+ manifest = check.single(index['manifests'])
3595
+ image_id = check.non_empty_str(manifest['digest'])
3596
+ return image_id
3373
3597
 
3374
- #
3375
3598
 
3376
- # patch_data_cmd = dc.replace(patch_cmd, s=' | '.join([
3377
- # f'dd if={in_file} bs={chunk_size} skip={i} count=1 status=none',
3378
- # f'{patch_cmd.s} --data-binary -',
3379
- # ]))
3380
- # print(f'{patch_data_cmd.s=}')
3381
- # patch_result = self._curl.run_cmd(patch_data_cmd, raise_=True)
3599
+ ##
3382
3600
 
3383
- #
3384
3601
 
3385
- with open(in_file, 'rb') as f:
3386
- f.seek(ofs)
3387
- buf = f.read(sz)
3388
- with open(tmp_file, 'wb') as f:
3389
- f.write(buf)
3390
- num_written += len(buf)
3391
- print(f'{num_written=}')
3392
- patch_data_cmd = dc.replace(patch_cmd, s=f'{patch_cmd.s} --data-binary @{tmp_file}')
3393
- print(f'{patch_data_cmd.s=}')
3394
- patch_result = self._curl.run_cmd(patch_data_cmd, raise_=True)
3602
+ async def is_docker_image_present(image: str) -> bool:
3603
+ out = await asyncio_subprocesses.check_output(
3604
+ 'docker',
3605
+ 'images',
3606
+ '--format', 'json',
3607
+ image,
3608
+ )
3609
+
3610
+ out_s = out.decode('utf-8').strip()
3611
+ if not out_s:
3612
+ return False
3395
3613
 
3396
- #
3614
+ json.loads(out_s) # noqa
3615
+ return True
3397
3616
 
3398
- check.equal(patch_result.status_code, 204)
3399
- ofs += sz
3400
3617
 
3401
- #
3618
+ async def pull_docker_image(
3619
+ image: str,
3620
+ ) -> None:
3621
+ await asyncio_subprocesses.check_call(
3622
+ 'docker',
3623
+ 'pull',
3624
+ image,
3625
+ )
3402
3626
 
3403
- commit_req = GithubCacheServiceV1.CommitCacheRequest(
3404
- size=file_size,
3405
- )
3406
- commit_cmd = self._curl.build_post_json_cmd(
3407
- f'caches/{cache_id}',
3408
- GithubCacheServiceV1.dataclass_to_json(commit_req),
3409
- )
3410
- commit_result = self._curl.run_cmd(commit_cmd, raise_=True)
3411
- check.equal(commit_result.status_code, 204)
3412
3627
 
3628
+ async def build_docker_image(
3629
+ docker_file: str,
3630
+ *,
3631
+ tag: ta.Optional[str] = None,
3632
+ cwd: ta.Optional[str] = None,
3633
+ run_options: ta.Optional[ta.Sequence[str]] = None,
3634
+ ) -> str:
3635
+ with temp_file_context() as id_file:
3636
+ await asyncio_subprocesses.check_call(
3637
+ 'docker',
3638
+ 'build',
3639
+ '-f', os.path.abspath(docker_file),
3640
+ '--iidfile', id_file,
3641
+ *(['--tag', tag] if tag is not None else []),
3642
+ *(run_options or []),
3643
+ '.',
3644
+ **(dict(cwd=cwd) if cwd is not None else {}),
3645
+ )
3413
3646
 
3414
- ##
3647
+ with open(id_file) as f: # noqa
3648
+ image_id = check.single(f.read().strip().splitlines()).strip()
3415
3649
 
3650
+ return image_id
3416
3651
 
3417
- class GithubShellCache(ShellCache):
3418
- def __init__(
3419
- self,
3420
- dir: str, # noqa
3421
- *,
3422
- client: ta.Optional[GithubCacheShellClient] = None,
3423
- ) -> None:
3424
- super().__init__()
3425
3652
 
3426
- self._dir = check.not_none(dir)
3653
+ async def tag_docker_image(image: str, tag: str) -> None:
3654
+ await asyncio_subprocesses.check_call(
3655
+ 'docker',
3656
+ 'tag',
3657
+ image,
3658
+ tag,
3659
+ )
3427
3660
 
3428
- if client is None:
3429
- client = GithubCacheServiceV1ShellClient()
3430
- self._client: GithubCacheShellClient = client
3431
3661
 
3432
- self._local = DirectoryFileCache(self._dir)
3662
+ async def delete_docker_tag(tag: str) -> None:
3663
+ await asyncio_subprocesses.check_call(
3664
+ 'docker',
3665
+ 'rmi',
3666
+ tag,
3667
+ )
3433
3668
 
3434
- def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
3435
- local_file = self._local.get_cache_file_path(key)
3436
- if os.path.exists(local_file):
3437
- return ShellCmd(f'cat {shlex.quote(local_file)}')
3438
3669
 
3439
- if (entry := self._client.run_get_entry(key)) is None:
3440
- return None
3670
+ ##
3441
3671
 
3442
- tmp_file = self._local.format_incomplete_file(local_file)
3443
- try:
3444
- self._client.download_get_entry(entry, tmp_file)
3445
3672
 
3446
- os.replace(tmp_file, local_file)
3673
+ async def save_docker_tar_cmd(
3674
+ image: str,
3675
+ output_cmd: ShellCmd,
3676
+ ) -> None:
3677
+ cmd = dc.replace(output_cmd, s=f'docker save {image} | {output_cmd.s}')
3678
+ await cmd.run(asyncio_subprocesses.check_call)
3447
3679
 
3448
- except BaseException: # noqa
3449
- os.unlink(tmp_file)
3450
3680
 
3451
- raise
3681
+ async def save_docker_tar(
3682
+ image: str,
3683
+ tar_file: str,
3684
+ ) -> None:
3685
+ return await save_docker_tar_cmd(
3686
+ image,
3687
+ ShellCmd(f'cat > {shlex.quote(tar_file)}'),
3688
+ )
3452
3689
 
3453
- return ShellCmd(f'cat {shlex.quote(local_file)}')
3454
3690
 
3455
- class _PutFileCmdContext(ShellCache.PutFileCmdContext): # noqa
3456
- def __init__(
3457
- self,
3458
- owner: 'GithubShellCache',
3459
- key: str,
3460
- tmp_file: str,
3461
- local_file: str,
3462
- ) -> None:
3463
- super().__init__()
3691
+ #
3464
3692
 
3465
- self._owner = owner
3466
- self._key = key
3467
- self._tmp_file = tmp_file
3468
- self._local_file = local_file
3469
3693
 
3470
- @property
3471
- def cmd(self) -> ShellCmd:
3472
- return ShellCmd(f'cat > {shlex.quote(self._tmp_file)}')
3694
+ async def load_docker_tar_cmd(
3695
+ input_cmd: ShellCmd,
3696
+ ) -> str:
3697
+ cmd = dc.replace(input_cmd, s=f'{input_cmd.s} | docker load')
3473
3698
 
3474
- def _commit(self) -> None:
3475
- os.replace(self._tmp_file, self._local_file)
3699
+ out = (await cmd.run(asyncio_subprocesses.check_output)).decode()
3476
3700
 
3477
- self._owner._client.upload_cache_entry(self._key, self._local_file) # noqa
3701
+ line = check.single(out.strip().splitlines())
3702
+ loaded = line.partition(':')[2].strip()
3703
+ return loaded
3478
3704
 
3479
- def _abort(self) -> None:
3480
- os.unlink(self._tmp_file)
3481
3705
 
3482
- def put_file_cmd(self, key: str) -> ShellCache.PutFileCmdContext:
3483
- local_file = self._local.get_cache_file_path(key, make_dirs=True)
3484
- return self._PutFileCmdContext(
3485
- self,
3486
- key,
3487
- self._local.format_incomplete_file(local_file),
3488
- local_file,
3489
- )
3706
+ async def load_docker_tar(
3707
+ tar_file: str,
3708
+ ) -> str:
3709
+ return await load_docker_tar_cmd(ShellCmd(f'cat {shlex.quote(tar_file)}'))
3490
3710
 
3491
3711
 
3492
3712
  ########################################
@@ -3494,7 +3714,7 @@ class GithubShellCache(ShellCache):
3494
3714
 
3495
3715
 
3496
3716
  class Ci(AsyncExitStacked):
3497
- FILE_NAME_HASH_LEN = 16
3717
+ KEY_HASH_LEN = 16
3498
3718
 
3499
3719
  @dc.dataclass(frozen=True)
3500
3720
  class Config:
@@ -3507,6 +3727,8 @@ class Ci(AsyncExitStacked):
3507
3727
 
3508
3728
  cmd: ShellCmd
3509
3729
 
3730
+ #
3731
+
3510
3732
  requirements_txts: ta.Optional[ta.Sequence[str]] = None
3511
3733
 
3512
3734
  always_pull: bool = False
@@ -3514,6 +3736,10 @@ class Ci(AsyncExitStacked):
3514
3736
 
3515
3737
  no_dependencies: bool = False
3516
3738
 
3739
+ run_options: ta.Optional[ta.Sequence[str]] = None
3740
+
3741
+ #
3742
+
3517
3743
  def __post_init__(self) -> None:
3518
3744
  check.not_isinstance(self.requirements_txts, str)
3519
3745
 
@@ -3521,42 +3747,15 @@ class Ci(AsyncExitStacked):
3521
3747
  self,
3522
3748
  cfg: Config,
3523
3749
  *,
3524
- shell_cache: ta.Optional[ShellCache] = None,
3525
3750
  file_cache: ta.Optional[FileCache] = None,
3526
3751
  ) -> None:
3527
3752
  super().__init__()
3528
3753
 
3529
3754
  self._cfg = cfg
3530
- self._shell_cache = shell_cache
3531
3755
  self._file_cache = file_cache
3532
3756
 
3533
3757
  #
3534
3758
 
3535
- async def _load_cache_docker_image(self, key: str) -> ta.Optional[str]:
3536
- if self._shell_cache is None:
3537
- return None
3538
-
3539
- get_cache_cmd = self._shell_cache.get_file_cmd(key)
3540
- if get_cache_cmd is None:
3541
- return None
3542
-
3543
- get_cache_cmd = dc.replace(get_cache_cmd, s=f'{get_cache_cmd.s} | zstd -cd --long') # noqa
3544
-
3545
- return await load_docker_tar_cmd(get_cache_cmd)
3546
-
3547
- async def _save_cache_docker_image(self, key: str, image: str) -> None:
3548
- if self._shell_cache is None:
3549
- return
3550
-
3551
- with self._shell_cache.put_file_cmd(key) as put_cache:
3552
- put_cache_cmd = put_cache.cmd
3553
-
3554
- put_cache_cmd = dc.replace(put_cache_cmd, s=f'zstd | {put_cache_cmd.s}')
3555
-
3556
- await save_docker_tar_cmd(image, put_cache_cmd)
3557
-
3558
- #
3559
-
3560
3759
  async def _load_docker_image(self, image: str) -> None:
3561
3760
  if not self._cfg.always_pull and (await is_docker_image_present(image)):
3562
3761
  return
@@ -3577,24 +3776,38 @@ class Ci(AsyncExitStacked):
3577
3776
  with log_timing_context(f'Load docker image: {image}'):
3578
3777
  await self._load_docker_image(image)
3579
3778
 
3580
- @async_cached_nullary
3581
- async def load_compose_service_dependencies(self) -> None:
3582
- deps = get_compose_service_dependencies(
3583
- self._cfg.compose_file,
3584
- self._cfg.service,
3585
- )
3779
+ #
3586
3780
 
3587
- for dep_image in deps.values():
3588
- await self.load_docker_image(dep_image)
3781
+ async def _load_cache_docker_image(self, key: str) -> ta.Optional[str]:
3782
+ if self._file_cache is None:
3783
+ return None
3589
3784
 
3590
- #
3785
+ cache_file = await self._file_cache.get_file(key)
3786
+ if cache_file is None:
3787
+ return None
3591
3788
 
3592
- @cached_nullary
3593
- def docker_file_hash(self) -> str:
3594
- return build_docker_file_hash(self._cfg.docker_file)[:self.FILE_NAME_HASH_LEN]
3789
+ get_cache_cmd = ShellCmd(f'cat {cache_file} | zstd -cd --long')
3595
3790
 
3596
- async def _resolve_ci_image(self) -> str:
3597
- cache_key = f'ci-{self.docker_file_hash()}'
3791
+ return await load_docker_tar_cmd(get_cache_cmd)
3792
+
3793
+ async def _save_cache_docker_image(self, key: str, image: str) -> None:
3794
+ if self._file_cache is None:
3795
+ return
3796
+
3797
+ with temp_file_context() as tmp_file:
3798
+ write_tmp_cmd = ShellCmd(f'zstd > {tmp_file}')
3799
+
3800
+ await save_docker_tar_cmd(image, write_tmp_cmd)
3801
+
3802
+ await self._file_cache.put_file(key, tmp_file, steal=True)
3803
+
3804
+ #
3805
+
3806
+ async def _resolve_docker_image(
3807
+ self,
3808
+ cache_key: str,
3809
+ build_and_tag: ta.Callable[[str], ta.Awaitable[str]],
3810
+ ) -> str:
3598
3811
  image_tag = f'{self._cfg.service}:{cache_key}'
3599
3812
 
3600
3813
  if not self._cfg.always_build and (await is_docker_image_present(image_tag)):
@@ -3607,21 +3820,35 @@ class Ci(AsyncExitStacked):
3607
3820
  )
3608
3821
  return image_tag
3609
3822
 
3610
- image_id = await build_docker_image(
3611
- self._cfg.docker_file,
3612
- tag=image_tag,
3613
- cwd=self._cfg.project_dir,
3614
- )
3823
+ image_id = await build_and_tag(image_tag)
3615
3824
 
3616
3825
  await self._save_cache_docker_image(cache_key, image_id)
3617
3826
 
3618
3827
  return image_tag
3619
3828
 
3829
+ #
3830
+
3831
+ @cached_nullary
3832
+ def docker_file_hash(self) -> str:
3833
+ return build_docker_file_hash(self._cfg.docker_file)[:self.KEY_HASH_LEN]
3834
+
3835
+ async def _resolve_ci_base_image(self) -> str:
3836
+ async def build_and_tag(image_tag: str) -> str:
3837
+ return await build_docker_image(
3838
+ self._cfg.docker_file,
3839
+ tag=image_tag,
3840
+ cwd=self._cfg.project_dir,
3841
+ )
3842
+
3843
+ cache_key = f'ci-base-{self.docker_file_hash()}'
3844
+
3845
+ return await self._resolve_docker_image(cache_key, build_and_tag)
3846
+
3620
3847
  @async_cached_nullary
3621
- async def resolve_ci_image(self) -> str:
3622
- with log_timing_context('Resolve ci image') as ltc:
3623
- image_id = await self._resolve_ci_image()
3624
- ltc.set_description(f'Resolve ci image: {image_id}')
3848
+ async def resolve_ci_base_image(self) -> str:
3849
+ with log_timing_context('Resolve ci base image') as ltc:
3850
+ image_id = await self._resolve_ci_base_image()
3851
+ ltc.set_description(f'Resolve ci base image: {image_id}')
3625
3852
  return image_id
3626
3853
 
3627
3854
  #
@@ -3635,82 +3862,85 @@ class Ci(AsyncExitStacked):
3635
3862
 
3636
3863
  @cached_nullary
3637
3864
  def requirements_hash(self) -> str:
3638
- return build_requirements_hash(self.requirements_txts())[:self.FILE_NAME_HASH_LEN]
3639
-
3640
- async def _resolve_requirements_dir(self) -> str:
3641
- tar_file_key = f'requirements-{self.docker_file_hash()}-{self.requirements_hash()}'
3642
- tar_file_name = f'{tar_file_key}.tar'
3643
-
3644
- temp_dir = tempfile.mkdtemp()
3645
- self._enter_context(defer(lambda: shutil.rmtree(temp_dir))) # noqa
3646
-
3647
- if self._file_cache is not None and (cache_tar_file := self._file_cache.get_file(tar_file_key)):
3648
- with tarfile.open(cache_tar_file) as tar:
3649
- tar.extractall(path=temp_dir) # noqa
3865
+ return build_requirements_hash(self.requirements_txts())[:self.KEY_HASH_LEN]
3650
3866
 
3651
- return temp_dir
3652
-
3653
- temp_requirements_dir = os.path.join(temp_dir, 'requirements')
3654
- os.makedirs(temp_requirements_dir)
3867
+ async def _resolve_ci_image(self) -> str:
3868
+ async def build_and_tag(image_tag: str) -> str:
3869
+ base_image = await self.resolve_ci_base_image()
3870
+
3871
+ setup_cmds = [
3872
+ ' '.join([
3873
+ 'pip install',
3874
+ '--no-cache-dir',
3875
+ '--root-user-action ignore',
3876
+ 'uv',
3877
+ ]),
3878
+ ' '.join([
3879
+ 'uv pip install',
3880
+ '--no-cache',
3881
+ '--index-strategy unsafe-best-match',
3882
+ '--system',
3883
+ *[f'-r /project/{rf}' for rf in self._cfg.requirements_txts or []],
3884
+ ]),
3885
+ ]
3886
+ setup_cmd = ' && '.join(setup_cmds)
3655
3887
 
3656
- download_requirements(
3657
- await self.resolve_ci_image(),
3658
- temp_requirements_dir,
3659
- self.requirements_txts(),
3660
- )
3888
+ docker_file_lines = [
3889
+ f'FROM {base_image}',
3890
+ 'RUN mkdir /project',
3891
+ *[f'COPY {rf} /project/{rf}' for rf in self._cfg.requirements_txts or []],
3892
+ f'RUN {setup_cmd}',
3893
+ 'RUN rm /project/*',
3894
+ 'WORKDIR /project',
3895
+ ]
3661
3896
 
3662
- if self._file_cache is not None:
3663
- temp_tar_file = os.path.join(temp_dir, tar_file_name)
3897
+ with temp_file_context() as docker_file:
3898
+ with open(docker_file, 'w') as f: # noqa
3899
+ f.write('\n'.join(docker_file_lines))
3664
3900
 
3665
- with tarfile.open(temp_tar_file, 'w') as tar:
3666
- for requirement_file in os.listdir(temp_requirements_dir):
3667
- tar.add(
3668
- os.path.join(temp_requirements_dir, requirement_file),
3669
- arcname=requirement_file,
3670
- )
3901
+ return await build_docker_image(
3902
+ docker_file,
3903
+ tag=image_tag,
3904
+ cwd=self._cfg.project_dir,
3905
+ )
3671
3906
 
3672
- self._file_cache.put_file(os.path.basename(tar_file_key), temp_tar_file)
3907
+ cache_key = f'ci-{self.docker_file_hash()}-{self.requirements_hash()}'
3673
3908
 
3674
- return temp_requirements_dir
3909
+ return await self._resolve_docker_image(cache_key, build_and_tag)
3675
3910
 
3676
3911
  @async_cached_nullary
3677
- async def resolve_requirements_dir(self) -> str:
3678
- with log_timing_context('Resolve requirements dir') as ltc:
3679
- requirements_dir = await self._resolve_requirements_dir()
3680
- ltc.set_description(f'Resolve requirements dir: {requirements_dir}')
3681
- return requirements_dir
3912
+ async def resolve_ci_image(self) -> str:
3913
+ with log_timing_context('Resolve ci image') as ltc:
3914
+ image_id = await self._resolve_ci_image()
3915
+ ltc.set_description(f'Resolve ci image: {image_id}')
3916
+ return image_id
3682
3917
 
3683
3918
  #
3684
3919
 
3685
- async def _run_compose_(self) -> None:
3686
- setup_cmds = [
3687
- 'pip install --root-user-action ignore --find-links /requirements --no-index uv',
3688
- (
3689
- 'uv pip install --system --find-links /requirements ' +
3690
- ' '.join(f'-r /project/{rf}' for rf in self._cfg.requirements_txts or [])
3691
- ),
3692
- ]
3693
-
3694
- #
3920
+ @async_cached_nullary
3921
+ async def load_dependencies(self) -> None:
3922
+ deps = get_compose_service_dependencies(
3923
+ self._cfg.compose_file,
3924
+ self._cfg.service,
3925
+ )
3695
3926
 
3696
- ci_cmd = dc.replace(self._cfg.cmd, s=' && '.join([
3697
- *setup_cmds,
3698
- f'({self._cfg.cmd.s})',
3699
- ]))
3927
+ for dep_image in deps.values():
3928
+ await self.load_docker_image(dep_image)
3700
3929
 
3701
- #
3930
+ #
3702
3931
 
3932
+ async def _run_compose_(self) -> None:
3703
3933
  async with DockerComposeRun(DockerComposeRun.Config(
3704
3934
  compose_file=self._cfg.compose_file,
3705
3935
  service=self._cfg.service,
3706
3936
 
3707
3937
  image=await self.resolve_ci_image(),
3708
3938
 
3709
- cmd=ci_cmd,
3939
+ cmd=self._cfg.cmd,
3710
3940
 
3711
3941
  run_options=[
3712
3942
  '-v', f'{os.path.abspath(self._cfg.project_dir)}:/project',
3713
- '-v', f'{os.path.abspath(await self.resolve_requirements_dir())}:/requirements',
3943
+ *(self._cfg.run_options or []),
3714
3944
  ],
3715
3945
 
3716
3946
  cwd=self._cfg.project_dir,
@@ -3726,41 +3956,13 @@ class Ci(AsyncExitStacked):
3726
3956
  #
3727
3957
 
3728
3958
  async def run(self) -> None:
3729
- await self.load_compose_service_dependencies()
3730
-
3731
3959
  await self.resolve_ci_image()
3732
3960
 
3733
- await self.resolve_requirements_dir()
3961
+ await self.load_dependencies()
3734
3962
 
3735
3963
  await self._run_compose()
3736
3964
 
3737
3965
 
3738
- ########################################
3739
- # ../github/cli.py
3740
- """
3741
- See:
3742
- - https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28
3743
- """
3744
-
3745
-
3746
- class GithubCli(ArgparseCli):
3747
- @argparse_cmd(
3748
- argparse_arg('key'),
3749
- )
3750
- def get_cache_entry(self) -> None:
3751
- shell_client = GithubCacheServiceV1ShellClient()
3752
- entry = shell_client.run_get_entry(self.args.key)
3753
- if entry is None:
3754
- return
3755
- print(json_dumps_pretty(dc.asdict(entry))) # noqa
3756
-
3757
- @argparse_cmd(
3758
- argparse_arg('repository-id'),
3759
- )
3760
- def list_cache_entries(self) -> None:
3761
- raise NotImplementedError
3762
-
3763
-
3764
3966
  ########################################
3765
3967
  # cli.py
3766
3968
 
@@ -3796,8 +3998,8 @@ class CiCli(ArgparseCli):
3796
3998
  @argparse_cmd(
3797
3999
  accepts_unknown=True,
3798
4000
  )
3799
- def github(self) -> ta.Optional[int]:
3800
- return GithubCli(self.unknown_args).cli_run()
4001
+ async def github(self) -> ta.Optional[int]:
4002
+ return await GithubCli(self.unknown_args).async_cli_run()
3801
4003
 
3802
4004
  #
3803
4005
 
@@ -3808,13 +4010,20 @@ class CiCli(ArgparseCli):
3808
4010
  argparse_arg('--compose-file'),
3809
4011
  argparse_arg('-r', '--requirements-txt', action='append'),
3810
4012
 
3811
- argparse_arg('--github-cache', action='store_true'),
3812
4013
  argparse_arg('--cache-dir'),
3813
4014
 
4015
+ argparse_arg('--github', action='store_true'),
4016
+ argparse_arg('--github-detect', action='store_true'),
4017
+
3814
4018
  argparse_arg('--always-pull', action='store_true'),
3815
4019
  argparse_arg('--always-build', action='store_true'),
3816
4020
 
3817
4021
  argparse_arg('--no-dependencies', action='store_true'),
4022
+
4023
+ argparse_arg('-e', '--env', action='append'),
4024
+ argparse_arg('-v', '--volume', action='append'),
4025
+
4026
+ argparse_arg('cmd', nargs=argparse.REMAINDER),
3818
4027
  )
3819
4028
  async def run(self) -> None:
3820
4029
  project_dir = self.args.project_dir
@@ -3825,6 +4034,11 @@ class CiCli(ArgparseCli):
3825
4034
 
3826
4035
  #
3827
4036
 
4037
+ cmd = ' '.join(self.args.cmd)
4038
+ check.non_empty_str(cmd)
4039
+
4040
+ #
4041
+
3828
4042
  check.state(os.path.isdir(project_dir))
3829
4043
 
3830
4044
  #
@@ -3833,6 +4047,7 @@ class CiCli(ArgparseCli):
3833
4047
  for alt in alts:
3834
4048
  alt_file = os.path.abspath(os.path.join(project_dir, alt))
3835
4049
  if os.path.isfile(alt_file):
4050
+ log.debug('Using %s', alt_file)
3836
4051
  return alt_file
3837
4052
  return None
3838
4053
 
@@ -3866,6 +4081,7 @@ class CiCli(ArgparseCli):
3866
4081
  'requirements-ci.txt',
3867
4082
  ]:
3868
4083
  if os.path.exists(os.path.join(project_dir, rf)):
4084
+ log.debug('Using %s', rf)
3869
4085
  requirements_txts.append(rf)
3870
4086
  else:
3871
4087
  for rf in requirements_txts:
@@ -3873,21 +4089,34 @@ class CiCli(ArgparseCli):
3873
4089
 
3874
4090
  #
3875
4091
 
3876
- shell_cache: ta.Optional[ShellCache] = None
4092
+ github = self.args.github
4093
+ if not github and self.args.github_detect:
4094
+ github = is_in_github_actions()
4095
+ if github:
4096
+ log.debug('Github detected')
4097
+
4098
+ #
4099
+
3877
4100
  file_cache: ta.Optional[FileCache] = None
3878
4101
  if cache_dir is not None:
3879
- if not os.path.exists(cache_dir):
3880
- os.makedirs(cache_dir)
3881
- check.state(os.path.isdir(cache_dir))
3882
-
3883
- directory_file_cache = DirectoryFileCache(cache_dir)
4102
+ cache_dir = os.path.abspath(cache_dir)
4103
+ log.debug('Using cache dir %s', cache_dir)
4104
+ if github:
4105
+ file_cache = GithubFileCache(cache_dir)
4106
+ else:
4107
+ file_cache = DirectoryFileCache(cache_dir)
3884
4108
 
3885
- file_cache = directory_file_cache
4109
+ #
3886
4110
 
3887
- if self.args.github_cache:
3888
- shell_cache = GithubShellCache(cache_dir)
3889
- else:
3890
- shell_cache = DirectoryShellCache(directory_file_cache)
4111
+ run_options: ta.List[str] = []
4112
+ for run_arg, run_arg_vals in [
4113
+ ('-e', self.args.env or []),
4114
+ ('-v', self.args.volume or []),
4115
+ ]:
4116
+ run_options.extend(itertools.chain.from_iterable(
4117
+ [run_arg, run_arg_val]
4118
+ for run_arg_val in run_arg_vals
4119
+ ))
3891
4120
 
3892
4121
  #
3893
4122
 
@@ -3902,18 +4131,16 @@ class CiCli(ArgparseCli):
3902
4131
 
3903
4132
  requirements_txts=requirements_txts,
3904
4133
 
3905
- cmd=ShellCmd(' && '.join([
3906
- 'cd /project',
3907
- 'python3 -m pytest -svv test.py',
3908
- ])),
4134
+ cmd=ShellCmd(cmd),
3909
4135
 
3910
4136
  always_pull=self.args.always_pull,
3911
4137
  always_build=self.args.always_build,
3912
4138
 
3913
4139
  no_dependencies=self.args.no_dependencies,
4140
+
4141
+ run_options=run_options,
3914
4142
  ),
3915
4143
  file_cache=file_cache,
3916
- shell_cache=shell_cache,
3917
4144
  ) as ci:
3918
4145
  await ci.run()
3919
4146