omdev 0.0.0.dev212__py3-none-any.whl → 0.0.0.dev214__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
omdev/scripts/ci.py CHANGED
@@ -3,7 +3,7 @@
3
3
  # @omlish-lite
4
4
  # @omlish-script
5
5
  # @omlish-amalg-output ../ci/cli.py
6
- # ruff: noqa: N802 UP006 UP007 UP036
6
+ # ruff: noqa: N802 TC003 UP006 UP007 UP036
7
7
  """
8
8
  Inputs:
9
9
  - requirements.txt
@@ -12,17 +12,20 @@ Inputs:
12
12
 
13
13
  ==
14
14
 
15
- ./python -m ci run --cache-dir ci/cache ci/project omlish-ci
15
+ ./python -m omdev.ci run --cache-dir omdev/ci/tests/cache omdev/ci/tests/project omlish-ci
16
16
  """
17
17
  import abc
18
18
  import argparse
19
19
  import asyncio
20
+ import asyncio.base_subprocess
21
+ import asyncio.subprocess
20
22
  import collections
21
23
  import contextlib
22
24
  import dataclasses as dc
23
25
  import datetime
24
26
  import functools
25
27
  import hashlib
28
+ import http.client
26
29
  import inspect
27
30
  import itertools
28
31
  import json
@@ -39,6 +42,8 @@ import threading
39
42
  import time
40
43
  import types
41
44
  import typing as ta
45
+ import urllib.parse
46
+ import urllib.request
42
47
 
43
48
 
44
49
  ########################################
@@ -54,9 +59,12 @@ if sys.version_info < (3, 8):
54
59
  # shell.py
55
60
  T = ta.TypeVar('T')
56
61
 
57
- # ../../omlish/lite/cached.py
62
+ # ../../omlish/asyncs/asyncio/asyncio.py
58
63
  CallableT = ta.TypeVar('CallableT', bound=ta.Callable)
59
64
 
65
+ # ../../omlish/asyncs/asyncio/timeouts.py
66
+ AwaitableT = ta.TypeVar('AwaitableT', bound=ta.Awaitable)
67
+
60
68
  # ../../omlish/lite/check.py
61
69
  SizedT = ta.TypeVar('SizedT', bound=ta.Sized)
62
70
  CheckMessage = ta.Union[str, ta.Callable[..., ta.Optional[str]], None] # ta.TypeAlias
@@ -70,11 +78,40 @@ ArgparseCmdFn = ta.Callable[[], ta.Optional[int]] # ta.TypeAlias
70
78
 
71
79
  # ../../omlish/lite/contextmanagers.py
72
80
  ExitStackedT = ta.TypeVar('ExitStackedT', bound='ExitStacked')
81
+ AsyncExitStackedT = ta.TypeVar('AsyncExitStackedT', bound='AsyncExitStacked')
73
82
 
74
83
  # ../../omlish/subprocesses.py
75
84
  SubprocessChannelOption = ta.Literal['pipe', 'stdout', 'devnull'] # ta.TypeAlias
76
85
 
77
86
 
87
+ ########################################
88
+ # ../consts.py
89
+
90
+
91
+ CI_CACHE_VERSION = 1
92
+
93
+
94
+ ########################################
95
+ # ../github/env.py
96
+
97
+
98
+ @dc.dataclass(frozen=True)
99
+ class GithubEnvVar:
100
+ k: str
101
+
102
+ def __call__(self) -> ta.Optional[str]:
103
+ return os.environ.get(self.k)
104
+
105
+
106
+ GITHUB_ENV_VARS: ta.Set[GithubEnvVar] = set()
107
+
108
+
109
+ def register_github_env_var(k: str) -> GithubEnvVar:
110
+ ev = GithubEnvVar(k)
111
+ GITHUB_ENV_VARS.add(ev)
112
+ return ev
113
+
114
+
78
115
  ########################################
79
116
  # ../shell.py
80
117
 
@@ -113,6 +150,84 @@ class ShellCmd:
113
150
  )
114
151
 
115
152
 
153
+ ########################################
154
+ # ../../../omlish/asyncs/asyncio/asyncio.py
155
+
156
+
157
+ def asyncio_once(fn: CallableT) -> CallableT:
158
+ future = None
159
+
160
+ @functools.wraps(fn)
161
+ async def inner(*args, **kwargs):
162
+ nonlocal future
163
+ if not future:
164
+ future = asyncio.create_task(fn(*args, **kwargs))
165
+ return await future
166
+
167
+ return ta.cast(CallableT, inner)
168
+
169
+
170
+ def drain_tasks(loop=None):
171
+ if loop is None:
172
+ loop = asyncio.get_running_loop()
173
+
174
+ while loop._ready or loop._scheduled: # noqa
175
+ loop._run_once() # noqa
176
+
177
+
178
+ @contextlib.contextmanager
179
+ def draining_asyncio_tasks() -> ta.Iterator[None]:
180
+ loop = asyncio.get_running_loop()
181
+ try:
182
+ yield
183
+ finally:
184
+ if loop is not None:
185
+ drain_tasks(loop) # noqa
186
+
187
+
188
+ async def asyncio_wait_concurrent(
189
+ coros: ta.Iterable[ta.Awaitable[T]],
190
+ concurrency: ta.Union[int, asyncio.Semaphore],
191
+ *,
192
+ return_when: ta.Any = asyncio.FIRST_EXCEPTION,
193
+ ) -> ta.List[T]:
194
+ if isinstance(concurrency, asyncio.Semaphore):
195
+ semaphore = concurrency
196
+ elif isinstance(concurrency, int):
197
+ semaphore = asyncio.Semaphore(concurrency)
198
+ else:
199
+ raise TypeError(concurrency)
200
+
201
+ async def limited_task(coro):
202
+ async with semaphore:
203
+ return await coro
204
+
205
+ tasks = [asyncio.create_task(limited_task(coro)) for coro in coros]
206
+ done, pending = await asyncio.wait(tasks, return_when=return_when)
207
+
208
+ for task in pending:
209
+ task.cancel()
210
+
211
+ for task in done:
212
+ if task.exception():
213
+ raise task.exception() # type: ignore
214
+
215
+ return [task.result() for task in done]
216
+
217
+
218
+ ########################################
219
+ # ../../../omlish/asyncs/asyncio/timeouts.py
220
+
221
+
222
+ def asyncio_maybe_timeout(
223
+ fut: AwaitableT,
224
+ timeout: ta.Optional[float] = None,
225
+ ) -> AwaitableT:
226
+ if timeout is not None:
227
+ fut = asyncio.wait_for(fut, timeout) # type: ignore
228
+ return fut
229
+
230
+
116
231
  ########################################
117
232
  # ../../../omlish/lite/cached.py
118
233
 
@@ -208,6 +323,17 @@ class Checks:
208
323
 
209
324
  #
210
325
 
326
+ def register_on_raise_breakpoint_if_env_var_set(self, key: str) -> None:
327
+ import os
328
+
329
+ def on_raise(exc: Exception) -> None: # noqa
330
+ if key in os.environ:
331
+ breakpoint() # noqa
332
+
333
+ self.register_on_raise(on_raise)
334
+
335
+ #
336
+
211
337
  def set_exception_factory(self, factory: CheckExceptionFactory) -> None:
212
338
  self._exception_factory = factory
213
339
 
@@ -523,6 +649,18 @@ class Checks:
523
649
 
524
650
  return v
525
651
 
652
+ def not_equal(self, v: T, o: ta.Any, msg: CheckMessage = None) -> T:
653
+ if o == v:
654
+ self._raise(
655
+ ValueError,
656
+ 'Must not be equal',
657
+ msg,
658
+ Checks._ArgsKwargs(v, o),
659
+ render_fmt='%s == %s',
660
+ )
661
+
662
+ return v
663
+
526
664
  def is_(self, v: T, o: ta.Any, msg: CheckMessage = None) -> T:
527
665
  if o is not v:
528
666
  self._raise(
@@ -956,170 +1094,183 @@ class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
956
1094
 
957
1095
 
958
1096
  ########################################
959
- # ../cache.py
1097
+ # ../../../omlish/os/files.py
960
1098
 
961
1099
 
962
- ##
1100
+ def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None:
1101
+ if exist_ok:
1102
+ # First try to bump modification time
1103
+ # Implementation note: GNU touch uses the UTIME_NOW option of the utimensat() / futimens() functions.
1104
+ try:
1105
+ os.utime(self, None)
1106
+ except OSError:
1107
+ pass
1108
+ else:
1109
+ return
1110
+ flags = os.O_CREAT | os.O_WRONLY
1111
+ if not exist_ok:
1112
+ flags |= os.O_EXCL
1113
+ fd = os.open(self, flags, mode)
1114
+ os.close(fd)
963
1115
 
964
1116
 
965
- @abc.abstractmethod
966
- class FileCache(abc.ABC):
967
- @abc.abstractmethod
968
- def get_file(self, key: str) -> ta.Optional[str]:
969
- raise NotImplementedError
1117
+ def unlink_if_exists(path: str) -> None:
1118
+ try:
1119
+ os.unlink(path)
1120
+ except FileNotFoundError:
1121
+ pass
970
1122
 
971
- @abc.abstractmethod
972
- def put_file(self, key: str, file_path: str) -> ta.Optional[str]:
973
- raise NotImplementedError
974
1123
 
1124
+ @contextlib.contextmanager
1125
+ def unlinking_if_exists(path: str) -> ta.Iterator[None]:
1126
+ try:
1127
+ yield
1128
+ finally:
1129
+ unlink_if_exists(path)
975
1130
 
976
- #
977
1131
 
1132
+ ########################################
1133
+ # ../cache.py
978
1134
 
979
- class DirectoryFileCache(FileCache):
980
- def __init__(self, dir: str) -> None: # noqa
981
- super().__init__()
982
1135
 
983
- self._dir = dir
1136
+ ##
984
1137
 
985
- #
986
1138
 
987
- def get_cache_file_path(
1139
+ @abc.abstractmethod
1140
+ class FileCache(abc.ABC):
1141
+ def __init__(
988
1142
  self,
989
- key: str,
990
1143
  *,
991
- make_dirs: bool = False,
992
- ) -> str:
993
- if make_dirs:
994
- os.makedirs(self._dir, exist_ok=True)
995
- return os.path.join(self._dir, key)
996
-
997
- def format_incomplete_file(self, f: str) -> str:
998
- return os.path.join(os.path.dirname(f), f'_{os.path.basename(f)}.incomplete')
999
-
1000
- #
1001
-
1002
- def get_file(self, key: str) -> ta.Optional[str]:
1003
- cache_file_path = self.get_cache_file_path(key)
1004
- if not os.path.exists(cache_file_path):
1005
- return None
1006
- return cache_file_path
1007
-
1008
- def put_file(self, key: str, file_path: str) -> None:
1009
- cache_file_path = self.get_cache_file_path(key, make_dirs=True)
1010
- shutil.copyfile(file_path, cache_file_path)
1144
+ version: int = CI_CACHE_VERSION,
1145
+ ) -> None:
1146
+ super().__init__()
1011
1147
 
1148
+ check.isinstance(version, int)
1149
+ check.arg(version >= 0)
1150
+ self._version = version
1012
1151
 
1013
- ##
1152
+ @property
1153
+ def version(self) -> int:
1154
+ return self._version
1014
1155
 
1156
+ #
1015
1157
 
1016
- class ShellCache(abc.ABC):
1017
1158
  @abc.abstractmethod
1018
- def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
1159
+ def get_file(self, key: str) -> ta.Awaitable[ta.Optional[str]]:
1019
1160
  raise NotImplementedError
1020
1161
 
1021
- class PutFileCmdContext(abc.ABC):
1022
- def __init__(self) -> None:
1023
- super().__init__()
1024
-
1025
- self._state: ta.Literal['open', 'committed', 'aborted'] = 'open'
1026
-
1027
- @property
1028
- def state(self) -> ta.Literal['open', 'committed', 'aborted']:
1029
- return self._state
1030
-
1031
- #
1162
+ @abc.abstractmethod
1163
+ def put_file(
1164
+ self,
1165
+ key: str,
1166
+ file_path: str,
1167
+ *,
1168
+ steal: bool = False,
1169
+ ) -> ta.Awaitable[str]:
1170
+ raise NotImplementedError
1032
1171
 
1033
- @property
1034
- @abc.abstractmethod
1035
- def cmd(self) -> ShellCmd:
1036
- raise NotImplementedError
1037
1172
 
1038
- #
1173
+ #
1039
1174
 
1040
- def __enter__(self):
1041
- return self
1042
1175
 
1043
- def __exit__(self, exc_type, exc_val, exc_tb):
1044
- if exc_val is None:
1045
- self.commit()
1046
- else:
1047
- self.abort()
1176
+ class DirectoryFileCache(FileCache):
1177
+ def __init__(
1178
+ self,
1179
+ dir: str, # noqa
1180
+ *,
1181
+ no_create: bool = False,
1182
+ no_purge: bool = False,
1183
+ **kwargs: ta.Any,
1184
+ ) -> None: # noqa
1185
+ super().__init__(**kwargs)
1048
1186
 
1049
- #
1187
+ self._dir = dir
1188
+ self._no_create = no_create
1189
+ self._no_purge = no_purge
1050
1190
 
1051
- @abc.abstractmethod
1052
- def _commit(self) -> None:
1053
- raise NotImplementedError
1191
+ #
1054
1192
 
1055
- def commit(self) -> None:
1056
- if self._state == 'committed':
1057
- return
1058
- elif self._state == 'open':
1059
- self._commit()
1060
- self._state = 'committed'
1061
- else:
1062
- raise RuntimeError(self._state)
1193
+ VERSION_FILE_NAME = '.ci-cache-version'
1063
1194
 
1064
- #
1195
+ @cached_nullary
1196
+ def setup_dir(self) -> None:
1197
+ version_file = os.path.join(self._dir, self.VERSION_FILE_NAME)
1065
1198
 
1066
- @abc.abstractmethod
1067
- def _abort(self) -> None:
1068
- raise NotImplementedError
1199
+ if self._no_create:
1200
+ check.state(os.path.isdir(self._dir))
1069
1201
 
1070
- def abort(self) -> None:
1071
- if self._state == 'aborted':
1072
- return
1073
- elif self._state == 'open':
1074
- self._abort()
1075
- self._state = 'committed'
1076
- else:
1077
- raise RuntimeError(self._state)
1202
+ elif not os.path.isdir(self._dir):
1203
+ os.makedirs(self._dir)
1204
+ with open(version_file, 'w') as f:
1205
+ f.write(str(self._version))
1206
+ return
1078
1207
 
1079
- @abc.abstractmethod
1080
- def put_file_cmd(self, key: str) -> PutFileCmdContext:
1081
- raise NotImplementedError
1208
+ with open(version_file) as f:
1209
+ dir_version = int(f.read().strip())
1082
1210
 
1211
+ if dir_version == self._version:
1212
+ return
1083
1213
 
1084
- #
1214
+ if self._no_purge:
1215
+ raise RuntimeError(f'{dir_version=} != {self._version=}')
1085
1216
 
1217
+ dirs = [n for n in sorted(os.listdir(self._dir)) if os.path.isdir(os.path.join(self._dir, n))]
1218
+ if dirs:
1219
+ raise RuntimeError(
1220
+ f'Refusing to remove stale cache dir {self._dir!r} '
1221
+ f'due to present directories: {", ".join(dirs)}',
1222
+ )
1086
1223
 
1087
- class DirectoryShellCache(ShellCache):
1088
- def __init__(self, dfc: DirectoryFileCache) -> None:
1089
- super().__init__()
1224
+ for n in sorted(os.listdir(self._dir)):
1225
+ if n.startswith('.'):
1226
+ continue
1227
+ fp = os.path.join(self._dir, n)
1228
+ check.state(os.path.isfile(fp))
1229
+ log.debug('Purging stale cache file: %s', fp)
1230
+ os.unlink(fp)
1090
1231
 
1091
- self._dfc = dfc
1232
+ os.unlink(version_file)
1092
1233
 
1093
- def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
1094
- f = self._dfc.get_file(key)
1095
- if f is None:
1096
- return None
1097
- return ShellCmd(f'cat {shlex.quote(f)}')
1234
+ with open(version_file, 'w') as f:
1235
+ f.write(str(self._version))
1098
1236
 
1099
- class _PutFileCmdContext(ShellCache.PutFileCmdContext): # noqa
1100
- def __init__(self, tf: str, f: str) -> None:
1101
- super().__init__()
1237
+ #
1102
1238
 
1103
- self._tf = tf
1104
- self._f = f
1239
+ def get_cache_file_path(
1240
+ self,
1241
+ key: str,
1242
+ ) -> str:
1243
+ self.setup_dir()
1244
+ return os.path.join(self._dir, key)
1105
1245
 
1106
- @property
1107
- def cmd(self) -> ShellCmd:
1108
- return ShellCmd(f'cat > {shlex.quote(self._tf)}')
1246
+ def format_incomplete_file(self, f: str) -> str:
1247
+ return os.path.join(os.path.dirname(f), f'_{os.path.basename(f)}.incomplete')
1109
1248
 
1110
- def _commit(self) -> None:
1111
- os.replace(self._tf, self._f)
1249
+ #
1112
1250
 
1113
- def _abort(self) -> None:
1114
- os.unlink(self._tf)
1251
+ async def get_file(self, key: str) -> ta.Optional[str]:
1252
+ cache_file_path = self.get_cache_file_path(key)
1253
+ if not os.path.exists(cache_file_path):
1254
+ return None
1255
+ return cache_file_path
1115
1256
 
1116
- def put_file_cmd(self, key: str) -> ShellCache.PutFileCmdContext:
1117
- f = self._dfc.get_cache_file_path(key, make_dirs=True)
1118
- return self._PutFileCmdContext(self._dfc.format_incomplete_file(f), f)
1257
+ async def put_file(
1258
+ self,
1259
+ key: str,
1260
+ file_path: str,
1261
+ *,
1262
+ steal: bool = False,
1263
+ ) -> str:
1264
+ cache_file_path = self.get_cache_file_path(key)
1265
+ if steal:
1266
+ shutil.move(file_path, cache_file_path)
1267
+ else:
1268
+ shutil.copyfile(file_path, cache_file_path)
1269
+ return cache_file_path
1119
1270
 
1120
1271
 
1121
1272
  ########################################
1122
- # ../github/cacheapi.py
1273
+ # ../github/api.py
1123
1274
  """
1124
1275
  export FILE_SIZE=$(stat --format="%s" $FILE)
1125
1276
 
@@ -1205,7 +1356,7 @@ class GithubCacheServiceV1:
1205
1356
  @dc.dataclass(frozen=True)
1206
1357
  class ReserveCacheRequest:
1207
1358
  key: str
1208
- cache_size: ta.Optional[int]
1359
+ cache_size: ta.Optional[int] = None
1209
1360
  version: ta.Optional[str] = None
1210
1361
 
1211
1362
  @dc.dataclass(frozen=True)
@@ -1320,16 +1471,27 @@ class GithubCacheServiceV2:
1320
1471
 
1321
1472
 
1322
1473
  ########################################
1323
- # ../utils.py
1474
+ # ../github/bootstrap.py
1475
+ """
1476
+ sudo rm -rf \
1477
+ /usr/local/.ghcup \
1478
+ /opt/hostedtoolcache \
1324
1479
 
1480
+ /usr/local/.ghcup 6.4G, 3391250 files
1481
+ /opt/hostedtoolcache 8.0G, 14843980 files
1482
+ /usr/local/lib/android 6.4G, 17251667 files
1483
+ """
1325
1484
 
1326
- ##
1327
1485
 
1486
+ GITHUB_ACTIONS_ENV_VAR = register_github_env_var('GITHUB_ACTIONS')
1328
1487
 
1329
- def make_temp_file() -> str:
1330
- file_fd, file = tempfile.mkstemp()
1331
- os.close(file_fd)
1332
- return file
1488
+
1489
+ def is_in_github_actions() -> bool:
1490
+ return GITHUB_ACTIONS_ENV_VAR() is not None
1491
+
1492
+
1493
+ ########################################
1494
+ # ../utils.py
1333
1495
 
1334
1496
 
1335
1497
  ##
@@ -1378,7 +1540,7 @@ class LogTimingContext:
1378
1540
  def __enter__(self) -> 'LogTimingContext':
1379
1541
  self._begin_time = time.time()
1380
1542
 
1381
- self._log.log(self._level, f'Begin {self._description}') # noqa
1543
+ self._log.log(self._level, f'Begin : {self._description}') # noqa
1382
1544
 
1383
1545
  return self
1384
1546
 
@@ -1387,7 +1549,7 @@ class LogTimingContext:
1387
1549
 
1388
1550
  self._log.log(
1389
1551
  self._level,
1390
- f'End {self._description} - {self._end_time - self._begin_time:0.2f} s elapsed',
1552
+ f'End : {self._description} - {self._end_time - self._begin_time:0.2f} s elapsed',
1391
1553
  )
1392
1554
 
1393
1555
 
@@ -1713,6 +1875,33 @@ class ExitStacked:
1713
1875
  return es.enter_context(cm)
1714
1876
 
1715
1877
 
1878
+ class AsyncExitStacked:
1879
+ _exit_stack: ta.Optional[contextlib.AsyncExitStack] = None
1880
+
1881
+ async def __aenter__(self: AsyncExitStackedT) -> AsyncExitStackedT:
1882
+ check.state(self._exit_stack is None)
1883
+ es = self._exit_stack = contextlib.AsyncExitStack()
1884
+ await es.__aenter__()
1885
+ return self
1886
+
1887
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
1888
+ if (es := self._exit_stack) is None:
1889
+ return None
1890
+ await self._async_exit_contexts()
1891
+ return await es.__aexit__(exc_type, exc_val, exc_tb)
1892
+
1893
+ async def _async_exit_contexts(self) -> None:
1894
+ pass
1895
+
1896
+ def _enter_context(self, cm: ta.ContextManager[T]) -> T:
1897
+ es = check.not_none(self._exit_stack)
1898
+ return es.enter_context(cm)
1899
+
1900
+ async def _enter_async_context(self, cm: ta.AsyncContextManager[T]) -> T:
1901
+ es = check.not_none(self._exit_stack)
1902
+ return await es.enter_async_context(cm)
1903
+
1904
+
1716
1905
  ##
1717
1906
 
1718
1907
 
@@ -1724,6 +1913,17 @@ def defer(fn: ta.Callable) -> ta.Generator[ta.Callable, None, None]:
1724
1913
  fn()
1725
1914
 
1726
1915
 
1916
+ @contextlib.asynccontextmanager
1917
+ async def adefer(fn: ta.Callable) -> ta.AsyncGenerator[ta.Callable, None]:
1918
+ try:
1919
+ yield fn
1920
+ finally:
1921
+ await fn()
1922
+
1923
+
1924
+ ##
1925
+
1926
+
1727
1927
  @contextlib.contextmanager
1728
1928
  def attr_setting(obj, attr, val, *, default=None): # noqa
1729
1929
  not_set = object()
@@ -1827,55 +2027,576 @@ class JsonLogFormatter(logging.Formatter):
1827
2027
 
1828
2028
 
1829
2029
  ########################################
1830
- # ../../../omlish/logs/standard.py
1831
- """
1832
- TODO:
1833
- - structured
1834
- - prefixed
1835
- - debug
1836
- - optional noisy? noisy will never be lite - some kinda configure_standard callback mechanism?
1837
- """
2030
+ # ../../../omlish/os/temp.py
1838
2031
 
1839
2032
 
1840
- ##
2033
+ def make_temp_file(**kwargs: ta.Any) -> str:
2034
+ file_fd, file = tempfile.mkstemp(**kwargs)
2035
+ os.close(file_fd)
2036
+ return file
1841
2037
 
1842
2038
 
1843
- STANDARD_LOG_FORMAT_PARTS = [
1844
- ('asctime', '%(asctime)-15s'),
1845
- ('process', 'pid=%(process)-6s'),
1846
- ('thread', 'tid=%(thread)x'),
1847
- ('levelname', '%(levelname)s'),
1848
- ('name', '%(name)s'),
1849
- ('separator', '::'),
1850
- ('message', '%(message)s'),
1851
- ]
2039
+ @contextlib.contextmanager
2040
+ def temp_file_context(**kwargs: ta.Any) -> ta.Iterator[str]:
2041
+ path = make_temp_file(**kwargs)
2042
+ try:
2043
+ yield path
2044
+ finally:
2045
+ unlink_if_exists(path)
1852
2046
 
1853
2047
 
1854
- class StandardLogFormatter(logging.Formatter):
1855
- @staticmethod
1856
- def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
1857
- return ' '.join(v for k, v in parts)
2048
+ @contextlib.contextmanager
2049
+ def temp_dir_context(
2050
+ root_dir: ta.Optional[str] = None,
2051
+ **kwargs: ta.Any,
2052
+ ) -> ta.Iterator[str]:
2053
+ path = tempfile.mkdtemp(dir=root_dir, **kwargs)
2054
+ try:
2055
+ yield path
2056
+ finally:
2057
+ shutil.rmtree(path, ignore_errors=True)
1858
2058
 
1859
- converter = datetime.datetime.fromtimestamp # type: ignore
1860
2059
 
1861
- def formatTime(self, record, datefmt=None):
1862
- ct = self.converter(record.created) # type: ignore
1863
- if datefmt:
1864
- return ct.strftime(datefmt) # noqa
1865
- else:
1866
- t = ct.strftime('%Y-%m-%d %H:%M:%S')
1867
- return '%s.%03d' % (t, record.msecs) # noqa
2060
+ @contextlib.contextmanager
2061
+ def temp_named_file_context(
2062
+ root_dir: ta.Optional[str] = None,
2063
+ cleanup: bool = True,
2064
+ **kwargs: ta.Any,
2065
+ ) -> ta.Iterator[tempfile._TemporaryFileWrapper]: # noqa
2066
+ with tempfile.NamedTemporaryFile(dir=root_dir, delete=False, **kwargs) as f:
2067
+ try:
2068
+ yield f
2069
+ finally:
2070
+ if cleanup:
2071
+ shutil.rmtree(f.name, ignore_errors=True)
2072
+
2073
+
2074
+ ########################################
2075
+ # ../github/client.py
1868
2076
 
1869
2077
 
1870
2078
  ##
1871
2079
 
1872
2080
 
1873
- class StandardConfiguredLogHandler(ProxyLogHandler):
1874
- def __init_subclass__(cls, **kwargs):
1875
- raise TypeError('This class serves only as a marker and should not be subclassed.')
2081
+ class GithubCacheClient(abc.ABC):
2082
+ class Entry(abc.ABC): # noqa
2083
+ pass
1876
2084
 
2085
+ @abc.abstractmethod
2086
+ def get_entry(self, key: str) -> ta.Awaitable[ta.Optional[Entry]]:
2087
+ raise NotImplementedError
1877
2088
 
1878
- ##
2089
+ @abc.abstractmethod
2090
+ def download_file(self, entry: Entry, out_file: str) -> ta.Awaitable[None]:
2091
+ raise NotImplementedError
2092
+
2093
+ @abc.abstractmethod
2094
+ def upload_file(self, key: str, in_file: str) -> ta.Awaitable[None]:
2095
+ raise NotImplementedError
2096
+
2097
+
2098
+ ##
2099
+
2100
+
2101
+ class GithubCacheServiceV1BaseClient(GithubCacheClient, abc.ABC):
2102
+ BASE_URL_ENV_VAR = register_github_env_var('ACTIONS_CACHE_URL')
2103
+ AUTH_TOKEN_ENV_VAR = register_github_env_var('ACTIONS_RUNTIME_TOKEN') # noqa
2104
+
2105
+ KEY_SUFFIX_ENV_VAR = register_github_env_var('GITHUB_RUN_ID')
2106
+
2107
+ #
2108
+
2109
+ def __init__(
2110
+ self,
2111
+ *,
2112
+ base_url: ta.Optional[str] = None,
2113
+ auth_token: ta.Optional[str] = None,
2114
+
2115
+ key_prefix: ta.Optional[str] = None,
2116
+ key_suffix: ta.Optional[str] = None,
2117
+
2118
+ cache_version: int = CI_CACHE_VERSION,
2119
+
2120
+ loop: ta.Optional[asyncio.AbstractEventLoop] = None,
2121
+ ) -> None:
2122
+ super().__init__()
2123
+
2124
+ #
2125
+
2126
+ if base_url is None:
2127
+ base_url = check.non_empty_str(self.BASE_URL_ENV_VAR())
2128
+ self._service_url = GithubCacheServiceV1.get_service_url(base_url)
2129
+
2130
+ if auth_token is None:
2131
+ auth_token = self.AUTH_TOKEN_ENV_VAR()
2132
+ self._auth_token = auth_token
2133
+
2134
+ #
2135
+
2136
+ self._key_prefix = key_prefix
2137
+
2138
+ if key_suffix is None:
2139
+ key_suffix = self.KEY_SUFFIX_ENV_VAR()
2140
+ self._key_suffix = check.non_empty_str(key_suffix)
2141
+
2142
+ #
2143
+
2144
+ self._cache_version = check.isinstance(cache_version, int)
2145
+
2146
+ #
2147
+
2148
+ self._given_loop = loop
2149
+
2150
+ #
2151
+
2152
+ def _get_loop(self) -> asyncio.AbstractEventLoop:
2153
+ if (loop := self._given_loop) is not None:
2154
+ return loop
2155
+ return asyncio.get_event_loop()
2156
+
2157
+ #
2158
+
2159
+ def build_request_headers(
2160
+ self,
2161
+ headers: ta.Optional[ta.Mapping[str, str]] = None,
2162
+ *,
2163
+ content_type: ta.Optional[str] = None,
2164
+ json_content: bool = False,
2165
+ ) -> ta.Dict[str, str]:
2166
+ dct = {
2167
+ 'Accept': ';'.join([
2168
+ 'application/json',
2169
+ f'api-version={GithubCacheServiceV1.API_VERSION}',
2170
+ ]),
2171
+ }
2172
+
2173
+ if (auth_token := self._auth_token):
2174
+ dct['Authorization'] = f'Bearer {auth_token}'
2175
+
2176
+ if content_type is None and json_content:
2177
+ content_type = 'application/json'
2178
+ if content_type is not None:
2179
+ dct['Content-Type'] = content_type
2180
+
2181
+ if headers:
2182
+ dct.update(headers)
2183
+
2184
+ return dct
2185
+
2186
+ #
2187
+
2188
+ def load_json_bytes(self, b: ta.Optional[bytes]) -> ta.Optional[ta.Any]:
2189
+ if not b:
2190
+ return None
2191
+ return json.loads(b.decode('utf-8-sig'))
2192
+
2193
+ #
2194
+
2195
+ async def send_url_request(
2196
+ self,
2197
+ req: urllib.request.Request,
2198
+ ) -> ta.Tuple[http.client.HTTPResponse, ta.Optional[bytes]]:
2199
+ def run_sync():
2200
+ with urllib.request.urlopen(req) as resp: # noqa
2201
+ body = resp.read()
2202
+ return (resp, body)
2203
+
2204
+ return await self._get_loop().run_in_executor(None, run_sync) # noqa
2205
+
2206
+ #
2207
+
2208
+ @dc.dataclass()
2209
+ class ServiceRequestError(RuntimeError):
2210
+ status_code: int
2211
+ body: ta.Optional[bytes]
2212
+
2213
+ def __str__(self) -> str:
2214
+ return repr(self)
2215
+
2216
+ async def send_service_request(
2217
+ self,
2218
+ path: str,
2219
+ *,
2220
+ method: ta.Optional[str] = None,
2221
+ headers: ta.Optional[ta.Mapping[str, str]] = None,
2222
+ content_type: ta.Optional[str] = None,
2223
+ content: ta.Optional[bytes] = None,
2224
+ json_content: ta.Optional[ta.Any] = None,
2225
+ success_status_codes: ta.Optional[ta.Container[int]] = None,
2226
+ ) -> ta.Optional[ta.Any]:
2227
+ url = f'{self._service_url}/{path}'
2228
+
2229
+ if content is not None and json_content is not None:
2230
+ raise RuntimeError('Must not pass both content and json_content')
2231
+ elif json_content is not None:
2232
+ content = json_dumps_compact(json_content).encode('utf-8')
2233
+ header_json_content = True
2234
+ else:
2235
+ header_json_content = False
2236
+
2237
+ if method is None:
2238
+ method = 'POST' if content is not None else 'GET'
2239
+
2240
+ #
2241
+
2242
+ req = urllib.request.Request( # noqa
2243
+ url,
2244
+ method=method,
2245
+ headers=self.build_request_headers(
2246
+ headers,
2247
+ content_type=content_type,
2248
+ json_content=header_json_content,
2249
+ ),
2250
+ data=content,
2251
+ )
2252
+
2253
+ resp, body = await self.send_url_request(req)
2254
+
2255
+ #
2256
+
2257
+ if success_status_codes is not None:
2258
+ is_success = resp.status in success_status_codes
2259
+ else:
2260
+ is_success = (200 <= resp.status <= 300)
2261
+ if not is_success:
2262
+ raise self.ServiceRequestError(resp.status, body)
2263
+
2264
+ return self.load_json_bytes(body)
2265
+
2266
+ #
2267
+
2268
+ KEY_PART_SEPARATOR = '--'
2269
+
2270
+ def fix_key(self, s: str, partial_suffix: bool = False) -> str:
2271
+ return self.KEY_PART_SEPARATOR.join([
2272
+ *([self._key_prefix] if self._key_prefix else []),
2273
+ s,
2274
+ ('' if partial_suffix else self._key_suffix),
2275
+ ])
2276
+
2277
+ #
2278
+
2279
+ @dc.dataclass(frozen=True)
2280
+ class Entry(GithubCacheClient.Entry):
2281
+ artifact: GithubCacheServiceV1.ArtifactCacheEntry
2282
+
2283
+ #
2284
+
2285
+ def build_get_entry_url_path(self, *keys: str) -> str:
2286
+ qp = dict(
2287
+ keys=','.join(urllib.parse.quote_plus(k) for k in keys),
2288
+ version=str(self._cache_version),
2289
+ )
2290
+
2291
+ return '?'.join([
2292
+ 'cache',
2293
+ '&'.join([
2294
+ f'{k}={v}'
2295
+ for k, v in qp.items()
2296
+ ]),
2297
+ ])
2298
+
2299
+ GET_ENTRY_SUCCESS_STATUS_CODES = (200, 204)
2300
+
2301
+
2302
+ ##
2303
+
2304
+
2305
+ class GithubCacheServiceV1Client(GithubCacheServiceV1BaseClient):
2306
+ DEFAULT_CONCURRENCY = 4
2307
+
2308
+ DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024
2309
+
2310
+ def __init__(
2311
+ self,
2312
+ *,
2313
+ concurrency: int = DEFAULT_CONCURRENCY,
2314
+ chunk_size: int = DEFAULT_CHUNK_SIZE,
2315
+ **kwargs: ta.Any,
2316
+ ) -> None:
2317
+ super().__init__(**kwargs)
2318
+
2319
+ check.arg(concurrency > 0)
2320
+ self._concurrency = concurrency
2321
+
2322
+ check.arg(chunk_size > 0)
2323
+ self._chunk_size = chunk_size
2324
+
2325
+ #
2326
+
2327
+ async def get_entry(self, key: str) -> ta.Optional[GithubCacheServiceV1BaseClient.Entry]:
2328
+ obj = await self.send_service_request(
2329
+ self.build_get_entry_url_path(self.fix_key(key, partial_suffix=True)),
2330
+ )
2331
+ if obj is None:
2332
+ return None
2333
+
2334
+ return self.Entry(GithubCacheServiceV1.dataclass_from_json(
2335
+ GithubCacheServiceV1.ArtifactCacheEntry,
2336
+ obj,
2337
+ ))
2338
+
2339
+ #
2340
+
2341
+ @dc.dataclass(frozen=True)
2342
+ class _DownloadChunk:
2343
+ key: str
2344
+ url: str
2345
+ out_file: str
2346
+ offset: int
2347
+ size: int
2348
+
2349
+ async def _download_file_chunk_urllib(self, chunk: _DownloadChunk) -> None:
2350
+ req = urllib.request.Request( # noqa
2351
+ chunk.url,
2352
+ headers={
2353
+ 'Range': f'bytes={chunk.offset}-{chunk.offset + chunk.size - 1}',
2354
+ },
2355
+ )
2356
+
2357
+ _, buf_ = await self.send_url_request(req)
2358
+
2359
+ buf = check.not_none(buf_)
2360
+ check.equal(len(buf), chunk.size)
2361
+
2362
+ #
2363
+
2364
+ def write_sync():
2365
+ with open(chunk.out_file, 'r+b') as f: # noqa
2366
+ f.seek(chunk.offset, os.SEEK_SET)
2367
+ f.write(buf)
2368
+
2369
+ await self._get_loop().run_in_executor(None, write_sync) # noqa
2370
+
2371
+ # async def _download_file_chunk_curl(self, chunk: _DownloadChunk) -> None:
2372
+ # async with contextlib.AsyncExitStack() as es:
2373
+ # f = open(chunk.out_file, 'r+b')
2374
+ # f.seek(chunk.offset, os.SEEK_SET)
2375
+ #
2376
+ # tmp_file = es.enter_context(temp_file_context()) # noqa
2377
+ #
2378
+ # proc = await es.enter_async_context(asyncio_subprocesses.popen(
2379
+ # 'curl',
2380
+ # '-s',
2381
+ # '-w', '%{json}',
2382
+ # '-H', f'Range: bytes={chunk.offset}-{chunk.offset + chunk.size - 1}',
2383
+ # chunk.url,
2384
+ # output=subprocess.PIPE,
2385
+ # ))
2386
+ #
2387
+ # futs = asyncio.gather(
2388
+ #
2389
+ # )
2390
+ #
2391
+ # await proc.wait()
2392
+ #
2393
+ # with open(tmp_file, 'r') as f: # noqa
2394
+ # curl_json = tmp_file.read()
2395
+ #
2396
+ # curl_res = json.loads(curl_json.decode().strip())
2397
+ #
2398
+ # status_code = check.isinstance(curl_res['response_code'], int)
2399
+ #
2400
+ # if not (200 <= status_code <= 300):
2401
+ # raise RuntimeError(f'Curl chunk download {chunk} failed: {curl_res}')
2402
+
2403
+ async def _download_file_chunk(self, chunk: _DownloadChunk) -> None:
2404
+ with log_timing_context(
2405
+ 'Downloading github cache '
2406
+ f'key {chunk.key} '
2407
+ f'file {chunk.out_file} '
2408
+ f'chunk {chunk.offset} - {chunk.offset + chunk.size}',
2409
+ ):
2410
+ await self._download_file_chunk_urllib(chunk)
2411
+
2412
+ async def _download_file(self, entry: GithubCacheServiceV1BaseClient.Entry, out_file: str) -> None:
2413
+ key = check.non_empty_str(entry.artifact.cache_key)
2414
+ url = check.non_empty_str(entry.artifact.archive_location)
2415
+
2416
+ head_resp, _ = await self.send_url_request(urllib.request.Request( # noqa
2417
+ url,
2418
+ method='HEAD',
2419
+ ))
2420
+ file_size = int(head_resp.headers['Content-Length'])
2421
+
2422
+ #
2423
+
2424
+ with open(out_file, 'xb') as f: # noqa
2425
+ f.truncate(file_size)
2426
+
2427
+ #
2428
+
2429
+ download_tasks = []
2430
+ chunk_size = self._chunk_size
2431
+ for i in range((file_size // chunk_size) + (1 if file_size % chunk_size else 0)):
2432
+ offset = i * chunk_size
2433
+ size = min(chunk_size, file_size - offset)
2434
+ chunk = self._DownloadChunk(
2435
+ key,
2436
+ url,
2437
+ out_file,
2438
+ offset,
2439
+ size,
2440
+ )
2441
+ download_tasks.append(self._download_file_chunk(chunk))
2442
+
2443
+ await asyncio_wait_concurrent(download_tasks, self._concurrency)
2444
+
2445
+ async def download_file(self, entry: GithubCacheClient.Entry, out_file: str) -> None:
2446
+ entry1 = check.isinstance(entry, self.Entry)
2447
+ with log_timing_context(
2448
+ 'Downloading github cache '
2449
+ f'key {entry1.artifact.cache_key} '
2450
+ f'version {entry1.artifact.cache_version} '
2451
+ f'to {out_file}',
2452
+ ):
2453
+ await self._download_file(entry1, out_file)
2454
+
2455
+ #
2456
+
2457
+ async def _upload_file_chunk(
2458
+ self,
2459
+ key: str,
2460
+ cache_id: int,
2461
+ in_file: str,
2462
+ offset: int,
2463
+ size: int,
2464
+ ) -> None:
2465
+ with log_timing_context(
2466
+ f'Uploading github cache {key} '
2467
+ f'file {in_file} '
2468
+ f'chunk {offset} - {offset + size}',
2469
+ ):
2470
+ with open(in_file, 'rb') as f: # noqa
2471
+ f.seek(offset)
2472
+ buf = f.read(size)
2473
+
2474
+ check.equal(len(buf), size)
2475
+
2476
+ await self.send_service_request(
2477
+ f'caches/{cache_id}',
2478
+ method='PATCH',
2479
+ content_type='application/octet-stream',
2480
+ headers={
2481
+ 'Content-Range': f'bytes {offset}-{offset + size - 1}/*',
2482
+ },
2483
+ content=buf,
2484
+ success_status_codes=[204],
2485
+ )
2486
+
2487
+ async def _upload_file(self, key: str, in_file: str) -> None:
2488
+ fixed_key = self.fix_key(key)
2489
+
2490
+ check.state(os.path.isfile(in_file))
2491
+
2492
+ file_size = os.stat(in_file).st_size
2493
+
2494
+ #
2495
+
2496
+ reserve_req = GithubCacheServiceV1.ReserveCacheRequest(
2497
+ key=fixed_key,
2498
+ cache_size=file_size,
2499
+ version=str(self._cache_version),
2500
+ )
2501
+ reserve_resp_obj = await self.send_service_request(
2502
+ 'caches',
2503
+ json_content=GithubCacheServiceV1.dataclass_to_json(reserve_req),
2504
+ success_status_codes=[201],
2505
+ )
2506
+ reserve_resp = GithubCacheServiceV1.dataclass_from_json( # noqa
2507
+ GithubCacheServiceV1.ReserveCacheResponse,
2508
+ reserve_resp_obj,
2509
+ )
2510
+ cache_id = check.isinstance(reserve_resp.cache_id, int)
2511
+
2512
+ log.debug(f'Github cache file {os.path.basename(in_file)} got id {cache_id}') # noqa
2513
+
2514
+ #
2515
+
2516
+ upload_tasks = []
2517
+ chunk_size = self._chunk_size
2518
+ for i in range((file_size // chunk_size) + (1 if file_size % chunk_size else 0)):
2519
+ offset = i * chunk_size
2520
+ size = min(chunk_size, file_size - offset)
2521
+ upload_tasks.append(self._upload_file_chunk(
2522
+ fixed_key,
2523
+ cache_id,
2524
+ in_file,
2525
+ offset,
2526
+ size,
2527
+ ))
2528
+
2529
+ await asyncio_wait_concurrent(upload_tasks, self._concurrency)
2530
+
2531
+ #
2532
+
2533
+ commit_req = GithubCacheServiceV1.CommitCacheRequest(
2534
+ size=file_size,
2535
+ )
2536
+ await self.send_service_request(
2537
+ f'caches/{cache_id}',
2538
+ json_content=GithubCacheServiceV1.dataclass_to_json(commit_req),
2539
+ success_status_codes=[204],
2540
+ )
2541
+
2542
+ async def upload_file(self, key: str, in_file: str) -> None:
2543
+ with log_timing_context(
2544
+ f'Uploading github cache file {os.path.basename(in_file)} '
2545
+ f'key {key}',
2546
+ ):
2547
+ await self._upload_file(key, in_file)
2548
+
2549
+
2550
+ ########################################
2551
+ # ../../../omlish/logs/standard.py
2552
+ """
2553
+ TODO:
2554
+ - structured
2555
+ - prefixed
2556
+ - debug
2557
+ - optional noisy? noisy will never be lite - some kinda configure_standard callback mechanism?
2558
+ """
2559
+
2560
+
2561
+ ##
2562
+
2563
+
2564
+ STANDARD_LOG_FORMAT_PARTS = [
2565
+ ('asctime', '%(asctime)-15s'),
2566
+ ('process', 'pid=%(process)-6s'),
2567
+ ('thread', 'tid=%(thread)x'),
2568
+ ('levelname', '%(levelname)s'),
2569
+ ('name', '%(name)s'),
2570
+ ('separator', '::'),
2571
+ ('message', '%(message)s'),
2572
+ ]
2573
+
2574
+
2575
+ class StandardLogFormatter(logging.Formatter):
2576
+ @staticmethod
2577
+ def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
2578
+ return ' '.join(v for k, v in parts)
2579
+
2580
+ converter = datetime.datetime.fromtimestamp # type: ignore
2581
+
2582
+ def formatTime(self, record, datefmt=None):
2583
+ ct = self.converter(record.created) # type: ignore
2584
+ if datefmt:
2585
+ return ct.strftime(datefmt) # noqa
2586
+ else:
2587
+ t = ct.strftime('%Y-%m-%d %H:%M:%S')
2588
+ return '%s.%03d' % (t, record.msecs) # noqa
2589
+
2590
+
2591
+ ##
2592
+
2593
+
2594
+ class StandardConfiguredLogHandler(ProxyLogHandler):
2595
+ def __init_subclass__(cls, **kwargs):
2596
+ raise TypeError('This class serves only as a marker and should not be subclassed.')
2597
+
2598
+
2599
+ ##
1879
2600
 
1880
2601
 
1881
2602
  @contextlib.contextmanager
@@ -2276,6 +2997,395 @@ class AbstractAsyncSubprocesses(BaseSubprocesses):
2276
2997
  return ret.decode().strip()
2277
2998
 
2278
2999
 
3000
+ ########################################
3001
+ # ../github/cache.py
3002
+
3003
+
3004
+ ##
3005
+
3006
+
3007
+ class GithubFileCache(FileCache):
3008
+ def __init__(
3009
+ self,
3010
+ dir: str, # noqa
3011
+ *,
3012
+ client: ta.Optional[GithubCacheClient] = None,
3013
+ **kwargs: ta.Any,
3014
+ ) -> None:
3015
+ super().__init__(**kwargs)
3016
+
3017
+ self._dir = check.not_none(dir)
3018
+
3019
+ if client is None:
3020
+ client = GithubCacheServiceV1Client(
3021
+ cache_version=self._version,
3022
+ )
3023
+ self._client: GithubCacheClient = client
3024
+
3025
+ self._local = DirectoryFileCache(
3026
+ self._dir,
3027
+ version=self._version,
3028
+ )
3029
+
3030
+ async def get_file(self, key: str) -> ta.Optional[str]:
3031
+ local_file = self._local.get_cache_file_path(key)
3032
+ if os.path.exists(local_file):
3033
+ return local_file
3034
+
3035
+ if (entry := await self._client.get_entry(key)) is None:
3036
+ return None
3037
+
3038
+ tmp_file = self._local.format_incomplete_file(local_file)
3039
+ with unlinking_if_exists(tmp_file):
3040
+ await self._client.download_file(entry, tmp_file)
3041
+
3042
+ os.replace(tmp_file, local_file)
3043
+
3044
+ return local_file
3045
+
3046
+ async def put_file(
3047
+ self,
3048
+ key: str,
3049
+ file_path: str,
3050
+ *,
3051
+ steal: bool = False,
3052
+ ) -> str:
3053
+ cache_file_path = await self._local.put_file(
3054
+ key,
3055
+ file_path,
3056
+ steal=steal,
3057
+ )
3058
+
3059
+ await self._client.upload_file(key, cache_file_path)
3060
+
3061
+ return cache_file_path
3062
+
3063
+
3064
+ ########################################
3065
+ # ../github/cli.py
3066
+ """
3067
+ See:
3068
+ - https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28
3069
+ """
3070
+
3071
+
3072
+ class GithubCli(ArgparseCli):
3073
+ @argparse_cmd()
3074
+ def list_referenced_env_vars(self) -> None:
3075
+ print('\n'.join(sorted(ev.k for ev in GITHUB_ENV_VARS)))
3076
+
3077
+ @argparse_cmd(
3078
+ argparse_arg('key'),
3079
+ )
3080
+ async def get_cache_entry(self) -> None:
3081
+ client = GithubCacheServiceV1Client()
3082
+ entry = await client.get_entry(self.args.key)
3083
+ if entry is None:
3084
+ return
3085
+ print(json_dumps_pretty(dc.asdict(entry))) # noqa
3086
+
3087
+ @argparse_cmd(
3088
+ argparse_arg('repository-id'),
3089
+ )
3090
+ def list_cache_entries(self) -> None:
3091
+ raise NotImplementedError
3092
+
3093
+
3094
+ ########################################
3095
+ # ../requirements.py
3096
+ """
3097
+ TODO:
3098
+ - pip compile lol
3099
+ - but still support git+ stuff
3100
+ - req.txt format aware hash
3101
+ - more than just whitespace
3102
+ - pyproject req rewriting
3103
+ - download_requirements bootstrap off prev? not worth the dl?
3104
+ - big deps (torch) change less, probably worth it
3105
+ - follow embedded -r automatically like pyp
3106
+ """
3107
+
3108
+
3109
+ ##
3110
+
3111
+
3112
+ def build_requirements_hash(
3113
+ requirements_txts: ta.Sequence[str],
3114
+ ) -> str:
3115
+ txt_file_contents: dict = {}
3116
+
3117
+ for txt_file in requirements_txts:
3118
+ txt_file_name = os.path.basename(txt_file)
3119
+ check.not_in(txt_file_name, txt_file_contents)
3120
+ with open(txt_file) as f:
3121
+ txt_contents = f.read()
3122
+ txt_file_contents[txt_file_name] = txt_contents
3123
+
3124
+ #
3125
+
3126
+ lines = []
3127
+ for txt_file, txt_contents in sorted(txt_file_contents.items()):
3128
+ txt_hash = sha256_str(txt_contents)
3129
+ lines.append(f'{txt_file}={txt_hash}')
3130
+
3131
+ return sha256_str('\n'.join(lines))
3132
+
3133
+
3134
+ ##
3135
+
3136
+
3137
+ def download_requirements(
3138
+ image: str,
3139
+ requirements_dir: str,
3140
+ requirements_txts: ta.Sequence[str],
3141
+ ) -> None:
3142
+ requirements_txt_dir = tempfile.mkdtemp()
3143
+ with defer(lambda: shutil.rmtree(requirements_txt_dir)):
3144
+ for rt in requirements_txts:
3145
+ shutil.copyfile(rt, os.path.join(requirements_txt_dir, os.path.basename(rt)))
3146
+
3147
+ subprocesses.check_call(
3148
+ 'docker',
3149
+ 'run',
3150
+ '--rm',
3151
+ '-i',
3152
+ '-v', f'{os.path.abspath(requirements_dir)}:/requirements',
3153
+ '-v', f'{requirements_txt_dir}:/requirements_txt',
3154
+ image,
3155
+ 'pip',
3156
+ 'download',
3157
+ '-d', '/requirements',
3158
+ *itertools.chain.from_iterable(
3159
+ ['-r', f'/requirements_txt/{os.path.basename(rt)}']
3160
+ for rt in requirements_txts
3161
+ ),
3162
+ )
3163
+
3164
+
3165
+ ########################################
3166
+ # ../../../omlish/asyncs/asyncio/subprocesses.py
3167
+
3168
+
3169
+ ##
3170
+
3171
+
3172
+ class AsyncioProcessCommunicator:
3173
+ def __init__(
3174
+ self,
3175
+ proc: asyncio.subprocess.Process,
3176
+ loop: ta.Optional[ta.Any] = None,
3177
+ *,
3178
+ log: ta.Optional[logging.Logger] = None,
3179
+ ) -> None:
3180
+ super().__init__()
3181
+
3182
+ if loop is None:
3183
+ loop = asyncio.get_running_loop()
3184
+
3185
+ self._proc = proc
3186
+ self._loop = loop
3187
+ self._log = log
3188
+
3189
+ self._transport: asyncio.base_subprocess.BaseSubprocessTransport = check.isinstance(
3190
+ proc._transport, # type: ignore # noqa
3191
+ asyncio.base_subprocess.BaseSubprocessTransport,
3192
+ )
3193
+
3194
+ @property
3195
+ def _debug(self) -> bool:
3196
+ return self._loop.get_debug()
3197
+
3198
+ async def _feed_stdin(self, input: bytes) -> None: # noqa
3199
+ stdin = check.not_none(self._proc.stdin)
3200
+ try:
3201
+ if input is not None:
3202
+ stdin.write(input)
3203
+ if self._debug and self._log is not None:
3204
+ self._log.debug('%r communicate: feed stdin (%s bytes)', self, len(input))
3205
+
3206
+ await stdin.drain()
3207
+
3208
+ except (BrokenPipeError, ConnectionResetError) as exc:
3209
+ # communicate() ignores BrokenPipeError and ConnectionResetError. write() and drain() can raise these
3210
+ # exceptions.
3211
+ if self._debug and self._log is not None:
3212
+ self._log.debug('%r communicate: stdin got %r', self, exc)
3213
+
3214
+ if self._debug and self._log is not None:
3215
+ self._log.debug('%r communicate: close stdin', self)
3216
+
3217
+ stdin.close()
3218
+
3219
+ async def _noop(self) -> None:
3220
+ return None
3221
+
3222
+ async def _read_stream(self, fd: int) -> bytes:
3223
+ transport: ta.Any = check.not_none(self._transport.get_pipe_transport(fd))
3224
+
3225
+ if fd == 2:
3226
+ stream = check.not_none(self._proc.stderr)
3227
+ else:
3228
+ check.equal(fd, 1)
3229
+ stream = check.not_none(self._proc.stdout)
3230
+
3231
+ if self._debug and self._log is not None:
3232
+ name = 'stdout' if fd == 1 else 'stderr'
3233
+ self._log.debug('%r communicate: read %s', self, name)
3234
+
3235
+ output = await stream.read()
3236
+
3237
+ if self._debug and self._log is not None:
3238
+ name = 'stdout' if fd == 1 else 'stderr'
3239
+ self._log.debug('%r communicate: close %s', self, name)
3240
+
3241
+ transport.close()
3242
+
3243
+ return output
3244
+
3245
+ class Communication(ta.NamedTuple):
3246
+ stdout: ta.Optional[bytes]
3247
+ stderr: ta.Optional[bytes]
3248
+
3249
+ async def _communicate(
3250
+ self,
3251
+ input: ta.Any = None, # noqa
3252
+ ) -> Communication:
3253
+ stdin_fut: ta.Any
3254
+ if self._proc.stdin is not None:
3255
+ stdin_fut = self._feed_stdin(input)
3256
+ else:
3257
+ stdin_fut = self._noop()
3258
+
3259
+ stdout_fut: ta.Any
3260
+ if self._proc.stdout is not None:
3261
+ stdout_fut = self._read_stream(1)
3262
+ else:
3263
+ stdout_fut = self._noop()
3264
+
3265
+ stderr_fut: ta.Any
3266
+ if self._proc.stderr is not None:
3267
+ stderr_fut = self._read_stream(2)
3268
+ else:
3269
+ stderr_fut = self._noop()
3270
+
3271
+ stdin_res, stdout_res, stderr_res = await asyncio.gather(stdin_fut, stdout_fut, stderr_fut)
3272
+
3273
+ await self._proc.wait()
3274
+
3275
+ return AsyncioProcessCommunicator.Communication(stdout_res, stderr_res)
3276
+
3277
+ async def communicate(
3278
+ self,
3279
+ input: ta.Any = None, # noqa
3280
+ timeout: ta.Optional[float] = None,
3281
+ ) -> Communication:
3282
+ return await asyncio_maybe_timeout(self._communicate(input), timeout)
3283
+
3284
+
3285
+ ##
3286
+
3287
+
3288
+ class AsyncioSubprocesses(AbstractAsyncSubprocesses):
3289
+ async def communicate(
3290
+ self,
3291
+ proc: asyncio.subprocess.Process,
3292
+ input: ta.Any = None, # noqa
3293
+ timeout: ta.Optional[float] = None,
3294
+ ) -> ta.Tuple[ta.Optional[bytes], ta.Optional[bytes]]:
3295
+ return await AsyncioProcessCommunicator(proc).communicate(input, timeout) # noqa
3296
+
3297
+ #
3298
+
3299
+ @contextlib.asynccontextmanager
3300
+ async def popen(
3301
+ self,
3302
+ *cmd: str,
3303
+ shell: bool = False,
3304
+ timeout: ta.Optional[float] = None,
3305
+ **kwargs: ta.Any,
3306
+ ) -> ta.AsyncGenerator[asyncio.subprocess.Process, None]:
3307
+ fac: ta.Any
3308
+ if shell:
3309
+ fac = functools.partial(
3310
+ asyncio.create_subprocess_shell,
3311
+ check.single(cmd),
3312
+ )
3313
+ else:
3314
+ fac = functools.partial(
3315
+ asyncio.create_subprocess_exec,
3316
+ *cmd,
3317
+ )
3318
+
3319
+ with self.prepare_and_wrap( *cmd, shell=shell, **kwargs) as (cmd, kwargs): # noqa
3320
+ proc: asyncio.subprocess.Process = await fac(**kwargs)
3321
+ try:
3322
+ yield proc
3323
+
3324
+ finally:
3325
+ await asyncio_maybe_timeout(proc.wait(), timeout)
3326
+
3327
+ #
3328
+
3329
+ @dc.dataclass(frozen=True)
3330
+ class RunOutput:
3331
+ proc: asyncio.subprocess.Process
3332
+ stdout: ta.Optional[bytes]
3333
+ stderr: ta.Optional[bytes]
3334
+
3335
+ async def run(
3336
+ self,
3337
+ *cmd: str,
3338
+ input: ta.Any = None, # noqa
3339
+ timeout: ta.Optional[float] = None,
3340
+ check: bool = False, # noqa
3341
+ capture_output: ta.Optional[bool] = None,
3342
+ **kwargs: ta.Any,
3343
+ ) -> RunOutput:
3344
+ if capture_output:
3345
+ kwargs.setdefault('stdout', subprocess.PIPE)
3346
+ kwargs.setdefault('stderr', subprocess.PIPE)
3347
+
3348
+ proc: asyncio.subprocess.Process
3349
+ async with self.popen(*cmd, **kwargs) as proc:
3350
+ stdout, stderr = await self.communicate(proc, input, timeout)
3351
+
3352
+ if check and proc.returncode:
3353
+ raise subprocess.CalledProcessError(
3354
+ proc.returncode,
3355
+ cmd,
3356
+ output=stdout,
3357
+ stderr=stderr,
3358
+ )
3359
+
3360
+ return self.RunOutput(
3361
+ proc,
3362
+ stdout,
3363
+ stderr,
3364
+ )
3365
+
3366
+ #
3367
+
3368
+ async def check_call(
3369
+ self,
3370
+ *cmd: str,
3371
+ stdout: ta.Any = sys.stderr,
3372
+ **kwargs: ta.Any,
3373
+ ) -> None:
3374
+ with self.prepare_and_wrap(*cmd, stdout=stdout, check=True, **kwargs) as (cmd, kwargs): # noqa
3375
+ await self.run(*cmd, **kwargs)
3376
+
3377
+ async def check_output(
3378
+ self,
3379
+ *cmd: str,
3380
+ **kwargs: ta.Any,
3381
+ ) -> bytes:
3382
+ with self.prepare_and_wrap(*cmd, stdout=subprocess.PIPE, check=True, **kwargs) as (cmd, kwargs): # noqa
3383
+ return check.not_none((await self.run(*cmd, **kwargs)).stdout)
3384
+
3385
+
3386
+ asyncio_subprocesses = AsyncioSubprocesses()
3387
+
3388
+
2279
3389
  ########################################
2280
3390
  # ../compose.py
2281
3391
  """
@@ -2307,7 +3417,7 @@ def get_compose_service_dependencies(
2307
3417
  ##
2308
3418
 
2309
3419
 
2310
- class DockerComposeRun(ExitStacked):
3420
+ class DockerComposeRun(AsyncExitStacked):
2311
3421
  @dc.dataclass(frozen=True)
2312
3422
  class Config:
2313
3423
  compose_file: str
@@ -2325,6 +3435,7 @@ class DockerComposeRun(ExitStacked):
2325
3435
 
2326
3436
  #
2327
3437
 
3438
+ no_dependencies: bool = False
2328
3439
  no_dependency_cleanup: bool = False
2329
3440
 
2330
3441
  #
@@ -2343,40 +3454,6 @@ class DockerComposeRun(ExitStacked):
2343
3454
 
2344
3455
  #
2345
3456
 
2346
- @property
2347
- def image_tag(self) -> str:
2348
- pfx = 'sha256:'
2349
- if (image := self._cfg.image).startswith(pfx):
2350
- image = image[len(pfx):]
2351
-
2352
- return f'{self._cfg.service}:{image}'
2353
-
2354
- @cached_nullary
2355
- def tag_image(self) -> str:
2356
- image_tag = self.image_tag
2357
-
2358
- subprocesses.check_call(
2359
- 'docker',
2360
- 'tag',
2361
- self._cfg.image,
2362
- image_tag,
2363
- **self._subprocess_kwargs,
2364
- )
2365
-
2366
- def delete_tag() -> None:
2367
- subprocesses.check_call(
2368
- 'docker',
2369
- 'rmi',
2370
- image_tag,
2371
- **self._subprocess_kwargs,
2372
- )
2373
-
2374
- self._enter_context(defer(delete_tag)) # noqa
2375
-
2376
- return image_tag
2377
-
2378
- #
2379
-
2380
3457
  def _rewrite_compose_dct(self, in_dct: ta.Dict[str, ta.Any]) -> ta.Dict[str, ta.Any]:
2381
3458
  out = dict(in_dct)
2382
3459
 
@@ -2390,29 +3467,28 @@ class DockerComposeRun(ExitStacked):
2390
3467
  in_service: dict = in_services[self._cfg.service]
2391
3468
  out_services[self._cfg.service] = out_service = dict(in_service)
2392
3469
 
2393
- out_service['image'] = self.image_tag
3470
+ out_service['image'] = self._cfg.image
2394
3471
 
2395
3472
  for k in ['build', 'platform']:
2396
3473
  if k in out_service:
2397
- del out_service[k]
2398
-
2399
- out_service['links'] = [
2400
- f'{l}:{l}' if ':' not in l else l
2401
- for l in out_service.get('links', [])
2402
- ]
3474
+ del out_service[k]
2403
3475
 
2404
3476
  #
2405
3477
 
2406
- depends_on = in_service.get('depends_on', [])
3478
+ if not self._cfg.no_dependencies:
3479
+ depends_on = in_service.get('depends_on', [])
2407
3480
 
2408
- for dep_service, in_dep_service_dct in list(in_services.items()):
2409
- if dep_service not in depends_on:
2410
- continue
3481
+ for dep_service, in_dep_service_dct in list(in_services.items()):
3482
+ if dep_service not in depends_on:
3483
+ continue
2411
3484
 
2412
- out_dep_service: dict = dict(in_dep_service_dct)
2413
- out_services[dep_service] = out_dep_service
3485
+ out_dep_service: dict = dict(in_dep_service_dct)
3486
+ out_services[dep_service] = out_dep_service
2414
3487
 
2415
- out_dep_service['ports'] = []
3488
+ out_dep_service['ports'] = []
3489
+
3490
+ else:
3491
+ out_service['depends_on'] = []
2416
3492
 
2417
3493
  #
2418
3494
 
@@ -2438,22 +3514,20 @@ class DockerComposeRun(ExitStacked):
2438
3514
 
2439
3515
  #
2440
3516
 
2441
- def _cleanup_dependencies(self) -> None:
2442
- subprocesses.check_call(
3517
+ async def _cleanup_dependencies(self) -> None:
3518
+ await asyncio_subprocesses.check_call(
2443
3519
  'docker',
2444
3520
  'compose',
2445
3521
  '-f', self.rewrite_compose_file(),
2446
3522
  'down',
2447
3523
  )
2448
3524
 
2449
- def run(self) -> None:
2450
- self.tag_image()
2451
-
3525
+ async def run(self) -> None:
2452
3526
  compose_file = self.rewrite_compose_file()
2453
3527
 
2454
- with contextlib.ExitStack() as es:
2455
- if not self._cfg.no_dependency_cleanup:
2456
- es.enter_context(defer(self._cleanup_dependencies)) # noqa
3528
+ async with contextlib.AsyncExitStack() as es:
3529
+ if not (self._cfg.no_dependencies or self._cfg.no_dependency_cleanup):
3530
+ await es.enter_async_context(adefer(self._cleanup_dependencies)) # noqa
2457
3531
 
2458
3532
  sh_cmd = ' '.join([
2459
3533
  'docker',
@@ -2472,8 +3546,8 @@ class DockerComposeRun(ExitStacked):
2472
3546
 
2473
3547
  run_cmd = dc.replace(self._cfg.cmd, s=sh_cmd)
2474
3548
 
2475
- run_cmd.run(
2476
- subprocesses.check_call,
3549
+ await run_cmd.run(
3550
+ asyncio_subprocesses.check_call,
2477
3551
  **self._subprocess_kwargs,
2478
3552
  )
2479
3553
 
@@ -2505,535 +3579,142 @@ def read_docker_tar_image_tag(tar_file: str) -> str:
2505
3579
  with contextlib.closing(check.not_none(tf.extractfile('manifest.json'))) as mf:
2506
3580
  m = mf.read()
2507
3581
 
2508
- manifests = json.loads(m.decode('utf-8'))
2509
- manifest = check.single(manifests)
2510
- tag = check.non_empty_str(check.single(manifest['RepoTags']))
2511
- return tag
2512
-
2513
-
2514
- def read_docker_tar_image_id(tar_file: str) -> str:
2515
- with tarfile.open(tar_file) as tf:
2516
- with contextlib.closing(check.not_none(tf.extractfile('index.json'))) as mf:
2517
- i = mf.read()
2518
-
2519
- index = json.loads(i.decode('utf-8'))
2520
- manifest = check.single(index['manifests'])
2521
- image_id = check.non_empty_str(manifest['digest'])
2522
- return image_id
2523
-
2524
-
2525
- ##
2526
-
2527
-
2528
- def is_docker_image_present(image: str) -> bool:
2529
- out = subprocesses.check_output(
2530
- 'docker',
2531
- 'images',
2532
- '--format', 'json',
2533
- image,
2534
- )
2535
-
2536
- out_s = out.decode('utf-8').strip()
2537
- if not out_s:
2538
- return False
2539
-
2540
- json.loads(out_s) # noqa
2541
- return True
2542
-
2543
-
2544
- def pull_docker_image(
2545
- image: str,
2546
- ) -> None:
2547
- subprocesses.check_call(
2548
- 'docker',
2549
- 'pull',
2550
- image,
2551
- )
2552
-
2553
-
2554
- def build_docker_image(
2555
- docker_file: str,
2556
- *,
2557
- cwd: ta.Optional[str] = None,
2558
- ) -> str:
2559
- id_file = make_temp_file()
2560
- with defer(lambda: os.unlink(id_file)):
2561
- subprocesses.check_call(
2562
- 'docker',
2563
- 'build',
2564
- '-f', os.path.abspath(docker_file),
2565
- '--iidfile', id_file,
2566
- '--squash',
2567
- '.',
2568
- **(dict(cwd=cwd) if cwd is not None else {}),
2569
- )
2570
-
2571
- with open(id_file) as f:
2572
- image_id = check.single(f.read().strip().splitlines()).strip()
2573
-
2574
- return image_id
2575
-
2576
-
2577
- ##
2578
-
2579
-
2580
- def save_docker_tar_cmd(
2581
- image: str,
2582
- output_cmd: ShellCmd,
2583
- ) -> None:
2584
- cmd = dc.replace(output_cmd, s=f'docker save {image} | {output_cmd.s}')
2585
- cmd.run(subprocesses.check_call)
2586
-
2587
-
2588
- def save_docker_tar(
2589
- image: str,
2590
- tar_file: str,
2591
- ) -> None:
2592
- return save_docker_tar_cmd(
2593
- image,
2594
- ShellCmd(f'cat > {shlex.quote(tar_file)}'),
2595
- )
2596
-
2597
-
2598
- #
2599
-
2600
-
2601
- def load_docker_tar_cmd(
2602
- input_cmd: ShellCmd,
2603
- ) -> str:
2604
- cmd = dc.replace(input_cmd, s=f'{input_cmd.s} | docker load')
2605
-
2606
- out = cmd.run(subprocesses.check_output).decode()
2607
-
2608
- line = check.single(out.strip().splitlines())
2609
- loaded = line.partition(':')[2].strip()
2610
- return loaded
2611
-
2612
-
2613
- def load_docker_tar(
2614
- tar_file: str,
2615
- ) -> str:
2616
- return load_docker_tar_cmd(ShellCmd(f'cat {shlex.quote(tar_file)}'))
2617
-
2618
-
2619
- ########################################
2620
- # ../github/cache.py
2621
-
2622
-
2623
- ##
2624
-
2625
-
2626
- class GithubV1CacheShellClient:
2627
- BASE_URL_ENV_KEY = 'ACTIONS_CACHE_URL'
2628
- AUTH_TOKEN_ENV_KEY = 'ACTIONS_RUNTIME_TOKEN' # noqa
2629
-
2630
- def __init__(
2631
- self,
2632
- *,
2633
- base_url: ta.Optional[str] = None,
2634
- auth_token: ta.Optional[str] = None,
2635
- ) -> None:
2636
- super().__init__()
2637
-
2638
- if base_url is None:
2639
- base_url = os.environ[self.BASE_URL_ENV_KEY]
2640
- self._base_url = check.non_empty_str(base_url)
2641
-
2642
- if auth_token is None:
2643
- auth_token = os.environ.get(self.AUTH_TOKEN_ENV_KEY)
2644
- self._auth_token = auth_token
2645
-
2646
- self._service_url = GithubCacheServiceV1.get_service_url(self._base_url)
2647
-
2648
- #
2649
-
2650
- _MISSING = object()
2651
-
2652
- def build_headers(
2653
- self,
2654
- *,
2655
- auth_token: ta.Any = _MISSING,
2656
- content_type: ta.Optional[str] = None,
2657
- ) -> ta.Dict[str, str]:
2658
- dct = {
2659
- 'Accept': f'application/json;api-version={GithubCacheServiceV1.API_VERSION}',
2660
- }
2661
-
2662
- if auth_token is self._MISSING:
2663
- auth_token = self._auth_token
2664
- if auth_token:
2665
- dct['Authorization'] = f'Bearer {auth_token}'
2666
-
2667
- if content_type is not None:
2668
- dct['Content-Type'] = content_type
2669
-
2670
- return dct
2671
-
2672
- #
2673
-
2674
- HEADER_AUTH_TOKEN_ENV_KEY = '_GITHUB_CACHE_AUTH_TOKEN' # noqa
2675
-
2676
- def build_curl_cmd(
2677
- self,
2678
- method: str,
2679
- url: str,
2680
- *,
2681
- json_content: bool = False,
2682
- content_type: ta.Optional[str] = None,
2683
- ) -> ShellCmd:
2684
- if content_type is None and json_content:
2685
- content_type = 'application/json'
2686
-
2687
- env = {}
2688
-
2689
- header_auth_token: ta.Optional[str]
2690
- if self._auth_token:
2691
- env[self.HEADER_AUTH_TOKEN_ENV_KEY] = self._auth_token
2692
- header_auth_token = f'${self.HEADER_AUTH_TOKEN_ENV_KEY}'
2693
- else:
2694
- header_auth_token = None
2695
-
2696
- hdrs = self.build_headers(
2697
- auth_token=header_auth_token,
2698
- content_type=content_type,
2699
- )
2700
-
2701
- url = f'{self._service_url}/{url}'
2702
-
2703
- cmd = ' '.join([
2704
- 'curl',
2705
- '-s',
2706
- '-X', method,
2707
- url,
2708
- *[f'-H "{k}: {v}"' for k, v in hdrs.items()],
2709
- ])
2710
-
2711
- return ShellCmd(
2712
- cmd,
2713
- env=env,
2714
- )
2715
-
2716
- def build_post_json_curl_cmd(
2717
- self,
2718
- url: str,
2719
- obj: ta.Any,
2720
- **kwargs: ta.Any,
2721
- ) -> ShellCmd:
2722
- curl_cmd = self.build_curl_cmd(
2723
- 'POST',
2724
- url,
2725
- json_content=True,
2726
- **kwargs,
2727
- )
2728
-
2729
- obj_json = json_dumps_compact(obj)
2730
-
2731
- return dc.replace(curl_cmd, s=f'{curl_cmd.s} -d {shlex.quote(obj_json)}')
2732
-
2733
- #
2734
-
2735
- @dc.dataclass()
2736
- class CurlError(RuntimeError):
2737
- status_code: int
2738
- body: ta.Optional[bytes]
2739
-
2740
- def __str__(self) -> str:
2741
- return repr(self)
2742
-
2743
- @dc.dataclass(frozen=True)
2744
- class CurlResult:
2745
- status_code: int
2746
- body: ta.Optional[bytes]
2747
-
2748
- def as_error(self) -> 'GithubV1CacheShellClient.CurlError':
2749
- return GithubV1CacheShellClient.CurlError(
2750
- status_code=self.status_code,
2751
- body=self.body,
2752
- )
2753
-
2754
- def run_curl_cmd(
2755
- self,
2756
- cmd: ShellCmd,
2757
- *,
2758
- raise_: bool = False,
2759
- ) -> CurlResult:
2760
- out_file = make_temp_file()
2761
- with defer(lambda: os.unlink(out_file)):
2762
- run_cmd = dc.replace(cmd, s=f"{cmd.s} -o {out_file} -w '%{{json}}'")
2763
-
2764
- out_json_bytes = run_cmd.run(subprocesses.check_output)
2765
-
2766
- out_json = json.loads(out_json_bytes.decode())
2767
- status_code = check.isinstance(out_json['response_code'], int)
2768
-
2769
- with open(out_file, 'rb') as f:
2770
- body = f.read()
2771
-
2772
- result = self.CurlResult(
2773
- status_code=status_code,
2774
- body=body,
2775
- )
2776
-
2777
- if raise_ and (500 <= status_code <= 600):
2778
- raise result.as_error()
2779
-
2780
- return result
2781
-
2782
- def run_json_curl_cmd(
2783
- self,
2784
- cmd: ShellCmd,
2785
- *,
2786
- success_status_codes: ta.Optional[ta.Container[int]] = None,
2787
- ) -> ta.Optional[ta.Any]:
2788
- result = self.run_curl_cmd(cmd, raise_=True)
2789
-
2790
- if success_status_codes is not None:
2791
- is_success = result.status_code in success_status_codes
2792
- else:
2793
- is_success = 200 <= result.status_code < 300
2794
-
2795
- if is_success:
2796
- if not (body := result.body):
2797
- return None
2798
- return json.loads(body.decode('utf-8-sig'))
2799
-
2800
- elif result.status_code == 404:
2801
- return None
2802
-
2803
- else:
2804
- raise result.as_error()
2805
-
2806
- #
2807
-
2808
- def build_get_entry_curl_cmd(self, key: str) -> ShellCmd:
2809
- return self.build_curl_cmd(
2810
- 'GET',
2811
- f'cache?keys={key}',
2812
- )
2813
-
2814
- def run_get_entry(self, key: str) -> ta.Optional[GithubCacheServiceV1.ArtifactCacheEntry]:
2815
- curl_cmd = self.build_get_entry_curl_cmd(key)
2816
-
2817
- obj = self.run_json_curl_cmd(
2818
- curl_cmd,
2819
- success_status_codes=[200, 204],
2820
- )
2821
- if obj is None:
2822
- return None
2823
-
2824
- return GithubCacheServiceV1.dataclass_from_json(
2825
- GithubCacheServiceV1.ArtifactCacheEntry,
2826
- obj,
2827
- )
2828
-
2829
- #
2830
-
2831
- def build_download_get_entry_cmd(
2832
- self,
2833
- entry: GithubCacheServiceV1.ArtifactCacheEntry,
2834
- out_file: str,
2835
- ) -> ShellCmd:
2836
- return ShellCmd(' '.join([
2837
- 'aria2c',
2838
- '-x', '4',
2839
- '-o', out_file,
2840
- check.non_empty_str(entry.archive_location),
2841
- ]))
2842
-
2843
- def download_get_entry(
2844
- self,
2845
- entry: GithubCacheServiceV1.ArtifactCacheEntry,
2846
- out_file: str,
2847
- ) -> None:
2848
- dl_cmd = self.build_download_get_entry_cmd(entry, out_file)
2849
- dl_cmd.run(subprocesses.check_call)
2850
-
2851
- #
2852
-
2853
- def upload_cache_entry(
2854
- self,
2855
- key: str,
2856
- in_file: str,
2857
- ) -> None:
2858
- check.state(os.path.isfile(in_file))
2859
-
2860
- file_size = os.stat(in_file).st_size
2861
-
2862
- reserve_req = GithubCacheServiceV1.ReserveCacheRequest(
2863
- key=key,
2864
- cache_size=file_size,
2865
- )
2866
- reserve_cmd = self.build_post_json_curl_cmd(
2867
- 'caches',
2868
- GithubCacheServiceV1.dataclass_to_json(reserve_req),
2869
- )
2870
- reserve_resp_obj: ta.Any = check.not_none(self.run_json_curl_cmd(
2871
- reserve_cmd,
2872
- success_status_codes=[201],
2873
- ))
2874
- reserve_resp = GithubCacheServiceV1.dataclass_from_json( # noqa
2875
- GithubCacheServiceV1.ReserveCacheResponse,
2876
- reserve_resp_obj,
2877
- )
2878
-
2879
- raise NotImplementedError
2880
-
2881
-
2882
- ##
2883
-
2884
-
2885
- class GithubShellCache(ShellCache):
2886
- def __init__(
2887
- self,
2888
- dir: str, # noqa
2889
- *,
2890
- client: ta.Optional[GithubV1CacheShellClient] = None,
2891
- ) -> None:
2892
- super().__init__()
3582
+ manifests = json.loads(m.decode('utf-8'))
3583
+ manifest = check.single(manifests)
3584
+ tag = check.non_empty_str(check.single(manifest['RepoTags']))
3585
+ return tag
2893
3586
 
2894
- self._dir = check.not_none(dir)
2895
3587
 
2896
- if client is None:
2897
- client = GithubV1CacheShellClient()
2898
- self._client = client
3588
+ def read_docker_tar_image_id(tar_file: str) -> str:
3589
+ with tarfile.open(tar_file) as tf:
3590
+ with contextlib.closing(check.not_none(tf.extractfile('index.json'))) as mf:
3591
+ i = mf.read()
2899
3592
 
2900
- self._local = DirectoryFileCache(self._dir)
3593
+ index = json.loads(i.decode('utf-8'))
3594
+ manifest = check.single(index['manifests'])
3595
+ image_id = check.non_empty_str(manifest['digest'])
3596
+ return image_id
2901
3597
 
2902
- def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
2903
- local_file = self._local.get_cache_file_path(key)
2904
- if os.path.exists(local_file):
2905
- return ShellCmd(f'cat {shlex.quote(local_file)}')
2906
3598
 
2907
- if (entry := self._client.run_get_entry(key)) is None:
2908
- return None
3599
+ ##
2909
3600
 
2910
- tmp_file = self._local.format_incomplete_file(local_file)
2911
- try:
2912
- self._client.download_get_entry(entry, tmp_file)
2913
3601
 
2914
- os.replace(tmp_file, local_file)
3602
+ async def is_docker_image_present(image: str) -> bool:
3603
+ out = await asyncio_subprocesses.check_output(
3604
+ 'docker',
3605
+ 'images',
3606
+ '--format', 'json',
3607
+ image,
3608
+ )
2915
3609
 
2916
- except BaseException: # noqa
2917
- os.unlink(tmp_file)
3610
+ out_s = out.decode('utf-8').strip()
3611
+ if not out_s:
3612
+ return False
2918
3613
 
2919
- raise
3614
+ json.loads(out_s) # noqa
3615
+ return True
2920
3616
 
2921
- return ShellCmd(f'cat {shlex.quote(local_file)}')
2922
3617
 
2923
- class _PutFileCmdContext(ShellCache.PutFileCmdContext): # noqa
2924
- def __init__(
2925
- self,
2926
- owner: 'GithubShellCache',
2927
- key: str,
2928
- tmp_file: str,
2929
- local_file: str,
2930
- ) -> None:
2931
- super().__init__()
3618
+ async def pull_docker_image(
3619
+ image: str,
3620
+ ) -> None:
3621
+ await asyncio_subprocesses.check_call(
3622
+ 'docker',
3623
+ 'pull',
3624
+ image,
3625
+ )
2932
3626
 
2933
- self._owner = owner
2934
- self._key = key
2935
- self._tmp_file = tmp_file
2936
- self._local_file = local_file
2937
3627
 
2938
- @property
2939
- def cmd(self) -> ShellCmd:
2940
- return ShellCmd(f'cat > {shlex.quote(self._tmp_file)}')
3628
+ async def build_docker_image(
3629
+ docker_file: str,
3630
+ *,
3631
+ tag: ta.Optional[str] = None,
3632
+ cwd: ta.Optional[str] = None,
3633
+ run_options: ta.Optional[ta.Sequence[str]] = None,
3634
+ ) -> str:
3635
+ with temp_file_context() as id_file:
3636
+ await asyncio_subprocesses.check_call(
3637
+ 'docker',
3638
+ 'build',
3639
+ '-f', os.path.abspath(docker_file),
3640
+ '--iidfile', id_file,
3641
+ *(['--tag', tag] if tag is not None else []),
3642
+ *(run_options or []),
3643
+ '.',
3644
+ **(dict(cwd=cwd) if cwd is not None else {}),
3645
+ )
2941
3646
 
2942
- def _commit(self) -> None:
2943
- os.replace(self._tmp_file, self._local_file)
3647
+ with open(id_file) as f: # noqa
3648
+ image_id = check.single(f.read().strip().splitlines()).strip()
2944
3649
 
2945
- self._owner._client.upload_cache_entry(self._key, self._local_file) # noqa
3650
+ return image_id
2946
3651
 
2947
- def _abort(self) -> None:
2948
- os.unlink(self._tmp_file)
2949
3652
 
2950
- def put_file_cmd(self, key: str) -> ShellCache.PutFileCmdContext:
2951
- local_file = self._local.get_cache_file_path(key, make_dirs=True)
2952
- return self._PutFileCmdContext(
2953
- self,
2954
- key,
2955
- self._local.format_incomplete_file(local_file),
2956
- local_file,
2957
- )
3653
+ async def tag_docker_image(image: str, tag: str) -> None:
3654
+ await asyncio_subprocesses.check_call(
3655
+ 'docker',
3656
+ 'tag',
3657
+ image,
3658
+ tag,
3659
+ )
2958
3660
 
2959
3661
 
2960
- ########################################
2961
- # ../requirements.py
2962
- """
2963
- TODO:
2964
- - pip compile lol
2965
- - but still support git+ stuff
2966
- - req.txt format aware hash
2967
- - more than just whitespace
2968
- - pyproject req rewriting
2969
- - download_requirements bootstrap off prev? not worth the dl?
2970
- - big deps (torch) change less, probably worth it
2971
- - follow embedded -r automatically like pyp
2972
- """
3662
+ async def delete_docker_tag(tag: str) -> None:
3663
+ await asyncio_subprocesses.check_call(
3664
+ 'docker',
3665
+ 'rmi',
3666
+ tag,
3667
+ )
2973
3668
 
2974
3669
 
2975
3670
  ##
2976
3671
 
2977
3672
 
2978
- def build_requirements_hash(
2979
- requirements_txts: ta.Sequence[str],
2980
- ) -> str:
2981
- txt_file_contents: dict = {}
3673
+ async def save_docker_tar_cmd(
3674
+ image: str,
3675
+ output_cmd: ShellCmd,
3676
+ ) -> None:
3677
+ cmd = dc.replace(output_cmd, s=f'docker save {image} | {output_cmd.s}')
3678
+ await cmd.run(asyncio_subprocesses.check_call)
2982
3679
 
2983
- for txt_file in requirements_txts:
2984
- txt_file_name = os.path.basename(txt_file)
2985
- check.not_in(txt_file_name, txt_file_contents)
2986
- with open(txt_file) as f:
2987
- txt_contents = f.read()
2988
- txt_file_contents[txt_file_name] = txt_contents
2989
3680
 
2990
- #
3681
+ async def save_docker_tar(
3682
+ image: str,
3683
+ tar_file: str,
3684
+ ) -> None:
3685
+ return await save_docker_tar_cmd(
3686
+ image,
3687
+ ShellCmd(f'cat > {shlex.quote(tar_file)}'),
3688
+ )
2991
3689
 
2992
- lines = []
2993
- for txt_file, txt_contents in sorted(txt_file_contents.items()):
2994
- txt_hash = sha256_str(txt_contents)
2995
- lines.append(f'{txt_file}={txt_hash}')
2996
3690
 
2997
- return sha256_str('\n'.join(lines))
3691
+ #
2998
3692
 
2999
3693
 
3000
- ##
3694
+ async def load_docker_tar_cmd(
3695
+ input_cmd: ShellCmd,
3696
+ ) -> str:
3697
+ cmd = dc.replace(input_cmd, s=f'{input_cmd.s} | docker load')
3001
3698
 
3699
+ out = (await cmd.run(asyncio_subprocesses.check_output)).decode()
3002
3700
 
3003
- def download_requirements(
3004
- image: str,
3005
- requirements_dir: str,
3006
- requirements_txts: ta.Sequence[str],
3007
- ) -> None:
3008
- requirements_txt_dir = tempfile.mkdtemp()
3009
- with defer(lambda: shutil.rmtree(requirements_txt_dir)):
3010
- for rt in requirements_txts:
3011
- shutil.copyfile(rt, os.path.join(requirements_txt_dir, os.path.basename(rt)))
3701
+ line = check.single(out.strip().splitlines())
3702
+ loaded = line.partition(':')[2].strip()
3703
+ return loaded
3012
3704
 
3013
- subprocesses.check_call(
3014
- 'docker',
3015
- 'run',
3016
- '--rm',
3017
- '-i',
3018
- '-v', f'{os.path.abspath(requirements_dir)}:/requirements',
3019
- '-v', f'{requirements_txt_dir}:/requirements_txt',
3020
- image,
3021
- 'pip',
3022
- 'download',
3023
- '-d', '/requirements',
3024
- *itertools.chain.from_iterable(
3025
- ['-r', f'/requirements_txt/{os.path.basename(rt)}']
3026
- for rt in requirements_txts
3027
- ),
3028
- )
3705
+
3706
+ async def load_docker_tar(
3707
+ tar_file: str,
3708
+ ) -> str:
3709
+ return await load_docker_tar_cmd(ShellCmd(f'cat {shlex.quote(tar_file)}'))
3029
3710
 
3030
3711
 
3031
3712
  ########################################
3032
3713
  # ../ci.py
3033
3714
 
3034
3715
 
3035
- class Ci(ExitStacked):
3036
- FILE_NAME_HASH_LEN = 16
3716
+ class Ci(AsyncExitStacked):
3717
+ KEY_HASH_LEN = 16
3037
3718
 
3038
3719
  @dc.dataclass(frozen=True)
3039
3720
  class Config:
@@ -3046,9 +3727,18 @@ class Ci(ExitStacked):
3046
3727
 
3047
3728
  cmd: ShellCmd
3048
3729
 
3730
+ #
3731
+
3049
3732
  requirements_txts: ta.Optional[ta.Sequence[str]] = None
3050
3733
 
3051
3734
  always_pull: bool = False
3735
+ always_build: bool = False
3736
+
3737
+ no_dependencies: bool = False
3738
+
3739
+ run_options: ta.Optional[ta.Sequence[str]] = None
3740
+
3741
+ #
3052
3742
 
3053
3743
  def __post_init__(self) -> None:
3054
3744
  check.not_isinstance(self.requirements_txts, str)
@@ -3057,44 +3747,17 @@ class Ci(ExitStacked):
3057
3747
  self,
3058
3748
  cfg: Config,
3059
3749
  *,
3060
- shell_cache: ta.Optional[ShellCache] = None,
3061
3750
  file_cache: ta.Optional[FileCache] = None,
3062
3751
  ) -> None:
3063
3752
  super().__init__()
3064
3753
 
3065
3754
  self._cfg = cfg
3066
- self._shell_cache = shell_cache
3067
3755
  self._file_cache = file_cache
3068
3756
 
3069
3757
  #
3070
3758
 
3071
- def _load_cache_docker_image(self, key: str) -> ta.Optional[str]:
3072
- if self._shell_cache is None:
3073
- return None
3074
-
3075
- get_cache_cmd = self._shell_cache.get_file_cmd(key)
3076
- if get_cache_cmd is None:
3077
- return None
3078
-
3079
- get_cache_cmd = dc.replace(get_cache_cmd, s=f'{get_cache_cmd.s} | zstd -cd --long') # noqa
3080
-
3081
- return load_docker_tar_cmd(get_cache_cmd)
3082
-
3083
- def _save_cache_docker_image(self, key: str, image: str) -> None:
3084
- if self._shell_cache is None:
3085
- return
3086
-
3087
- with self._shell_cache.put_file_cmd(key) as put_cache:
3088
- put_cache_cmd = put_cache.cmd
3089
-
3090
- put_cache_cmd = dc.replace(put_cache_cmd, s=f'zstd | {put_cache_cmd.s}')
3091
-
3092
- save_docker_tar_cmd(image, put_cache_cmd)
3093
-
3094
- #
3095
-
3096
- def _load_docker_image(self, image: str) -> None:
3097
- if not self._cfg.always_pull and is_docker_image_present(image):
3759
+ async def _load_docker_image(self, image: str) -> None:
3760
+ if not self._cfg.always_pull and (await is_docker_image_present(image)):
3098
3761
  return
3099
3762
 
3100
3763
  dep_suffix = image
@@ -3102,181 +3765,202 @@ class Ci(ExitStacked):
3102
3765
  dep_suffix = dep_suffix.replace(c, '-')
3103
3766
 
3104
3767
  cache_key = f'docker-{dep_suffix}'
3105
- if self._load_cache_docker_image(cache_key) is not None:
3768
+ if (await self._load_cache_docker_image(cache_key)) is not None:
3106
3769
  return
3107
3770
 
3108
- pull_docker_image(image)
3771
+ await pull_docker_image(image)
3109
3772
 
3110
- self._save_cache_docker_image(cache_key, image)
3773
+ await self._save_cache_docker_image(cache_key, image)
3111
3774
 
3112
- def load_docker_image(self, image: str) -> None:
3775
+ async def load_docker_image(self, image: str) -> None:
3113
3776
  with log_timing_context(f'Load docker image: {image}'):
3114
- self._load_docker_image(image)
3777
+ await self._load_docker_image(image)
3115
3778
 
3116
- @cached_nullary
3117
- def load_compose_service_dependencies(self) -> None:
3118
- deps = get_compose_service_dependencies(
3119
- self._cfg.compose_file,
3120
- self._cfg.service,
3121
- )
3779
+ #
3122
3780
 
3123
- for dep_image in deps.values():
3124
- self.load_docker_image(dep_image)
3781
+ async def _load_cache_docker_image(self, key: str) -> ta.Optional[str]:
3782
+ if self._file_cache is None:
3783
+ return None
3125
3784
 
3126
- #
3785
+ cache_file = await self._file_cache.get_file(key)
3786
+ if cache_file is None:
3787
+ return None
3127
3788
 
3128
- def _resolve_ci_image(self) -> str:
3129
- docker_file_hash = build_docker_file_hash(self._cfg.docker_file)[:self.FILE_NAME_HASH_LEN]
3789
+ get_cache_cmd = ShellCmd(f'cat {cache_file} | zstd -cd --long')
3130
3790
 
3131
- cache_key = f'ci-{docker_file_hash}'
3132
- if (cache_image_id := self._load_cache_docker_image(cache_key)) is not None:
3133
- return cache_image_id
3791
+ return await load_docker_tar_cmd(get_cache_cmd)
3134
3792
 
3135
- image_id = build_docker_image(
3136
- self._cfg.docker_file,
3137
- cwd=self._cfg.project_dir,
3138
- )
3793
+ async def _save_cache_docker_image(self, key: str, image: str) -> None:
3794
+ if self._file_cache is None:
3795
+ return
3139
3796
 
3140
- self._save_cache_docker_image(cache_key, image_id)
3797
+ with temp_file_context() as tmp_file:
3798
+ write_tmp_cmd = ShellCmd(f'zstd > {tmp_file}')
3141
3799
 
3142
- return image_id
3800
+ await save_docker_tar_cmd(image, write_tmp_cmd)
3143
3801
 
3144
- @cached_nullary
3145
- def resolve_ci_image(self) -> str:
3146
- with log_timing_context('Resolve ci image') as ltc:
3147
- image_id = self._resolve_ci_image()
3148
- ltc.set_description(f'Resolve ci image: {image_id}')
3149
- return image_id
3802
+ await self._file_cache.put_file(key, tmp_file, steal=True)
3150
3803
 
3151
3804
  #
3152
3805
 
3153
- def _resolve_requirements_dir(self) -> str:
3154
- requirements_txts = [
3155
- os.path.join(self._cfg.project_dir, rf)
3156
- for rf in check.not_none(self._cfg.requirements_txts)
3157
- ]
3806
+ async def _resolve_docker_image(
3807
+ self,
3808
+ cache_key: str,
3809
+ build_and_tag: ta.Callable[[str], ta.Awaitable[str]],
3810
+ ) -> str:
3811
+ image_tag = f'{self._cfg.service}:{cache_key}'
3158
3812
 
3159
- requirements_hash = build_requirements_hash(requirements_txts)[:self.FILE_NAME_HASH_LEN]
3813
+ if not self._cfg.always_build and (await is_docker_image_present(image_tag)):
3814
+ return image_tag
3815
+
3816
+ if (cache_image_id := await self._load_cache_docker_image(cache_key)) is not None:
3817
+ await tag_docker_image(
3818
+ cache_image_id,
3819
+ image_tag,
3820
+ )
3821
+ return image_tag
3160
3822
 
3161
- tar_file_key = f'requirements-{requirements_hash}'
3162
- tar_file_name = f'{tar_file_key}.tar'
3823
+ image_id = await build_and_tag(image_tag)
3163
3824
 
3164
- temp_dir = tempfile.mkdtemp()
3165
- self._enter_context(defer(lambda: shutil.rmtree(temp_dir))) # noqa
3825
+ await self._save_cache_docker_image(cache_key, image_id)
3166
3826
 
3167
- if self._file_cache is not None and (cache_tar_file := self._file_cache.get_file(tar_file_key)):
3168
- with tarfile.open(cache_tar_file) as tar:
3169
- tar.extractall(path=temp_dir) # noqa
3827
+ return image_tag
3170
3828
 
3171
- return temp_dir
3829
+ #
3172
3830
 
3173
- temp_requirements_dir = os.path.join(temp_dir, 'requirements')
3174
- os.makedirs(temp_requirements_dir)
3831
+ @cached_nullary
3832
+ def docker_file_hash(self) -> str:
3833
+ return build_docker_file_hash(self._cfg.docker_file)[:self.KEY_HASH_LEN]
3834
+
3835
+ async def _resolve_ci_base_image(self) -> str:
3836
+ async def build_and_tag(image_tag: str) -> str:
3837
+ return await build_docker_image(
3838
+ self._cfg.docker_file,
3839
+ tag=image_tag,
3840
+ cwd=self._cfg.project_dir,
3841
+ )
3175
3842
 
3176
- download_requirements(
3177
- self.resolve_ci_image(),
3178
- temp_requirements_dir,
3179
- requirements_txts,
3180
- )
3843
+ cache_key = f'ci-base-{self.docker_file_hash()}'
3181
3844
 
3182
- if self._file_cache is not None:
3183
- temp_tar_file = os.path.join(temp_dir, tar_file_name)
3845
+ return await self._resolve_docker_image(cache_key, build_and_tag)
3184
3846
 
3185
- with tarfile.open(temp_tar_file, 'w') as tar:
3186
- for requirement_file in os.listdir(temp_requirements_dir):
3187
- tar.add(
3188
- os.path.join(temp_requirements_dir, requirement_file),
3189
- arcname=requirement_file,
3190
- )
3847
+ @async_cached_nullary
3848
+ async def resolve_ci_base_image(self) -> str:
3849
+ with log_timing_context('Resolve ci base image') as ltc:
3850
+ image_id = await self._resolve_ci_base_image()
3851
+ ltc.set_description(f'Resolve ci base image: {image_id}')
3852
+ return image_id
3191
3853
 
3192
- self._file_cache.put_file(os.path.basename(tar_file_key), temp_tar_file)
3854
+ #
3193
3855
 
3194
- return temp_requirements_dir
3856
+ @cached_nullary
3857
+ def requirements_txts(self) -> ta.Sequence[str]:
3858
+ return [
3859
+ os.path.join(self._cfg.project_dir, rf)
3860
+ for rf in check.not_none(self._cfg.requirements_txts)
3861
+ ]
3195
3862
 
3196
3863
  @cached_nullary
3197
- def resolve_requirements_dir(self) -> str:
3198
- with log_timing_context('Resolve requirements dir') as ltc:
3199
- requirements_dir = self._resolve_requirements_dir()
3200
- ltc.set_description(f'Resolve requirements dir: {requirements_dir}')
3201
- return requirements_dir
3864
+ def requirements_hash(self) -> str:
3865
+ return build_requirements_hash(self.requirements_txts())[:self.KEY_HASH_LEN]
3866
+
3867
+ async def _resolve_ci_image(self) -> str:
3868
+ async def build_and_tag(image_tag: str) -> str:
3869
+ base_image = await self.resolve_ci_base_image()
3870
+
3871
+ setup_cmds = [
3872
+ ' '.join([
3873
+ 'pip install',
3874
+ '--no-cache-dir',
3875
+ '--root-user-action ignore',
3876
+ 'uv',
3877
+ ]),
3878
+ ' '.join([
3879
+ 'uv pip install',
3880
+ '--no-cache',
3881
+ '--index-strategy unsafe-best-match',
3882
+ '--system',
3883
+ *[f'-r /project/{rf}' for rf in self._cfg.requirements_txts or []],
3884
+ ]),
3885
+ ]
3886
+ setup_cmd = ' && '.join(setup_cmds)
3887
+
3888
+ docker_file_lines = [
3889
+ f'FROM {base_image}',
3890
+ 'RUN mkdir /project',
3891
+ *[f'COPY {rf} /project/{rf}' for rf in self._cfg.requirements_txts or []],
3892
+ f'RUN {setup_cmd}',
3893
+ 'RUN rm /project/*',
3894
+ 'WORKDIR /project',
3895
+ ]
3896
+
3897
+ with temp_file_context() as docker_file:
3898
+ with open(docker_file, 'w') as f: # noqa
3899
+ f.write('\n'.join(docker_file_lines))
3900
+
3901
+ return await build_docker_image(
3902
+ docker_file,
3903
+ tag=image_tag,
3904
+ cwd=self._cfg.project_dir,
3905
+ )
3906
+
3907
+ cache_key = f'ci-{self.docker_file_hash()}-{self.requirements_hash()}'
3908
+
3909
+ return await self._resolve_docker_image(cache_key, build_and_tag)
3910
+
3911
+ @async_cached_nullary
3912
+ async def resolve_ci_image(self) -> str:
3913
+ with log_timing_context('Resolve ci image') as ltc:
3914
+ image_id = await self._resolve_ci_image()
3915
+ ltc.set_description(f'Resolve ci image: {image_id}')
3916
+ return image_id
3202
3917
 
3203
3918
  #
3204
3919
 
3205
- def _run_compose_(self) -> None:
3206
- setup_cmds = [
3207
- 'pip install --root-user-action ignore --find-links /requirements --no-index uv',
3208
- (
3209
- 'uv pip install --system --find-links /requirements ' +
3210
- ' '.join(f'-r /project/{rf}' for rf in self._cfg.requirements_txts or [])
3211
- ),
3212
- ]
3213
-
3214
- #
3920
+ @async_cached_nullary
3921
+ async def load_dependencies(self) -> None:
3922
+ deps = get_compose_service_dependencies(
3923
+ self._cfg.compose_file,
3924
+ self._cfg.service,
3925
+ )
3215
3926
 
3216
- ci_cmd = dc.replace(self._cfg.cmd, s=' && '.join([
3217
- *setup_cmds,
3218
- f'({self._cfg.cmd.s})',
3219
- ]))
3927
+ for dep_image in deps.values():
3928
+ await self.load_docker_image(dep_image)
3220
3929
 
3221
- #
3930
+ #
3222
3931
 
3223
- with DockerComposeRun(DockerComposeRun.Config(
3932
+ async def _run_compose_(self) -> None:
3933
+ async with DockerComposeRun(DockerComposeRun.Config(
3224
3934
  compose_file=self._cfg.compose_file,
3225
3935
  service=self._cfg.service,
3226
3936
 
3227
- image=self.resolve_ci_image(),
3937
+ image=await self.resolve_ci_image(),
3228
3938
 
3229
- cmd=ci_cmd,
3939
+ cmd=self._cfg.cmd,
3230
3940
 
3231
3941
  run_options=[
3232
3942
  '-v', f'{os.path.abspath(self._cfg.project_dir)}:/project',
3233
- '-v', f'{os.path.abspath(self.resolve_requirements_dir())}:/requirements',
3943
+ *(self._cfg.run_options or []),
3234
3944
  ],
3235
3945
 
3236
3946
  cwd=self._cfg.project_dir,
3947
+
3948
+ no_dependencies=self._cfg.no_dependencies,
3237
3949
  )) as ci_compose_run:
3238
- ci_compose_run.run()
3950
+ await ci_compose_run.run()
3239
3951
 
3240
- def _run_compose(self) -> None:
3952
+ async def _run_compose(self) -> None:
3241
3953
  with log_timing_context('Run compose'):
3242
- self._run_compose_()
3954
+ await self._run_compose_()
3243
3955
 
3244
3956
  #
3245
3957
 
3246
- def run(self) -> None:
3247
- self.load_compose_service_dependencies()
3248
-
3249
- self.resolve_ci_image()
3250
-
3251
- self.resolve_requirements_dir()
3252
-
3253
- self._run_compose()
3254
-
3255
-
3256
- ########################################
3257
- # ../github/cli.py
3258
- """
3259
- See:
3260
- - https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28
3261
- """
3262
-
3958
+ async def run(self) -> None:
3959
+ await self.resolve_ci_image()
3263
3960
 
3264
- class GithubCli(ArgparseCli):
3265
- @argparse_cmd(
3266
- argparse_arg('key'),
3267
- )
3268
- def get_cache_entry(self) -> None:
3269
- shell_client = GithubV1CacheShellClient()
3270
- entry = shell_client.run_get_entry(self.args.key)
3271
- if entry is None:
3272
- return
3273
- print(json_dumps_pretty(dc.asdict(entry))) # noqa
3961
+ await self.load_dependencies()
3274
3962
 
3275
- @argparse_cmd(
3276
- argparse_arg('repository-id'),
3277
- )
3278
- def list_cache_entries(self) -> None:
3279
- raise NotImplementedError
3963
+ await self._run_compose()
3280
3964
 
3281
3965
 
3282
3966
  ########################################
@@ -3314,8 +3998,8 @@ class CiCli(ArgparseCli):
3314
3998
  @argparse_cmd(
3315
3999
  accepts_unknown=True,
3316
4000
  )
3317
- def github(self) -> ta.Optional[int]:
3318
- return GithubCli(self.unknown_args).cli_run()
4001
+ async def github(self) -> ta.Optional[int]:
4002
+ return await GithubCli(self.unknown_args).async_cli_run()
3319
4003
 
3320
4004
  #
3321
4005
 
@@ -3325,18 +4009,33 @@ class CiCli(ArgparseCli):
3325
4009
  argparse_arg('--docker-file'),
3326
4010
  argparse_arg('--compose-file'),
3327
4011
  argparse_arg('-r', '--requirements-txt', action='append'),
3328
- argparse_arg('--github-cache', action='store_true'),
4012
+
3329
4013
  argparse_arg('--cache-dir'),
4014
+
4015
+ argparse_arg('--github', action='store_true'),
4016
+ argparse_arg('--github-detect', action='store_true'),
4017
+
3330
4018
  argparse_arg('--always-pull', action='store_true'),
4019
+ argparse_arg('--always-build', action='store_true'),
4020
+
4021
+ argparse_arg('--no-dependencies', action='store_true'),
4022
+
4023
+ argparse_arg('-e', '--env', action='append'),
4024
+ argparse_arg('-v', '--volume', action='append'),
4025
+
4026
+ argparse_arg('cmd', nargs=argparse.REMAINDER),
3331
4027
  )
3332
4028
  async def run(self) -> None:
3333
4029
  project_dir = self.args.project_dir
3334
4030
  docker_file = self.args.docker_file
3335
4031
  compose_file = self.args.compose_file
3336
- service = self.args.service
3337
4032
  requirements_txts = self.args.requirements_txt
3338
4033
  cache_dir = self.args.cache_dir
3339
- always_pull = self.args.always_pull
4034
+
4035
+ #
4036
+
4037
+ cmd = ' '.join(self.args.cmd)
4038
+ check.non_empty_str(cmd)
3340
4039
 
3341
4040
  #
3342
4041
 
@@ -3348,6 +4047,7 @@ class CiCli(ArgparseCli):
3348
4047
  for alt in alts:
3349
4048
  alt_file = os.path.abspath(os.path.join(project_dir, alt))
3350
4049
  if os.path.isfile(alt_file):
4050
+ log.debug('Using %s', alt_file)
3351
4051
  return alt_file
3352
4052
  return None
3353
4053
 
@@ -3381,6 +4081,7 @@ class CiCli(ArgparseCli):
3381
4081
  'requirements-ci.txt',
3382
4082
  ]:
3383
4083
  if os.path.exists(os.path.join(project_dir, rf)):
4084
+ log.debug('Using %s', rf)
3384
4085
  requirements_txts.append(rf)
3385
4086
  else:
3386
4087
  for rf in requirements_txts:
@@ -3388,46 +4089,60 @@ class CiCli(ArgparseCli):
3388
4089
 
3389
4090
  #
3390
4091
 
3391
- shell_cache: ta.Optional[ShellCache] = None
4092
+ github = self.args.github
4093
+ if not github and self.args.github_detect:
4094
+ github = is_in_github_actions()
4095
+ if github:
4096
+ log.debug('Github detected')
4097
+
4098
+ #
4099
+
3392
4100
  file_cache: ta.Optional[FileCache] = None
3393
4101
  if cache_dir is not None:
3394
- if not os.path.exists(cache_dir):
3395
- os.makedirs(cache_dir)
3396
- check.state(os.path.isdir(cache_dir))
3397
-
3398
- directory_file_cache = DirectoryFileCache(cache_dir)
4102
+ cache_dir = os.path.abspath(cache_dir)
4103
+ log.debug('Using cache dir %s', cache_dir)
4104
+ if github:
4105
+ file_cache = GithubFileCache(cache_dir)
4106
+ else:
4107
+ file_cache = DirectoryFileCache(cache_dir)
3399
4108
 
3400
- file_cache = directory_file_cache
4109
+ #
3401
4110
 
3402
- if self.args.github_cache:
3403
- shell_cache = GithubShellCache(cache_dir)
3404
- else:
3405
- shell_cache = DirectoryShellCache(directory_file_cache)
4111
+ run_options: ta.List[str] = []
4112
+ for run_arg, run_arg_vals in [
4113
+ ('-e', self.args.env or []),
4114
+ ('-v', self.args.volume or []),
4115
+ ]:
4116
+ run_options.extend(itertools.chain.from_iterable(
4117
+ [run_arg, run_arg_val]
4118
+ for run_arg_val in run_arg_vals
4119
+ ))
3406
4120
 
3407
4121
  #
3408
4122
 
3409
- with Ci(
4123
+ async with Ci(
3410
4124
  Ci.Config(
3411
4125
  project_dir=project_dir,
3412
4126
 
3413
4127
  docker_file=docker_file,
3414
4128
 
3415
4129
  compose_file=compose_file,
3416
- service=service,
4130
+ service=self.args.service,
3417
4131
 
3418
4132
  requirements_txts=requirements_txts,
3419
4133
 
3420
- cmd=ShellCmd(' && '.join([
3421
- 'cd /project',
3422
- 'python3 -m pytest -svv test.py',
3423
- ])),
4134
+ cmd=ShellCmd(cmd),
4135
+
4136
+ always_pull=self.args.always_pull,
4137
+ always_build=self.args.always_build,
4138
+
4139
+ no_dependencies=self.args.no_dependencies,
3424
4140
 
3425
- always_pull=always_pull,
4141
+ run_options=run_options,
3426
4142
  ),
3427
4143
  file_cache=file_cache,
3428
- shell_cache=shell_cache,
3429
4144
  ) as ci:
3430
- ci.run()
4145
+ await ci.run()
3431
4146
 
3432
4147
 
3433
4148
  async def _async_main() -> ta.Optional[int]: