omdev 0.0.0.dev212__py3-none-any.whl → 0.0.0.dev213__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omdev/cc/cdeps.py +34 -1
- omdev/cc/cdeps.toml +19 -2
- omdev/cc/cli.py +13 -1
- omdev/ci/ci.py +71 -48
- omdev/ci/cli.py +12 -6
- omdev/ci/compose.py +26 -55
- omdev/ci/docker.py +35 -16
- omdev/ci/github/cache.py +153 -184
- omdev/ci/github/cacheapi.py +1 -1
- omdev/ci/github/cli.py +2 -2
- omdev/ci/github/curl.py +209 -0
- omdev/git/shallow.py +1 -1
- omdev/scripts/ci.py +936 -448
- omdev/scripts/interp.py +23 -0
- omdev/scripts/pyproject.py +23 -0
- omdev/tokens/tokenizert.py +1 -3
- {omdev-0.0.0.dev212.dist-info → omdev-0.0.0.dev213.dist-info}/METADATA +2 -2
- {omdev-0.0.0.dev212.dist-info → omdev-0.0.0.dev213.dist-info}/RECORD +22 -21
- {omdev-0.0.0.dev212.dist-info → omdev-0.0.0.dev213.dist-info}/LICENSE +0 -0
- {omdev-0.0.0.dev212.dist-info → omdev-0.0.0.dev213.dist-info}/WHEEL +0 -0
- {omdev-0.0.0.dev212.dist-info → omdev-0.0.0.dev213.dist-info}/entry_points.txt +0 -0
- {omdev-0.0.0.dev212.dist-info → omdev-0.0.0.dev213.dist-info}/top_level.txt +0 -0
omdev/scripts/ci.py
CHANGED
@@ -17,6 +17,8 @@ Inputs:
|
|
17
17
|
import abc
|
18
18
|
import argparse
|
19
19
|
import asyncio
|
20
|
+
import asyncio.base_subprocess
|
21
|
+
import asyncio.subprocess
|
20
22
|
import collections
|
21
23
|
import contextlib
|
22
24
|
import dataclasses as dc
|
@@ -39,6 +41,7 @@ import threading
|
|
39
41
|
import time
|
40
42
|
import types
|
41
43
|
import typing as ta
|
44
|
+
import urllib.parse
|
42
45
|
|
43
46
|
|
44
47
|
########################################
|
@@ -54,6 +57,9 @@ if sys.version_info < (3, 8):
|
|
54
57
|
# shell.py
|
55
58
|
T = ta.TypeVar('T')
|
56
59
|
|
60
|
+
# ../../omlish/asyncs/asyncio/timeouts.py
|
61
|
+
AwaitableT = ta.TypeVar('AwaitableT', bound=ta.Awaitable)
|
62
|
+
|
57
63
|
# ../../omlish/lite/cached.py
|
58
64
|
CallableT = ta.TypeVar('CallableT', bound=ta.Callable)
|
59
65
|
|
@@ -70,6 +76,7 @@ ArgparseCmdFn = ta.Callable[[], ta.Optional[int]] # ta.TypeAlias
|
|
70
76
|
|
71
77
|
# ../../omlish/lite/contextmanagers.py
|
72
78
|
ExitStackedT = ta.TypeVar('ExitStackedT', bound='ExitStacked')
|
79
|
+
AsyncExitStackedT = ta.TypeVar('AsyncExitStackedT', bound='AsyncExitStacked')
|
73
80
|
|
74
81
|
# ../../omlish/subprocesses.py
|
75
82
|
SubprocessChannelOption = ta.Literal['pipe', 'stdout', 'devnull'] # ta.TypeAlias
|
@@ -113,6 +120,19 @@ class ShellCmd:
|
|
113
120
|
)
|
114
121
|
|
115
122
|
|
123
|
+
########################################
|
124
|
+
# ../../../omlish/asyncs/asyncio/timeouts.py
|
125
|
+
|
126
|
+
|
127
|
+
def asyncio_maybe_timeout(
|
128
|
+
fut: AwaitableT,
|
129
|
+
timeout: ta.Optional[float] = None,
|
130
|
+
) -> AwaitableT:
|
131
|
+
if timeout is not None:
|
132
|
+
fut = asyncio.wait_for(fut, timeout) # type: ignore
|
133
|
+
return fut
|
134
|
+
|
135
|
+
|
116
136
|
########################################
|
117
137
|
# ../../../omlish/lite/cached.py
|
118
138
|
|
@@ -208,6 +228,17 @@ class Checks:
|
|
208
228
|
|
209
229
|
#
|
210
230
|
|
231
|
+
def register_on_raise_breakpoint_if_env_var_set(self, key: str) -> None:
|
232
|
+
import os
|
233
|
+
|
234
|
+
def on_raise(exc: Exception) -> None: # noqa
|
235
|
+
if key in os.environ:
|
236
|
+
breakpoint() # noqa
|
237
|
+
|
238
|
+
self.register_on_raise(on_raise)
|
239
|
+
|
240
|
+
#
|
241
|
+
|
211
242
|
def set_exception_factory(self, factory: CheckExceptionFactory) -> None:
|
212
243
|
self._exception_factory = factory
|
213
244
|
|
@@ -523,6 +554,18 @@ class Checks:
|
|
523
554
|
|
524
555
|
return v
|
525
556
|
|
557
|
+
def not_equal(self, v: T, o: ta.Any, msg: CheckMessage = None) -> T:
|
558
|
+
if o == v:
|
559
|
+
self._raise(
|
560
|
+
ValueError,
|
561
|
+
'Must not be equal',
|
562
|
+
msg,
|
563
|
+
Checks._ArgsKwargs(v, o),
|
564
|
+
render_fmt='%s == %s',
|
565
|
+
)
|
566
|
+
|
567
|
+
return v
|
568
|
+
|
526
569
|
def is_(self, v: T, o: ta.Any, msg: CheckMessage = None) -> T:
|
527
570
|
if o is not v:
|
528
571
|
self._raise(
|
@@ -1205,7 +1248,7 @@ class GithubCacheServiceV1:
|
|
1205
1248
|
@dc.dataclass(frozen=True)
|
1206
1249
|
class ReserveCacheRequest:
|
1207
1250
|
key: str
|
1208
|
-
cache_size: ta.Optional[int]
|
1251
|
+
cache_size: ta.Optional[int] = None
|
1209
1252
|
version: ta.Optional[str] = None
|
1210
1253
|
|
1211
1254
|
@dc.dataclass(frozen=True)
|
@@ -1713,6 +1756,33 @@ class ExitStacked:
|
|
1713
1756
|
return es.enter_context(cm)
|
1714
1757
|
|
1715
1758
|
|
1759
|
+
class AsyncExitStacked:
|
1760
|
+
_exit_stack: ta.Optional[contextlib.AsyncExitStack] = None
|
1761
|
+
|
1762
|
+
async def __aenter__(self: AsyncExitStackedT) -> AsyncExitStackedT:
|
1763
|
+
check.state(self._exit_stack is None)
|
1764
|
+
es = self._exit_stack = contextlib.AsyncExitStack()
|
1765
|
+
await es.__aenter__()
|
1766
|
+
return self
|
1767
|
+
|
1768
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
1769
|
+
if (es := self._exit_stack) is None:
|
1770
|
+
return None
|
1771
|
+
await self._async_exit_contexts()
|
1772
|
+
return await es.__aexit__(exc_type, exc_val, exc_tb)
|
1773
|
+
|
1774
|
+
async def _async_exit_contexts(self) -> None:
|
1775
|
+
pass
|
1776
|
+
|
1777
|
+
def _enter_context(self, cm: ta.ContextManager[T]) -> T:
|
1778
|
+
es = check.not_none(self._exit_stack)
|
1779
|
+
return es.enter_context(cm)
|
1780
|
+
|
1781
|
+
async def _enter_async_context(self, cm: ta.AsyncContextManager[T]) -> T:
|
1782
|
+
es = check.not_none(self._exit_stack)
|
1783
|
+
return await es.enter_async_context(cm)
|
1784
|
+
|
1785
|
+
|
1716
1786
|
##
|
1717
1787
|
|
1718
1788
|
|
@@ -1724,6 +1794,17 @@ def defer(fn: ta.Callable) -> ta.Generator[ta.Callable, None, None]:
|
|
1724
1794
|
fn()
|
1725
1795
|
|
1726
1796
|
|
1797
|
+
@contextlib.asynccontextmanager
|
1798
|
+
async def adefer(fn: ta.Callable) -> ta.AsyncGenerator[ta.Callable, None]:
|
1799
|
+
try:
|
1800
|
+
yield fn
|
1801
|
+
finally:
|
1802
|
+
await fn()
|
1803
|
+
|
1804
|
+
|
1805
|
+
##
|
1806
|
+
|
1807
|
+
|
1727
1808
|
@contextlib.contextmanager
|
1728
1809
|
def attr_setting(obj, attr, val, *, default=None): # noqa
|
1729
1810
|
not_set = object()
|
@@ -2277,154 +2358,619 @@ class AbstractAsyncSubprocesses(BaseSubprocesses):
|
|
2277
2358
|
|
2278
2359
|
|
2279
2360
|
########################################
|
2280
|
-
# ../
|
2281
|
-
"""
|
2282
|
-
TODO:
|
2283
|
-
- fix rmi - only when not referenced anymore
|
2284
|
-
"""
|
2361
|
+
# ../github/curl.py
|
2285
2362
|
|
2286
2363
|
|
2287
2364
|
##
|
2288
2365
|
|
2289
2366
|
|
2290
|
-
|
2291
|
-
|
2292
|
-
|
2293
|
-
|
2294
|
-
|
2367
|
+
class GithubServiceCurlClient:
|
2368
|
+
def __init__(
|
2369
|
+
self,
|
2370
|
+
service_url: str,
|
2371
|
+
auth_token: ta.Optional[str] = None,
|
2372
|
+
*,
|
2373
|
+
api_version: ta.Optional[str] = None,
|
2374
|
+
) -> None:
|
2375
|
+
super().__init__()
|
2295
2376
|
|
2296
|
-
|
2297
|
-
|
2377
|
+
self._service_url = check.non_empty_str(service_url)
|
2378
|
+
self._auth_token = auth_token
|
2379
|
+
self._api_version = api_version
|
2298
2380
|
|
2299
|
-
|
2300
|
-
for dep_service in service_dct.get('depends_on', []):
|
2301
|
-
dep_service_dct = services[dep_service]
|
2302
|
-
out[dep_service] = dep_service_dct['image']
|
2381
|
+
#
|
2303
2382
|
|
2304
|
-
|
2383
|
+
_MISSING = object()
|
2305
2384
|
|
2385
|
+
def build_headers(
|
2386
|
+
self,
|
2387
|
+
headers: ta.Optional[ta.Mapping[str, str]] = None,
|
2388
|
+
*,
|
2389
|
+
auth_token: ta.Any = _MISSING,
|
2390
|
+
content_type: ta.Optional[str] = None,
|
2391
|
+
) -> ta.Dict[str, str]:
|
2392
|
+
dct = {
|
2393
|
+
'Accept': ';'.join([
|
2394
|
+
'application/json',
|
2395
|
+
*([f'api-version={self._api_version}'] if self._api_version else []),
|
2396
|
+
]),
|
2397
|
+
}
|
2306
2398
|
|
2307
|
-
|
2399
|
+
if auth_token is self._MISSING:
|
2400
|
+
auth_token = self._auth_token
|
2401
|
+
if auth_token:
|
2402
|
+
dct['Authorization'] = f'Bearer {auth_token}'
|
2308
2403
|
|
2404
|
+
if content_type is not None:
|
2405
|
+
dct['Content-Type'] = content_type
|
2309
2406
|
|
2310
|
-
|
2311
|
-
|
2312
|
-
class Config:
|
2313
|
-
compose_file: str
|
2314
|
-
service: str
|
2407
|
+
if headers:
|
2408
|
+
dct.update(headers)
|
2315
2409
|
|
2316
|
-
|
2410
|
+
return dct
|
2317
2411
|
|
2318
|
-
|
2412
|
+
#
|
2319
2413
|
|
2320
|
-
|
2414
|
+
HEADER_AUTH_TOKEN_ENV_KEY_PREFIX = '_GITHUB_SERVICE_AUTH_TOKEN' # noqa
|
2321
2415
|
|
2322
|
-
|
2416
|
+
@property
|
2417
|
+
def header_auth_token_env_key(self) -> str:
|
2418
|
+
return f'{self.HEADER_AUTH_TOKEN_ENV_KEY_PREFIX}_{id(self)}'
|
2323
2419
|
|
2324
|
-
|
2420
|
+
def build_cmd(
|
2421
|
+
self,
|
2422
|
+
method: str,
|
2423
|
+
url: str,
|
2424
|
+
*,
|
2425
|
+
json_content: bool = False,
|
2426
|
+
content_type: ta.Optional[str] = None,
|
2427
|
+
headers: ta.Optional[ta.Dict[str, str]] = None,
|
2428
|
+
) -> ShellCmd:
|
2429
|
+
if content_type is None and json_content:
|
2430
|
+
content_type = 'application/json'
|
2325
2431
|
|
2326
|
-
|
2432
|
+
env = {}
|
2327
2433
|
|
2328
|
-
|
2434
|
+
header_auth_token: ta.Optional[str]
|
2435
|
+
if self._auth_token:
|
2436
|
+
header_env_key = self.header_auth_token_env_key
|
2437
|
+
env[header_env_key] = self._auth_token
|
2438
|
+
header_auth_token = f'${header_env_key}'
|
2439
|
+
else:
|
2440
|
+
header_auth_token = None
|
2329
2441
|
|
2330
|
-
|
2442
|
+
built_hdrs = self.build_headers(
|
2443
|
+
headers,
|
2444
|
+
auth_token=header_auth_token,
|
2445
|
+
content_type=content_type,
|
2446
|
+
)
|
2331
2447
|
|
2332
|
-
|
2333
|
-
check.not_isinstance(self.run_options, str)
|
2448
|
+
url = f'{self._service_url}/{url}'
|
2334
2449
|
|
2335
|
-
|
2336
|
-
|
2450
|
+
cmd = ' '.join([
|
2451
|
+
'curl',
|
2452
|
+
'-s',
|
2453
|
+
'-X', method,
|
2454
|
+
url,
|
2455
|
+
*[f'-H "{k}: {v}"' for k, v in built_hdrs.items()],
|
2456
|
+
])
|
2337
2457
|
|
2338
|
-
|
2458
|
+
return ShellCmd(
|
2459
|
+
cmd,
|
2460
|
+
env=env,
|
2461
|
+
)
|
2339
2462
|
|
2340
|
-
|
2341
|
-
|
2342
|
-
|
2463
|
+
def build_post_json_cmd(
|
2464
|
+
self,
|
2465
|
+
url: str,
|
2466
|
+
obj: ta.Any,
|
2467
|
+
**kwargs: ta.Any,
|
2468
|
+
) -> ShellCmd:
|
2469
|
+
curl_cmd = self.build_cmd(
|
2470
|
+
'POST',
|
2471
|
+
url,
|
2472
|
+
json_content=True,
|
2473
|
+
**kwargs,
|
2474
|
+
)
|
2343
2475
|
|
2344
|
-
|
2476
|
+
obj_json = json_dumps_compact(obj)
|
2345
2477
|
|
2346
|
-
|
2347
|
-
def image_tag(self) -> str:
|
2348
|
-
pfx = 'sha256:'
|
2349
|
-
if (image := self._cfg.image).startswith(pfx):
|
2350
|
-
image = image[len(pfx):]
|
2478
|
+
return dc.replace(curl_cmd, s=f'{curl_cmd.s} -d {shlex.quote(obj_json)}')
|
2351
2479
|
|
2352
|
-
|
2480
|
+
#
|
2353
2481
|
|
2354
|
-
@
|
2355
|
-
|
2356
|
-
|
2482
|
+
@dc.dataclass()
|
2483
|
+
class Error(RuntimeError):
|
2484
|
+
status_code: int
|
2485
|
+
body: ta.Optional[bytes]
|
2357
2486
|
|
2358
|
-
|
2359
|
-
|
2360
|
-
'tag',
|
2361
|
-
self._cfg.image,
|
2362
|
-
image_tag,
|
2363
|
-
**self._subprocess_kwargs,
|
2364
|
-
)
|
2487
|
+
def __str__(self) -> str:
|
2488
|
+
return repr(self)
|
2365
2489
|
|
2366
|
-
|
2367
|
-
|
2368
|
-
|
2369
|
-
|
2370
|
-
image_tag,
|
2371
|
-
**self._subprocess_kwargs,
|
2372
|
-
)
|
2490
|
+
@dc.dataclass(frozen=True)
|
2491
|
+
class Result:
|
2492
|
+
status_code: int
|
2493
|
+
body: ta.Optional[bytes]
|
2373
2494
|
|
2374
|
-
self
|
2495
|
+
def as_error(self) -> 'GithubServiceCurlClient.Error':
|
2496
|
+
return GithubServiceCurlClient.Error(
|
2497
|
+
status_code=self.status_code,
|
2498
|
+
body=self.body,
|
2499
|
+
)
|
2375
2500
|
|
2376
|
-
|
2501
|
+
def run_cmd(
|
2502
|
+
self,
|
2503
|
+
cmd: ShellCmd,
|
2504
|
+
*,
|
2505
|
+
raise_: bool = False,
|
2506
|
+
**subprocess_kwargs: ta.Any,
|
2507
|
+
) -> Result:
|
2508
|
+
out_file = make_temp_file()
|
2509
|
+
with defer(lambda: os.unlink(out_file)):
|
2510
|
+
run_cmd = dc.replace(cmd, s=f"{cmd.s} -o {out_file} -w '%{{json}}'")
|
2377
2511
|
|
2378
|
-
|
2512
|
+
out_json_bytes = run_cmd.run(
|
2513
|
+
subprocesses.check_output,
|
2514
|
+
**subprocess_kwargs,
|
2515
|
+
)
|
2379
2516
|
|
2380
|
-
|
2381
|
-
|
2517
|
+
out_json = json.loads(out_json_bytes.decode())
|
2518
|
+
status_code = check.isinstance(out_json['response_code'], int)
|
2382
2519
|
|
2383
|
-
|
2520
|
+
with open(out_file, 'rb') as f:
|
2521
|
+
body = f.read()
|
2384
2522
|
|
2385
|
-
|
2386
|
-
|
2523
|
+
result = self.Result(
|
2524
|
+
status_code=status_code,
|
2525
|
+
body=body,
|
2526
|
+
)
|
2387
2527
|
|
2388
|
-
|
2528
|
+
if raise_ and (500 <= status_code <= 600):
|
2529
|
+
raise result.as_error()
|
2389
2530
|
|
2390
|
-
|
2391
|
-
out_services[self._cfg.service] = out_service = dict(in_service)
|
2531
|
+
return result
|
2392
2532
|
|
2393
|
-
|
2533
|
+
def run_json_cmd(
|
2534
|
+
self,
|
2535
|
+
cmd: ShellCmd,
|
2536
|
+
*,
|
2537
|
+
success_status_codes: ta.Optional[ta.Container[int]] = None,
|
2538
|
+
) -> ta.Optional[ta.Any]:
|
2539
|
+
result = self.run_cmd(cmd, raise_=True)
|
2394
2540
|
|
2395
|
-
|
2396
|
-
|
2397
|
-
|
2541
|
+
if success_status_codes is not None:
|
2542
|
+
is_success = result.status_code in success_status_codes
|
2543
|
+
else:
|
2544
|
+
is_success = 200 <= result.status_code < 300
|
2398
2545
|
|
2399
|
-
|
2400
|
-
|
2401
|
-
|
2402
|
-
|
2546
|
+
if is_success:
|
2547
|
+
if not (body := result.body):
|
2548
|
+
return None
|
2549
|
+
return json.loads(body.decode('utf-8-sig'))
|
2403
2550
|
|
2404
|
-
|
2551
|
+
elif result.status_code == 404:
|
2552
|
+
return None
|
2405
2553
|
|
2406
|
-
|
2554
|
+
else:
|
2555
|
+
raise result.as_error()
|
2407
2556
|
|
2408
|
-
for dep_service, in_dep_service_dct in list(in_services.items()):
|
2409
|
-
if dep_service not in depends_on:
|
2410
|
-
continue
|
2411
2557
|
|
2412
|
-
|
2413
|
-
|
2558
|
+
########################################
|
2559
|
+
# ../requirements.py
|
2560
|
+
"""
|
2561
|
+
TODO:
|
2562
|
+
- pip compile lol
|
2563
|
+
- but still support git+ stuff
|
2564
|
+
- req.txt format aware hash
|
2565
|
+
- more than just whitespace
|
2566
|
+
- pyproject req rewriting
|
2567
|
+
- download_requirements bootstrap off prev? not worth the dl?
|
2568
|
+
- big deps (torch) change less, probably worth it
|
2569
|
+
- follow embedded -r automatically like pyp
|
2570
|
+
"""
|
2414
2571
|
|
2415
|
-
out_dep_service['ports'] = []
|
2416
2572
|
|
2417
|
-
|
2573
|
+
##
|
2418
2574
|
|
2419
|
-
return out
|
2420
2575
|
|
2421
|
-
|
2422
|
-
|
2423
|
-
|
2576
|
+
def build_requirements_hash(
|
2577
|
+
requirements_txts: ta.Sequence[str],
|
2578
|
+
) -> str:
|
2579
|
+
txt_file_contents: dict = {}
|
2424
2580
|
|
2425
|
-
|
2581
|
+
for txt_file in requirements_txts:
|
2582
|
+
txt_file_name = os.path.basename(txt_file)
|
2583
|
+
check.not_in(txt_file_name, txt_file_contents)
|
2584
|
+
with open(txt_file) as f:
|
2585
|
+
txt_contents = f.read()
|
2586
|
+
txt_file_contents[txt_file_name] = txt_contents
|
2426
2587
|
|
2427
|
-
|
2588
|
+
#
|
2589
|
+
|
2590
|
+
lines = []
|
2591
|
+
for txt_file, txt_contents in sorted(txt_file_contents.items()):
|
2592
|
+
txt_hash = sha256_str(txt_contents)
|
2593
|
+
lines.append(f'{txt_file}={txt_hash}')
|
2594
|
+
|
2595
|
+
return sha256_str('\n'.join(lines))
|
2596
|
+
|
2597
|
+
|
2598
|
+
##
|
2599
|
+
|
2600
|
+
|
2601
|
+
def download_requirements(
|
2602
|
+
image: str,
|
2603
|
+
requirements_dir: str,
|
2604
|
+
requirements_txts: ta.Sequence[str],
|
2605
|
+
) -> None:
|
2606
|
+
requirements_txt_dir = tempfile.mkdtemp()
|
2607
|
+
with defer(lambda: shutil.rmtree(requirements_txt_dir)):
|
2608
|
+
for rt in requirements_txts:
|
2609
|
+
shutil.copyfile(rt, os.path.join(requirements_txt_dir, os.path.basename(rt)))
|
2610
|
+
|
2611
|
+
subprocesses.check_call(
|
2612
|
+
'docker',
|
2613
|
+
'run',
|
2614
|
+
'--rm',
|
2615
|
+
'-i',
|
2616
|
+
'-v', f'{os.path.abspath(requirements_dir)}:/requirements',
|
2617
|
+
'-v', f'{requirements_txt_dir}:/requirements_txt',
|
2618
|
+
image,
|
2619
|
+
'pip',
|
2620
|
+
'download',
|
2621
|
+
'-d', '/requirements',
|
2622
|
+
*itertools.chain.from_iterable(
|
2623
|
+
['-r', f'/requirements_txt/{os.path.basename(rt)}']
|
2624
|
+
for rt in requirements_txts
|
2625
|
+
),
|
2626
|
+
)
|
2627
|
+
|
2628
|
+
|
2629
|
+
########################################
|
2630
|
+
# ../../../omlish/asyncs/asyncio/subprocesses.py
|
2631
|
+
|
2632
|
+
|
2633
|
+
##
|
2634
|
+
|
2635
|
+
|
2636
|
+
class AsyncioProcessCommunicator:
|
2637
|
+
def __init__(
|
2638
|
+
self,
|
2639
|
+
proc: asyncio.subprocess.Process,
|
2640
|
+
loop: ta.Optional[ta.Any] = None,
|
2641
|
+
*,
|
2642
|
+
log: ta.Optional[logging.Logger] = None,
|
2643
|
+
) -> None:
|
2644
|
+
super().__init__()
|
2645
|
+
|
2646
|
+
if loop is None:
|
2647
|
+
loop = asyncio.get_running_loop()
|
2648
|
+
|
2649
|
+
self._proc = proc
|
2650
|
+
self._loop = loop
|
2651
|
+
self._log = log
|
2652
|
+
|
2653
|
+
self._transport: asyncio.base_subprocess.BaseSubprocessTransport = check.isinstance(
|
2654
|
+
proc._transport, # type: ignore # noqa
|
2655
|
+
asyncio.base_subprocess.BaseSubprocessTransport,
|
2656
|
+
)
|
2657
|
+
|
2658
|
+
@property
|
2659
|
+
def _debug(self) -> bool:
|
2660
|
+
return self._loop.get_debug()
|
2661
|
+
|
2662
|
+
async def _feed_stdin(self, input: bytes) -> None: # noqa
|
2663
|
+
stdin = check.not_none(self._proc.stdin)
|
2664
|
+
try:
|
2665
|
+
if input is not None:
|
2666
|
+
stdin.write(input)
|
2667
|
+
if self._debug and self._log is not None:
|
2668
|
+
self._log.debug('%r communicate: feed stdin (%s bytes)', self, len(input))
|
2669
|
+
|
2670
|
+
await stdin.drain()
|
2671
|
+
|
2672
|
+
except (BrokenPipeError, ConnectionResetError) as exc:
|
2673
|
+
# communicate() ignores BrokenPipeError and ConnectionResetError. write() and drain() can raise these
|
2674
|
+
# exceptions.
|
2675
|
+
if self._debug and self._log is not None:
|
2676
|
+
self._log.debug('%r communicate: stdin got %r', self, exc)
|
2677
|
+
|
2678
|
+
if self._debug and self._log is not None:
|
2679
|
+
self._log.debug('%r communicate: close stdin', self)
|
2680
|
+
|
2681
|
+
stdin.close()
|
2682
|
+
|
2683
|
+
async def _noop(self) -> None:
|
2684
|
+
return None
|
2685
|
+
|
2686
|
+
async def _read_stream(self, fd: int) -> bytes:
|
2687
|
+
transport: ta.Any = check.not_none(self._transport.get_pipe_transport(fd))
|
2688
|
+
|
2689
|
+
if fd == 2:
|
2690
|
+
stream = check.not_none(self._proc.stderr)
|
2691
|
+
else:
|
2692
|
+
check.equal(fd, 1)
|
2693
|
+
stream = check.not_none(self._proc.stdout)
|
2694
|
+
|
2695
|
+
if self._debug and self._log is not None:
|
2696
|
+
name = 'stdout' if fd == 1 else 'stderr'
|
2697
|
+
self._log.debug('%r communicate: read %s', self, name)
|
2698
|
+
|
2699
|
+
output = await stream.read()
|
2700
|
+
|
2701
|
+
if self._debug and self._log is not None:
|
2702
|
+
name = 'stdout' if fd == 1 else 'stderr'
|
2703
|
+
self._log.debug('%r communicate: close %s', self, name)
|
2704
|
+
|
2705
|
+
transport.close()
|
2706
|
+
|
2707
|
+
return output
|
2708
|
+
|
2709
|
+
class Communication(ta.NamedTuple):
|
2710
|
+
stdout: ta.Optional[bytes]
|
2711
|
+
stderr: ta.Optional[bytes]
|
2712
|
+
|
2713
|
+
async def _communicate(
|
2714
|
+
self,
|
2715
|
+
input: ta.Any = None, # noqa
|
2716
|
+
) -> Communication:
|
2717
|
+
stdin_fut: ta.Any
|
2718
|
+
if self._proc.stdin is not None:
|
2719
|
+
stdin_fut = self._feed_stdin(input)
|
2720
|
+
else:
|
2721
|
+
stdin_fut = self._noop()
|
2722
|
+
|
2723
|
+
stdout_fut: ta.Any
|
2724
|
+
if self._proc.stdout is not None:
|
2725
|
+
stdout_fut = self._read_stream(1)
|
2726
|
+
else:
|
2727
|
+
stdout_fut = self._noop()
|
2728
|
+
|
2729
|
+
stderr_fut: ta.Any
|
2730
|
+
if self._proc.stderr is not None:
|
2731
|
+
stderr_fut = self._read_stream(2)
|
2732
|
+
else:
|
2733
|
+
stderr_fut = self._noop()
|
2734
|
+
|
2735
|
+
stdin_res, stdout_res, stderr_res = await asyncio.gather(stdin_fut, stdout_fut, stderr_fut)
|
2736
|
+
|
2737
|
+
await self._proc.wait()
|
2738
|
+
|
2739
|
+
return AsyncioProcessCommunicator.Communication(stdout_res, stderr_res)
|
2740
|
+
|
2741
|
+
async def communicate(
|
2742
|
+
self,
|
2743
|
+
input: ta.Any = None, # noqa
|
2744
|
+
timeout: ta.Optional[float] = None,
|
2745
|
+
) -> Communication:
|
2746
|
+
return await asyncio_maybe_timeout(self._communicate(input), timeout)
|
2747
|
+
|
2748
|
+
|
2749
|
+
##
|
2750
|
+
|
2751
|
+
|
2752
|
+
class AsyncioSubprocesses(AbstractAsyncSubprocesses):
|
2753
|
+
async def communicate(
|
2754
|
+
self,
|
2755
|
+
proc: asyncio.subprocess.Process,
|
2756
|
+
input: ta.Any = None, # noqa
|
2757
|
+
timeout: ta.Optional[float] = None,
|
2758
|
+
) -> ta.Tuple[ta.Optional[bytes], ta.Optional[bytes]]:
|
2759
|
+
return await AsyncioProcessCommunicator(proc).communicate(input, timeout) # noqa
|
2760
|
+
|
2761
|
+
#
|
2762
|
+
|
2763
|
+
@contextlib.asynccontextmanager
|
2764
|
+
async def popen(
|
2765
|
+
self,
|
2766
|
+
*cmd: str,
|
2767
|
+
shell: bool = False,
|
2768
|
+
timeout: ta.Optional[float] = None,
|
2769
|
+
**kwargs: ta.Any,
|
2770
|
+
) -> ta.AsyncGenerator[asyncio.subprocess.Process, None]:
|
2771
|
+
fac: ta.Any
|
2772
|
+
if shell:
|
2773
|
+
fac = functools.partial(
|
2774
|
+
asyncio.create_subprocess_shell,
|
2775
|
+
check.single(cmd),
|
2776
|
+
)
|
2777
|
+
else:
|
2778
|
+
fac = functools.partial(
|
2779
|
+
asyncio.create_subprocess_exec,
|
2780
|
+
*cmd,
|
2781
|
+
)
|
2782
|
+
|
2783
|
+
with self.prepare_and_wrap( *cmd, shell=shell, **kwargs) as (cmd, kwargs): # noqa
|
2784
|
+
proc: asyncio.subprocess.Process = await fac(**kwargs)
|
2785
|
+
try:
|
2786
|
+
yield proc
|
2787
|
+
|
2788
|
+
finally:
|
2789
|
+
await asyncio_maybe_timeout(proc.wait(), timeout)
|
2790
|
+
|
2791
|
+
#
|
2792
|
+
|
2793
|
+
@dc.dataclass(frozen=True)
|
2794
|
+
class RunOutput:
|
2795
|
+
proc: asyncio.subprocess.Process
|
2796
|
+
stdout: ta.Optional[bytes]
|
2797
|
+
stderr: ta.Optional[bytes]
|
2798
|
+
|
2799
|
+
async def run(
|
2800
|
+
self,
|
2801
|
+
*cmd: str,
|
2802
|
+
input: ta.Any = None, # noqa
|
2803
|
+
timeout: ta.Optional[float] = None,
|
2804
|
+
check: bool = False, # noqa
|
2805
|
+
capture_output: ta.Optional[bool] = None,
|
2806
|
+
**kwargs: ta.Any,
|
2807
|
+
) -> RunOutput:
|
2808
|
+
if capture_output:
|
2809
|
+
kwargs.setdefault('stdout', subprocess.PIPE)
|
2810
|
+
kwargs.setdefault('stderr', subprocess.PIPE)
|
2811
|
+
|
2812
|
+
proc: asyncio.subprocess.Process
|
2813
|
+
async with self.popen(*cmd, **kwargs) as proc:
|
2814
|
+
stdout, stderr = await self.communicate(proc, input, timeout)
|
2815
|
+
|
2816
|
+
if check and proc.returncode:
|
2817
|
+
raise subprocess.CalledProcessError(
|
2818
|
+
proc.returncode,
|
2819
|
+
cmd,
|
2820
|
+
output=stdout,
|
2821
|
+
stderr=stderr,
|
2822
|
+
)
|
2823
|
+
|
2824
|
+
return self.RunOutput(
|
2825
|
+
proc,
|
2826
|
+
stdout,
|
2827
|
+
stderr,
|
2828
|
+
)
|
2829
|
+
|
2830
|
+
#
|
2831
|
+
|
2832
|
+
async def check_call(
|
2833
|
+
self,
|
2834
|
+
*cmd: str,
|
2835
|
+
stdout: ta.Any = sys.stderr,
|
2836
|
+
**kwargs: ta.Any,
|
2837
|
+
) -> None:
|
2838
|
+
with self.prepare_and_wrap(*cmd, stdout=stdout, check=True, **kwargs) as (cmd, kwargs): # noqa
|
2839
|
+
await self.run(*cmd, **kwargs)
|
2840
|
+
|
2841
|
+
async def check_output(
|
2842
|
+
self,
|
2843
|
+
*cmd: str,
|
2844
|
+
**kwargs: ta.Any,
|
2845
|
+
) -> bytes:
|
2846
|
+
with self.prepare_and_wrap(*cmd, stdout=subprocess.PIPE, check=True, **kwargs) as (cmd, kwargs): # noqa
|
2847
|
+
return check.not_none((await self.run(*cmd, **kwargs)).stdout)
|
2848
|
+
|
2849
|
+
|
2850
|
+
asyncio_subprocesses = AsyncioSubprocesses()
|
2851
|
+
|
2852
|
+
|
2853
|
+
########################################
|
2854
|
+
# ../compose.py
|
2855
|
+
"""
|
2856
|
+
TODO:
|
2857
|
+
- fix rmi - only when not referenced anymore
|
2858
|
+
"""
|
2859
|
+
|
2860
|
+
|
2861
|
+
##
|
2862
|
+
|
2863
|
+
|
2864
|
+
def get_compose_service_dependencies(
|
2865
|
+
compose_file: str,
|
2866
|
+
service: str,
|
2867
|
+
) -> ta.Dict[str, str]:
|
2868
|
+
compose_dct = read_yaml_file(compose_file)
|
2869
|
+
|
2870
|
+
services = compose_dct['services']
|
2871
|
+
service_dct = services[service]
|
2872
|
+
|
2873
|
+
out = {}
|
2874
|
+
for dep_service in service_dct.get('depends_on', []):
|
2875
|
+
dep_service_dct = services[dep_service]
|
2876
|
+
out[dep_service] = dep_service_dct['image']
|
2877
|
+
|
2878
|
+
return out
|
2879
|
+
|
2880
|
+
|
2881
|
+
##
|
2882
|
+
|
2883
|
+
|
2884
|
+
class DockerComposeRun(AsyncExitStacked):
|
2885
|
+
@dc.dataclass(frozen=True)
|
2886
|
+
class Config:
|
2887
|
+
compose_file: str
|
2888
|
+
service: str
|
2889
|
+
|
2890
|
+
image: str
|
2891
|
+
|
2892
|
+
cmd: ShellCmd
|
2893
|
+
|
2894
|
+
#
|
2895
|
+
|
2896
|
+
run_options: ta.Optional[ta.Sequence[str]] = None
|
2897
|
+
|
2898
|
+
cwd: ta.Optional[str] = None
|
2899
|
+
|
2900
|
+
#
|
2901
|
+
|
2902
|
+
no_dependencies: bool = False
|
2903
|
+
no_dependency_cleanup: bool = False
|
2904
|
+
|
2905
|
+
#
|
2906
|
+
|
2907
|
+
def __post_init__(self) -> None:
|
2908
|
+
check.not_isinstance(self.run_options, str)
|
2909
|
+
|
2910
|
+
def __init__(self, cfg: Config) -> None:
|
2911
|
+
super().__init__()
|
2912
|
+
|
2913
|
+
self._cfg = cfg
|
2914
|
+
|
2915
|
+
self._subprocess_kwargs = {
|
2916
|
+
**(dict(cwd=self._cfg.cwd) if self._cfg.cwd is not None else {}),
|
2917
|
+
}
|
2918
|
+
|
2919
|
+
#
|
2920
|
+
|
2921
|
+
def _rewrite_compose_dct(self, in_dct: ta.Dict[str, ta.Any]) -> ta.Dict[str, ta.Any]:
|
2922
|
+
out = dict(in_dct)
|
2923
|
+
|
2924
|
+
#
|
2925
|
+
|
2926
|
+
in_services = in_dct['services']
|
2927
|
+
out['services'] = out_services = {}
|
2928
|
+
|
2929
|
+
#
|
2930
|
+
|
2931
|
+
in_service: dict = in_services[self._cfg.service]
|
2932
|
+
out_services[self._cfg.service] = out_service = dict(in_service)
|
2933
|
+
|
2934
|
+
out_service['image'] = self._cfg.image
|
2935
|
+
|
2936
|
+
for k in ['build', 'platform']:
|
2937
|
+
if k in out_service:
|
2938
|
+
del out_service[k]
|
2939
|
+
|
2940
|
+
out_service['links'] = [
|
2941
|
+
f'{l}:{l}' if ':' not in l else l
|
2942
|
+
for l in out_service.get('links', [])
|
2943
|
+
]
|
2944
|
+
|
2945
|
+
#
|
2946
|
+
|
2947
|
+
if not self._cfg.no_dependencies:
|
2948
|
+
depends_on = in_service.get('depends_on', [])
|
2949
|
+
|
2950
|
+
for dep_service, in_dep_service_dct in list(in_services.items()):
|
2951
|
+
if dep_service not in depends_on:
|
2952
|
+
continue
|
2953
|
+
|
2954
|
+
out_dep_service: dict = dict(in_dep_service_dct)
|
2955
|
+
out_services[dep_service] = out_dep_service
|
2956
|
+
|
2957
|
+
out_dep_service['ports'] = []
|
2958
|
+
|
2959
|
+
else:
|
2960
|
+
out_service['depends_on'] = []
|
2961
|
+
out_service['links'] = []
|
2962
|
+
|
2963
|
+
#
|
2964
|
+
|
2965
|
+
return out
|
2966
|
+
|
2967
|
+
@cached_nullary
|
2968
|
+
def rewrite_compose_file(self) -> str:
|
2969
|
+
in_dct = read_yaml_file(self._cfg.compose_file)
|
2970
|
+
|
2971
|
+
out_dct = self._rewrite_compose_dct(in_dct)
|
2972
|
+
|
2973
|
+
#
|
2428
2974
|
|
2429
2975
|
out_compose_file = make_temp_file()
|
2430
2976
|
self._enter_context(defer(lambda: os.unlink(out_compose_file))) # noqa
|
@@ -2438,22 +2984,20 @@ class DockerComposeRun(ExitStacked):
|
|
2438
2984
|
|
2439
2985
|
#
|
2440
2986
|
|
2441
|
-
def _cleanup_dependencies(self) -> None:
|
2442
|
-
|
2987
|
+
async def _cleanup_dependencies(self) -> None:
|
2988
|
+
await asyncio_subprocesses.check_call(
|
2443
2989
|
'docker',
|
2444
2990
|
'compose',
|
2445
2991
|
'-f', self.rewrite_compose_file(),
|
2446
2992
|
'down',
|
2447
2993
|
)
|
2448
2994
|
|
2449
|
-
def run(self) -> None:
|
2450
|
-
self.tag_image()
|
2451
|
-
|
2995
|
+
async def run(self) -> None:
|
2452
2996
|
compose_file = self.rewrite_compose_file()
|
2453
2997
|
|
2454
|
-
with contextlib.
|
2455
|
-
if not self._cfg.no_dependency_cleanup:
|
2456
|
-
es.
|
2998
|
+
async with contextlib.AsyncExitStack() as es:
|
2999
|
+
if not (self._cfg.no_dependencies or self._cfg.no_dependency_cleanup):
|
3000
|
+
await es.enter_async_context(adefer(self._cleanup_dependencies)) # noqa
|
2457
3001
|
|
2458
3002
|
sh_cmd = ' '.join([
|
2459
3003
|
'docker',
|
@@ -2472,8 +3016,8 @@ class DockerComposeRun(ExitStacked):
|
|
2472
3016
|
|
2473
3017
|
run_cmd = dc.replace(self._cfg.cmd, s=sh_cmd)
|
2474
3018
|
|
2475
|
-
run_cmd.run(
|
2476
|
-
|
3019
|
+
await run_cmd.run(
|
3020
|
+
asyncio_subprocesses.check_call,
|
2477
3021
|
**self._subprocess_kwargs,
|
2478
3022
|
)
|
2479
3023
|
|
@@ -2525,8 +3069,8 @@ def read_docker_tar_image_id(tar_file: str) -> str:
|
|
2525
3069
|
##
|
2526
3070
|
|
2527
3071
|
|
2528
|
-
def is_docker_image_present(image: str) -> bool:
|
2529
|
-
out =
|
3072
|
+
async def is_docker_image_present(image: str) -> bool:
|
3073
|
+
out = await asyncio_subprocesses.check_output(
|
2530
3074
|
'docker',
|
2531
3075
|
'images',
|
2532
3076
|
'--format', 'json',
|
@@ -2541,333 +3085,263 @@ def is_docker_image_present(image: str) -> bool:
|
|
2541
3085
|
return True
|
2542
3086
|
|
2543
3087
|
|
2544
|
-
def pull_docker_image(
|
3088
|
+
async def pull_docker_image(
|
2545
3089
|
image: str,
|
2546
3090
|
) -> None:
|
2547
|
-
|
3091
|
+
await asyncio_subprocesses.check_call(
|
2548
3092
|
'docker',
|
2549
3093
|
'pull',
|
2550
3094
|
image,
|
2551
3095
|
)
|
2552
3096
|
|
2553
3097
|
|
2554
|
-
def build_docker_image(
|
3098
|
+
async def build_docker_image(
|
2555
3099
|
docker_file: str,
|
2556
3100
|
*,
|
3101
|
+
tag: ta.Optional[str] = None,
|
2557
3102
|
cwd: ta.Optional[str] = None,
|
2558
3103
|
) -> str:
|
2559
3104
|
id_file = make_temp_file()
|
2560
3105
|
with defer(lambda: os.unlink(id_file)):
|
2561
|
-
|
3106
|
+
await asyncio_subprocesses.check_call(
|
2562
3107
|
'docker',
|
2563
3108
|
'build',
|
2564
3109
|
'-f', os.path.abspath(docker_file),
|
2565
3110
|
'--iidfile', id_file,
|
2566
3111
|
'--squash',
|
3112
|
+
*(['--tag', tag] if tag is not None else []),
|
2567
3113
|
'.',
|
2568
3114
|
**(dict(cwd=cwd) if cwd is not None else {}),
|
2569
3115
|
)
|
2570
3116
|
|
2571
|
-
with open(id_file) as f:
|
3117
|
+
with open(id_file) as f: # noqa
|
2572
3118
|
image_id = check.single(f.read().strip().splitlines()).strip()
|
2573
3119
|
|
2574
3120
|
return image_id
|
2575
3121
|
|
2576
3122
|
|
3123
|
+
async def tag_docker_image(image: str, tag: str) -> None:
|
3124
|
+
await asyncio_subprocesses.check_call(
|
3125
|
+
'docker',
|
3126
|
+
'tag',
|
3127
|
+
image,
|
3128
|
+
tag,
|
3129
|
+
)
|
3130
|
+
|
3131
|
+
|
3132
|
+
async def delete_docker_tag(tag: str) -> None:
|
3133
|
+
await asyncio_subprocesses.check_call(
|
3134
|
+
'docker',
|
3135
|
+
'rmi',
|
3136
|
+
tag,
|
3137
|
+
)
|
3138
|
+
|
3139
|
+
|
2577
3140
|
##
|
2578
3141
|
|
2579
3142
|
|
2580
|
-
def save_docker_tar_cmd(
|
3143
|
+
async def save_docker_tar_cmd(
|
2581
3144
|
image: str,
|
2582
3145
|
output_cmd: ShellCmd,
|
2583
3146
|
) -> None:
|
2584
3147
|
cmd = dc.replace(output_cmd, s=f'docker save {image} | {output_cmd.s}')
|
2585
|
-
cmd.run(
|
3148
|
+
await cmd.run(asyncio_subprocesses.check_call)
|
2586
3149
|
|
2587
3150
|
|
2588
|
-
def save_docker_tar(
|
3151
|
+
async def save_docker_tar(
|
2589
3152
|
image: str,
|
2590
3153
|
tar_file: str,
|
2591
3154
|
) -> None:
|
2592
|
-
return save_docker_tar_cmd(
|
3155
|
+
return await save_docker_tar_cmd(
|
2593
3156
|
image,
|
2594
3157
|
ShellCmd(f'cat > {shlex.quote(tar_file)}'),
|
2595
3158
|
)
|
2596
3159
|
|
2597
3160
|
|
2598
|
-
#
|
2599
|
-
|
2600
|
-
|
2601
|
-
def load_docker_tar_cmd(
|
2602
|
-
input_cmd: ShellCmd,
|
2603
|
-
) -> str:
|
2604
|
-
cmd = dc.replace(input_cmd, s=f'{input_cmd.s} | docker load')
|
2605
|
-
|
2606
|
-
out = cmd.run(subprocesses.check_output).decode()
|
2607
|
-
|
2608
|
-
line = check.single(out.strip().splitlines())
|
2609
|
-
loaded = line.partition(':')[2].strip()
|
2610
|
-
return loaded
|
2611
|
-
|
2612
|
-
|
2613
|
-
def load_docker_tar(
|
2614
|
-
tar_file: str,
|
2615
|
-
) -> str:
|
2616
|
-
return load_docker_tar_cmd(ShellCmd(f'cat {shlex.quote(tar_file)}'))
|
2617
|
-
|
2618
|
-
|
2619
|
-
########################################
|
2620
|
-
# ../github/cache.py
|
2621
|
-
|
2622
|
-
|
2623
|
-
##
|
2624
|
-
|
2625
|
-
|
2626
|
-
class GithubV1CacheShellClient:
|
2627
|
-
BASE_URL_ENV_KEY = 'ACTIONS_CACHE_URL'
|
2628
|
-
AUTH_TOKEN_ENV_KEY = 'ACTIONS_RUNTIME_TOKEN' # noqa
|
2629
|
-
|
2630
|
-
def __init__(
|
2631
|
-
self,
|
2632
|
-
*,
|
2633
|
-
base_url: ta.Optional[str] = None,
|
2634
|
-
auth_token: ta.Optional[str] = None,
|
2635
|
-
) -> None:
|
2636
|
-
super().__init__()
|
2637
|
-
|
2638
|
-
if base_url is None:
|
2639
|
-
base_url = os.environ[self.BASE_URL_ENV_KEY]
|
2640
|
-
self._base_url = check.non_empty_str(base_url)
|
2641
|
-
|
2642
|
-
if auth_token is None:
|
2643
|
-
auth_token = os.environ.get(self.AUTH_TOKEN_ENV_KEY)
|
2644
|
-
self._auth_token = auth_token
|
2645
|
-
|
2646
|
-
self._service_url = GithubCacheServiceV1.get_service_url(self._base_url)
|
2647
|
-
|
2648
|
-
#
|
2649
|
-
|
2650
|
-
_MISSING = object()
|
2651
|
-
|
2652
|
-
def build_headers(
|
2653
|
-
self,
|
2654
|
-
*,
|
2655
|
-
auth_token: ta.Any = _MISSING,
|
2656
|
-
content_type: ta.Optional[str] = None,
|
2657
|
-
) -> ta.Dict[str, str]:
|
2658
|
-
dct = {
|
2659
|
-
'Accept': f'application/json;api-version={GithubCacheServiceV1.API_VERSION}',
|
2660
|
-
}
|
3161
|
+
#
|
2661
3162
|
|
2662
|
-
if auth_token is self._MISSING:
|
2663
|
-
auth_token = self._auth_token
|
2664
|
-
if auth_token:
|
2665
|
-
dct['Authorization'] = f'Bearer {auth_token}'
|
2666
3163
|
|
2667
|
-
|
2668
|
-
|
3164
|
+
async def load_docker_tar_cmd(
|
3165
|
+
input_cmd: ShellCmd,
|
3166
|
+
) -> str:
|
3167
|
+
cmd = dc.replace(input_cmd, s=f'{input_cmd.s} | docker load')
|
2669
3168
|
|
2670
|
-
|
3169
|
+
out = (await cmd.run(asyncio_subprocesses.check_output)).decode()
|
2671
3170
|
|
2672
|
-
|
3171
|
+
line = check.single(out.strip().splitlines())
|
3172
|
+
loaded = line.partition(':')[2].strip()
|
3173
|
+
return loaded
|
2673
3174
|
|
2674
|
-
HEADER_AUTH_TOKEN_ENV_KEY = '_GITHUB_CACHE_AUTH_TOKEN' # noqa
|
2675
3175
|
|
2676
|
-
|
2677
|
-
|
2678
|
-
|
2679
|
-
|
2680
|
-
*,
|
2681
|
-
json_content: bool = False,
|
2682
|
-
content_type: ta.Optional[str] = None,
|
2683
|
-
) -> ShellCmd:
|
2684
|
-
if content_type is None and json_content:
|
2685
|
-
content_type = 'application/json'
|
3176
|
+
async def load_docker_tar(
|
3177
|
+
tar_file: str,
|
3178
|
+
) -> str:
|
3179
|
+
return await load_docker_tar_cmd(ShellCmd(f'cat {shlex.quote(tar_file)}'))
|
2686
3180
|
|
2687
|
-
env = {}
|
2688
3181
|
|
2689
|
-
|
2690
|
-
|
2691
|
-
env[self.HEADER_AUTH_TOKEN_ENV_KEY] = self._auth_token
|
2692
|
-
header_auth_token = f'${self.HEADER_AUTH_TOKEN_ENV_KEY}'
|
2693
|
-
else:
|
2694
|
-
header_auth_token = None
|
3182
|
+
########################################
|
3183
|
+
# ../github/cache.py
|
2695
3184
|
|
2696
|
-
hdrs = self.build_headers(
|
2697
|
-
auth_token=header_auth_token,
|
2698
|
-
content_type=content_type,
|
2699
|
-
)
|
2700
3185
|
|
2701
|
-
|
3186
|
+
##
|
2702
3187
|
|
2703
|
-
cmd = ' '.join([
|
2704
|
-
'curl',
|
2705
|
-
'-s',
|
2706
|
-
'-X', method,
|
2707
|
-
url,
|
2708
|
-
*[f'-H "{k}: {v}"' for k, v in hdrs.items()],
|
2709
|
-
])
|
2710
3188
|
|
2711
|
-
|
2712
|
-
|
2713
|
-
|
2714
|
-
)
|
3189
|
+
class GithubCacheShellClient(abc.ABC):
|
3190
|
+
class Entry(abc.ABC): # noqa
|
3191
|
+
pass
|
2715
3192
|
|
2716
|
-
|
2717
|
-
|
2718
|
-
|
2719
|
-
obj: ta.Any,
|
2720
|
-
**kwargs: ta.Any,
|
2721
|
-
) -> ShellCmd:
|
2722
|
-
curl_cmd = self.build_curl_cmd(
|
2723
|
-
'POST',
|
2724
|
-
url,
|
2725
|
-
json_content=True,
|
2726
|
-
**kwargs,
|
2727
|
-
)
|
3193
|
+
@abc.abstractmethod
|
3194
|
+
def run_get_entry(self, key: str) -> ta.Optional[Entry]:
|
3195
|
+
raise NotImplementedError
|
2728
3196
|
|
2729
|
-
|
3197
|
+
@abc.abstractmethod
|
3198
|
+
def download_get_entry(self, entry: Entry, out_file: str) -> None:
|
3199
|
+
raise NotImplementedError
|
2730
3200
|
|
2731
|
-
|
3201
|
+
@abc.abstractmethod
|
3202
|
+
def upload_cache_entry(self, key: str, in_file: str) -> None:
|
3203
|
+
raise NotImplementedError
|
2732
3204
|
|
2733
|
-
#
|
2734
3205
|
|
2735
|
-
|
2736
|
-
class CurlError(RuntimeError):
|
2737
|
-
status_code: int
|
2738
|
-
body: ta.Optional[bytes]
|
3206
|
+
#
|
2739
3207
|
|
2740
|
-
def __str__(self) -> str:
|
2741
|
-
return repr(self)
|
2742
3208
|
|
2743
|
-
|
2744
|
-
|
2745
|
-
|
2746
|
-
body: ta.Optional[bytes]
|
3209
|
+
class GithubCacheServiceV1ShellClient(GithubCacheShellClient):
|
3210
|
+
BASE_URL_ENV_KEY = 'ACTIONS_CACHE_URL'
|
3211
|
+
AUTH_TOKEN_ENV_KEY = 'ACTIONS_RUNTIME_TOKEN' # noqa
|
2747
3212
|
|
2748
|
-
|
2749
|
-
|
2750
|
-
|
2751
|
-
|
2752
|
-
|
3213
|
+
KEY_SUFFIX_ENV_KEY = 'GITHUB_RUN_ID'
|
3214
|
+
|
3215
|
+
CACHE_VERSION: ta.ClassVar[int] = 1
|
3216
|
+
|
3217
|
+
#
|
2753
3218
|
|
2754
|
-
def
|
3219
|
+
def __init__(
|
2755
3220
|
self,
|
2756
|
-
cmd: ShellCmd,
|
2757
3221
|
*,
|
2758
|
-
|
2759
|
-
|
2760
|
-
out_file = make_temp_file()
|
2761
|
-
with defer(lambda: os.unlink(out_file)):
|
2762
|
-
run_cmd = dc.replace(cmd, s=f"{cmd.s} -o {out_file} -w '%{{json}}'")
|
3222
|
+
base_url: ta.Optional[str] = None,
|
3223
|
+
auth_token: ta.Optional[str] = None,
|
2763
3224
|
|
2764
|
-
|
3225
|
+
key_prefix: ta.Optional[str] = None,
|
3226
|
+
key_suffix: ta.Optional[str] = None,
|
3227
|
+
) -> None:
|
3228
|
+
super().__init__()
|
2765
3229
|
|
2766
|
-
|
2767
|
-
status_code = check.isinstance(out_json['response_code'], int)
|
3230
|
+
#
|
2768
3231
|
|
2769
|
-
|
2770
|
-
|
3232
|
+
if base_url is None:
|
3233
|
+
base_url = os.environ[self.BASE_URL_ENV_KEY]
|
3234
|
+
service_url = GithubCacheServiceV1.get_service_url(base_url)
|
2771
3235
|
|
2772
|
-
|
2773
|
-
|
2774
|
-
body=body,
|
2775
|
-
)
|
3236
|
+
if auth_token is None:
|
3237
|
+
auth_token = os.environ.get(self.AUTH_TOKEN_ENV_KEY)
|
2776
3238
|
|
2777
|
-
|
2778
|
-
|
3239
|
+
self._curl = GithubServiceCurlClient(
|
3240
|
+
service_url,
|
3241
|
+
auth_token,
|
3242
|
+
api_version=GithubCacheServiceV1.API_VERSION,
|
3243
|
+
)
|
2779
3244
|
|
2780
|
-
|
3245
|
+
#
|
2781
3246
|
|
2782
|
-
|
2783
|
-
self,
|
2784
|
-
cmd: ShellCmd,
|
2785
|
-
*,
|
2786
|
-
success_status_codes: ta.Optional[ta.Container[int]] = None,
|
2787
|
-
) -> ta.Optional[ta.Any]:
|
2788
|
-
result = self.run_curl_cmd(cmd, raise_=True)
|
3247
|
+
self._key_prefix = key_prefix
|
2789
3248
|
|
2790
|
-
if
|
2791
|
-
|
2792
|
-
|
2793
|
-
is_success = 200 <= result.status_code < 300
|
3249
|
+
if key_suffix is None:
|
3250
|
+
key_suffix = os.environ[self.KEY_SUFFIX_ENV_KEY]
|
3251
|
+
self._key_suffix = check.non_empty_str(key_suffix)
|
2794
3252
|
|
2795
|
-
|
2796
|
-
if not (body := result.body):
|
2797
|
-
return None
|
2798
|
-
return json.loads(body.decode('utf-8-sig'))
|
3253
|
+
#
|
2799
3254
|
|
2800
|
-
|
2801
|
-
return None
|
3255
|
+
KEY_PART_SEPARATOR = '--'
|
2802
3256
|
|
2803
|
-
|
2804
|
-
|
3257
|
+
def fix_key(self, s: str) -> str:
|
3258
|
+
return self.KEY_PART_SEPARATOR.join([
|
3259
|
+
*([self._key_prefix] if self._key_prefix else []),
|
3260
|
+
s,
|
3261
|
+
self._key_suffix,
|
3262
|
+
])
|
3263
|
+
|
3264
|
+
#
|
3265
|
+
|
3266
|
+
@dc.dataclass(frozen=True)
|
3267
|
+
class Entry(GithubCacheShellClient.Entry):
|
3268
|
+
artifact: GithubCacheServiceV1.ArtifactCacheEntry
|
2805
3269
|
|
2806
3270
|
#
|
2807
3271
|
|
2808
3272
|
def build_get_entry_curl_cmd(self, key: str) -> ShellCmd:
|
2809
|
-
|
3273
|
+
fixed_key = self.fix_key(key)
|
3274
|
+
|
3275
|
+
qp = dict(
|
3276
|
+
keys=fixed_key,
|
3277
|
+
version=str(self.CACHE_VERSION),
|
3278
|
+
)
|
3279
|
+
|
3280
|
+
return self._curl.build_cmd(
|
2810
3281
|
'GET',
|
2811
|
-
|
3282
|
+
shlex.quote('?'.join([
|
3283
|
+
'cache',
|
3284
|
+
'&'.join([
|
3285
|
+
f'{k}={urllib.parse.quote_plus(v)}'
|
3286
|
+
for k, v in qp.items()
|
3287
|
+
]),
|
3288
|
+
])),
|
2812
3289
|
)
|
2813
3290
|
|
2814
|
-
def run_get_entry(self, key: str) -> ta.Optional[
|
2815
|
-
|
3291
|
+
def run_get_entry(self, key: str) -> ta.Optional[Entry]:
|
3292
|
+
fixed_key = self.fix_key(key)
|
3293
|
+
curl_cmd = self.build_get_entry_curl_cmd(fixed_key)
|
2816
3294
|
|
2817
|
-
obj = self.
|
3295
|
+
obj = self._curl.run_json_cmd(
|
2818
3296
|
curl_cmd,
|
2819
3297
|
success_status_codes=[200, 204],
|
2820
3298
|
)
|
2821
3299
|
if obj is None:
|
2822
3300
|
return None
|
2823
3301
|
|
2824
|
-
return GithubCacheServiceV1.dataclass_from_json(
|
3302
|
+
return self.Entry(GithubCacheServiceV1.dataclass_from_json(
|
2825
3303
|
GithubCacheServiceV1.ArtifactCacheEntry,
|
2826
3304
|
obj,
|
2827
|
-
)
|
3305
|
+
))
|
2828
3306
|
|
2829
3307
|
#
|
2830
3308
|
|
2831
|
-
def build_download_get_entry_cmd(
|
2832
|
-
self,
|
2833
|
-
entry: GithubCacheServiceV1.ArtifactCacheEntry,
|
2834
|
-
out_file: str,
|
2835
|
-
) -> ShellCmd:
|
3309
|
+
def build_download_get_entry_cmd(self, entry: Entry, out_file: str) -> ShellCmd:
|
2836
3310
|
return ShellCmd(' '.join([
|
2837
3311
|
'aria2c',
|
2838
3312
|
'-x', '4',
|
2839
3313
|
'-o', out_file,
|
2840
|
-
check.non_empty_str(entry.archive_location),
|
3314
|
+
check.non_empty_str(entry.artifact.archive_location),
|
2841
3315
|
]))
|
2842
3316
|
|
2843
|
-
def download_get_entry(
|
2844
|
-
|
2845
|
-
entry
|
2846
|
-
out_file
|
2847
|
-
|
2848
|
-
dl_cmd = self.build_download_get_entry_cmd(entry, out_file)
|
3317
|
+
def download_get_entry(self, entry: GithubCacheShellClient.Entry, out_file: str) -> None:
|
3318
|
+
dl_cmd = self.build_download_get_entry_cmd(
|
3319
|
+
check.isinstance(entry, GithubCacheServiceV1ShellClient.Entry),
|
3320
|
+
out_file,
|
3321
|
+
)
|
2849
3322
|
dl_cmd.run(subprocesses.check_call)
|
2850
3323
|
|
2851
3324
|
#
|
2852
3325
|
|
2853
|
-
def upload_cache_entry(
|
2854
|
-
|
2855
|
-
|
2856
|
-
in_file: str,
|
2857
|
-
) -> None:
|
3326
|
+
def upload_cache_entry(self, key: str, in_file: str) -> None:
|
3327
|
+
fixed_key = self.fix_key(key)
|
3328
|
+
|
2858
3329
|
check.state(os.path.isfile(in_file))
|
2859
3330
|
|
2860
3331
|
file_size = os.stat(in_file).st_size
|
2861
3332
|
|
3333
|
+
#
|
3334
|
+
|
2862
3335
|
reserve_req = GithubCacheServiceV1.ReserveCacheRequest(
|
2863
|
-
key=
|
3336
|
+
key=fixed_key,
|
2864
3337
|
cache_size=file_size,
|
3338
|
+
version=str(self.CACHE_VERSION),
|
2865
3339
|
)
|
2866
|
-
reserve_cmd = self.
|
3340
|
+
reserve_cmd = self._curl.build_post_json_cmd(
|
2867
3341
|
'caches',
|
2868
3342
|
GithubCacheServiceV1.dataclass_to_json(reserve_req),
|
2869
3343
|
)
|
2870
|
-
reserve_resp_obj: ta.Any = check.not_none(self.
|
3344
|
+
reserve_resp_obj: ta.Any = check.not_none(self._curl.run_json_cmd(
|
2871
3345
|
reserve_cmd,
|
2872
3346
|
success_status_codes=[201],
|
2873
3347
|
))
|
@@ -2875,8 +3349,66 @@ class GithubV1CacheShellClient:
|
|
2875
3349
|
GithubCacheServiceV1.ReserveCacheResponse,
|
2876
3350
|
reserve_resp_obj,
|
2877
3351
|
)
|
3352
|
+
cache_id = check.isinstance(reserve_resp.cache_id, int)
|
2878
3353
|
|
2879
|
-
|
3354
|
+
#
|
3355
|
+
|
3356
|
+
tmp_file = make_temp_file()
|
3357
|
+
|
3358
|
+
print(f'{file_size=}')
|
3359
|
+
num_written = 0
|
3360
|
+
chunk_size = 32 * 1024 * 1024
|
3361
|
+
for i in range((file_size // chunk_size) + (1 if file_size % chunk_size else 0)):
|
3362
|
+
ofs = i * chunk_size
|
3363
|
+
sz = min(chunk_size, file_size - ofs)
|
3364
|
+
|
3365
|
+
patch_cmd = self._curl.build_cmd(
|
3366
|
+
'PATCH',
|
3367
|
+
f'caches/{cache_id}',
|
3368
|
+
content_type='application/octet-stream',
|
3369
|
+
headers={
|
3370
|
+
'Content-Range': f'bytes {ofs}-{ofs + sz - 1}/*',
|
3371
|
+
},
|
3372
|
+
)
|
3373
|
+
|
3374
|
+
#
|
3375
|
+
|
3376
|
+
# patch_data_cmd = dc.replace(patch_cmd, s=' | '.join([
|
3377
|
+
# f'dd if={in_file} bs={chunk_size} skip={i} count=1 status=none',
|
3378
|
+
# f'{patch_cmd.s} --data-binary -',
|
3379
|
+
# ]))
|
3380
|
+
# print(f'{patch_data_cmd.s=}')
|
3381
|
+
# patch_result = self._curl.run_cmd(patch_data_cmd, raise_=True)
|
3382
|
+
|
3383
|
+
#
|
3384
|
+
|
3385
|
+
with open(in_file, 'rb') as f:
|
3386
|
+
f.seek(ofs)
|
3387
|
+
buf = f.read(sz)
|
3388
|
+
with open(tmp_file, 'wb') as f:
|
3389
|
+
f.write(buf)
|
3390
|
+
num_written += len(buf)
|
3391
|
+
print(f'{num_written=}')
|
3392
|
+
patch_data_cmd = dc.replace(patch_cmd, s=f'{patch_cmd.s} --data-binary @{tmp_file}')
|
3393
|
+
print(f'{patch_data_cmd.s=}')
|
3394
|
+
patch_result = self._curl.run_cmd(patch_data_cmd, raise_=True)
|
3395
|
+
|
3396
|
+
#
|
3397
|
+
|
3398
|
+
check.equal(patch_result.status_code, 204)
|
3399
|
+
ofs += sz
|
3400
|
+
|
3401
|
+
#
|
3402
|
+
|
3403
|
+
commit_req = GithubCacheServiceV1.CommitCacheRequest(
|
3404
|
+
size=file_size,
|
3405
|
+
)
|
3406
|
+
commit_cmd = self._curl.build_post_json_cmd(
|
3407
|
+
f'caches/{cache_id}',
|
3408
|
+
GithubCacheServiceV1.dataclass_to_json(commit_req),
|
3409
|
+
)
|
3410
|
+
commit_result = self._curl.run_cmd(commit_cmd, raise_=True)
|
3411
|
+
check.equal(commit_result.status_code, 204)
|
2880
3412
|
|
2881
3413
|
|
2882
3414
|
##
|
@@ -2887,15 +3419,15 @@ class GithubShellCache(ShellCache):
|
|
2887
3419
|
self,
|
2888
3420
|
dir: str, # noqa
|
2889
3421
|
*,
|
2890
|
-
client: ta.Optional[
|
3422
|
+
client: ta.Optional[GithubCacheShellClient] = None,
|
2891
3423
|
) -> None:
|
2892
3424
|
super().__init__()
|
2893
3425
|
|
2894
3426
|
self._dir = check.not_none(dir)
|
2895
3427
|
|
2896
3428
|
if client is None:
|
2897
|
-
client =
|
2898
|
-
self._client = client
|
3429
|
+
client = GithubCacheServiceV1ShellClient()
|
3430
|
+
self._client: GithubCacheShellClient = client
|
2899
3431
|
|
2900
3432
|
self._local = DirectoryFileCache(self._dir)
|
2901
3433
|
|
@@ -2957,82 +3489,11 @@ class GithubShellCache(ShellCache):
|
|
2957
3489
|
)
|
2958
3490
|
|
2959
3491
|
|
2960
|
-
########################################
|
2961
|
-
# ../requirements.py
|
2962
|
-
"""
|
2963
|
-
TODO:
|
2964
|
-
- pip compile lol
|
2965
|
-
- but still support git+ stuff
|
2966
|
-
- req.txt format aware hash
|
2967
|
-
- more than just whitespace
|
2968
|
-
- pyproject req rewriting
|
2969
|
-
- download_requirements bootstrap off prev? not worth the dl?
|
2970
|
-
- big deps (torch) change less, probably worth it
|
2971
|
-
- follow embedded -r automatically like pyp
|
2972
|
-
"""
|
2973
|
-
|
2974
|
-
|
2975
|
-
##
|
2976
|
-
|
2977
|
-
|
2978
|
-
def build_requirements_hash(
|
2979
|
-
requirements_txts: ta.Sequence[str],
|
2980
|
-
) -> str:
|
2981
|
-
txt_file_contents: dict = {}
|
2982
|
-
|
2983
|
-
for txt_file in requirements_txts:
|
2984
|
-
txt_file_name = os.path.basename(txt_file)
|
2985
|
-
check.not_in(txt_file_name, txt_file_contents)
|
2986
|
-
with open(txt_file) as f:
|
2987
|
-
txt_contents = f.read()
|
2988
|
-
txt_file_contents[txt_file_name] = txt_contents
|
2989
|
-
|
2990
|
-
#
|
2991
|
-
|
2992
|
-
lines = []
|
2993
|
-
for txt_file, txt_contents in sorted(txt_file_contents.items()):
|
2994
|
-
txt_hash = sha256_str(txt_contents)
|
2995
|
-
lines.append(f'{txt_file}={txt_hash}')
|
2996
|
-
|
2997
|
-
return sha256_str('\n'.join(lines))
|
2998
|
-
|
2999
|
-
|
3000
|
-
##
|
3001
|
-
|
3002
|
-
|
3003
|
-
def download_requirements(
|
3004
|
-
image: str,
|
3005
|
-
requirements_dir: str,
|
3006
|
-
requirements_txts: ta.Sequence[str],
|
3007
|
-
) -> None:
|
3008
|
-
requirements_txt_dir = tempfile.mkdtemp()
|
3009
|
-
with defer(lambda: shutil.rmtree(requirements_txt_dir)):
|
3010
|
-
for rt in requirements_txts:
|
3011
|
-
shutil.copyfile(rt, os.path.join(requirements_txt_dir, os.path.basename(rt)))
|
3012
|
-
|
3013
|
-
subprocesses.check_call(
|
3014
|
-
'docker',
|
3015
|
-
'run',
|
3016
|
-
'--rm',
|
3017
|
-
'-i',
|
3018
|
-
'-v', f'{os.path.abspath(requirements_dir)}:/requirements',
|
3019
|
-
'-v', f'{requirements_txt_dir}:/requirements_txt',
|
3020
|
-
image,
|
3021
|
-
'pip',
|
3022
|
-
'download',
|
3023
|
-
'-d', '/requirements',
|
3024
|
-
*itertools.chain.from_iterable(
|
3025
|
-
['-r', f'/requirements_txt/{os.path.basename(rt)}']
|
3026
|
-
for rt in requirements_txts
|
3027
|
-
),
|
3028
|
-
)
|
3029
|
-
|
3030
|
-
|
3031
3492
|
########################################
|
3032
3493
|
# ../ci.py
|
3033
3494
|
|
3034
3495
|
|
3035
|
-
class Ci(
|
3496
|
+
class Ci(AsyncExitStacked):
|
3036
3497
|
FILE_NAME_HASH_LEN = 16
|
3037
3498
|
|
3038
3499
|
@dc.dataclass(frozen=True)
|
@@ -3049,6 +3510,9 @@ class Ci(ExitStacked):
|
|
3049
3510
|
requirements_txts: ta.Optional[ta.Sequence[str]] = None
|
3050
3511
|
|
3051
3512
|
always_pull: bool = False
|
3513
|
+
always_build: bool = False
|
3514
|
+
|
3515
|
+
no_dependencies: bool = False
|
3052
3516
|
|
3053
3517
|
def __post_init__(self) -> None:
|
3054
3518
|
check.not_isinstance(self.requirements_txts, str)
|
@@ -3068,7 +3532,7 @@ class Ci(ExitStacked):
|
|
3068
3532
|
|
3069
3533
|
#
|
3070
3534
|
|
3071
|
-
def _load_cache_docker_image(self, key: str) -> ta.Optional[str]:
|
3535
|
+
async def _load_cache_docker_image(self, key: str) -> ta.Optional[str]:
|
3072
3536
|
if self._shell_cache is None:
|
3073
3537
|
return None
|
3074
3538
|
|
@@ -3078,9 +3542,9 @@ class Ci(ExitStacked):
|
|
3078
3542
|
|
3079
3543
|
get_cache_cmd = dc.replace(get_cache_cmd, s=f'{get_cache_cmd.s} | zstd -cd --long') # noqa
|
3080
3544
|
|
3081
|
-
return load_docker_tar_cmd(get_cache_cmd)
|
3545
|
+
return await load_docker_tar_cmd(get_cache_cmd)
|
3082
3546
|
|
3083
|
-
def _save_cache_docker_image(self, key: str, image: str) -> None:
|
3547
|
+
async def _save_cache_docker_image(self, key: str, image: str) -> None:
|
3084
3548
|
if self._shell_cache is None:
|
3085
3549
|
return
|
3086
3550
|
|
@@ -3089,12 +3553,12 @@ class Ci(ExitStacked):
|
|
3089
3553
|
|
3090
3554
|
put_cache_cmd = dc.replace(put_cache_cmd, s=f'zstd | {put_cache_cmd.s}')
|
3091
3555
|
|
3092
|
-
save_docker_tar_cmd(image, put_cache_cmd)
|
3556
|
+
await save_docker_tar_cmd(image, put_cache_cmd)
|
3093
3557
|
|
3094
3558
|
#
|
3095
3559
|
|
3096
|
-
def _load_docker_image(self, image: str) -> None:
|
3097
|
-
if not self._cfg.always_pull and is_docker_image_present(image):
|
3560
|
+
async def _load_docker_image(self, image: str) -> None:
|
3561
|
+
if not self._cfg.always_pull and (await is_docker_image_present(image)):
|
3098
3562
|
return
|
3099
3563
|
|
3100
3564
|
dep_suffix = image
|
@@ -3102,63 +3566,79 @@ class Ci(ExitStacked):
|
|
3102
3566
|
dep_suffix = dep_suffix.replace(c, '-')
|
3103
3567
|
|
3104
3568
|
cache_key = f'docker-{dep_suffix}'
|
3105
|
-
if self._load_cache_docker_image(cache_key) is not None:
|
3569
|
+
if (await self._load_cache_docker_image(cache_key)) is not None:
|
3106
3570
|
return
|
3107
3571
|
|
3108
|
-
pull_docker_image(image)
|
3572
|
+
await pull_docker_image(image)
|
3109
3573
|
|
3110
|
-
self._save_cache_docker_image(cache_key, image)
|
3574
|
+
await self._save_cache_docker_image(cache_key, image)
|
3111
3575
|
|
3112
|
-
def load_docker_image(self, image: str) -> None:
|
3576
|
+
async def load_docker_image(self, image: str) -> None:
|
3113
3577
|
with log_timing_context(f'Load docker image: {image}'):
|
3114
|
-
self._load_docker_image(image)
|
3578
|
+
await self._load_docker_image(image)
|
3115
3579
|
|
3116
|
-
@
|
3117
|
-
def load_compose_service_dependencies(self) -> None:
|
3580
|
+
@async_cached_nullary
|
3581
|
+
async def load_compose_service_dependencies(self) -> None:
|
3118
3582
|
deps = get_compose_service_dependencies(
|
3119
3583
|
self._cfg.compose_file,
|
3120
3584
|
self._cfg.service,
|
3121
3585
|
)
|
3122
3586
|
|
3123
3587
|
for dep_image in deps.values():
|
3124
|
-
self.load_docker_image(dep_image)
|
3588
|
+
await self.load_docker_image(dep_image)
|
3125
3589
|
|
3126
3590
|
#
|
3127
3591
|
|
3128
|
-
|
3129
|
-
|
3592
|
+
@cached_nullary
|
3593
|
+
def docker_file_hash(self) -> str:
|
3594
|
+
return build_docker_file_hash(self._cfg.docker_file)[:self.FILE_NAME_HASH_LEN]
|
3595
|
+
|
3596
|
+
async def _resolve_ci_image(self) -> str:
|
3597
|
+
cache_key = f'ci-{self.docker_file_hash()}'
|
3598
|
+
image_tag = f'{self._cfg.service}:{cache_key}'
|
3599
|
+
|
3600
|
+
if not self._cfg.always_build and (await is_docker_image_present(image_tag)):
|
3601
|
+
return image_tag
|
3130
3602
|
|
3131
|
-
cache_key
|
3132
|
-
|
3133
|
-
|
3603
|
+
if (cache_image_id := await self._load_cache_docker_image(cache_key)) is not None:
|
3604
|
+
await tag_docker_image(
|
3605
|
+
cache_image_id,
|
3606
|
+
image_tag,
|
3607
|
+
)
|
3608
|
+
return image_tag
|
3134
3609
|
|
3135
|
-
image_id = build_docker_image(
|
3610
|
+
image_id = await build_docker_image(
|
3136
3611
|
self._cfg.docker_file,
|
3612
|
+
tag=image_tag,
|
3137
3613
|
cwd=self._cfg.project_dir,
|
3138
3614
|
)
|
3139
3615
|
|
3140
|
-
self._save_cache_docker_image(cache_key, image_id)
|
3616
|
+
await self._save_cache_docker_image(cache_key, image_id)
|
3141
3617
|
|
3142
|
-
return
|
3618
|
+
return image_tag
|
3143
3619
|
|
3144
|
-
@
|
3145
|
-
def resolve_ci_image(self) -> str:
|
3620
|
+
@async_cached_nullary
|
3621
|
+
async def resolve_ci_image(self) -> str:
|
3146
3622
|
with log_timing_context('Resolve ci image') as ltc:
|
3147
|
-
image_id = self._resolve_ci_image()
|
3623
|
+
image_id = await self._resolve_ci_image()
|
3148
3624
|
ltc.set_description(f'Resolve ci image: {image_id}')
|
3149
3625
|
return image_id
|
3150
3626
|
|
3151
3627
|
#
|
3152
3628
|
|
3153
|
-
|
3154
|
-
|
3629
|
+
@cached_nullary
|
3630
|
+
def requirements_txts(self) -> ta.Sequence[str]:
|
3631
|
+
return [
|
3155
3632
|
os.path.join(self._cfg.project_dir, rf)
|
3156
3633
|
for rf in check.not_none(self._cfg.requirements_txts)
|
3157
3634
|
]
|
3158
3635
|
|
3159
|
-
|
3636
|
+
@cached_nullary
|
3637
|
+
def requirements_hash(self) -> str:
|
3638
|
+
return build_requirements_hash(self.requirements_txts())[:self.FILE_NAME_HASH_LEN]
|
3160
3639
|
|
3161
|
-
|
3640
|
+
async def _resolve_requirements_dir(self) -> str:
|
3641
|
+
tar_file_key = f'requirements-{self.docker_file_hash()}-{self.requirements_hash()}'
|
3162
3642
|
tar_file_name = f'{tar_file_key}.tar'
|
3163
3643
|
|
3164
3644
|
temp_dir = tempfile.mkdtemp()
|
@@ -3174,9 +3654,9 @@ class Ci(ExitStacked):
|
|
3174
3654
|
os.makedirs(temp_requirements_dir)
|
3175
3655
|
|
3176
3656
|
download_requirements(
|
3177
|
-
self.resolve_ci_image(),
|
3657
|
+
await self.resolve_ci_image(),
|
3178
3658
|
temp_requirements_dir,
|
3179
|
-
requirements_txts,
|
3659
|
+
self.requirements_txts(),
|
3180
3660
|
)
|
3181
3661
|
|
3182
3662
|
if self._file_cache is not None:
|
@@ -3193,16 +3673,16 @@ class Ci(ExitStacked):
|
|
3193
3673
|
|
3194
3674
|
return temp_requirements_dir
|
3195
3675
|
|
3196
|
-
@
|
3197
|
-
def resolve_requirements_dir(self) -> str:
|
3676
|
+
@async_cached_nullary
|
3677
|
+
async def resolve_requirements_dir(self) -> str:
|
3198
3678
|
with log_timing_context('Resolve requirements dir') as ltc:
|
3199
|
-
requirements_dir = self._resolve_requirements_dir()
|
3679
|
+
requirements_dir = await self._resolve_requirements_dir()
|
3200
3680
|
ltc.set_description(f'Resolve requirements dir: {requirements_dir}')
|
3201
3681
|
return requirements_dir
|
3202
3682
|
|
3203
3683
|
#
|
3204
3684
|
|
3205
|
-
def _run_compose_(self) -> None:
|
3685
|
+
async def _run_compose_(self) -> None:
|
3206
3686
|
setup_cmds = [
|
3207
3687
|
'pip install --root-user-action ignore --find-links /requirements --no-index uv',
|
3208
3688
|
(
|
@@ -3220,37 +3700,39 @@ class Ci(ExitStacked):
|
|
3220
3700
|
|
3221
3701
|
#
|
3222
3702
|
|
3223
|
-
with DockerComposeRun(DockerComposeRun.Config(
|
3703
|
+
async with DockerComposeRun(DockerComposeRun.Config(
|
3224
3704
|
compose_file=self._cfg.compose_file,
|
3225
3705
|
service=self._cfg.service,
|
3226
3706
|
|
3227
|
-
image=self.resolve_ci_image(),
|
3707
|
+
image=await self.resolve_ci_image(),
|
3228
3708
|
|
3229
3709
|
cmd=ci_cmd,
|
3230
3710
|
|
3231
3711
|
run_options=[
|
3232
3712
|
'-v', f'{os.path.abspath(self._cfg.project_dir)}:/project',
|
3233
|
-
'-v', f'{os.path.abspath(self.resolve_requirements_dir())}:/requirements',
|
3713
|
+
'-v', f'{os.path.abspath(await self.resolve_requirements_dir())}:/requirements',
|
3234
3714
|
],
|
3235
3715
|
|
3236
3716
|
cwd=self._cfg.project_dir,
|
3717
|
+
|
3718
|
+
no_dependencies=self._cfg.no_dependencies,
|
3237
3719
|
)) as ci_compose_run:
|
3238
|
-
ci_compose_run.run()
|
3720
|
+
await ci_compose_run.run()
|
3239
3721
|
|
3240
|
-
def _run_compose(self) -> None:
|
3722
|
+
async def _run_compose(self) -> None:
|
3241
3723
|
with log_timing_context('Run compose'):
|
3242
|
-
self._run_compose_()
|
3724
|
+
await self._run_compose_()
|
3243
3725
|
|
3244
3726
|
#
|
3245
3727
|
|
3246
|
-
def run(self) -> None:
|
3247
|
-
self.load_compose_service_dependencies()
|
3728
|
+
async def run(self) -> None:
|
3729
|
+
await self.load_compose_service_dependencies()
|
3248
3730
|
|
3249
|
-
self.resolve_ci_image()
|
3731
|
+
await self.resolve_ci_image()
|
3250
3732
|
|
3251
|
-
self.resolve_requirements_dir()
|
3733
|
+
await self.resolve_requirements_dir()
|
3252
3734
|
|
3253
|
-
self._run_compose()
|
3735
|
+
await self._run_compose()
|
3254
3736
|
|
3255
3737
|
|
3256
3738
|
########################################
|
@@ -3266,7 +3748,7 @@ class GithubCli(ArgparseCli):
|
|
3266
3748
|
argparse_arg('key'),
|
3267
3749
|
)
|
3268
3750
|
def get_cache_entry(self) -> None:
|
3269
|
-
shell_client =
|
3751
|
+
shell_client = GithubCacheServiceV1ShellClient()
|
3270
3752
|
entry = shell_client.run_get_entry(self.args.key)
|
3271
3753
|
if entry is None:
|
3272
3754
|
return
|
@@ -3325,18 +3807,21 @@ class CiCli(ArgparseCli):
|
|
3325
3807
|
argparse_arg('--docker-file'),
|
3326
3808
|
argparse_arg('--compose-file'),
|
3327
3809
|
argparse_arg('-r', '--requirements-txt', action='append'),
|
3810
|
+
|
3328
3811
|
argparse_arg('--github-cache', action='store_true'),
|
3329
3812
|
argparse_arg('--cache-dir'),
|
3813
|
+
|
3330
3814
|
argparse_arg('--always-pull', action='store_true'),
|
3815
|
+
argparse_arg('--always-build', action='store_true'),
|
3816
|
+
|
3817
|
+
argparse_arg('--no-dependencies', action='store_true'),
|
3331
3818
|
)
|
3332
3819
|
async def run(self) -> None:
|
3333
3820
|
project_dir = self.args.project_dir
|
3334
3821
|
docker_file = self.args.docker_file
|
3335
3822
|
compose_file = self.args.compose_file
|
3336
|
-
service = self.args.service
|
3337
3823
|
requirements_txts = self.args.requirements_txt
|
3338
3824
|
cache_dir = self.args.cache_dir
|
3339
|
-
always_pull = self.args.always_pull
|
3340
3825
|
|
3341
3826
|
#
|
3342
3827
|
|
@@ -3406,14 +3891,14 @@ class CiCli(ArgparseCli):
|
|
3406
3891
|
|
3407
3892
|
#
|
3408
3893
|
|
3409
|
-
with Ci(
|
3894
|
+
async with Ci(
|
3410
3895
|
Ci.Config(
|
3411
3896
|
project_dir=project_dir,
|
3412
3897
|
|
3413
3898
|
docker_file=docker_file,
|
3414
3899
|
|
3415
3900
|
compose_file=compose_file,
|
3416
|
-
service=service,
|
3901
|
+
service=self.args.service,
|
3417
3902
|
|
3418
3903
|
requirements_txts=requirements_txts,
|
3419
3904
|
|
@@ -3422,12 +3907,15 @@ class CiCli(ArgparseCli):
|
|
3422
3907
|
'python3 -m pytest -svv test.py',
|
3423
3908
|
])),
|
3424
3909
|
|
3425
|
-
always_pull=always_pull,
|
3910
|
+
always_pull=self.args.always_pull,
|
3911
|
+
always_build=self.args.always_build,
|
3912
|
+
|
3913
|
+
no_dependencies=self.args.no_dependencies,
|
3426
3914
|
),
|
3427
3915
|
file_cache=file_cache,
|
3428
3916
|
shell_cache=shell_cache,
|
3429
3917
|
) as ci:
|
3430
|
-
ci.run()
|
3918
|
+
await ci.run()
|
3431
3919
|
|
3432
3920
|
|
3433
3921
|
async def _async_main() -> ta.Optional[int]:
|