omdev 0.0.0.dev211__py3-none-any.whl → 0.0.0.dev213__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omdev/.manifests.json +15 -1
- omdev/__about__.py +0 -4
- omdev/amalg/gen.py +2 -3
- omdev/amalg/imports.py +4 -5
- omdev/amalg/manifests.py +7 -10
- omdev/amalg/resources.py +24 -27
- omdev/amalg/srcfiles.py +7 -10
- omdev/amalg/strip.py +4 -5
- omdev/amalg/types.py +1 -1
- omdev/amalg/typing.py +9 -8
- omdev/cc/cdeps.py +34 -1
- omdev/cc/cdeps.toml +19 -2
- omdev/cc/cli.py +13 -1
- omdev/ci/ci.py +71 -48
- omdev/ci/cli.py +22 -10
- omdev/ci/compose.py +30 -56
- omdev/ci/docker.py +35 -16
- omdev/ci/github/cache.py +153 -184
- omdev/ci/github/cacheapi.py +1 -1
- omdev/ci/github/cli.py +2 -2
- omdev/ci/github/curl.py +209 -0
- omdev/ci/requirements.py +2 -2
- omdev/git/shallow.py +1 -1
- omdev/scripts/ci.py +948 -451
- omdev/scripts/interp.py +23 -0
- omdev/scripts/pyproject.py +23 -0
- omdev/tokens/__init__.py +0 -0
- omdev/tokens/all.py +35 -0
- omdev/tokens/tokenizert.py +215 -0
- omdev/{tokens.py → tokens/utils.py} +6 -12
- omdev/tools/mkenv.py +131 -0
- omdev/tools/mkrelimp.py +4 -6
- {omdev-0.0.0.dev211.dist-info → omdev-0.0.0.dev213.dist-info}/METADATA +2 -5
- {omdev-0.0.0.dev211.dist-info → omdev-0.0.0.dev213.dist-info}/RECORD +38 -33
- {omdev-0.0.0.dev211.dist-info → omdev-0.0.0.dev213.dist-info}/LICENSE +0 -0
- {omdev-0.0.0.dev211.dist-info → omdev-0.0.0.dev213.dist-info}/WHEEL +0 -0
- {omdev-0.0.0.dev211.dist-info → omdev-0.0.0.dev213.dist-info}/entry_points.txt +0 -0
- {omdev-0.0.0.dev211.dist-info → omdev-0.0.0.dev213.dist-info}/top_level.txt +0 -0
omdev/scripts/ci.py
CHANGED
@@ -17,6 +17,8 @@ Inputs:
|
|
17
17
|
import abc
|
18
18
|
import argparse
|
19
19
|
import asyncio
|
20
|
+
import asyncio.base_subprocess
|
21
|
+
import asyncio.subprocess
|
20
22
|
import collections
|
21
23
|
import contextlib
|
22
24
|
import dataclasses as dc
|
@@ -39,6 +41,7 @@ import threading
|
|
39
41
|
import time
|
40
42
|
import types
|
41
43
|
import typing as ta
|
44
|
+
import urllib.parse
|
42
45
|
|
43
46
|
|
44
47
|
########################################
|
@@ -54,6 +57,9 @@ if sys.version_info < (3, 8):
|
|
54
57
|
# shell.py
|
55
58
|
T = ta.TypeVar('T')
|
56
59
|
|
60
|
+
# ../../omlish/asyncs/asyncio/timeouts.py
|
61
|
+
AwaitableT = ta.TypeVar('AwaitableT', bound=ta.Awaitable)
|
62
|
+
|
57
63
|
# ../../omlish/lite/cached.py
|
58
64
|
CallableT = ta.TypeVar('CallableT', bound=ta.Callable)
|
59
65
|
|
@@ -70,6 +76,7 @@ ArgparseCmdFn = ta.Callable[[], ta.Optional[int]] # ta.TypeAlias
|
|
70
76
|
|
71
77
|
# ../../omlish/lite/contextmanagers.py
|
72
78
|
ExitStackedT = ta.TypeVar('ExitStackedT', bound='ExitStacked')
|
79
|
+
AsyncExitStackedT = ta.TypeVar('AsyncExitStackedT', bound='AsyncExitStacked')
|
73
80
|
|
74
81
|
# ../../omlish/subprocesses.py
|
75
82
|
SubprocessChannelOption = ta.Literal['pipe', 'stdout', 'devnull'] # ta.TypeAlias
|
@@ -113,6 +120,19 @@ class ShellCmd:
|
|
113
120
|
)
|
114
121
|
|
115
122
|
|
123
|
+
########################################
|
124
|
+
# ../../../omlish/asyncs/asyncio/timeouts.py
|
125
|
+
|
126
|
+
|
127
|
+
def asyncio_maybe_timeout(
|
128
|
+
fut: AwaitableT,
|
129
|
+
timeout: ta.Optional[float] = None,
|
130
|
+
) -> AwaitableT:
|
131
|
+
if timeout is not None:
|
132
|
+
fut = asyncio.wait_for(fut, timeout) # type: ignore
|
133
|
+
return fut
|
134
|
+
|
135
|
+
|
116
136
|
########################################
|
117
137
|
# ../../../omlish/lite/cached.py
|
118
138
|
|
@@ -208,6 +228,17 @@ class Checks:
|
|
208
228
|
|
209
229
|
#
|
210
230
|
|
231
|
+
def register_on_raise_breakpoint_if_env_var_set(self, key: str) -> None:
|
232
|
+
import os
|
233
|
+
|
234
|
+
def on_raise(exc: Exception) -> None: # noqa
|
235
|
+
if key in os.environ:
|
236
|
+
breakpoint() # noqa
|
237
|
+
|
238
|
+
self.register_on_raise(on_raise)
|
239
|
+
|
240
|
+
#
|
241
|
+
|
211
242
|
def set_exception_factory(self, factory: CheckExceptionFactory) -> None:
|
212
243
|
self._exception_factory = factory
|
213
244
|
|
@@ -523,6 +554,18 @@ class Checks:
|
|
523
554
|
|
524
555
|
return v
|
525
556
|
|
557
|
+
def not_equal(self, v: T, o: ta.Any, msg: CheckMessage = None) -> T:
|
558
|
+
if o == v:
|
559
|
+
self._raise(
|
560
|
+
ValueError,
|
561
|
+
'Must not be equal',
|
562
|
+
msg,
|
563
|
+
Checks._ArgsKwargs(v, o),
|
564
|
+
render_fmt='%s == %s',
|
565
|
+
)
|
566
|
+
|
567
|
+
return v
|
568
|
+
|
526
569
|
def is_(self, v: T, o: ta.Any, msg: CheckMessage = None) -> T:
|
527
570
|
if o is not v:
|
528
571
|
self._raise(
|
@@ -1205,7 +1248,7 @@ class GithubCacheServiceV1:
|
|
1205
1248
|
@dc.dataclass(frozen=True)
|
1206
1249
|
class ReserveCacheRequest:
|
1207
1250
|
key: str
|
1208
|
-
cache_size: ta.Optional[int]
|
1251
|
+
cache_size: ta.Optional[int] = None
|
1209
1252
|
version: ta.Optional[str] = None
|
1210
1253
|
|
1211
1254
|
@dc.dataclass(frozen=True)
|
@@ -1713,6 +1756,33 @@ class ExitStacked:
|
|
1713
1756
|
return es.enter_context(cm)
|
1714
1757
|
|
1715
1758
|
|
1759
|
+
class AsyncExitStacked:
|
1760
|
+
_exit_stack: ta.Optional[contextlib.AsyncExitStack] = None
|
1761
|
+
|
1762
|
+
async def __aenter__(self: AsyncExitStackedT) -> AsyncExitStackedT:
|
1763
|
+
check.state(self._exit_stack is None)
|
1764
|
+
es = self._exit_stack = contextlib.AsyncExitStack()
|
1765
|
+
await es.__aenter__()
|
1766
|
+
return self
|
1767
|
+
|
1768
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
1769
|
+
if (es := self._exit_stack) is None:
|
1770
|
+
return None
|
1771
|
+
await self._async_exit_contexts()
|
1772
|
+
return await es.__aexit__(exc_type, exc_val, exc_tb)
|
1773
|
+
|
1774
|
+
async def _async_exit_contexts(self) -> None:
|
1775
|
+
pass
|
1776
|
+
|
1777
|
+
def _enter_context(self, cm: ta.ContextManager[T]) -> T:
|
1778
|
+
es = check.not_none(self._exit_stack)
|
1779
|
+
return es.enter_context(cm)
|
1780
|
+
|
1781
|
+
async def _enter_async_context(self, cm: ta.AsyncContextManager[T]) -> T:
|
1782
|
+
es = check.not_none(self._exit_stack)
|
1783
|
+
return await es.enter_async_context(cm)
|
1784
|
+
|
1785
|
+
|
1716
1786
|
##
|
1717
1787
|
|
1718
1788
|
|
@@ -1724,6 +1794,17 @@ def defer(fn: ta.Callable) -> ta.Generator[ta.Callable, None, None]:
|
|
1724
1794
|
fn()
|
1725
1795
|
|
1726
1796
|
|
1797
|
+
@contextlib.asynccontextmanager
|
1798
|
+
async def adefer(fn: ta.Callable) -> ta.AsyncGenerator[ta.Callable, None]:
|
1799
|
+
try:
|
1800
|
+
yield fn
|
1801
|
+
finally:
|
1802
|
+
await fn()
|
1803
|
+
|
1804
|
+
|
1805
|
+
##
|
1806
|
+
|
1807
|
+
|
1727
1808
|
@contextlib.contextmanager
|
1728
1809
|
def attr_setting(obj, attr, val, *, default=None): # noqa
|
1729
1810
|
not_set = object()
|
@@ -2277,154 +2358,619 @@ class AbstractAsyncSubprocesses(BaseSubprocesses):
|
|
2277
2358
|
|
2278
2359
|
|
2279
2360
|
########################################
|
2280
|
-
# ../
|
2281
|
-
"""
|
2282
|
-
TODO:
|
2283
|
-
- fix rmi - only when not referenced anymore
|
2284
|
-
"""
|
2361
|
+
# ../github/curl.py
|
2285
2362
|
|
2286
2363
|
|
2287
2364
|
##
|
2288
2365
|
|
2289
2366
|
|
2290
|
-
|
2291
|
-
|
2292
|
-
|
2293
|
-
|
2294
|
-
|
2367
|
+
class GithubServiceCurlClient:
|
2368
|
+
def __init__(
|
2369
|
+
self,
|
2370
|
+
service_url: str,
|
2371
|
+
auth_token: ta.Optional[str] = None,
|
2372
|
+
*,
|
2373
|
+
api_version: ta.Optional[str] = None,
|
2374
|
+
) -> None:
|
2375
|
+
super().__init__()
|
2295
2376
|
|
2296
|
-
|
2297
|
-
|
2377
|
+
self._service_url = check.non_empty_str(service_url)
|
2378
|
+
self._auth_token = auth_token
|
2379
|
+
self._api_version = api_version
|
2298
2380
|
|
2299
|
-
|
2300
|
-
for dep_service in service_dct.get('depends_on', []):
|
2301
|
-
dep_service_dct = services[dep_service]
|
2302
|
-
out[dep_service] = dep_service_dct['image']
|
2381
|
+
#
|
2303
2382
|
|
2304
|
-
|
2383
|
+
_MISSING = object()
|
2305
2384
|
|
2385
|
+
def build_headers(
|
2386
|
+
self,
|
2387
|
+
headers: ta.Optional[ta.Mapping[str, str]] = None,
|
2388
|
+
*,
|
2389
|
+
auth_token: ta.Any = _MISSING,
|
2390
|
+
content_type: ta.Optional[str] = None,
|
2391
|
+
) -> ta.Dict[str, str]:
|
2392
|
+
dct = {
|
2393
|
+
'Accept': ';'.join([
|
2394
|
+
'application/json',
|
2395
|
+
*([f'api-version={self._api_version}'] if self._api_version else []),
|
2396
|
+
]),
|
2397
|
+
}
|
2306
2398
|
|
2307
|
-
|
2399
|
+
if auth_token is self._MISSING:
|
2400
|
+
auth_token = self._auth_token
|
2401
|
+
if auth_token:
|
2402
|
+
dct['Authorization'] = f'Bearer {auth_token}'
|
2308
2403
|
|
2404
|
+
if content_type is not None:
|
2405
|
+
dct['Content-Type'] = content_type
|
2309
2406
|
|
2310
|
-
|
2311
|
-
|
2312
|
-
class Config:
|
2313
|
-
compose_file: str
|
2314
|
-
service: str
|
2407
|
+
if headers:
|
2408
|
+
dct.update(headers)
|
2315
2409
|
|
2316
|
-
|
2410
|
+
return dct
|
2317
2411
|
|
2318
|
-
|
2412
|
+
#
|
2319
2413
|
|
2320
|
-
|
2414
|
+
HEADER_AUTH_TOKEN_ENV_KEY_PREFIX = '_GITHUB_SERVICE_AUTH_TOKEN' # noqa
|
2321
2415
|
|
2322
|
-
|
2416
|
+
@property
|
2417
|
+
def header_auth_token_env_key(self) -> str:
|
2418
|
+
return f'{self.HEADER_AUTH_TOKEN_ENV_KEY_PREFIX}_{id(self)}'
|
2323
2419
|
|
2324
|
-
|
2420
|
+
def build_cmd(
|
2421
|
+
self,
|
2422
|
+
method: str,
|
2423
|
+
url: str,
|
2424
|
+
*,
|
2425
|
+
json_content: bool = False,
|
2426
|
+
content_type: ta.Optional[str] = None,
|
2427
|
+
headers: ta.Optional[ta.Dict[str, str]] = None,
|
2428
|
+
) -> ShellCmd:
|
2429
|
+
if content_type is None and json_content:
|
2430
|
+
content_type = 'application/json'
|
2325
2431
|
|
2326
|
-
|
2432
|
+
env = {}
|
2327
2433
|
|
2328
|
-
|
2434
|
+
header_auth_token: ta.Optional[str]
|
2435
|
+
if self._auth_token:
|
2436
|
+
header_env_key = self.header_auth_token_env_key
|
2437
|
+
env[header_env_key] = self._auth_token
|
2438
|
+
header_auth_token = f'${header_env_key}'
|
2439
|
+
else:
|
2440
|
+
header_auth_token = None
|
2329
2441
|
|
2330
|
-
|
2442
|
+
built_hdrs = self.build_headers(
|
2443
|
+
headers,
|
2444
|
+
auth_token=header_auth_token,
|
2445
|
+
content_type=content_type,
|
2446
|
+
)
|
2331
2447
|
|
2332
|
-
|
2333
|
-
check.not_isinstance(self.run_options, str)
|
2448
|
+
url = f'{self._service_url}/{url}'
|
2334
2449
|
|
2335
|
-
|
2336
|
-
|
2450
|
+
cmd = ' '.join([
|
2451
|
+
'curl',
|
2452
|
+
'-s',
|
2453
|
+
'-X', method,
|
2454
|
+
url,
|
2455
|
+
*[f'-H "{k}: {v}"' for k, v in built_hdrs.items()],
|
2456
|
+
])
|
2337
2457
|
|
2338
|
-
|
2458
|
+
return ShellCmd(
|
2459
|
+
cmd,
|
2460
|
+
env=env,
|
2461
|
+
)
|
2339
2462
|
|
2340
|
-
|
2341
|
-
|
2342
|
-
|
2463
|
+
def build_post_json_cmd(
|
2464
|
+
self,
|
2465
|
+
url: str,
|
2466
|
+
obj: ta.Any,
|
2467
|
+
**kwargs: ta.Any,
|
2468
|
+
) -> ShellCmd:
|
2469
|
+
curl_cmd = self.build_cmd(
|
2470
|
+
'POST',
|
2471
|
+
url,
|
2472
|
+
json_content=True,
|
2473
|
+
**kwargs,
|
2474
|
+
)
|
2343
2475
|
|
2344
|
-
|
2476
|
+
obj_json = json_dumps_compact(obj)
|
2345
2477
|
|
2346
|
-
|
2347
|
-
def image_tag(self) -> str:
|
2348
|
-
pfx = 'sha256:'
|
2349
|
-
if (image := self._cfg.image).startswith(pfx):
|
2350
|
-
image = image[len(pfx):]
|
2478
|
+
return dc.replace(curl_cmd, s=f'{curl_cmd.s} -d {shlex.quote(obj_json)}')
|
2351
2479
|
|
2352
|
-
|
2480
|
+
#
|
2353
2481
|
|
2354
|
-
@
|
2355
|
-
|
2356
|
-
|
2482
|
+
@dc.dataclass()
|
2483
|
+
class Error(RuntimeError):
|
2484
|
+
status_code: int
|
2485
|
+
body: ta.Optional[bytes]
|
2357
2486
|
|
2358
|
-
|
2359
|
-
|
2360
|
-
'tag',
|
2361
|
-
self._cfg.image,
|
2362
|
-
image_tag,
|
2363
|
-
**self._subprocess_kwargs,
|
2364
|
-
)
|
2487
|
+
def __str__(self) -> str:
|
2488
|
+
return repr(self)
|
2365
2489
|
|
2366
|
-
|
2367
|
-
|
2368
|
-
|
2369
|
-
|
2370
|
-
|
2371
|
-
|
2490
|
+
@dc.dataclass(frozen=True)
|
2491
|
+
class Result:
|
2492
|
+
status_code: int
|
2493
|
+
body: ta.Optional[bytes]
|
2494
|
+
|
2495
|
+
def as_error(self) -> 'GithubServiceCurlClient.Error':
|
2496
|
+
return GithubServiceCurlClient.Error(
|
2497
|
+
status_code=self.status_code,
|
2498
|
+
body=self.body,
|
2372
2499
|
)
|
2373
2500
|
|
2374
|
-
|
2501
|
+
def run_cmd(
|
2502
|
+
self,
|
2503
|
+
cmd: ShellCmd,
|
2504
|
+
*,
|
2505
|
+
raise_: bool = False,
|
2506
|
+
**subprocess_kwargs: ta.Any,
|
2507
|
+
) -> Result:
|
2508
|
+
out_file = make_temp_file()
|
2509
|
+
with defer(lambda: os.unlink(out_file)):
|
2510
|
+
run_cmd = dc.replace(cmd, s=f"{cmd.s} -o {out_file} -w '%{{json}}'")
|
2375
2511
|
|
2376
|
-
|
2512
|
+
out_json_bytes = run_cmd.run(
|
2513
|
+
subprocesses.check_output,
|
2514
|
+
**subprocess_kwargs,
|
2515
|
+
)
|
2377
2516
|
|
2378
|
-
|
2517
|
+
out_json = json.loads(out_json_bytes.decode())
|
2518
|
+
status_code = check.isinstance(out_json['response_code'], int)
|
2379
2519
|
|
2380
|
-
|
2381
|
-
|
2520
|
+
with open(out_file, 'rb') as f:
|
2521
|
+
body = f.read()
|
2382
2522
|
|
2383
|
-
|
2523
|
+
result = self.Result(
|
2524
|
+
status_code=status_code,
|
2525
|
+
body=body,
|
2526
|
+
)
|
2384
2527
|
|
2385
|
-
|
2386
|
-
|
2528
|
+
if raise_ and (500 <= status_code <= 600):
|
2529
|
+
raise result.as_error()
|
2387
2530
|
|
2388
|
-
|
2531
|
+
return result
|
2389
2532
|
|
2390
|
-
|
2391
|
-
|
2533
|
+
def run_json_cmd(
|
2534
|
+
self,
|
2535
|
+
cmd: ShellCmd,
|
2536
|
+
*,
|
2537
|
+
success_status_codes: ta.Optional[ta.Container[int]] = None,
|
2538
|
+
) -> ta.Optional[ta.Any]:
|
2539
|
+
result = self.run_cmd(cmd, raise_=True)
|
2392
2540
|
|
2393
|
-
|
2541
|
+
if success_status_codes is not None:
|
2542
|
+
is_success = result.status_code in success_status_codes
|
2543
|
+
else:
|
2544
|
+
is_success = 200 <= result.status_code < 300
|
2394
2545
|
|
2395
|
-
|
2396
|
-
if
|
2397
|
-
|
2546
|
+
if is_success:
|
2547
|
+
if not (body := result.body):
|
2548
|
+
return None
|
2549
|
+
return json.loads(body.decode('utf-8-sig'))
|
2398
2550
|
|
2399
|
-
|
2400
|
-
|
2401
|
-
for l in out_service.get('links', [])
|
2402
|
-
]
|
2551
|
+
elif result.status_code == 404:
|
2552
|
+
return None
|
2403
2553
|
|
2404
|
-
|
2554
|
+
else:
|
2555
|
+
raise result.as_error()
|
2405
2556
|
|
2406
|
-
depends_on = in_service.get('depends_on', [])
|
2407
2557
|
|
2408
|
-
|
2409
|
-
|
2410
|
-
|
2558
|
+
########################################
|
2559
|
+
# ../requirements.py
|
2560
|
+
"""
|
2561
|
+
TODO:
|
2562
|
+
- pip compile lol
|
2563
|
+
- but still support git+ stuff
|
2564
|
+
- req.txt format aware hash
|
2565
|
+
- more than just whitespace
|
2566
|
+
- pyproject req rewriting
|
2567
|
+
- download_requirements bootstrap off prev? not worth the dl?
|
2568
|
+
- big deps (torch) change less, probably worth it
|
2569
|
+
- follow embedded -r automatically like pyp
|
2570
|
+
"""
|
2411
2571
|
|
2412
|
-
out_dep_service: dict = dict(in_dep_service_dct)
|
2413
|
-
out_services[dep_service] = out_dep_service
|
2414
2572
|
|
2415
|
-
|
2573
|
+
##
|
2416
2574
|
|
2417
|
-
#
|
2418
2575
|
|
2419
|
-
|
2576
|
+
def build_requirements_hash(
|
2577
|
+
requirements_txts: ta.Sequence[str],
|
2578
|
+
) -> str:
|
2579
|
+
txt_file_contents: dict = {}
|
2420
2580
|
|
2421
|
-
|
2422
|
-
|
2423
|
-
|
2581
|
+
for txt_file in requirements_txts:
|
2582
|
+
txt_file_name = os.path.basename(txt_file)
|
2583
|
+
check.not_in(txt_file_name, txt_file_contents)
|
2584
|
+
with open(txt_file) as f:
|
2585
|
+
txt_contents = f.read()
|
2586
|
+
txt_file_contents[txt_file_name] = txt_contents
|
2424
2587
|
|
2425
|
-
|
2588
|
+
#
|
2426
2589
|
|
2427
|
-
|
2590
|
+
lines = []
|
2591
|
+
for txt_file, txt_contents in sorted(txt_file_contents.items()):
|
2592
|
+
txt_hash = sha256_str(txt_contents)
|
2593
|
+
lines.append(f'{txt_file}={txt_hash}')
|
2594
|
+
|
2595
|
+
return sha256_str('\n'.join(lines))
|
2596
|
+
|
2597
|
+
|
2598
|
+
##
|
2599
|
+
|
2600
|
+
|
2601
|
+
def download_requirements(
|
2602
|
+
image: str,
|
2603
|
+
requirements_dir: str,
|
2604
|
+
requirements_txts: ta.Sequence[str],
|
2605
|
+
) -> None:
|
2606
|
+
requirements_txt_dir = tempfile.mkdtemp()
|
2607
|
+
with defer(lambda: shutil.rmtree(requirements_txt_dir)):
|
2608
|
+
for rt in requirements_txts:
|
2609
|
+
shutil.copyfile(rt, os.path.join(requirements_txt_dir, os.path.basename(rt)))
|
2610
|
+
|
2611
|
+
subprocesses.check_call(
|
2612
|
+
'docker',
|
2613
|
+
'run',
|
2614
|
+
'--rm',
|
2615
|
+
'-i',
|
2616
|
+
'-v', f'{os.path.abspath(requirements_dir)}:/requirements',
|
2617
|
+
'-v', f'{requirements_txt_dir}:/requirements_txt',
|
2618
|
+
image,
|
2619
|
+
'pip',
|
2620
|
+
'download',
|
2621
|
+
'-d', '/requirements',
|
2622
|
+
*itertools.chain.from_iterable(
|
2623
|
+
['-r', f'/requirements_txt/{os.path.basename(rt)}']
|
2624
|
+
for rt in requirements_txts
|
2625
|
+
),
|
2626
|
+
)
|
2627
|
+
|
2628
|
+
|
2629
|
+
########################################
|
2630
|
+
# ../../../omlish/asyncs/asyncio/subprocesses.py
|
2631
|
+
|
2632
|
+
|
2633
|
+
##
|
2634
|
+
|
2635
|
+
|
2636
|
+
class AsyncioProcessCommunicator:
|
2637
|
+
def __init__(
|
2638
|
+
self,
|
2639
|
+
proc: asyncio.subprocess.Process,
|
2640
|
+
loop: ta.Optional[ta.Any] = None,
|
2641
|
+
*,
|
2642
|
+
log: ta.Optional[logging.Logger] = None,
|
2643
|
+
) -> None:
|
2644
|
+
super().__init__()
|
2645
|
+
|
2646
|
+
if loop is None:
|
2647
|
+
loop = asyncio.get_running_loop()
|
2648
|
+
|
2649
|
+
self._proc = proc
|
2650
|
+
self._loop = loop
|
2651
|
+
self._log = log
|
2652
|
+
|
2653
|
+
self._transport: asyncio.base_subprocess.BaseSubprocessTransport = check.isinstance(
|
2654
|
+
proc._transport, # type: ignore # noqa
|
2655
|
+
asyncio.base_subprocess.BaseSubprocessTransport,
|
2656
|
+
)
|
2657
|
+
|
2658
|
+
@property
|
2659
|
+
def _debug(self) -> bool:
|
2660
|
+
return self._loop.get_debug()
|
2661
|
+
|
2662
|
+
async def _feed_stdin(self, input: bytes) -> None: # noqa
|
2663
|
+
stdin = check.not_none(self._proc.stdin)
|
2664
|
+
try:
|
2665
|
+
if input is not None:
|
2666
|
+
stdin.write(input)
|
2667
|
+
if self._debug and self._log is not None:
|
2668
|
+
self._log.debug('%r communicate: feed stdin (%s bytes)', self, len(input))
|
2669
|
+
|
2670
|
+
await stdin.drain()
|
2671
|
+
|
2672
|
+
except (BrokenPipeError, ConnectionResetError) as exc:
|
2673
|
+
# communicate() ignores BrokenPipeError and ConnectionResetError. write() and drain() can raise these
|
2674
|
+
# exceptions.
|
2675
|
+
if self._debug and self._log is not None:
|
2676
|
+
self._log.debug('%r communicate: stdin got %r', self, exc)
|
2677
|
+
|
2678
|
+
if self._debug and self._log is not None:
|
2679
|
+
self._log.debug('%r communicate: close stdin', self)
|
2680
|
+
|
2681
|
+
stdin.close()
|
2682
|
+
|
2683
|
+
async def _noop(self) -> None:
|
2684
|
+
return None
|
2685
|
+
|
2686
|
+
async def _read_stream(self, fd: int) -> bytes:
|
2687
|
+
transport: ta.Any = check.not_none(self._transport.get_pipe_transport(fd))
|
2688
|
+
|
2689
|
+
if fd == 2:
|
2690
|
+
stream = check.not_none(self._proc.stderr)
|
2691
|
+
else:
|
2692
|
+
check.equal(fd, 1)
|
2693
|
+
stream = check.not_none(self._proc.stdout)
|
2694
|
+
|
2695
|
+
if self._debug and self._log is not None:
|
2696
|
+
name = 'stdout' if fd == 1 else 'stderr'
|
2697
|
+
self._log.debug('%r communicate: read %s', self, name)
|
2698
|
+
|
2699
|
+
output = await stream.read()
|
2700
|
+
|
2701
|
+
if self._debug and self._log is not None:
|
2702
|
+
name = 'stdout' if fd == 1 else 'stderr'
|
2703
|
+
self._log.debug('%r communicate: close %s', self, name)
|
2704
|
+
|
2705
|
+
transport.close()
|
2706
|
+
|
2707
|
+
return output
|
2708
|
+
|
2709
|
+
class Communication(ta.NamedTuple):
|
2710
|
+
stdout: ta.Optional[bytes]
|
2711
|
+
stderr: ta.Optional[bytes]
|
2712
|
+
|
2713
|
+
async def _communicate(
|
2714
|
+
self,
|
2715
|
+
input: ta.Any = None, # noqa
|
2716
|
+
) -> Communication:
|
2717
|
+
stdin_fut: ta.Any
|
2718
|
+
if self._proc.stdin is not None:
|
2719
|
+
stdin_fut = self._feed_stdin(input)
|
2720
|
+
else:
|
2721
|
+
stdin_fut = self._noop()
|
2722
|
+
|
2723
|
+
stdout_fut: ta.Any
|
2724
|
+
if self._proc.stdout is not None:
|
2725
|
+
stdout_fut = self._read_stream(1)
|
2726
|
+
else:
|
2727
|
+
stdout_fut = self._noop()
|
2728
|
+
|
2729
|
+
stderr_fut: ta.Any
|
2730
|
+
if self._proc.stderr is not None:
|
2731
|
+
stderr_fut = self._read_stream(2)
|
2732
|
+
else:
|
2733
|
+
stderr_fut = self._noop()
|
2734
|
+
|
2735
|
+
stdin_res, stdout_res, stderr_res = await asyncio.gather(stdin_fut, stdout_fut, stderr_fut)
|
2736
|
+
|
2737
|
+
await self._proc.wait()
|
2738
|
+
|
2739
|
+
return AsyncioProcessCommunicator.Communication(stdout_res, stderr_res)
|
2740
|
+
|
2741
|
+
async def communicate(
|
2742
|
+
self,
|
2743
|
+
input: ta.Any = None, # noqa
|
2744
|
+
timeout: ta.Optional[float] = None,
|
2745
|
+
) -> Communication:
|
2746
|
+
return await asyncio_maybe_timeout(self._communicate(input), timeout)
|
2747
|
+
|
2748
|
+
|
2749
|
+
##
|
2750
|
+
|
2751
|
+
|
2752
|
+
class AsyncioSubprocesses(AbstractAsyncSubprocesses):
|
2753
|
+
async def communicate(
|
2754
|
+
self,
|
2755
|
+
proc: asyncio.subprocess.Process,
|
2756
|
+
input: ta.Any = None, # noqa
|
2757
|
+
timeout: ta.Optional[float] = None,
|
2758
|
+
) -> ta.Tuple[ta.Optional[bytes], ta.Optional[bytes]]:
|
2759
|
+
return await AsyncioProcessCommunicator(proc).communicate(input, timeout) # noqa
|
2760
|
+
|
2761
|
+
#
|
2762
|
+
|
2763
|
+
@contextlib.asynccontextmanager
|
2764
|
+
async def popen(
|
2765
|
+
self,
|
2766
|
+
*cmd: str,
|
2767
|
+
shell: bool = False,
|
2768
|
+
timeout: ta.Optional[float] = None,
|
2769
|
+
**kwargs: ta.Any,
|
2770
|
+
) -> ta.AsyncGenerator[asyncio.subprocess.Process, None]:
|
2771
|
+
fac: ta.Any
|
2772
|
+
if shell:
|
2773
|
+
fac = functools.partial(
|
2774
|
+
asyncio.create_subprocess_shell,
|
2775
|
+
check.single(cmd),
|
2776
|
+
)
|
2777
|
+
else:
|
2778
|
+
fac = functools.partial(
|
2779
|
+
asyncio.create_subprocess_exec,
|
2780
|
+
*cmd,
|
2781
|
+
)
|
2782
|
+
|
2783
|
+
with self.prepare_and_wrap( *cmd, shell=shell, **kwargs) as (cmd, kwargs): # noqa
|
2784
|
+
proc: asyncio.subprocess.Process = await fac(**kwargs)
|
2785
|
+
try:
|
2786
|
+
yield proc
|
2787
|
+
|
2788
|
+
finally:
|
2789
|
+
await asyncio_maybe_timeout(proc.wait(), timeout)
|
2790
|
+
|
2791
|
+
#
|
2792
|
+
|
2793
|
+
@dc.dataclass(frozen=True)
|
2794
|
+
class RunOutput:
|
2795
|
+
proc: asyncio.subprocess.Process
|
2796
|
+
stdout: ta.Optional[bytes]
|
2797
|
+
stderr: ta.Optional[bytes]
|
2798
|
+
|
2799
|
+
async def run(
|
2800
|
+
self,
|
2801
|
+
*cmd: str,
|
2802
|
+
input: ta.Any = None, # noqa
|
2803
|
+
timeout: ta.Optional[float] = None,
|
2804
|
+
check: bool = False, # noqa
|
2805
|
+
capture_output: ta.Optional[bool] = None,
|
2806
|
+
**kwargs: ta.Any,
|
2807
|
+
) -> RunOutput:
|
2808
|
+
if capture_output:
|
2809
|
+
kwargs.setdefault('stdout', subprocess.PIPE)
|
2810
|
+
kwargs.setdefault('stderr', subprocess.PIPE)
|
2811
|
+
|
2812
|
+
proc: asyncio.subprocess.Process
|
2813
|
+
async with self.popen(*cmd, **kwargs) as proc:
|
2814
|
+
stdout, stderr = await self.communicate(proc, input, timeout)
|
2815
|
+
|
2816
|
+
if check and proc.returncode:
|
2817
|
+
raise subprocess.CalledProcessError(
|
2818
|
+
proc.returncode,
|
2819
|
+
cmd,
|
2820
|
+
output=stdout,
|
2821
|
+
stderr=stderr,
|
2822
|
+
)
|
2823
|
+
|
2824
|
+
return self.RunOutput(
|
2825
|
+
proc,
|
2826
|
+
stdout,
|
2827
|
+
stderr,
|
2828
|
+
)
|
2829
|
+
|
2830
|
+
#
|
2831
|
+
|
2832
|
+
async def check_call(
|
2833
|
+
self,
|
2834
|
+
*cmd: str,
|
2835
|
+
stdout: ta.Any = sys.stderr,
|
2836
|
+
**kwargs: ta.Any,
|
2837
|
+
) -> None:
|
2838
|
+
with self.prepare_and_wrap(*cmd, stdout=stdout, check=True, **kwargs) as (cmd, kwargs): # noqa
|
2839
|
+
await self.run(*cmd, **kwargs)
|
2840
|
+
|
2841
|
+
async def check_output(
|
2842
|
+
self,
|
2843
|
+
*cmd: str,
|
2844
|
+
**kwargs: ta.Any,
|
2845
|
+
) -> bytes:
|
2846
|
+
with self.prepare_and_wrap(*cmd, stdout=subprocess.PIPE, check=True, **kwargs) as (cmd, kwargs): # noqa
|
2847
|
+
return check.not_none((await self.run(*cmd, **kwargs)).stdout)
|
2848
|
+
|
2849
|
+
|
2850
|
+
asyncio_subprocesses = AsyncioSubprocesses()
|
2851
|
+
|
2852
|
+
|
2853
|
+
########################################
|
2854
|
+
# ../compose.py
|
2855
|
+
"""
|
2856
|
+
TODO:
|
2857
|
+
- fix rmi - only when not referenced anymore
|
2858
|
+
"""
|
2859
|
+
|
2860
|
+
|
2861
|
+
##
|
2862
|
+
|
2863
|
+
|
2864
|
+
def get_compose_service_dependencies(
|
2865
|
+
compose_file: str,
|
2866
|
+
service: str,
|
2867
|
+
) -> ta.Dict[str, str]:
|
2868
|
+
compose_dct = read_yaml_file(compose_file)
|
2869
|
+
|
2870
|
+
services = compose_dct['services']
|
2871
|
+
service_dct = services[service]
|
2872
|
+
|
2873
|
+
out = {}
|
2874
|
+
for dep_service in service_dct.get('depends_on', []):
|
2875
|
+
dep_service_dct = services[dep_service]
|
2876
|
+
out[dep_service] = dep_service_dct['image']
|
2877
|
+
|
2878
|
+
return out
|
2879
|
+
|
2880
|
+
|
2881
|
+
##
|
2882
|
+
|
2883
|
+
|
2884
|
+
class DockerComposeRun(AsyncExitStacked):
|
2885
|
+
@dc.dataclass(frozen=True)
|
2886
|
+
class Config:
|
2887
|
+
compose_file: str
|
2888
|
+
service: str
|
2889
|
+
|
2890
|
+
image: str
|
2891
|
+
|
2892
|
+
cmd: ShellCmd
|
2893
|
+
|
2894
|
+
#
|
2895
|
+
|
2896
|
+
run_options: ta.Optional[ta.Sequence[str]] = None
|
2897
|
+
|
2898
|
+
cwd: ta.Optional[str] = None
|
2899
|
+
|
2900
|
+
#
|
2901
|
+
|
2902
|
+
no_dependencies: bool = False
|
2903
|
+
no_dependency_cleanup: bool = False
|
2904
|
+
|
2905
|
+
#
|
2906
|
+
|
2907
|
+
def __post_init__(self) -> None:
|
2908
|
+
check.not_isinstance(self.run_options, str)
|
2909
|
+
|
2910
|
+
def __init__(self, cfg: Config) -> None:
|
2911
|
+
super().__init__()
|
2912
|
+
|
2913
|
+
self._cfg = cfg
|
2914
|
+
|
2915
|
+
self._subprocess_kwargs = {
|
2916
|
+
**(dict(cwd=self._cfg.cwd) if self._cfg.cwd is not None else {}),
|
2917
|
+
}
|
2918
|
+
|
2919
|
+
#
|
2920
|
+
|
2921
|
+
def _rewrite_compose_dct(self, in_dct: ta.Dict[str, ta.Any]) -> ta.Dict[str, ta.Any]:
|
2922
|
+
out = dict(in_dct)
|
2923
|
+
|
2924
|
+
#
|
2925
|
+
|
2926
|
+
in_services = in_dct['services']
|
2927
|
+
out['services'] = out_services = {}
|
2928
|
+
|
2929
|
+
#
|
2930
|
+
|
2931
|
+
in_service: dict = in_services[self._cfg.service]
|
2932
|
+
out_services[self._cfg.service] = out_service = dict(in_service)
|
2933
|
+
|
2934
|
+
out_service['image'] = self._cfg.image
|
2935
|
+
|
2936
|
+
for k in ['build', 'platform']:
|
2937
|
+
if k in out_service:
|
2938
|
+
del out_service[k]
|
2939
|
+
|
2940
|
+
out_service['links'] = [
|
2941
|
+
f'{l}:{l}' if ':' not in l else l
|
2942
|
+
for l in out_service.get('links', [])
|
2943
|
+
]
|
2944
|
+
|
2945
|
+
#
|
2946
|
+
|
2947
|
+
if not self._cfg.no_dependencies:
|
2948
|
+
depends_on = in_service.get('depends_on', [])
|
2949
|
+
|
2950
|
+
for dep_service, in_dep_service_dct in list(in_services.items()):
|
2951
|
+
if dep_service not in depends_on:
|
2952
|
+
continue
|
2953
|
+
|
2954
|
+
out_dep_service: dict = dict(in_dep_service_dct)
|
2955
|
+
out_services[dep_service] = out_dep_service
|
2956
|
+
|
2957
|
+
out_dep_service['ports'] = []
|
2958
|
+
|
2959
|
+
else:
|
2960
|
+
out_service['depends_on'] = []
|
2961
|
+
out_service['links'] = []
|
2962
|
+
|
2963
|
+
#
|
2964
|
+
|
2965
|
+
return out
|
2966
|
+
|
2967
|
+
@cached_nullary
|
2968
|
+
def rewrite_compose_file(self) -> str:
|
2969
|
+
in_dct = read_yaml_file(self._cfg.compose_file)
|
2970
|
+
|
2971
|
+
out_dct = self._rewrite_compose_dct(in_dct)
|
2972
|
+
|
2973
|
+
#
|
2428
2974
|
|
2429
2975
|
out_compose_file = make_temp_file()
|
2430
2976
|
self._enter_context(defer(lambda: os.unlink(out_compose_file))) # noqa
|
@@ -2438,22 +2984,20 @@ class DockerComposeRun(ExitStacked):
|
|
2438
2984
|
|
2439
2985
|
#
|
2440
2986
|
|
2441
|
-
def _cleanup_dependencies(self) -> None:
|
2442
|
-
|
2987
|
+
async def _cleanup_dependencies(self) -> None:
|
2988
|
+
await asyncio_subprocesses.check_call(
|
2443
2989
|
'docker',
|
2444
2990
|
'compose',
|
2445
2991
|
'-f', self.rewrite_compose_file(),
|
2446
2992
|
'down',
|
2447
2993
|
)
|
2448
2994
|
|
2449
|
-
def run(self) -> None:
|
2450
|
-
self.tag_image()
|
2451
|
-
|
2995
|
+
async def run(self) -> None:
|
2452
2996
|
compose_file = self.rewrite_compose_file()
|
2453
2997
|
|
2454
|
-
with contextlib.
|
2455
|
-
if not self._cfg.no_dependency_cleanup:
|
2456
|
-
es.
|
2998
|
+
async with contextlib.AsyncExitStack() as es:
|
2999
|
+
if not (self._cfg.no_dependencies or self._cfg.no_dependency_cleanup):
|
3000
|
+
await es.enter_async_context(adefer(self._cleanup_dependencies)) # noqa
|
2457
3001
|
|
2458
3002
|
sh_cmd = ' '.join([
|
2459
3003
|
'docker',
|
@@ -2461,7 +3005,10 @@ class DockerComposeRun(ExitStacked):
|
|
2461
3005
|
'-f', compose_file,
|
2462
3006
|
'run',
|
2463
3007
|
'--rm',
|
2464
|
-
*itertools.chain.from_iterable(
|
3008
|
+
*itertools.chain.from_iterable(
|
3009
|
+
['-e', k]
|
3010
|
+
for k in (self._cfg.cmd.env or [])
|
3011
|
+
),
|
2465
3012
|
*(self._cfg.run_options or []),
|
2466
3013
|
self._cfg.service,
|
2467
3014
|
'sh', '-c', shlex.quote(self._cfg.cmd.s),
|
@@ -2469,8 +3016,8 @@ class DockerComposeRun(ExitStacked):
|
|
2469
3016
|
|
2470
3017
|
run_cmd = dc.replace(self._cfg.cmd, s=sh_cmd)
|
2471
3018
|
|
2472
|
-
run_cmd.run(
|
2473
|
-
|
3019
|
+
await run_cmd.run(
|
3020
|
+
asyncio_subprocesses.check_call,
|
2474
3021
|
**self._subprocess_kwargs,
|
2475
3022
|
)
|
2476
3023
|
|
@@ -2522,8 +3069,8 @@ def read_docker_tar_image_id(tar_file: str) -> str:
|
|
2522
3069
|
##
|
2523
3070
|
|
2524
3071
|
|
2525
|
-
def is_docker_image_present(image: str) -> bool:
|
2526
|
-
out =
|
3072
|
+
async def is_docker_image_present(image: str) -> bool:
|
3073
|
+
out = await asyncio_subprocesses.check_output(
|
2527
3074
|
'docker',
|
2528
3075
|
'images',
|
2529
3076
|
'--format', 'json',
|
@@ -2538,55 +3085,74 @@ def is_docker_image_present(image: str) -> bool:
|
|
2538
3085
|
return True
|
2539
3086
|
|
2540
3087
|
|
2541
|
-
def pull_docker_image(
|
3088
|
+
async def pull_docker_image(
|
2542
3089
|
image: str,
|
2543
3090
|
) -> None:
|
2544
|
-
|
3091
|
+
await asyncio_subprocesses.check_call(
|
2545
3092
|
'docker',
|
2546
3093
|
'pull',
|
2547
3094
|
image,
|
2548
3095
|
)
|
2549
3096
|
|
2550
3097
|
|
2551
|
-
def build_docker_image(
|
3098
|
+
async def build_docker_image(
|
2552
3099
|
docker_file: str,
|
2553
3100
|
*,
|
3101
|
+
tag: ta.Optional[str] = None,
|
2554
3102
|
cwd: ta.Optional[str] = None,
|
2555
3103
|
) -> str:
|
2556
3104
|
id_file = make_temp_file()
|
2557
3105
|
with defer(lambda: os.unlink(id_file)):
|
2558
|
-
|
3106
|
+
await asyncio_subprocesses.check_call(
|
2559
3107
|
'docker',
|
2560
3108
|
'build',
|
2561
3109
|
'-f', os.path.abspath(docker_file),
|
2562
3110
|
'--iidfile', id_file,
|
2563
3111
|
'--squash',
|
3112
|
+
*(['--tag', tag] if tag is not None else []),
|
2564
3113
|
'.',
|
2565
3114
|
**(dict(cwd=cwd) if cwd is not None else {}),
|
2566
3115
|
)
|
2567
3116
|
|
2568
|
-
with open(id_file) as f:
|
3117
|
+
with open(id_file) as f: # noqa
|
2569
3118
|
image_id = check.single(f.read().strip().splitlines()).strip()
|
2570
3119
|
|
2571
3120
|
return image_id
|
2572
3121
|
|
2573
3122
|
|
3123
|
+
async def tag_docker_image(image: str, tag: str) -> None:
|
3124
|
+
await asyncio_subprocesses.check_call(
|
3125
|
+
'docker',
|
3126
|
+
'tag',
|
3127
|
+
image,
|
3128
|
+
tag,
|
3129
|
+
)
|
3130
|
+
|
3131
|
+
|
3132
|
+
async def delete_docker_tag(tag: str) -> None:
|
3133
|
+
await asyncio_subprocesses.check_call(
|
3134
|
+
'docker',
|
3135
|
+
'rmi',
|
3136
|
+
tag,
|
3137
|
+
)
|
3138
|
+
|
3139
|
+
|
2574
3140
|
##
|
2575
3141
|
|
2576
3142
|
|
2577
|
-
def save_docker_tar_cmd(
|
3143
|
+
async def save_docker_tar_cmd(
|
2578
3144
|
image: str,
|
2579
3145
|
output_cmd: ShellCmd,
|
2580
3146
|
) -> None:
|
2581
3147
|
cmd = dc.replace(output_cmd, s=f'docker save {image} | {output_cmd.s}')
|
2582
|
-
cmd.run(
|
3148
|
+
await cmd.run(asyncio_subprocesses.check_call)
|
2583
3149
|
|
2584
3150
|
|
2585
|
-
def save_docker_tar(
|
3151
|
+
async def save_docker_tar(
|
2586
3152
|
image: str,
|
2587
3153
|
tar_file: str,
|
2588
3154
|
) -> None:
|
2589
|
-
return save_docker_tar_cmd(
|
3155
|
+
return await save_docker_tar_cmd(
|
2590
3156
|
image,
|
2591
3157
|
ShellCmd(f'cat > {shlex.quote(tar_file)}'),
|
2592
3158
|
)
|
@@ -2595,276 +3161,187 @@ def save_docker_tar(
|
|
2595
3161
|
#
|
2596
3162
|
|
2597
3163
|
|
2598
|
-
def load_docker_tar_cmd(
|
2599
|
-
input_cmd: ShellCmd,
|
2600
|
-
) -> str:
|
2601
|
-
cmd = dc.replace(input_cmd, s=f'{input_cmd.s} | docker load')
|
2602
|
-
|
2603
|
-
out = cmd.run(subprocesses.check_output).decode()
|
2604
|
-
|
2605
|
-
line = check.single(out.strip().splitlines())
|
2606
|
-
loaded = line.partition(':')[2].strip()
|
2607
|
-
return loaded
|
2608
|
-
|
2609
|
-
|
2610
|
-
def load_docker_tar(
|
2611
|
-
tar_file: str,
|
2612
|
-
) -> str:
|
2613
|
-
return load_docker_tar_cmd(ShellCmd(f'cat {shlex.quote(tar_file)}'))
|
2614
|
-
|
2615
|
-
|
2616
|
-
########################################
|
2617
|
-
# ../github/cache.py
|
2618
|
-
|
2619
|
-
|
2620
|
-
##
|
2621
|
-
|
2622
|
-
|
2623
|
-
class GithubV1CacheShellClient:
|
2624
|
-
BASE_URL_ENV_KEY = 'ACTIONS_CACHE_URL'
|
2625
|
-
AUTH_TOKEN_ENV_KEY = 'ACTIONS_RUNTIME_TOKEN' # noqa
|
2626
|
-
|
2627
|
-
def __init__(
|
2628
|
-
self,
|
2629
|
-
*,
|
2630
|
-
base_url: ta.Optional[str] = None,
|
2631
|
-
auth_token: ta.Optional[str] = None,
|
2632
|
-
) -> None:
|
2633
|
-
super().__init__()
|
2634
|
-
|
2635
|
-
if base_url is None:
|
2636
|
-
base_url = os.environ[self.BASE_URL_ENV_KEY]
|
2637
|
-
self._base_url = check.non_empty_str(base_url)
|
2638
|
-
|
2639
|
-
if auth_token is None:
|
2640
|
-
auth_token = os.environ.get(self.AUTH_TOKEN_ENV_KEY)
|
2641
|
-
self._auth_token = auth_token
|
2642
|
-
|
2643
|
-
self._service_url = GithubCacheServiceV1.get_service_url(self._base_url)
|
2644
|
-
|
2645
|
-
#
|
2646
|
-
|
2647
|
-
_MISSING = object()
|
2648
|
-
|
2649
|
-
def build_headers(
|
2650
|
-
self,
|
2651
|
-
*,
|
2652
|
-
auth_token: ta.Any = _MISSING,
|
2653
|
-
content_type: ta.Optional[str] = None,
|
2654
|
-
) -> ta.Dict[str, str]:
|
2655
|
-
dct = {
|
2656
|
-
'Accept': f'application/json;api-version={GithubCacheServiceV1.API_VERSION}',
|
2657
|
-
}
|
2658
|
-
|
2659
|
-
if auth_token is self._MISSING:
|
2660
|
-
auth_token = self._auth_token
|
2661
|
-
if auth_token:
|
2662
|
-
dct['Authorization'] = f'Bearer {auth_token}'
|
2663
|
-
|
2664
|
-
if content_type is not None:
|
2665
|
-
dct['Content-Type'] = content_type
|
3164
|
+
async def load_docker_tar_cmd(
|
3165
|
+
input_cmd: ShellCmd,
|
3166
|
+
) -> str:
|
3167
|
+
cmd = dc.replace(input_cmd, s=f'{input_cmd.s} | docker load')
|
2666
3168
|
|
2667
|
-
|
3169
|
+
out = (await cmd.run(asyncio_subprocesses.check_output)).decode()
|
2668
3170
|
|
2669
|
-
|
3171
|
+
line = check.single(out.strip().splitlines())
|
3172
|
+
loaded = line.partition(':')[2].strip()
|
3173
|
+
return loaded
|
2670
3174
|
|
2671
|
-
HEADER_AUTH_TOKEN_ENV_KEY = '_GITHUB_CACHE_AUTH_TOKEN' # noqa
|
2672
3175
|
|
2673
|
-
|
2674
|
-
|
2675
|
-
|
2676
|
-
|
2677
|
-
*,
|
2678
|
-
json_content: bool = False,
|
2679
|
-
content_type: ta.Optional[str] = None,
|
2680
|
-
) -> ShellCmd:
|
2681
|
-
if content_type is None and json_content:
|
2682
|
-
content_type = 'application/json'
|
3176
|
+
async def load_docker_tar(
|
3177
|
+
tar_file: str,
|
3178
|
+
) -> str:
|
3179
|
+
return await load_docker_tar_cmd(ShellCmd(f'cat {shlex.quote(tar_file)}'))
|
2683
3180
|
|
2684
|
-
env = {}
|
2685
3181
|
|
2686
|
-
|
2687
|
-
|
2688
|
-
env[self.HEADER_AUTH_TOKEN_ENV_KEY] = self._auth_token
|
2689
|
-
header_auth_token = f'${self.HEADER_AUTH_TOKEN_ENV_KEY}'
|
2690
|
-
else:
|
2691
|
-
header_auth_token = None
|
3182
|
+
########################################
|
3183
|
+
# ../github/cache.py
|
2692
3184
|
|
2693
|
-
hdrs = self.build_headers(
|
2694
|
-
auth_token=header_auth_token,
|
2695
|
-
content_type=content_type,
|
2696
|
-
)
|
2697
3185
|
|
2698
|
-
|
3186
|
+
##
|
2699
3187
|
|
2700
|
-
cmd = ' '.join([
|
2701
|
-
'curl',
|
2702
|
-
'-s',
|
2703
|
-
'-X', method,
|
2704
|
-
url,
|
2705
|
-
*[f'-H "{k}: {v}"' for k, v in hdrs.items()],
|
2706
|
-
])
|
2707
3188
|
|
2708
|
-
|
2709
|
-
|
2710
|
-
|
2711
|
-
)
|
3189
|
+
class GithubCacheShellClient(abc.ABC):
|
3190
|
+
class Entry(abc.ABC): # noqa
|
3191
|
+
pass
|
2712
3192
|
|
2713
|
-
|
2714
|
-
|
2715
|
-
|
2716
|
-
obj: ta.Any,
|
2717
|
-
**kwargs: ta.Any,
|
2718
|
-
) -> ShellCmd:
|
2719
|
-
curl_cmd = self.build_curl_cmd(
|
2720
|
-
'POST',
|
2721
|
-
url,
|
2722
|
-
json_content=True,
|
2723
|
-
**kwargs,
|
2724
|
-
)
|
3193
|
+
@abc.abstractmethod
|
3194
|
+
def run_get_entry(self, key: str) -> ta.Optional[Entry]:
|
3195
|
+
raise NotImplementedError
|
2725
3196
|
|
2726
|
-
|
3197
|
+
@abc.abstractmethod
|
3198
|
+
def download_get_entry(self, entry: Entry, out_file: str) -> None:
|
3199
|
+
raise NotImplementedError
|
2727
3200
|
|
2728
|
-
|
3201
|
+
@abc.abstractmethod
|
3202
|
+
def upload_cache_entry(self, key: str, in_file: str) -> None:
|
3203
|
+
raise NotImplementedError
|
2729
3204
|
|
2730
|
-
#
|
2731
3205
|
|
2732
|
-
|
2733
|
-
class CurlError(RuntimeError):
|
2734
|
-
status_code: int
|
2735
|
-
body: ta.Optional[bytes]
|
3206
|
+
#
|
2736
3207
|
|
2737
|
-
def __str__(self) -> str:
|
2738
|
-
return repr(self)
|
2739
3208
|
|
2740
|
-
|
2741
|
-
|
2742
|
-
|
2743
|
-
body: ta.Optional[bytes]
|
3209
|
+
class GithubCacheServiceV1ShellClient(GithubCacheShellClient):
|
3210
|
+
BASE_URL_ENV_KEY = 'ACTIONS_CACHE_URL'
|
3211
|
+
AUTH_TOKEN_ENV_KEY = 'ACTIONS_RUNTIME_TOKEN' # noqa
|
2744
3212
|
|
2745
|
-
|
2746
|
-
|
2747
|
-
|
2748
|
-
|
2749
|
-
|
3213
|
+
KEY_SUFFIX_ENV_KEY = 'GITHUB_RUN_ID'
|
3214
|
+
|
3215
|
+
CACHE_VERSION: ta.ClassVar[int] = 1
|
3216
|
+
|
3217
|
+
#
|
2750
3218
|
|
2751
|
-
def
|
3219
|
+
def __init__(
|
2752
3220
|
self,
|
2753
|
-
cmd: ShellCmd,
|
2754
3221
|
*,
|
2755
|
-
|
2756
|
-
|
2757
|
-
out_file = make_temp_file()
|
2758
|
-
with defer(lambda: os.unlink(out_file)):
|
2759
|
-
run_cmd = dc.replace(cmd, s=f"{cmd.s} -o {out_file} -w '%{{json}}'")
|
3222
|
+
base_url: ta.Optional[str] = None,
|
3223
|
+
auth_token: ta.Optional[str] = None,
|
2760
3224
|
|
2761
|
-
|
3225
|
+
key_prefix: ta.Optional[str] = None,
|
3226
|
+
key_suffix: ta.Optional[str] = None,
|
3227
|
+
) -> None:
|
3228
|
+
super().__init__()
|
2762
3229
|
|
2763
|
-
|
2764
|
-
status_code = check.isinstance(out_json['response_code'], int)
|
3230
|
+
#
|
2765
3231
|
|
2766
|
-
|
2767
|
-
|
3232
|
+
if base_url is None:
|
3233
|
+
base_url = os.environ[self.BASE_URL_ENV_KEY]
|
3234
|
+
service_url = GithubCacheServiceV1.get_service_url(base_url)
|
2768
3235
|
|
2769
|
-
|
2770
|
-
|
2771
|
-
body=body,
|
2772
|
-
)
|
3236
|
+
if auth_token is None:
|
3237
|
+
auth_token = os.environ.get(self.AUTH_TOKEN_ENV_KEY)
|
2773
3238
|
|
2774
|
-
|
2775
|
-
|
3239
|
+
self._curl = GithubServiceCurlClient(
|
3240
|
+
service_url,
|
3241
|
+
auth_token,
|
3242
|
+
api_version=GithubCacheServiceV1.API_VERSION,
|
3243
|
+
)
|
2776
3244
|
|
2777
|
-
|
3245
|
+
#
|
2778
3246
|
|
2779
|
-
|
2780
|
-
self,
|
2781
|
-
cmd: ShellCmd,
|
2782
|
-
*,
|
2783
|
-
success_status_codes: ta.Optional[ta.Container[int]] = None,
|
2784
|
-
) -> ta.Optional[ta.Any]:
|
2785
|
-
result = self.run_curl_cmd(cmd, raise_=True)
|
3247
|
+
self._key_prefix = key_prefix
|
2786
3248
|
|
2787
|
-
if
|
2788
|
-
|
2789
|
-
|
2790
|
-
is_success = 200 <= result.status_code < 300
|
3249
|
+
if key_suffix is None:
|
3250
|
+
key_suffix = os.environ[self.KEY_SUFFIX_ENV_KEY]
|
3251
|
+
self._key_suffix = check.non_empty_str(key_suffix)
|
2791
3252
|
|
2792
|
-
|
2793
|
-
if not (body := result.body):
|
2794
|
-
return None
|
2795
|
-
return json.loads(body.decode('utf-8-sig'))
|
3253
|
+
#
|
2796
3254
|
|
2797
|
-
|
2798
|
-
return None
|
3255
|
+
KEY_PART_SEPARATOR = '--'
|
2799
3256
|
|
2800
|
-
|
2801
|
-
|
3257
|
+
def fix_key(self, s: str) -> str:
|
3258
|
+
return self.KEY_PART_SEPARATOR.join([
|
3259
|
+
*([self._key_prefix] if self._key_prefix else []),
|
3260
|
+
s,
|
3261
|
+
self._key_suffix,
|
3262
|
+
])
|
3263
|
+
|
3264
|
+
#
|
3265
|
+
|
3266
|
+
@dc.dataclass(frozen=True)
|
3267
|
+
class Entry(GithubCacheShellClient.Entry):
|
3268
|
+
artifact: GithubCacheServiceV1.ArtifactCacheEntry
|
2802
3269
|
|
2803
3270
|
#
|
2804
3271
|
|
2805
3272
|
def build_get_entry_curl_cmd(self, key: str) -> ShellCmd:
|
2806
|
-
|
3273
|
+
fixed_key = self.fix_key(key)
|
3274
|
+
|
3275
|
+
qp = dict(
|
3276
|
+
keys=fixed_key,
|
3277
|
+
version=str(self.CACHE_VERSION),
|
3278
|
+
)
|
3279
|
+
|
3280
|
+
return self._curl.build_cmd(
|
2807
3281
|
'GET',
|
2808
|
-
|
3282
|
+
shlex.quote('?'.join([
|
3283
|
+
'cache',
|
3284
|
+
'&'.join([
|
3285
|
+
f'{k}={urllib.parse.quote_plus(v)}'
|
3286
|
+
for k, v in qp.items()
|
3287
|
+
]),
|
3288
|
+
])),
|
2809
3289
|
)
|
2810
3290
|
|
2811
|
-
def run_get_entry(self, key: str) -> ta.Optional[
|
2812
|
-
|
3291
|
+
def run_get_entry(self, key: str) -> ta.Optional[Entry]:
|
3292
|
+
fixed_key = self.fix_key(key)
|
3293
|
+
curl_cmd = self.build_get_entry_curl_cmd(fixed_key)
|
2813
3294
|
|
2814
|
-
obj = self.
|
3295
|
+
obj = self._curl.run_json_cmd(
|
2815
3296
|
curl_cmd,
|
2816
3297
|
success_status_codes=[200, 204],
|
2817
3298
|
)
|
2818
3299
|
if obj is None:
|
2819
3300
|
return None
|
2820
3301
|
|
2821
|
-
return GithubCacheServiceV1.dataclass_from_json(
|
3302
|
+
return self.Entry(GithubCacheServiceV1.dataclass_from_json(
|
2822
3303
|
GithubCacheServiceV1.ArtifactCacheEntry,
|
2823
3304
|
obj,
|
2824
|
-
)
|
3305
|
+
))
|
2825
3306
|
|
2826
3307
|
#
|
2827
3308
|
|
2828
|
-
def build_download_get_entry_cmd(
|
2829
|
-
self,
|
2830
|
-
entry: GithubCacheServiceV1.ArtifactCacheEntry,
|
2831
|
-
out_file: str,
|
2832
|
-
) -> ShellCmd:
|
3309
|
+
def build_download_get_entry_cmd(self, entry: Entry, out_file: str) -> ShellCmd:
|
2833
3310
|
return ShellCmd(' '.join([
|
2834
3311
|
'aria2c',
|
2835
3312
|
'-x', '4',
|
2836
3313
|
'-o', out_file,
|
2837
|
-
check.non_empty_str(entry.archive_location),
|
3314
|
+
check.non_empty_str(entry.artifact.archive_location),
|
2838
3315
|
]))
|
2839
3316
|
|
2840
|
-
def download_get_entry(
|
2841
|
-
|
2842
|
-
entry
|
2843
|
-
out_file
|
2844
|
-
|
2845
|
-
dl_cmd = self.build_download_get_entry_cmd(entry, out_file)
|
3317
|
+
def download_get_entry(self, entry: GithubCacheShellClient.Entry, out_file: str) -> None:
|
3318
|
+
dl_cmd = self.build_download_get_entry_cmd(
|
3319
|
+
check.isinstance(entry, GithubCacheServiceV1ShellClient.Entry),
|
3320
|
+
out_file,
|
3321
|
+
)
|
2846
3322
|
dl_cmd.run(subprocesses.check_call)
|
2847
3323
|
|
2848
3324
|
#
|
2849
3325
|
|
2850
|
-
def upload_cache_entry(
|
2851
|
-
|
2852
|
-
|
2853
|
-
in_file: str,
|
2854
|
-
) -> None:
|
3326
|
+
def upload_cache_entry(self, key: str, in_file: str) -> None:
|
3327
|
+
fixed_key = self.fix_key(key)
|
3328
|
+
|
2855
3329
|
check.state(os.path.isfile(in_file))
|
2856
3330
|
|
2857
3331
|
file_size = os.stat(in_file).st_size
|
2858
3332
|
|
3333
|
+
#
|
3334
|
+
|
2859
3335
|
reserve_req = GithubCacheServiceV1.ReserveCacheRequest(
|
2860
|
-
key=
|
3336
|
+
key=fixed_key,
|
2861
3337
|
cache_size=file_size,
|
3338
|
+
version=str(self.CACHE_VERSION),
|
2862
3339
|
)
|
2863
|
-
reserve_cmd = self.
|
3340
|
+
reserve_cmd = self._curl.build_post_json_cmd(
|
2864
3341
|
'caches',
|
2865
3342
|
GithubCacheServiceV1.dataclass_to_json(reserve_req),
|
2866
3343
|
)
|
2867
|
-
reserve_resp_obj: ta.Any = check.not_none(self.
|
3344
|
+
reserve_resp_obj: ta.Any = check.not_none(self._curl.run_json_cmd(
|
2868
3345
|
reserve_cmd,
|
2869
3346
|
success_status_codes=[201],
|
2870
3347
|
))
|
@@ -2872,8 +3349,66 @@ class GithubV1CacheShellClient:
|
|
2872
3349
|
GithubCacheServiceV1.ReserveCacheResponse,
|
2873
3350
|
reserve_resp_obj,
|
2874
3351
|
)
|
3352
|
+
cache_id = check.isinstance(reserve_resp.cache_id, int)
|
2875
3353
|
|
2876
|
-
|
3354
|
+
#
|
3355
|
+
|
3356
|
+
tmp_file = make_temp_file()
|
3357
|
+
|
3358
|
+
print(f'{file_size=}')
|
3359
|
+
num_written = 0
|
3360
|
+
chunk_size = 32 * 1024 * 1024
|
3361
|
+
for i in range((file_size // chunk_size) + (1 if file_size % chunk_size else 0)):
|
3362
|
+
ofs = i * chunk_size
|
3363
|
+
sz = min(chunk_size, file_size - ofs)
|
3364
|
+
|
3365
|
+
patch_cmd = self._curl.build_cmd(
|
3366
|
+
'PATCH',
|
3367
|
+
f'caches/{cache_id}',
|
3368
|
+
content_type='application/octet-stream',
|
3369
|
+
headers={
|
3370
|
+
'Content-Range': f'bytes {ofs}-{ofs + sz - 1}/*',
|
3371
|
+
},
|
3372
|
+
)
|
3373
|
+
|
3374
|
+
#
|
3375
|
+
|
3376
|
+
# patch_data_cmd = dc.replace(patch_cmd, s=' | '.join([
|
3377
|
+
# f'dd if={in_file} bs={chunk_size} skip={i} count=1 status=none',
|
3378
|
+
# f'{patch_cmd.s} --data-binary -',
|
3379
|
+
# ]))
|
3380
|
+
# print(f'{patch_data_cmd.s=}')
|
3381
|
+
# patch_result = self._curl.run_cmd(patch_data_cmd, raise_=True)
|
3382
|
+
|
3383
|
+
#
|
3384
|
+
|
3385
|
+
with open(in_file, 'rb') as f:
|
3386
|
+
f.seek(ofs)
|
3387
|
+
buf = f.read(sz)
|
3388
|
+
with open(tmp_file, 'wb') as f:
|
3389
|
+
f.write(buf)
|
3390
|
+
num_written += len(buf)
|
3391
|
+
print(f'{num_written=}')
|
3392
|
+
patch_data_cmd = dc.replace(patch_cmd, s=f'{patch_cmd.s} --data-binary @{tmp_file}')
|
3393
|
+
print(f'{patch_data_cmd.s=}')
|
3394
|
+
patch_result = self._curl.run_cmd(patch_data_cmd, raise_=True)
|
3395
|
+
|
3396
|
+
#
|
3397
|
+
|
3398
|
+
check.equal(patch_result.status_code, 204)
|
3399
|
+
ofs += sz
|
3400
|
+
|
3401
|
+
#
|
3402
|
+
|
3403
|
+
commit_req = GithubCacheServiceV1.CommitCacheRequest(
|
3404
|
+
size=file_size,
|
3405
|
+
)
|
3406
|
+
commit_cmd = self._curl.build_post_json_cmd(
|
3407
|
+
f'caches/{cache_id}',
|
3408
|
+
GithubCacheServiceV1.dataclass_to_json(commit_req),
|
3409
|
+
)
|
3410
|
+
commit_result = self._curl.run_cmd(commit_cmd, raise_=True)
|
3411
|
+
check.equal(commit_result.status_code, 204)
|
2877
3412
|
|
2878
3413
|
|
2879
3414
|
##
|
@@ -2884,15 +3419,15 @@ class GithubShellCache(ShellCache):
|
|
2884
3419
|
self,
|
2885
3420
|
dir: str, # noqa
|
2886
3421
|
*,
|
2887
|
-
client: ta.Optional[
|
3422
|
+
client: ta.Optional[GithubCacheShellClient] = None,
|
2888
3423
|
) -> None:
|
2889
3424
|
super().__init__()
|
2890
3425
|
|
2891
3426
|
self._dir = check.not_none(dir)
|
2892
3427
|
|
2893
3428
|
if client is None:
|
2894
|
-
client =
|
2895
|
-
self._client = client
|
3429
|
+
client = GithubCacheServiceV1ShellClient()
|
3430
|
+
self._client: GithubCacheShellClient = client
|
2896
3431
|
|
2897
3432
|
self._local = DirectoryFileCache(self._dir)
|
2898
3433
|
|
@@ -2954,82 +3489,11 @@ class GithubShellCache(ShellCache):
|
|
2954
3489
|
)
|
2955
3490
|
|
2956
3491
|
|
2957
|
-
########################################
|
2958
|
-
# ../requirements.py
|
2959
|
-
"""
|
2960
|
-
TODO:
|
2961
|
-
- pip compile lol
|
2962
|
-
- but still support git+ stuff
|
2963
|
-
- req.txt format aware hash
|
2964
|
-
- more than just whitespace
|
2965
|
-
- pyproject req rewriting
|
2966
|
-
- download_requirements bootstrap off prev? not worth the dl?
|
2967
|
-
- big deps (torch) change less, probably worth it
|
2968
|
-
- follow embedded -r automatically like pyp
|
2969
|
-
"""
|
2970
|
-
|
2971
|
-
|
2972
|
-
##
|
2973
|
-
|
2974
|
-
|
2975
|
-
def build_requirements_hash(
|
2976
|
-
requirements_txts: ta.Sequence[str],
|
2977
|
-
) -> str:
|
2978
|
-
txt_file_contents: dict = {}
|
2979
|
-
|
2980
|
-
for txt_file in requirements_txts:
|
2981
|
-
txt_file_name = os.path.basename(txt_file)
|
2982
|
-
check.not_in(txt_file_name, txt_file_contents)
|
2983
|
-
with open(txt_file) as f:
|
2984
|
-
txt_contents = f.read()
|
2985
|
-
txt_file_contents[txt_file_name] = txt_contents
|
2986
|
-
|
2987
|
-
#
|
2988
|
-
|
2989
|
-
lines = []
|
2990
|
-
for txt_file, txt_contents in sorted(txt_file_contents.items()):
|
2991
|
-
txt_hash = sha256_str(txt_contents)
|
2992
|
-
lines.append(f'{txt_file}={txt_hash}')
|
2993
|
-
|
2994
|
-
return sha256_str('\n'.join(lines))
|
2995
|
-
|
2996
|
-
|
2997
|
-
##
|
2998
|
-
|
2999
|
-
|
3000
|
-
def download_requirements(
|
3001
|
-
image: str,
|
3002
|
-
requirements_dir: str,
|
3003
|
-
requirements_txts: ta.Sequence[str],
|
3004
|
-
) -> None:
|
3005
|
-
requirements_txt_dir = tempfile.mkdtemp()
|
3006
|
-
with defer(lambda: shutil.rmtree(requirements_txt_dir)):
|
3007
|
-
for rt in requirements_txts:
|
3008
|
-
shutil.copyfile(rt, os.path.join(requirements_txt_dir, os.path.basename(rt)))
|
3009
|
-
|
3010
|
-
subprocesses.check_call(
|
3011
|
-
'docker',
|
3012
|
-
'run',
|
3013
|
-
'--rm',
|
3014
|
-
'-i',
|
3015
|
-
'-v', f'{os.path.abspath(requirements_dir)}:/requirements',
|
3016
|
-
'-v', f'{requirements_txt_dir}:/requirements_txt',
|
3017
|
-
image,
|
3018
|
-
'pip',
|
3019
|
-
'download',
|
3020
|
-
'-d', '/requirements',
|
3021
|
-
*itertools.chain.from_iterable([
|
3022
|
-
['-r', f'/requirements_txt/{os.path.basename(rt)}']
|
3023
|
-
for rt in requirements_txts
|
3024
|
-
]),
|
3025
|
-
)
|
3026
|
-
|
3027
|
-
|
3028
3492
|
########################################
|
3029
3493
|
# ../ci.py
|
3030
3494
|
|
3031
3495
|
|
3032
|
-
class Ci(
|
3496
|
+
class Ci(AsyncExitStacked):
|
3033
3497
|
FILE_NAME_HASH_LEN = 16
|
3034
3498
|
|
3035
3499
|
@dc.dataclass(frozen=True)
|
@@ -3046,6 +3510,9 @@ class Ci(ExitStacked):
|
|
3046
3510
|
requirements_txts: ta.Optional[ta.Sequence[str]] = None
|
3047
3511
|
|
3048
3512
|
always_pull: bool = False
|
3513
|
+
always_build: bool = False
|
3514
|
+
|
3515
|
+
no_dependencies: bool = False
|
3049
3516
|
|
3050
3517
|
def __post_init__(self) -> None:
|
3051
3518
|
check.not_isinstance(self.requirements_txts, str)
|
@@ -3065,7 +3532,7 @@ class Ci(ExitStacked):
|
|
3065
3532
|
|
3066
3533
|
#
|
3067
3534
|
|
3068
|
-
def _load_cache_docker_image(self, key: str) -> ta.Optional[str]:
|
3535
|
+
async def _load_cache_docker_image(self, key: str) -> ta.Optional[str]:
|
3069
3536
|
if self._shell_cache is None:
|
3070
3537
|
return None
|
3071
3538
|
|
@@ -3075,9 +3542,9 @@ class Ci(ExitStacked):
|
|
3075
3542
|
|
3076
3543
|
get_cache_cmd = dc.replace(get_cache_cmd, s=f'{get_cache_cmd.s} | zstd -cd --long') # noqa
|
3077
3544
|
|
3078
|
-
return load_docker_tar_cmd(get_cache_cmd)
|
3545
|
+
return await load_docker_tar_cmd(get_cache_cmd)
|
3079
3546
|
|
3080
|
-
def _save_cache_docker_image(self, key: str, image: str) -> None:
|
3547
|
+
async def _save_cache_docker_image(self, key: str, image: str) -> None:
|
3081
3548
|
if self._shell_cache is None:
|
3082
3549
|
return
|
3083
3550
|
|
@@ -3086,12 +3553,12 @@ class Ci(ExitStacked):
|
|
3086
3553
|
|
3087
3554
|
put_cache_cmd = dc.replace(put_cache_cmd, s=f'zstd | {put_cache_cmd.s}')
|
3088
3555
|
|
3089
|
-
save_docker_tar_cmd(image, put_cache_cmd)
|
3556
|
+
await save_docker_tar_cmd(image, put_cache_cmd)
|
3090
3557
|
|
3091
3558
|
#
|
3092
3559
|
|
3093
|
-
def _load_docker_image(self, image: str) -> None:
|
3094
|
-
if not self._cfg.always_pull and is_docker_image_present(image):
|
3560
|
+
async def _load_docker_image(self, image: str) -> None:
|
3561
|
+
if not self._cfg.always_pull and (await is_docker_image_present(image)):
|
3095
3562
|
return
|
3096
3563
|
|
3097
3564
|
dep_suffix = image
|
@@ -3099,63 +3566,79 @@ class Ci(ExitStacked):
|
|
3099
3566
|
dep_suffix = dep_suffix.replace(c, '-')
|
3100
3567
|
|
3101
3568
|
cache_key = f'docker-{dep_suffix}'
|
3102
|
-
if self._load_cache_docker_image(cache_key) is not None:
|
3569
|
+
if (await self._load_cache_docker_image(cache_key)) is not None:
|
3103
3570
|
return
|
3104
3571
|
|
3105
|
-
pull_docker_image(image)
|
3572
|
+
await pull_docker_image(image)
|
3106
3573
|
|
3107
|
-
self._save_cache_docker_image(cache_key, image)
|
3574
|
+
await self._save_cache_docker_image(cache_key, image)
|
3108
3575
|
|
3109
|
-
def load_docker_image(self, image: str) -> None:
|
3576
|
+
async def load_docker_image(self, image: str) -> None:
|
3110
3577
|
with log_timing_context(f'Load docker image: {image}'):
|
3111
|
-
self._load_docker_image(image)
|
3578
|
+
await self._load_docker_image(image)
|
3112
3579
|
|
3113
|
-
@
|
3114
|
-
def load_compose_service_dependencies(self) -> None:
|
3580
|
+
@async_cached_nullary
|
3581
|
+
async def load_compose_service_dependencies(self) -> None:
|
3115
3582
|
deps = get_compose_service_dependencies(
|
3116
3583
|
self._cfg.compose_file,
|
3117
3584
|
self._cfg.service,
|
3118
3585
|
)
|
3119
3586
|
|
3120
3587
|
for dep_image in deps.values():
|
3121
|
-
self.load_docker_image(dep_image)
|
3588
|
+
await self.load_docker_image(dep_image)
|
3122
3589
|
|
3123
3590
|
#
|
3124
3591
|
|
3125
|
-
|
3126
|
-
|
3592
|
+
@cached_nullary
|
3593
|
+
def docker_file_hash(self) -> str:
|
3594
|
+
return build_docker_file_hash(self._cfg.docker_file)[:self.FILE_NAME_HASH_LEN]
|
3595
|
+
|
3596
|
+
async def _resolve_ci_image(self) -> str:
|
3597
|
+
cache_key = f'ci-{self.docker_file_hash()}'
|
3598
|
+
image_tag = f'{self._cfg.service}:{cache_key}'
|
3599
|
+
|
3600
|
+
if not self._cfg.always_build and (await is_docker_image_present(image_tag)):
|
3601
|
+
return image_tag
|
3127
3602
|
|
3128
|
-
cache_key
|
3129
|
-
|
3130
|
-
|
3603
|
+
if (cache_image_id := await self._load_cache_docker_image(cache_key)) is not None:
|
3604
|
+
await tag_docker_image(
|
3605
|
+
cache_image_id,
|
3606
|
+
image_tag,
|
3607
|
+
)
|
3608
|
+
return image_tag
|
3131
3609
|
|
3132
|
-
image_id = build_docker_image(
|
3610
|
+
image_id = await build_docker_image(
|
3133
3611
|
self._cfg.docker_file,
|
3612
|
+
tag=image_tag,
|
3134
3613
|
cwd=self._cfg.project_dir,
|
3135
3614
|
)
|
3136
3615
|
|
3137
|
-
self._save_cache_docker_image(cache_key, image_id)
|
3616
|
+
await self._save_cache_docker_image(cache_key, image_id)
|
3138
3617
|
|
3139
|
-
return
|
3618
|
+
return image_tag
|
3140
3619
|
|
3141
|
-
@
|
3142
|
-
def resolve_ci_image(self) -> str:
|
3620
|
+
@async_cached_nullary
|
3621
|
+
async def resolve_ci_image(self) -> str:
|
3143
3622
|
with log_timing_context('Resolve ci image') as ltc:
|
3144
|
-
image_id = self._resolve_ci_image()
|
3623
|
+
image_id = await self._resolve_ci_image()
|
3145
3624
|
ltc.set_description(f'Resolve ci image: {image_id}')
|
3146
3625
|
return image_id
|
3147
3626
|
|
3148
3627
|
#
|
3149
3628
|
|
3150
|
-
|
3151
|
-
|
3629
|
+
@cached_nullary
|
3630
|
+
def requirements_txts(self) -> ta.Sequence[str]:
|
3631
|
+
return [
|
3152
3632
|
os.path.join(self._cfg.project_dir, rf)
|
3153
3633
|
for rf in check.not_none(self._cfg.requirements_txts)
|
3154
3634
|
]
|
3155
3635
|
|
3156
|
-
|
3636
|
+
@cached_nullary
|
3637
|
+
def requirements_hash(self) -> str:
|
3638
|
+
return build_requirements_hash(self.requirements_txts())[:self.FILE_NAME_HASH_LEN]
|
3157
3639
|
|
3158
|
-
|
3640
|
+
async def _resolve_requirements_dir(self) -> str:
|
3641
|
+
tar_file_key = f'requirements-{self.docker_file_hash()}-{self.requirements_hash()}'
|
3159
3642
|
tar_file_name = f'{tar_file_key}.tar'
|
3160
3643
|
|
3161
3644
|
temp_dir = tempfile.mkdtemp()
|
@@ -3171,9 +3654,9 @@ class Ci(ExitStacked):
|
|
3171
3654
|
os.makedirs(temp_requirements_dir)
|
3172
3655
|
|
3173
3656
|
download_requirements(
|
3174
|
-
self.resolve_ci_image(),
|
3657
|
+
await self.resolve_ci_image(),
|
3175
3658
|
temp_requirements_dir,
|
3176
|
-
requirements_txts,
|
3659
|
+
self.requirements_txts(),
|
3177
3660
|
)
|
3178
3661
|
|
3179
3662
|
if self._file_cache is not None:
|
@@ -3190,16 +3673,16 @@ class Ci(ExitStacked):
|
|
3190
3673
|
|
3191
3674
|
return temp_requirements_dir
|
3192
3675
|
|
3193
|
-
@
|
3194
|
-
def resolve_requirements_dir(self) -> str:
|
3676
|
+
@async_cached_nullary
|
3677
|
+
async def resolve_requirements_dir(self) -> str:
|
3195
3678
|
with log_timing_context('Resolve requirements dir') as ltc:
|
3196
|
-
requirements_dir = self._resolve_requirements_dir()
|
3679
|
+
requirements_dir = await self._resolve_requirements_dir()
|
3197
3680
|
ltc.set_description(f'Resolve requirements dir: {requirements_dir}')
|
3198
3681
|
return requirements_dir
|
3199
3682
|
|
3200
3683
|
#
|
3201
3684
|
|
3202
|
-
def _run_compose_(self) -> None:
|
3685
|
+
async def _run_compose_(self) -> None:
|
3203
3686
|
setup_cmds = [
|
3204
3687
|
'pip install --root-user-action ignore --find-links /requirements --no-index uv',
|
3205
3688
|
(
|
@@ -3217,37 +3700,39 @@ class Ci(ExitStacked):
|
|
3217
3700
|
|
3218
3701
|
#
|
3219
3702
|
|
3220
|
-
with DockerComposeRun(DockerComposeRun.Config(
|
3703
|
+
async with DockerComposeRun(DockerComposeRun.Config(
|
3221
3704
|
compose_file=self._cfg.compose_file,
|
3222
3705
|
service=self._cfg.service,
|
3223
3706
|
|
3224
|
-
image=self.resolve_ci_image(),
|
3707
|
+
image=await self.resolve_ci_image(),
|
3225
3708
|
|
3226
3709
|
cmd=ci_cmd,
|
3227
3710
|
|
3228
3711
|
run_options=[
|
3229
3712
|
'-v', f'{os.path.abspath(self._cfg.project_dir)}:/project',
|
3230
|
-
'-v', f'{os.path.abspath(self.resolve_requirements_dir())}:/requirements',
|
3713
|
+
'-v', f'{os.path.abspath(await self.resolve_requirements_dir())}:/requirements',
|
3231
3714
|
],
|
3232
3715
|
|
3233
3716
|
cwd=self._cfg.project_dir,
|
3717
|
+
|
3718
|
+
no_dependencies=self._cfg.no_dependencies,
|
3234
3719
|
)) as ci_compose_run:
|
3235
|
-
ci_compose_run.run()
|
3720
|
+
await ci_compose_run.run()
|
3236
3721
|
|
3237
|
-
def _run_compose(self) -> None:
|
3722
|
+
async def _run_compose(self) -> None:
|
3238
3723
|
with log_timing_context('Run compose'):
|
3239
|
-
self._run_compose_()
|
3724
|
+
await self._run_compose_()
|
3240
3725
|
|
3241
3726
|
#
|
3242
3727
|
|
3243
|
-
def run(self) -> None:
|
3244
|
-
self.load_compose_service_dependencies()
|
3728
|
+
async def run(self) -> None:
|
3729
|
+
await self.load_compose_service_dependencies()
|
3245
3730
|
|
3246
|
-
self.resolve_ci_image()
|
3731
|
+
await self.resolve_ci_image()
|
3247
3732
|
|
3248
|
-
self.resolve_requirements_dir()
|
3733
|
+
await self.resolve_requirements_dir()
|
3249
3734
|
|
3250
|
-
self._run_compose()
|
3735
|
+
await self._run_compose()
|
3251
3736
|
|
3252
3737
|
|
3253
3738
|
########################################
|
@@ -3263,7 +3748,7 @@ class GithubCli(ArgparseCli):
|
|
3263
3748
|
argparse_arg('key'),
|
3264
3749
|
)
|
3265
3750
|
def get_cache_entry(self) -> None:
|
3266
|
-
shell_client =
|
3751
|
+
shell_client = GithubCacheServiceV1ShellClient()
|
3267
3752
|
entry = shell_client.run_get_entry(self.args.key)
|
3268
3753
|
if entry is None:
|
3269
3754
|
return
|
@@ -3322,18 +3807,21 @@ class CiCli(ArgparseCli):
|
|
3322
3807
|
argparse_arg('--docker-file'),
|
3323
3808
|
argparse_arg('--compose-file'),
|
3324
3809
|
argparse_arg('-r', '--requirements-txt', action='append'),
|
3810
|
+
|
3325
3811
|
argparse_arg('--github-cache', action='store_true'),
|
3326
3812
|
argparse_arg('--cache-dir'),
|
3813
|
+
|
3327
3814
|
argparse_arg('--always-pull', action='store_true'),
|
3815
|
+
argparse_arg('--always-build', action='store_true'),
|
3816
|
+
|
3817
|
+
argparse_arg('--no-dependencies', action='store_true'),
|
3328
3818
|
)
|
3329
3819
|
async def run(self) -> None:
|
3330
3820
|
project_dir = self.args.project_dir
|
3331
3821
|
docker_file = self.args.docker_file
|
3332
3822
|
compose_file = self.args.compose_file
|
3333
|
-
service = self.args.service
|
3334
3823
|
requirements_txts = self.args.requirements_txt
|
3335
3824
|
cache_dir = self.args.cache_dir
|
3336
|
-
always_pull = self.args.always_pull
|
3337
3825
|
|
3338
3826
|
#
|
3339
3827
|
|
@@ -3358,10 +3846,16 @@ class CiCli(ArgparseCli):
|
|
3358
3846
|
check.state(os.path.isfile(docker_file))
|
3359
3847
|
|
3360
3848
|
if compose_file is None:
|
3361
|
-
compose_file = find_alt_file(
|
3362
|
-
'
|
3363
|
-
|
3364
|
-
|
3849
|
+
compose_file = find_alt_file(*[
|
3850
|
+
f'{f}.{x}'
|
3851
|
+
for f in [
|
3852
|
+
'docker/docker-compose',
|
3853
|
+
'docker/compose',
|
3854
|
+
'docker-compose',
|
3855
|
+
'compose',
|
3856
|
+
]
|
3857
|
+
for x in ['yaml', 'yml']
|
3858
|
+
])
|
3365
3859
|
check.state(os.path.isfile(compose_file))
|
3366
3860
|
|
3367
3861
|
if not requirements_txts:
|
@@ -3397,14 +3891,14 @@ class CiCli(ArgparseCli):
|
|
3397
3891
|
|
3398
3892
|
#
|
3399
3893
|
|
3400
|
-
with Ci(
|
3894
|
+
async with Ci(
|
3401
3895
|
Ci.Config(
|
3402
3896
|
project_dir=project_dir,
|
3403
3897
|
|
3404
3898
|
docker_file=docker_file,
|
3405
3899
|
|
3406
3900
|
compose_file=compose_file,
|
3407
|
-
service=service,
|
3901
|
+
service=self.args.service,
|
3408
3902
|
|
3409
3903
|
requirements_txts=requirements_txts,
|
3410
3904
|
|
@@ -3413,12 +3907,15 @@ class CiCli(ArgparseCli):
|
|
3413
3907
|
'python3 -m pytest -svv test.py',
|
3414
3908
|
])),
|
3415
3909
|
|
3416
|
-
always_pull=always_pull,
|
3910
|
+
always_pull=self.args.always_pull,
|
3911
|
+
always_build=self.args.always_build,
|
3912
|
+
|
3913
|
+
no_dependencies=self.args.no_dependencies,
|
3417
3914
|
),
|
3418
3915
|
file_cache=file_cache,
|
3419
3916
|
shell_cache=shell_cache,
|
3420
3917
|
) as ci:
|
3421
|
-
ci.run()
|
3918
|
+
await ci.run()
|
3422
3919
|
|
3423
3920
|
|
3424
3921
|
async def _async_main() -> ta.Optional[int]:
|