omdev 0.0.0.dev212__py3-none-any.whl → 0.0.0.dev214__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omdev/.manifests.json +1 -1
- omdev/cc/cdeps.py +34 -1
- omdev/cc/cdeps.toml +19 -2
- omdev/cc/cli.py +13 -1
- omdev/ci/__init__.py +1 -0
- omdev/ci/cache.py +100 -121
- omdev/ci/ci.py +161 -136
- omdev/ci/cli.py +62 -30
- omdev/ci/compose.py +26 -62
- omdev/ci/consts.py +1 -0
- omdev/ci/docker.py +39 -22
- omdev/ci/github/{cacheapi.py → api.py} +1 -2
- omdev/ci/github/bootstrap.py +8 -1
- omdev/ci/github/cache.py +36 -320
- omdev/ci/github/cli.py +9 -5
- omdev/ci/github/client.py +492 -0
- omdev/ci/github/env.py +21 -0
- omdev/ci/requirements.py +0 -1
- omdev/ci/shell.py +0 -1
- omdev/ci/utils.py +2 -14
- omdev/git/shallow.py +1 -1
- omdev/scripts/ci.py +1602 -887
- omdev/scripts/interp.py +23 -0
- omdev/scripts/pyproject.py +23 -0
- omdev/tokens/tokenizert.py +1 -3
- omdev/tools/docker.py +6 -0
- {omdev-0.0.0.dev212.dist-info → omdev-0.0.0.dev214.dist-info}/METADATA +2 -2
- {omdev-0.0.0.dev212.dist-info → omdev-0.0.0.dev214.dist-info}/RECORD +32 -29
- {omdev-0.0.0.dev212.dist-info → omdev-0.0.0.dev214.dist-info}/LICENSE +0 -0
- {omdev-0.0.0.dev212.dist-info → omdev-0.0.0.dev214.dist-info}/WHEEL +0 -0
- {omdev-0.0.0.dev212.dist-info → omdev-0.0.0.dev214.dist-info}/entry_points.txt +0 -0
- {omdev-0.0.0.dev212.dist-info → omdev-0.0.0.dev214.dist-info}/top_level.txt +0 -0
omdev/ci/github/cache.py
CHANGED
@@ -1,355 +1,71 @@
|
|
1
1
|
# ruff: noqa: UP006 UP007
|
2
|
-
|
3
|
-
import dataclasses as dc
|
4
|
-
import json
|
5
|
-
import os
|
6
|
-
import shlex
|
2
|
+
import os.path
|
7
3
|
import typing as ta
|
8
4
|
|
9
5
|
from omlish.lite.check import check
|
10
|
-
from omlish.
|
11
|
-
from omlish.lite.json import json_dumps_compact
|
12
|
-
from omlish.subprocesses import subprocesses
|
6
|
+
from omlish.os.files import unlinking_if_exists
|
13
7
|
|
14
8
|
from ..cache import DirectoryFileCache
|
15
|
-
from ..cache import
|
16
|
-
from
|
17
|
-
from
|
18
|
-
from .cacheapi import GithubCacheServiceV1
|
9
|
+
from ..cache import FileCache
|
10
|
+
from .client import GithubCacheClient
|
11
|
+
from .client import GithubCacheServiceV1Client
|
19
12
|
|
20
13
|
|
21
14
|
##
|
22
15
|
|
23
16
|
|
24
|
-
class
|
25
|
-
BASE_URL_ENV_KEY = 'ACTIONS_CACHE_URL'
|
26
|
-
AUTH_TOKEN_ENV_KEY = 'ACTIONS_RUNTIME_TOKEN' # noqa
|
27
|
-
|
28
|
-
def __init__(
|
29
|
-
self,
|
30
|
-
*,
|
31
|
-
base_url: ta.Optional[str] = None,
|
32
|
-
auth_token: ta.Optional[str] = None,
|
33
|
-
) -> None:
|
34
|
-
super().__init__()
|
35
|
-
|
36
|
-
if base_url is None:
|
37
|
-
base_url = os.environ[self.BASE_URL_ENV_KEY]
|
38
|
-
self._base_url = check.non_empty_str(base_url)
|
39
|
-
|
40
|
-
if auth_token is None:
|
41
|
-
auth_token = os.environ.get(self.AUTH_TOKEN_ENV_KEY)
|
42
|
-
self._auth_token = auth_token
|
43
|
-
|
44
|
-
self._service_url = GithubCacheServiceV1.get_service_url(self._base_url)
|
45
|
-
|
46
|
-
#
|
47
|
-
|
48
|
-
_MISSING = object()
|
49
|
-
|
50
|
-
def build_headers(
|
51
|
-
self,
|
52
|
-
*,
|
53
|
-
auth_token: ta.Any = _MISSING,
|
54
|
-
content_type: ta.Optional[str] = None,
|
55
|
-
) -> ta.Dict[str, str]:
|
56
|
-
dct = {
|
57
|
-
'Accept': f'application/json;api-version={GithubCacheServiceV1.API_VERSION}',
|
58
|
-
}
|
59
|
-
|
60
|
-
if auth_token is self._MISSING:
|
61
|
-
auth_token = self._auth_token
|
62
|
-
if auth_token:
|
63
|
-
dct['Authorization'] = f'Bearer {auth_token}'
|
64
|
-
|
65
|
-
if content_type is not None:
|
66
|
-
dct['Content-Type'] = content_type
|
67
|
-
|
68
|
-
return dct
|
69
|
-
|
70
|
-
#
|
71
|
-
|
72
|
-
HEADER_AUTH_TOKEN_ENV_KEY = '_GITHUB_CACHE_AUTH_TOKEN' # noqa
|
73
|
-
|
74
|
-
def build_curl_cmd(
|
75
|
-
self,
|
76
|
-
method: str,
|
77
|
-
url: str,
|
78
|
-
*,
|
79
|
-
json_content: bool = False,
|
80
|
-
content_type: ta.Optional[str] = None,
|
81
|
-
) -> ShellCmd:
|
82
|
-
if content_type is None and json_content:
|
83
|
-
content_type = 'application/json'
|
84
|
-
|
85
|
-
env = {}
|
86
|
-
|
87
|
-
header_auth_token: ta.Optional[str]
|
88
|
-
if self._auth_token:
|
89
|
-
env[self.HEADER_AUTH_TOKEN_ENV_KEY] = self._auth_token
|
90
|
-
header_auth_token = f'${self.HEADER_AUTH_TOKEN_ENV_KEY}'
|
91
|
-
else:
|
92
|
-
header_auth_token = None
|
93
|
-
|
94
|
-
hdrs = self.build_headers(
|
95
|
-
auth_token=header_auth_token,
|
96
|
-
content_type=content_type,
|
97
|
-
)
|
98
|
-
|
99
|
-
url = f'{self._service_url}/{url}'
|
100
|
-
|
101
|
-
cmd = ' '.join([
|
102
|
-
'curl',
|
103
|
-
'-s',
|
104
|
-
'-X', method,
|
105
|
-
url,
|
106
|
-
*[f'-H "{k}: {v}"' for k, v in hdrs.items()],
|
107
|
-
])
|
108
|
-
|
109
|
-
return ShellCmd(
|
110
|
-
cmd,
|
111
|
-
env=env,
|
112
|
-
)
|
113
|
-
|
114
|
-
def build_post_json_curl_cmd(
|
115
|
-
self,
|
116
|
-
url: str,
|
117
|
-
obj: ta.Any,
|
118
|
-
**kwargs: ta.Any,
|
119
|
-
) -> ShellCmd:
|
120
|
-
curl_cmd = self.build_curl_cmd(
|
121
|
-
'POST',
|
122
|
-
url,
|
123
|
-
json_content=True,
|
124
|
-
**kwargs,
|
125
|
-
)
|
126
|
-
|
127
|
-
obj_json = json_dumps_compact(obj)
|
128
|
-
|
129
|
-
return dc.replace(curl_cmd, s=f'{curl_cmd.s} -d {shlex.quote(obj_json)}')
|
130
|
-
|
131
|
-
#
|
132
|
-
|
133
|
-
@dc.dataclass()
|
134
|
-
class CurlError(RuntimeError):
|
135
|
-
status_code: int
|
136
|
-
body: ta.Optional[bytes]
|
137
|
-
|
138
|
-
def __str__(self) -> str:
|
139
|
-
return repr(self)
|
140
|
-
|
141
|
-
@dc.dataclass(frozen=True)
|
142
|
-
class CurlResult:
|
143
|
-
status_code: int
|
144
|
-
body: ta.Optional[bytes]
|
145
|
-
|
146
|
-
def as_error(self) -> 'GithubV1CacheShellClient.CurlError':
|
147
|
-
return GithubV1CacheShellClient.CurlError(
|
148
|
-
status_code=self.status_code,
|
149
|
-
body=self.body,
|
150
|
-
)
|
151
|
-
|
152
|
-
def run_curl_cmd(
|
153
|
-
self,
|
154
|
-
cmd: ShellCmd,
|
155
|
-
*,
|
156
|
-
raise_: bool = False,
|
157
|
-
) -> CurlResult:
|
158
|
-
out_file = make_temp_file()
|
159
|
-
with defer(lambda: os.unlink(out_file)):
|
160
|
-
run_cmd = dc.replace(cmd, s=f"{cmd.s} -o {out_file} -w '%{{json}}'")
|
161
|
-
|
162
|
-
out_json_bytes = run_cmd.run(subprocesses.check_output)
|
163
|
-
|
164
|
-
out_json = json.loads(out_json_bytes.decode())
|
165
|
-
status_code = check.isinstance(out_json['response_code'], int)
|
166
|
-
|
167
|
-
with open(out_file, 'rb') as f:
|
168
|
-
body = f.read()
|
169
|
-
|
170
|
-
result = self.CurlResult(
|
171
|
-
status_code=status_code,
|
172
|
-
body=body,
|
173
|
-
)
|
174
|
-
|
175
|
-
if raise_ and (500 <= status_code <= 600):
|
176
|
-
raise result.as_error()
|
177
|
-
|
178
|
-
return result
|
179
|
-
|
180
|
-
def run_json_curl_cmd(
|
181
|
-
self,
|
182
|
-
cmd: ShellCmd,
|
183
|
-
*,
|
184
|
-
success_status_codes: ta.Optional[ta.Container[int]] = None,
|
185
|
-
) -> ta.Optional[ta.Any]:
|
186
|
-
result = self.run_curl_cmd(cmd, raise_=True)
|
187
|
-
|
188
|
-
if success_status_codes is not None:
|
189
|
-
is_success = result.status_code in success_status_codes
|
190
|
-
else:
|
191
|
-
is_success = 200 <= result.status_code < 300
|
192
|
-
|
193
|
-
if is_success:
|
194
|
-
if not (body := result.body):
|
195
|
-
return None
|
196
|
-
return json.loads(body.decode('utf-8-sig'))
|
197
|
-
|
198
|
-
elif result.status_code == 404:
|
199
|
-
return None
|
200
|
-
|
201
|
-
else:
|
202
|
-
raise result.as_error()
|
203
|
-
|
204
|
-
#
|
205
|
-
|
206
|
-
def build_get_entry_curl_cmd(self, key: str) -> ShellCmd:
|
207
|
-
return self.build_curl_cmd(
|
208
|
-
'GET',
|
209
|
-
f'cache?keys={key}',
|
210
|
-
)
|
211
|
-
|
212
|
-
def run_get_entry(self, key: str) -> ta.Optional[GithubCacheServiceV1.ArtifactCacheEntry]:
|
213
|
-
curl_cmd = self.build_get_entry_curl_cmd(key)
|
214
|
-
|
215
|
-
obj = self.run_json_curl_cmd(
|
216
|
-
curl_cmd,
|
217
|
-
success_status_codes=[200, 204],
|
218
|
-
)
|
219
|
-
if obj is None:
|
220
|
-
return None
|
221
|
-
|
222
|
-
return GithubCacheServiceV1.dataclass_from_json(
|
223
|
-
GithubCacheServiceV1.ArtifactCacheEntry,
|
224
|
-
obj,
|
225
|
-
)
|
226
|
-
|
227
|
-
#
|
228
|
-
|
229
|
-
def build_download_get_entry_cmd(
|
230
|
-
self,
|
231
|
-
entry: GithubCacheServiceV1.ArtifactCacheEntry,
|
232
|
-
out_file: str,
|
233
|
-
) -> ShellCmd:
|
234
|
-
return ShellCmd(' '.join([
|
235
|
-
'aria2c',
|
236
|
-
'-x', '4',
|
237
|
-
'-o', out_file,
|
238
|
-
check.non_empty_str(entry.archive_location),
|
239
|
-
]))
|
240
|
-
|
241
|
-
def download_get_entry(
|
242
|
-
self,
|
243
|
-
entry: GithubCacheServiceV1.ArtifactCacheEntry,
|
244
|
-
out_file: str,
|
245
|
-
) -> None:
|
246
|
-
dl_cmd = self.build_download_get_entry_cmd(entry, out_file)
|
247
|
-
dl_cmd.run(subprocesses.check_call)
|
248
|
-
|
249
|
-
#
|
250
|
-
|
251
|
-
def upload_cache_entry(
|
252
|
-
self,
|
253
|
-
key: str,
|
254
|
-
in_file: str,
|
255
|
-
) -> None:
|
256
|
-
check.state(os.path.isfile(in_file))
|
257
|
-
|
258
|
-
file_size = os.stat(in_file).st_size
|
259
|
-
|
260
|
-
reserve_req = GithubCacheServiceV1.ReserveCacheRequest(
|
261
|
-
key=key,
|
262
|
-
cache_size=file_size,
|
263
|
-
)
|
264
|
-
reserve_cmd = self.build_post_json_curl_cmd(
|
265
|
-
'caches',
|
266
|
-
GithubCacheServiceV1.dataclass_to_json(reserve_req),
|
267
|
-
)
|
268
|
-
reserve_resp_obj: ta.Any = check.not_none(self.run_json_curl_cmd(
|
269
|
-
reserve_cmd,
|
270
|
-
success_status_codes=[201],
|
271
|
-
))
|
272
|
-
reserve_resp = GithubCacheServiceV1.dataclass_from_json( # noqa
|
273
|
-
GithubCacheServiceV1.ReserveCacheResponse,
|
274
|
-
reserve_resp_obj,
|
275
|
-
)
|
276
|
-
|
277
|
-
raise NotImplementedError
|
278
|
-
|
279
|
-
|
280
|
-
##
|
281
|
-
|
282
|
-
|
283
|
-
class GithubShellCache(ShellCache):
|
17
|
+
class GithubFileCache(FileCache):
|
284
18
|
def __init__(
|
285
19
|
self,
|
286
20
|
dir: str, # noqa
|
287
21
|
*,
|
288
|
-
client: ta.Optional[
|
22
|
+
client: ta.Optional[GithubCacheClient] = None,
|
23
|
+
**kwargs: ta.Any,
|
289
24
|
) -> None:
|
290
|
-
super().__init__()
|
25
|
+
super().__init__(**kwargs)
|
291
26
|
|
292
27
|
self._dir = check.not_none(dir)
|
293
28
|
|
294
29
|
if client is None:
|
295
|
-
client =
|
296
|
-
|
30
|
+
client = GithubCacheServiceV1Client(
|
31
|
+
cache_version=self._version,
|
32
|
+
)
|
33
|
+
self._client: GithubCacheClient = client
|
297
34
|
|
298
|
-
self._local = DirectoryFileCache(
|
35
|
+
self._local = DirectoryFileCache(
|
36
|
+
self._dir,
|
37
|
+
version=self._version,
|
38
|
+
)
|
299
39
|
|
300
|
-
def
|
40
|
+
async def get_file(self, key: str) -> ta.Optional[str]:
|
301
41
|
local_file = self._local.get_cache_file_path(key)
|
302
42
|
if os.path.exists(local_file):
|
303
|
-
return
|
43
|
+
return local_file
|
304
44
|
|
305
|
-
if (entry := self._client.
|
45
|
+
if (entry := await self._client.get_entry(key)) is None:
|
306
46
|
return None
|
307
47
|
|
308
48
|
tmp_file = self._local.format_incomplete_file(local_file)
|
309
|
-
|
310
|
-
self._client.
|
49
|
+
with unlinking_if_exists(tmp_file):
|
50
|
+
await self._client.download_file(entry, tmp_file)
|
311
51
|
|
312
52
|
os.replace(tmp_file, local_file)
|
313
53
|
|
314
|
-
|
315
|
-
os.unlink(tmp_file)
|
316
|
-
|
317
|
-
raise
|
318
|
-
|
319
|
-
return ShellCmd(f'cat {shlex.quote(local_file)}')
|
320
|
-
|
321
|
-
class _PutFileCmdContext(ShellCache.PutFileCmdContext): # noqa
|
322
|
-
def __init__(
|
323
|
-
self,
|
324
|
-
owner: 'GithubShellCache',
|
325
|
-
key: str,
|
326
|
-
tmp_file: str,
|
327
|
-
local_file: str,
|
328
|
-
) -> None:
|
329
|
-
super().__init__()
|
330
|
-
|
331
|
-
self._owner = owner
|
332
|
-
self._key = key
|
333
|
-
self._tmp_file = tmp_file
|
334
|
-
self._local_file = local_file
|
335
|
-
|
336
|
-
@property
|
337
|
-
def cmd(self) -> ShellCmd:
|
338
|
-
return ShellCmd(f'cat > {shlex.quote(self._tmp_file)}')
|
339
|
-
|
340
|
-
def _commit(self) -> None:
|
341
|
-
os.replace(self._tmp_file, self._local_file)
|
54
|
+
return local_file
|
342
55
|
|
343
|
-
|
344
|
-
|
345
|
-
def _abort(self) -> None:
|
346
|
-
os.unlink(self._tmp_file)
|
347
|
-
|
348
|
-
def put_file_cmd(self, key: str) -> ShellCache.PutFileCmdContext:
|
349
|
-
local_file = self._local.get_cache_file_path(key, make_dirs=True)
|
350
|
-
return self._PutFileCmdContext(
|
56
|
+
async def put_file(
|
351
57
|
self,
|
58
|
+
key: str,
|
59
|
+
file_path: str,
|
60
|
+
*,
|
61
|
+
steal: bool = False,
|
62
|
+
) -> str:
|
63
|
+
cache_file_path = await self._local.put_file(
|
352
64
|
key,
|
353
|
-
|
354
|
-
|
65
|
+
file_path,
|
66
|
+
steal=steal,
|
355
67
|
)
|
68
|
+
|
69
|
+
await self._client.upload_file(key, cache_file_path)
|
70
|
+
|
71
|
+
return cache_file_path
|
omdev/ci/github/cli.py
CHANGED
@@ -1,5 +1,4 @@
|
|
1
1
|
# ruff: noqa: UP006 UP007
|
2
|
-
# @omlish-lite
|
3
2
|
"""
|
4
3
|
See:
|
5
4
|
- https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28
|
@@ -11,16 +10,21 @@ from omlish.argparse.cli import argparse_arg
|
|
11
10
|
from omlish.argparse.cli import argparse_cmd
|
12
11
|
from omlish.lite.json import json_dumps_pretty
|
13
12
|
|
14
|
-
from .
|
13
|
+
from .client import GithubCacheServiceV1Client
|
14
|
+
from .env import GITHUB_ENV_VARS
|
15
15
|
|
16
16
|
|
17
17
|
class GithubCli(ArgparseCli):
|
18
|
+
@argparse_cmd()
|
19
|
+
def list_referenced_env_vars(self) -> None:
|
20
|
+
print('\n'.join(sorted(ev.k for ev in GITHUB_ENV_VARS)))
|
21
|
+
|
18
22
|
@argparse_cmd(
|
19
23
|
argparse_arg('key'),
|
20
24
|
)
|
21
|
-
def get_cache_entry(self) -> None:
|
22
|
-
|
23
|
-
entry =
|
25
|
+
async def get_cache_entry(self) -> None:
|
26
|
+
client = GithubCacheServiceV1Client()
|
27
|
+
entry = await client.get_entry(self.args.key)
|
24
28
|
if entry is None:
|
25
29
|
return
|
26
30
|
print(json_dumps_pretty(dc.asdict(entry))) # noqa
|