omdev 0.0.0.dev210__py3-none-any.whl → 0.0.0.dev212__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omdev/.manifests.json +15 -1
- omdev/__about__.py +0 -4
- omdev/amalg/gen.py +2 -3
- omdev/amalg/imports.py +4 -5
- omdev/amalg/manifests.py +7 -10
- omdev/amalg/resources.py +24 -27
- omdev/amalg/srcfiles.py +7 -10
- omdev/amalg/strip.py +4 -5
- omdev/amalg/types.py +1 -1
- omdev/amalg/typing.py +9 -8
- omdev/ci/cache.py +137 -10
- omdev/ci/ci.py +110 -75
- omdev/ci/cli.py +51 -11
- omdev/ci/compose.py +34 -15
- omdev/ci/{dockertars.py → docker.py} +43 -30
- omdev/ci/github/__init__.py +0 -0
- omdev/ci/github/bootstrap.py +11 -0
- omdev/ci/github/cache.py +355 -0
- omdev/ci/github/cacheapi.py +207 -0
- omdev/ci/github/cli.py +39 -0
- omdev/ci/requirements.py +3 -2
- omdev/ci/shell.py +42 -0
- omdev/ci/utils.py +49 -0
- omdev/scripts/ci.py +1734 -473
- omdev/scripts/interp.py +22 -22
- omdev/scripts/pyproject.py +22 -22
- omdev/tokens/__init__.py +0 -0
- omdev/tokens/all.py +35 -0
- omdev/tokens/tokenizert.py +217 -0
- omdev/{tokens.py → tokens/utils.py} +6 -12
- omdev/tools/mkenv.py +131 -0
- omdev/tools/mkrelimp.py +4 -6
- {omdev-0.0.0.dev210.dist-info → omdev-0.0.0.dev212.dist-info}/METADATA +2 -5
- {omdev-0.0.0.dev210.dist-info → omdev-0.0.0.dev212.dist-info}/RECORD +38 -28
- {omdev-0.0.0.dev210.dist-info → omdev-0.0.0.dev212.dist-info}/LICENSE +0 -0
- {omdev-0.0.0.dev210.dist-info → omdev-0.0.0.dev212.dist-info}/WHEEL +0 -0
- {omdev-0.0.0.dev210.dist-info → omdev-0.0.0.dev212.dist-info}/entry_points.txt +0 -0
- {omdev-0.0.0.dev210.dist-info → omdev-0.0.0.dev212.dist-info}/top_level.txt +0 -0
omdev/ci/github/cache.py
ADDED
@@ -0,0 +1,355 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
# @omlish-lite
|
3
|
+
import dataclasses as dc
|
4
|
+
import json
|
5
|
+
import os
|
6
|
+
import shlex
|
7
|
+
import typing as ta
|
8
|
+
|
9
|
+
from omlish.lite.check import check
|
10
|
+
from omlish.lite.contextmanagers import defer
|
11
|
+
from omlish.lite.json import json_dumps_compact
|
12
|
+
from omlish.subprocesses import subprocesses
|
13
|
+
|
14
|
+
from ..cache import DirectoryFileCache
|
15
|
+
from ..cache import ShellCache
|
16
|
+
from ..shell import ShellCmd
|
17
|
+
from ..utils import make_temp_file
|
18
|
+
from .cacheapi import GithubCacheServiceV1
|
19
|
+
|
20
|
+
|
21
|
+
##
|
22
|
+
|
23
|
+
|
24
|
+
class GithubV1CacheShellClient:
|
25
|
+
BASE_URL_ENV_KEY = 'ACTIONS_CACHE_URL'
|
26
|
+
AUTH_TOKEN_ENV_KEY = 'ACTIONS_RUNTIME_TOKEN' # noqa
|
27
|
+
|
28
|
+
def __init__(
|
29
|
+
self,
|
30
|
+
*,
|
31
|
+
base_url: ta.Optional[str] = None,
|
32
|
+
auth_token: ta.Optional[str] = None,
|
33
|
+
) -> None:
|
34
|
+
super().__init__()
|
35
|
+
|
36
|
+
if base_url is None:
|
37
|
+
base_url = os.environ[self.BASE_URL_ENV_KEY]
|
38
|
+
self._base_url = check.non_empty_str(base_url)
|
39
|
+
|
40
|
+
if auth_token is None:
|
41
|
+
auth_token = os.environ.get(self.AUTH_TOKEN_ENV_KEY)
|
42
|
+
self._auth_token = auth_token
|
43
|
+
|
44
|
+
self._service_url = GithubCacheServiceV1.get_service_url(self._base_url)
|
45
|
+
|
46
|
+
#
|
47
|
+
|
48
|
+
_MISSING = object()
|
49
|
+
|
50
|
+
def build_headers(
|
51
|
+
self,
|
52
|
+
*,
|
53
|
+
auth_token: ta.Any = _MISSING,
|
54
|
+
content_type: ta.Optional[str] = None,
|
55
|
+
) -> ta.Dict[str, str]:
|
56
|
+
dct = {
|
57
|
+
'Accept': f'application/json;api-version={GithubCacheServiceV1.API_VERSION}',
|
58
|
+
}
|
59
|
+
|
60
|
+
if auth_token is self._MISSING:
|
61
|
+
auth_token = self._auth_token
|
62
|
+
if auth_token:
|
63
|
+
dct['Authorization'] = f'Bearer {auth_token}'
|
64
|
+
|
65
|
+
if content_type is not None:
|
66
|
+
dct['Content-Type'] = content_type
|
67
|
+
|
68
|
+
return dct
|
69
|
+
|
70
|
+
#
|
71
|
+
|
72
|
+
HEADER_AUTH_TOKEN_ENV_KEY = '_GITHUB_CACHE_AUTH_TOKEN' # noqa
|
73
|
+
|
74
|
+
def build_curl_cmd(
|
75
|
+
self,
|
76
|
+
method: str,
|
77
|
+
url: str,
|
78
|
+
*,
|
79
|
+
json_content: bool = False,
|
80
|
+
content_type: ta.Optional[str] = None,
|
81
|
+
) -> ShellCmd:
|
82
|
+
if content_type is None and json_content:
|
83
|
+
content_type = 'application/json'
|
84
|
+
|
85
|
+
env = {}
|
86
|
+
|
87
|
+
header_auth_token: ta.Optional[str]
|
88
|
+
if self._auth_token:
|
89
|
+
env[self.HEADER_AUTH_TOKEN_ENV_KEY] = self._auth_token
|
90
|
+
header_auth_token = f'${self.HEADER_AUTH_TOKEN_ENV_KEY}'
|
91
|
+
else:
|
92
|
+
header_auth_token = None
|
93
|
+
|
94
|
+
hdrs = self.build_headers(
|
95
|
+
auth_token=header_auth_token,
|
96
|
+
content_type=content_type,
|
97
|
+
)
|
98
|
+
|
99
|
+
url = f'{self._service_url}/{url}'
|
100
|
+
|
101
|
+
cmd = ' '.join([
|
102
|
+
'curl',
|
103
|
+
'-s',
|
104
|
+
'-X', method,
|
105
|
+
url,
|
106
|
+
*[f'-H "{k}: {v}"' for k, v in hdrs.items()],
|
107
|
+
])
|
108
|
+
|
109
|
+
return ShellCmd(
|
110
|
+
cmd,
|
111
|
+
env=env,
|
112
|
+
)
|
113
|
+
|
114
|
+
def build_post_json_curl_cmd(
|
115
|
+
self,
|
116
|
+
url: str,
|
117
|
+
obj: ta.Any,
|
118
|
+
**kwargs: ta.Any,
|
119
|
+
) -> ShellCmd:
|
120
|
+
curl_cmd = self.build_curl_cmd(
|
121
|
+
'POST',
|
122
|
+
url,
|
123
|
+
json_content=True,
|
124
|
+
**kwargs,
|
125
|
+
)
|
126
|
+
|
127
|
+
obj_json = json_dumps_compact(obj)
|
128
|
+
|
129
|
+
return dc.replace(curl_cmd, s=f'{curl_cmd.s} -d {shlex.quote(obj_json)}')
|
130
|
+
|
131
|
+
#
|
132
|
+
|
133
|
+
@dc.dataclass()
|
134
|
+
class CurlError(RuntimeError):
|
135
|
+
status_code: int
|
136
|
+
body: ta.Optional[bytes]
|
137
|
+
|
138
|
+
def __str__(self) -> str:
|
139
|
+
return repr(self)
|
140
|
+
|
141
|
+
@dc.dataclass(frozen=True)
|
142
|
+
class CurlResult:
|
143
|
+
status_code: int
|
144
|
+
body: ta.Optional[bytes]
|
145
|
+
|
146
|
+
def as_error(self) -> 'GithubV1CacheShellClient.CurlError':
|
147
|
+
return GithubV1CacheShellClient.CurlError(
|
148
|
+
status_code=self.status_code,
|
149
|
+
body=self.body,
|
150
|
+
)
|
151
|
+
|
152
|
+
def run_curl_cmd(
|
153
|
+
self,
|
154
|
+
cmd: ShellCmd,
|
155
|
+
*,
|
156
|
+
raise_: bool = False,
|
157
|
+
) -> CurlResult:
|
158
|
+
out_file = make_temp_file()
|
159
|
+
with defer(lambda: os.unlink(out_file)):
|
160
|
+
run_cmd = dc.replace(cmd, s=f"{cmd.s} -o {out_file} -w '%{{json}}'")
|
161
|
+
|
162
|
+
out_json_bytes = run_cmd.run(subprocesses.check_output)
|
163
|
+
|
164
|
+
out_json = json.loads(out_json_bytes.decode())
|
165
|
+
status_code = check.isinstance(out_json['response_code'], int)
|
166
|
+
|
167
|
+
with open(out_file, 'rb') as f:
|
168
|
+
body = f.read()
|
169
|
+
|
170
|
+
result = self.CurlResult(
|
171
|
+
status_code=status_code,
|
172
|
+
body=body,
|
173
|
+
)
|
174
|
+
|
175
|
+
if raise_ and (500 <= status_code <= 600):
|
176
|
+
raise result.as_error()
|
177
|
+
|
178
|
+
return result
|
179
|
+
|
180
|
+
def run_json_curl_cmd(
|
181
|
+
self,
|
182
|
+
cmd: ShellCmd,
|
183
|
+
*,
|
184
|
+
success_status_codes: ta.Optional[ta.Container[int]] = None,
|
185
|
+
) -> ta.Optional[ta.Any]:
|
186
|
+
result = self.run_curl_cmd(cmd, raise_=True)
|
187
|
+
|
188
|
+
if success_status_codes is not None:
|
189
|
+
is_success = result.status_code in success_status_codes
|
190
|
+
else:
|
191
|
+
is_success = 200 <= result.status_code < 300
|
192
|
+
|
193
|
+
if is_success:
|
194
|
+
if not (body := result.body):
|
195
|
+
return None
|
196
|
+
return json.loads(body.decode('utf-8-sig'))
|
197
|
+
|
198
|
+
elif result.status_code == 404:
|
199
|
+
return None
|
200
|
+
|
201
|
+
else:
|
202
|
+
raise result.as_error()
|
203
|
+
|
204
|
+
#
|
205
|
+
|
206
|
+
def build_get_entry_curl_cmd(self, key: str) -> ShellCmd:
|
207
|
+
return self.build_curl_cmd(
|
208
|
+
'GET',
|
209
|
+
f'cache?keys={key}',
|
210
|
+
)
|
211
|
+
|
212
|
+
def run_get_entry(self, key: str) -> ta.Optional[GithubCacheServiceV1.ArtifactCacheEntry]:
|
213
|
+
curl_cmd = self.build_get_entry_curl_cmd(key)
|
214
|
+
|
215
|
+
obj = self.run_json_curl_cmd(
|
216
|
+
curl_cmd,
|
217
|
+
success_status_codes=[200, 204],
|
218
|
+
)
|
219
|
+
if obj is None:
|
220
|
+
return None
|
221
|
+
|
222
|
+
return GithubCacheServiceV1.dataclass_from_json(
|
223
|
+
GithubCacheServiceV1.ArtifactCacheEntry,
|
224
|
+
obj,
|
225
|
+
)
|
226
|
+
|
227
|
+
#
|
228
|
+
|
229
|
+
def build_download_get_entry_cmd(
|
230
|
+
self,
|
231
|
+
entry: GithubCacheServiceV1.ArtifactCacheEntry,
|
232
|
+
out_file: str,
|
233
|
+
) -> ShellCmd:
|
234
|
+
return ShellCmd(' '.join([
|
235
|
+
'aria2c',
|
236
|
+
'-x', '4',
|
237
|
+
'-o', out_file,
|
238
|
+
check.non_empty_str(entry.archive_location),
|
239
|
+
]))
|
240
|
+
|
241
|
+
def download_get_entry(
|
242
|
+
self,
|
243
|
+
entry: GithubCacheServiceV1.ArtifactCacheEntry,
|
244
|
+
out_file: str,
|
245
|
+
) -> None:
|
246
|
+
dl_cmd = self.build_download_get_entry_cmd(entry, out_file)
|
247
|
+
dl_cmd.run(subprocesses.check_call)
|
248
|
+
|
249
|
+
#
|
250
|
+
|
251
|
+
def upload_cache_entry(
|
252
|
+
self,
|
253
|
+
key: str,
|
254
|
+
in_file: str,
|
255
|
+
) -> None:
|
256
|
+
check.state(os.path.isfile(in_file))
|
257
|
+
|
258
|
+
file_size = os.stat(in_file).st_size
|
259
|
+
|
260
|
+
reserve_req = GithubCacheServiceV1.ReserveCacheRequest(
|
261
|
+
key=key,
|
262
|
+
cache_size=file_size,
|
263
|
+
)
|
264
|
+
reserve_cmd = self.build_post_json_curl_cmd(
|
265
|
+
'caches',
|
266
|
+
GithubCacheServiceV1.dataclass_to_json(reserve_req),
|
267
|
+
)
|
268
|
+
reserve_resp_obj: ta.Any = check.not_none(self.run_json_curl_cmd(
|
269
|
+
reserve_cmd,
|
270
|
+
success_status_codes=[201],
|
271
|
+
))
|
272
|
+
reserve_resp = GithubCacheServiceV1.dataclass_from_json( # noqa
|
273
|
+
GithubCacheServiceV1.ReserveCacheResponse,
|
274
|
+
reserve_resp_obj,
|
275
|
+
)
|
276
|
+
|
277
|
+
raise NotImplementedError
|
278
|
+
|
279
|
+
|
280
|
+
##
|
281
|
+
|
282
|
+
|
283
|
+
class GithubShellCache(ShellCache):
|
284
|
+
def __init__(
|
285
|
+
self,
|
286
|
+
dir: str, # noqa
|
287
|
+
*,
|
288
|
+
client: ta.Optional[GithubV1CacheShellClient] = None,
|
289
|
+
) -> None:
|
290
|
+
super().__init__()
|
291
|
+
|
292
|
+
self._dir = check.not_none(dir)
|
293
|
+
|
294
|
+
if client is None:
|
295
|
+
client = GithubV1CacheShellClient()
|
296
|
+
self._client = client
|
297
|
+
|
298
|
+
self._local = DirectoryFileCache(self._dir)
|
299
|
+
|
300
|
+
def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
|
301
|
+
local_file = self._local.get_cache_file_path(key)
|
302
|
+
if os.path.exists(local_file):
|
303
|
+
return ShellCmd(f'cat {shlex.quote(local_file)}')
|
304
|
+
|
305
|
+
if (entry := self._client.run_get_entry(key)) is None:
|
306
|
+
return None
|
307
|
+
|
308
|
+
tmp_file = self._local.format_incomplete_file(local_file)
|
309
|
+
try:
|
310
|
+
self._client.download_get_entry(entry, tmp_file)
|
311
|
+
|
312
|
+
os.replace(tmp_file, local_file)
|
313
|
+
|
314
|
+
except BaseException: # noqa
|
315
|
+
os.unlink(tmp_file)
|
316
|
+
|
317
|
+
raise
|
318
|
+
|
319
|
+
return ShellCmd(f'cat {shlex.quote(local_file)}')
|
320
|
+
|
321
|
+
class _PutFileCmdContext(ShellCache.PutFileCmdContext): # noqa
|
322
|
+
def __init__(
|
323
|
+
self,
|
324
|
+
owner: 'GithubShellCache',
|
325
|
+
key: str,
|
326
|
+
tmp_file: str,
|
327
|
+
local_file: str,
|
328
|
+
) -> None:
|
329
|
+
super().__init__()
|
330
|
+
|
331
|
+
self._owner = owner
|
332
|
+
self._key = key
|
333
|
+
self._tmp_file = tmp_file
|
334
|
+
self._local_file = local_file
|
335
|
+
|
336
|
+
@property
|
337
|
+
def cmd(self) -> ShellCmd:
|
338
|
+
return ShellCmd(f'cat > {shlex.quote(self._tmp_file)}')
|
339
|
+
|
340
|
+
def _commit(self) -> None:
|
341
|
+
os.replace(self._tmp_file, self._local_file)
|
342
|
+
|
343
|
+
self._owner._client.upload_cache_entry(self._key, self._local_file) # noqa
|
344
|
+
|
345
|
+
def _abort(self) -> None:
|
346
|
+
os.unlink(self._tmp_file)
|
347
|
+
|
348
|
+
def put_file_cmd(self, key: str) -> ShellCache.PutFileCmdContext:
|
349
|
+
local_file = self._local.get_cache_file_path(key, make_dirs=True)
|
350
|
+
return self._PutFileCmdContext(
|
351
|
+
self,
|
352
|
+
key,
|
353
|
+
self._local.format_incomplete_file(local_file),
|
354
|
+
local_file,
|
355
|
+
)
|
@@ -0,0 +1,207 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
# @omlish-lite
|
3
|
+
"""
|
4
|
+
export FILE_SIZE=$(stat --format="%s" $FILE)
|
5
|
+
|
6
|
+
export CACHE_ID=$(curl -s \
|
7
|
+
-X POST \
|
8
|
+
"${ACTIONS_CACHE_URL}_apis/artifactcache/caches" \
|
9
|
+
-H 'Content-Type: application/json' \
|
10
|
+
-H 'Accept: application/json;api-version=6.0-preview.1' \
|
11
|
+
-H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
|
12
|
+
-d '{"key": "'"$CACHE_KEY"'", "cacheSize": '"$FILE_SIZE"'}' \
|
13
|
+
| jq .cacheId)
|
14
|
+
|
15
|
+
curl -s \
|
16
|
+
-X PATCH \
|
17
|
+
"${ACTIONS_CACHE_URL}_apis/artifactcache/caches/$CACHE_ID" \
|
18
|
+
-H 'Content-Type: application/octet-stream' \
|
19
|
+
-H 'Accept: application/json;api-version=6.0-preview.1' \
|
20
|
+
-H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
|
21
|
+
-H "Content-Range: bytes 0-$((FILE_SIZE - 1))/*" \
|
22
|
+
--data-binary @"$FILE"
|
23
|
+
|
24
|
+
curl -s \
|
25
|
+
-X POST \
|
26
|
+
"${ACTIONS_CACHE_URL}_apis/artifactcache/caches/$CACHE_ID" \
|
27
|
+
-H 'Content-Type: application/json' \
|
28
|
+
-H 'Accept: application/json;api-version=6.0-preview.1' \
|
29
|
+
-H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
|
30
|
+
-d '{"size": '"$(stat --format="%s" $FILE)"'}'
|
31
|
+
|
32
|
+
curl -s \
|
33
|
+
-X GET \
|
34
|
+
"${ACTIONS_CACHE_URL}_apis/artifactcache/cache?keys=$CACHE_KEY" \
|
35
|
+
-H 'Content-Type: application/json' \
|
36
|
+
-H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
|
37
|
+
| jq .
|
38
|
+
"""
|
39
|
+
import dataclasses as dc
|
40
|
+
import typing as ta
|
41
|
+
|
42
|
+
from omlish.lite.strings import camel_case
|
43
|
+
from omlish.lite.strings import snake_case
|
44
|
+
|
45
|
+
|
46
|
+
T = ta.TypeVar('T')
|
47
|
+
|
48
|
+
|
49
|
+
##
|
50
|
+
|
51
|
+
|
52
|
+
class GithubCacheServiceV1:
|
53
|
+
API_VERSION = '6.0-preview.1'
|
54
|
+
|
55
|
+
@classmethod
|
56
|
+
def get_service_url(cls, base_url: str) -> str:
|
57
|
+
return f'{base_url.rstrip("/")}/_apis/artifactcache'
|
58
|
+
|
59
|
+
#
|
60
|
+
|
61
|
+
@classmethod
|
62
|
+
def dataclass_to_json(cls, obj: ta.Any) -> ta.Any:
|
63
|
+
return {
|
64
|
+
camel_case(k, lower=True): v
|
65
|
+
for k, v in dc.asdict(obj).items()
|
66
|
+
if v is not None
|
67
|
+
}
|
68
|
+
|
69
|
+
@classmethod
|
70
|
+
def dataclass_from_json(cls, dcls: ta.Type[T], obj: ta.Any) -> T:
|
71
|
+
return dcls(**{
|
72
|
+
snake_case(k): v
|
73
|
+
for k, v in obj.items()
|
74
|
+
})
|
75
|
+
|
76
|
+
#
|
77
|
+
|
78
|
+
@dc.dataclass(frozen=True)
|
79
|
+
class ArtifactCacheEntry:
|
80
|
+
cache_key: ta.Optional[str]
|
81
|
+
scope: ta.Optional[str]
|
82
|
+
cache_version: ta.Optional[str]
|
83
|
+
creation_time: ta.Optional[str]
|
84
|
+
archive_location: ta.Optional[str]
|
85
|
+
|
86
|
+
@dc.dataclass(frozen=True)
|
87
|
+
class ArtifactCacheList:
|
88
|
+
total_count: int
|
89
|
+
artifact_caches: ta.Optional[ta.Sequence['GithubCacheServiceV1.ArtifactCacheEntry']]
|
90
|
+
|
91
|
+
#
|
92
|
+
|
93
|
+
@dc.dataclass(frozen=True)
|
94
|
+
class ReserveCacheRequest:
|
95
|
+
key: str
|
96
|
+
cache_size: ta.Optional[int]
|
97
|
+
version: ta.Optional[str] = None
|
98
|
+
|
99
|
+
@dc.dataclass(frozen=True)
|
100
|
+
class ReserveCacheResponse:
|
101
|
+
cache_id: int
|
102
|
+
|
103
|
+
#
|
104
|
+
|
105
|
+
@dc.dataclass(frozen=True)
|
106
|
+
class CommitCacheRequest:
|
107
|
+
size: int
|
108
|
+
|
109
|
+
#
|
110
|
+
|
111
|
+
class CompressionMethod:
|
112
|
+
GZIP = 'gzip'
|
113
|
+
ZSTD_WITHOUT_LONG = 'zstd-without-long'
|
114
|
+
ZSTD = 'zstd'
|
115
|
+
|
116
|
+
@dc.dataclass(frozen=True)
|
117
|
+
class InternalCacheOptions:
|
118
|
+
compression_method: ta.Optional[str] # CompressionMethod
|
119
|
+
enable_cross_os_archive: ta.Optional[bool]
|
120
|
+
cache_size: ta.Optional[int]
|
121
|
+
|
122
|
+
|
123
|
+
class GithubCacheServiceV2:
|
124
|
+
SERVICE_NAME = 'github.actions.results.api.v1.CacheService'
|
125
|
+
|
126
|
+
@dc.dataclass(frozen=True)
|
127
|
+
class Method:
|
128
|
+
name: str
|
129
|
+
request: type
|
130
|
+
response: type
|
131
|
+
|
132
|
+
#
|
133
|
+
|
134
|
+
class CacheScopePermission:
|
135
|
+
READ = 1
|
136
|
+
WRITE = 2
|
137
|
+
ALL = READ | WRITE
|
138
|
+
|
139
|
+
@dc.dataclass(frozen=True)
|
140
|
+
class CacheScope:
|
141
|
+
scope: str
|
142
|
+
permission: int # CacheScopePermission
|
143
|
+
|
144
|
+
@dc.dataclass(frozen=True)
|
145
|
+
class CacheMetadata:
|
146
|
+
repository_id: int
|
147
|
+
scope: ta.Sequence['GithubCacheServiceV2.CacheScope']
|
148
|
+
|
149
|
+
#
|
150
|
+
|
151
|
+
@dc.dataclass(frozen=True)
|
152
|
+
class CreateCacheEntryRequest:
|
153
|
+
key: str
|
154
|
+
version: str
|
155
|
+
metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
|
156
|
+
|
157
|
+
@dc.dataclass(frozen=True)
|
158
|
+
class CreateCacheEntryResponse:
|
159
|
+
ok: bool
|
160
|
+
signed_upload_url: str
|
161
|
+
|
162
|
+
CREATE_CACHE_ENTRY_METHOD = Method(
|
163
|
+
'CreateCacheEntry',
|
164
|
+
CreateCacheEntryRequest,
|
165
|
+
CreateCacheEntryResponse,
|
166
|
+
)
|
167
|
+
|
168
|
+
#
|
169
|
+
|
170
|
+
@dc.dataclass(frozen=True)
|
171
|
+
class FinalizeCacheEntryUploadRequest:
|
172
|
+
key: str
|
173
|
+
size_bytes: int
|
174
|
+
version: str
|
175
|
+
metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
|
176
|
+
|
177
|
+
@dc.dataclass(frozen=True)
|
178
|
+
class FinalizeCacheEntryUploadResponse:
|
179
|
+
ok: bool
|
180
|
+
entry_id: str
|
181
|
+
|
182
|
+
FINALIZE_CACHE_ENTRY_METHOD = Method(
|
183
|
+
'FinalizeCacheEntryUpload',
|
184
|
+
FinalizeCacheEntryUploadRequest,
|
185
|
+
FinalizeCacheEntryUploadResponse,
|
186
|
+
)
|
187
|
+
|
188
|
+
#
|
189
|
+
|
190
|
+
@dc.dataclass(frozen=True)
|
191
|
+
class GetCacheEntryDownloadUrlRequest:
|
192
|
+
key: str
|
193
|
+
restore_keys: ta.Sequence[str]
|
194
|
+
version: str
|
195
|
+
metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
|
196
|
+
|
197
|
+
@dc.dataclass(frozen=True)
|
198
|
+
class GetCacheEntryDownloadUrlResponse:
|
199
|
+
ok: bool
|
200
|
+
signed_download_url: str
|
201
|
+
matched_key: str
|
202
|
+
|
203
|
+
GET_CACHE_ENTRY_DOWNLOAD_URL_METHOD = Method(
|
204
|
+
'GetCacheEntryDownloadURL',
|
205
|
+
GetCacheEntryDownloadUrlRequest,
|
206
|
+
GetCacheEntryDownloadUrlResponse,
|
207
|
+
)
|
omdev/ci/github/cli.py
ADDED
@@ -0,0 +1,39 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
# @omlish-lite
|
3
|
+
"""
|
4
|
+
See:
|
5
|
+
- https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28
|
6
|
+
"""
|
7
|
+
import dataclasses as dc
|
8
|
+
|
9
|
+
from omlish.argparse.cli import ArgparseCli
|
10
|
+
from omlish.argparse.cli import argparse_arg
|
11
|
+
from omlish.argparse.cli import argparse_cmd
|
12
|
+
from omlish.lite.json import json_dumps_pretty
|
13
|
+
|
14
|
+
from .cache import GithubV1CacheShellClient
|
15
|
+
|
16
|
+
|
17
|
+
class GithubCli(ArgparseCli):
|
18
|
+
@argparse_cmd(
|
19
|
+
argparse_arg('key'),
|
20
|
+
)
|
21
|
+
def get_cache_entry(self) -> None:
|
22
|
+
shell_client = GithubV1CacheShellClient()
|
23
|
+
entry = shell_client.run_get_entry(self.args.key)
|
24
|
+
if entry is None:
|
25
|
+
return
|
26
|
+
print(json_dumps_pretty(dc.asdict(entry))) # noqa
|
27
|
+
|
28
|
+
@argparse_cmd(
|
29
|
+
argparse_arg('repository-id'),
|
30
|
+
)
|
31
|
+
def list_cache_entries(self) -> None:
|
32
|
+
raise NotImplementedError
|
33
|
+
|
34
|
+
|
35
|
+
if __name__ == '__main__':
|
36
|
+
def _main() -> None:
|
37
|
+
GithubCli().cli_run_and_exit()
|
38
|
+
|
39
|
+
_main()
|
omdev/ci/requirements.py
CHANGED
@@ -65,6 +65,7 @@ def download_requirements(
|
|
65
65
|
subprocesses.check_call(
|
66
66
|
'docker',
|
67
67
|
'run',
|
68
|
+
'--rm',
|
68
69
|
'-i',
|
69
70
|
'-v', f'{os.path.abspath(requirements_dir)}:/requirements',
|
70
71
|
'-v', f'{requirements_txt_dir}:/requirements_txt',
|
@@ -72,8 +73,8 @@ def download_requirements(
|
|
72
73
|
'pip',
|
73
74
|
'download',
|
74
75
|
'-d', '/requirements',
|
75
|
-
*itertools.chain.from_iterable(
|
76
|
+
*itertools.chain.from_iterable(
|
76
77
|
['-r', f'/requirements_txt/{os.path.basename(rt)}']
|
77
78
|
for rt in requirements_txts
|
78
|
-
|
79
|
+
),
|
79
80
|
)
|
omdev/ci/shell.py
ADDED
@@ -0,0 +1,42 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
# @omlish-lite
|
3
|
+
import dataclasses as dc
|
4
|
+
import os
|
5
|
+
import typing as ta
|
6
|
+
|
7
|
+
|
8
|
+
T = ta.TypeVar('T')
|
9
|
+
|
10
|
+
|
11
|
+
##
|
12
|
+
|
13
|
+
|
14
|
+
@dc.dataclass(frozen=True)
|
15
|
+
class ShellCmd:
|
16
|
+
s: str
|
17
|
+
|
18
|
+
env: ta.Optional[ta.Mapping[str, str]] = None
|
19
|
+
|
20
|
+
def build_run_kwargs(
|
21
|
+
self,
|
22
|
+
*,
|
23
|
+
env: ta.Optional[ta.Mapping[str, str]] = None,
|
24
|
+
**kwargs: ta.Any,
|
25
|
+
) -> ta.Dict[str, ta.Any]:
|
26
|
+
if env is None:
|
27
|
+
env = os.environ
|
28
|
+
if self.env:
|
29
|
+
if (ek := set(env) & set(self.env)):
|
30
|
+
raise KeyError(*ek)
|
31
|
+
env = {**env, **self.env}
|
32
|
+
|
33
|
+
return dict(
|
34
|
+
env=env,
|
35
|
+
**kwargs,
|
36
|
+
)
|
37
|
+
|
38
|
+
def run(self, fn: ta.Callable[..., T], **kwargs) -> T:
|
39
|
+
return fn(
|
40
|
+
'sh', '-c', self.s,
|
41
|
+
**self.build_run_kwargs(**kwargs),
|
42
|
+
)
|