omdev 0.0.0.dev209__py3-none-any.whl → 0.0.0.dev211__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,355 @@
1
+ # ruff: noqa: UP006 UP007
2
+ # @omlish-lite
3
+ import dataclasses as dc
4
+ import json
5
+ import os
6
+ import shlex
7
+ import typing as ta
8
+
9
+ from omlish.lite.check import check
10
+ from omlish.lite.contextmanagers import defer
11
+ from omlish.lite.json import json_dumps_compact
12
+ from omlish.subprocesses import subprocesses
13
+
14
+ from ..cache import DirectoryFileCache
15
+ from ..cache import ShellCache
16
+ from ..shell import ShellCmd
17
+ from ..utils import make_temp_file
18
+ from .cacheapi import GithubCacheServiceV1
19
+
20
+
21
+ ##
22
+
23
+
24
+ class GithubV1CacheShellClient:
25
+ BASE_URL_ENV_KEY = 'ACTIONS_CACHE_URL'
26
+ AUTH_TOKEN_ENV_KEY = 'ACTIONS_RUNTIME_TOKEN' # noqa
27
+
28
+ def __init__(
29
+ self,
30
+ *,
31
+ base_url: ta.Optional[str] = None,
32
+ auth_token: ta.Optional[str] = None,
33
+ ) -> None:
34
+ super().__init__()
35
+
36
+ if base_url is None:
37
+ base_url = os.environ[self.BASE_URL_ENV_KEY]
38
+ self._base_url = check.non_empty_str(base_url)
39
+
40
+ if auth_token is None:
41
+ auth_token = os.environ.get(self.AUTH_TOKEN_ENV_KEY)
42
+ self._auth_token = auth_token
43
+
44
+ self._service_url = GithubCacheServiceV1.get_service_url(self._base_url)
45
+
46
+ #
47
+
48
+ _MISSING = object()
49
+
50
+ def build_headers(
51
+ self,
52
+ *,
53
+ auth_token: ta.Any = _MISSING,
54
+ content_type: ta.Optional[str] = None,
55
+ ) -> ta.Dict[str, str]:
56
+ dct = {
57
+ 'Accept': f'application/json;api-version={GithubCacheServiceV1.API_VERSION}',
58
+ }
59
+
60
+ if auth_token is self._MISSING:
61
+ auth_token = self._auth_token
62
+ if auth_token:
63
+ dct['Authorization'] = f'Bearer {auth_token}'
64
+
65
+ if content_type is not None:
66
+ dct['Content-Type'] = content_type
67
+
68
+ return dct
69
+
70
+ #
71
+
72
+ HEADER_AUTH_TOKEN_ENV_KEY = '_GITHUB_CACHE_AUTH_TOKEN' # noqa
73
+
74
+ def build_curl_cmd(
75
+ self,
76
+ method: str,
77
+ url: str,
78
+ *,
79
+ json_content: bool = False,
80
+ content_type: ta.Optional[str] = None,
81
+ ) -> ShellCmd:
82
+ if content_type is None and json_content:
83
+ content_type = 'application/json'
84
+
85
+ env = {}
86
+
87
+ header_auth_token: ta.Optional[str]
88
+ if self._auth_token:
89
+ env[self.HEADER_AUTH_TOKEN_ENV_KEY] = self._auth_token
90
+ header_auth_token = f'${self.HEADER_AUTH_TOKEN_ENV_KEY}'
91
+ else:
92
+ header_auth_token = None
93
+
94
+ hdrs = self.build_headers(
95
+ auth_token=header_auth_token,
96
+ content_type=content_type,
97
+ )
98
+
99
+ url = f'{self._service_url}/{url}'
100
+
101
+ cmd = ' '.join([
102
+ 'curl',
103
+ '-s',
104
+ '-X', method,
105
+ url,
106
+ *[f'-H "{k}: {v}"' for k, v in hdrs.items()],
107
+ ])
108
+
109
+ return ShellCmd(
110
+ cmd,
111
+ env=env,
112
+ )
113
+
114
+ def build_post_json_curl_cmd(
115
+ self,
116
+ url: str,
117
+ obj: ta.Any,
118
+ **kwargs: ta.Any,
119
+ ) -> ShellCmd:
120
+ curl_cmd = self.build_curl_cmd(
121
+ 'POST',
122
+ url,
123
+ json_content=True,
124
+ **kwargs,
125
+ )
126
+
127
+ obj_json = json_dumps_compact(obj)
128
+
129
+ return dc.replace(curl_cmd, s=f'{curl_cmd.s} -d {shlex.quote(obj_json)}')
130
+
131
+ #
132
+
133
+ @dc.dataclass()
134
+ class CurlError(RuntimeError):
135
+ status_code: int
136
+ body: ta.Optional[bytes]
137
+
138
+ def __str__(self) -> str:
139
+ return repr(self)
140
+
141
+ @dc.dataclass(frozen=True)
142
+ class CurlResult:
143
+ status_code: int
144
+ body: ta.Optional[bytes]
145
+
146
+ def as_error(self) -> 'GithubV1CacheShellClient.CurlError':
147
+ return GithubV1CacheShellClient.CurlError(
148
+ status_code=self.status_code,
149
+ body=self.body,
150
+ )
151
+
152
+ def run_curl_cmd(
153
+ self,
154
+ cmd: ShellCmd,
155
+ *,
156
+ raise_: bool = False,
157
+ ) -> CurlResult:
158
+ out_file = make_temp_file()
159
+ with defer(lambda: os.unlink(out_file)):
160
+ run_cmd = dc.replace(cmd, s=f"{cmd.s} -o {out_file} -w '%{{json}}'")
161
+
162
+ out_json_bytes = run_cmd.run(subprocesses.check_output)
163
+
164
+ out_json = json.loads(out_json_bytes.decode())
165
+ status_code = check.isinstance(out_json['response_code'], int)
166
+
167
+ with open(out_file, 'rb') as f:
168
+ body = f.read()
169
+
170
+ result = self.CurlResult(
171
+ status_code=status_code,
172
+ body=body,
173
+ )
174
+
175
+ if raise_ and (500 <= status_code <= 600):
176
+ raise result.as_error()
177
+
178
+ return result
179
+
180
+ def run_json_curl_cmd(
181
+ self,
182
+ cmd: ShellCmd,
183
+ *,
184
+ success_status_codes: ta.Optional[ta.Container[int]] = None,
185
+ ) -> ta.Optional[ta.Any]:
186
+ result = self.run_curl_cmd(cmd, raise_=True)
187
+
188
+ if success_status_codes is not None:
189
+ is_success = result.status_code in success_status_codes
190
+ else:
191
+ is_success = 200 <= result.status_code < 300
192
+
193
+ if is_success:
194
+ if not (body := result.body):
195
+ return None
196
+ return json.loads(body.decode('utf-8-sig'))
197
+
198
+ elif result.status_code == 404:
199
+ return None
200
+
201
+ else:
202
+ raise result.as_error()
203
+
204
+ #
205
+
206
+ def build_get_entry_curl_cmd(self, key: str) -> ShellCmd:
207
+ return self.build_curl_cmd(
208
+ 'GET',
209
+ f'cache?keys={key}',
210
+ )
211
+
212
+ def run_get_entry(self, key: str) -> ta.Optional[GithubCacheServiceV1.ArtifactCacheEntry]:
213
+ curl_cmd = self.build_get_entry_curl_cmd(key)
214
+
215
+ obj = self.run_json_curl_cmd(
216
+ curl_cmd,
217
+ success_status_codes=[200, 204],
218
+ )
219
+ if obj is None:
220
+ return None
221
+
222
+ return GithubCacheServiceV1.dataclass_from_json(
223
+ GithubCacheServiceV1.ArtifactCacheEntry,
224
+ obj,
225
+ )
226
+
227
+ #
228
+
229
+ def build_download_get_entry_cmd(
230
+ self,
231
+ entry: GithubCacheServiceV1.ArtifactCacheEntry,
232
+ out_file: str,
233
+ ) -> ShellCmd:
234
+ return ShellCmd(' '.join([
235
+ 'aria2c',
236
+ '-x', '4',
237
+ '-o', out_file,
238
+ check.non_empty_str(entry.archive_location),
239
+ ]))
240
+
241
+ def download_get_entry(
242
+ self,
243
+ entry: GithubCacheServiceV1.ArtifactCacheEntry,
244
+ out_file: str,
245
+ ) -> None:
246
+ dl_cmd = self.build_download_get_entry_cmd(entry, out_file)
247
+ dl_cmd.run(subprocesses.check_call)
248
+
249
+ #
250
+
251
+ def upload_cache_entry(
252
+ self,
253
+ key: str,
254
+ in_file: str,
255
+ ) -> None:
256
+ check.state(os.path.isfile(in_file))
257
+
258
+ file_size = os.stat(in_file).st_size
259
+
260
+ reserve_req = GithubCacheServiceV1.ReserveCacheRequest(
261
+ key=key,
262
+ cache_size=file_size,
263
+ )
264
+ reserve_cmd = self.build_post_json_curl_cmd(
265
+ 'caches',
266
+ GithubCacheServiceV1.dataclass_to_json(reserve_req),
267
+ )
268
+ reserve_resp_obj: ta.Any = check.not_none(self.run_json_curl_cmd(
269
+ reserve_cmd,
270
+ success_status_codes=[201],
271
+ ))
272
+ reserve_resp = GithubCacheServiceV1.dataclass_from_json( # noqa
273
+ GithubCacheServiceV1.ReserveCacheResponse,
274
+ reserve_resp_obj,
275
+ )
276
+
277
+ raise NotImplementedError
278
+
279
+
280
+ ##
281
+
282
+
283
+ class GithubShellCache(ShellCache):
284
+ def __init__(
285
+ self,
286
+ dir: str, # noqa
287
+ *,
288
+ client: ta.Optional[GithubV1CacheShellClient] = None,
289
+ ) -> None:
290
+ super().__init__()
291
+
292
+ self._dir = check.not_none(dir)
293
+
294
+ if client is None:
295
+ client = GithubV1CacheShellClient()
296
+ self._client = client
297
+
298
+ self._local = DirectoryFileCache(self._dir)
299
+
300
+ def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
301
+ local_file = self._local.get_cache_file_path(key)
302
+ if os.path.exists(local_file):
303
+ return ShellCmd(f'cat {shlex.quote(local_file)}')
304
+
305
+ if (entry := self._client.run_get_entry(key)) is None:
306
+ return None
307
+
308
+ tmp_file = self._local.format_incomplete_file(local_file)
309
+ try:
310
+ self._client.download_get_entry(entry, tmp_file)
311
+
312
+ os.replace(tmp_file, local_file)
313
+
314
+ except BaseException: # noqa
315
+ os.unlink(tmp_file)
316
+
317
+ raise
318
+
319
+ return ShellCmd(f'cat {shlex.quote(local_file)}')
320
+
321
+ class _PutFileCmdContext(ShellCache.PutFileCmdContext): # noqa
322
+ def __init__(
323
+ self,
324
+ owner: 'GithubShellCache',
325
+ key: str,
326
+ tmp_file: str,
327
+ local_file: str,
328
+ ) -> None:
329
+ super().__init__()
330
+
331
+ self._owner = owner
332
+ self._key = key
333
+ self._tmp_file = tmp_file
334
+ self._local_file = local_file
335
+
336
+ @property
337
+ def cmd(self) -> ShellCmd:
338
+ return ShellCmd(f'cat > {shlex.quote(self._tmp_file)}')
339
+
340
+ def _commit(self) -> None:
341
+ os.replace(self._tmp_file, self._local_file)
342
+
343
+ self._owner._client.upload_cache_entry(self._key, self._local_file) # noqa
344
+
345
+ def _abort(self) -> None:
346
+ os.unlink(self._tmp_file)
347
+
348
+ def put_file_cmd(self, key: str) -> ShellCache.PutFileCmdContext:
349
+ local_file = self._local.get_cache_file_path(key, make_dirs=True)
350
+ return self._PutFileCmdContext(
351
+ self,
352
+ key,
353
+ self._local.format_incomplete_file(local_file),
354
+ local_file,
355
+ )
@@ -0,0 +1,207 @@
1
+ # ruff: noqa: UP006 UP007
2
+ # @omlish-lite
3
+ """
4
+ export FILE_SIZE=$(stat --format="%s" $FILE)
5
+
6
+ export CACHE_ID=$(curl -s \
7
+ -X POST \
8
+ "${ACTIONS_CACHE_URL}_apis/artifactcache/caches" \
9
+ -H 'Content-Type: application/json' \
10
+ -H 'Accept: application/json;api-version=6.0-preview.1' \
11
+ -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
12
+ -d '{"key": "'"$CACHE_KEY"'", "cacheSize": '"$FILE_SIZE"'}' \
13
+ | jq .cacheId)
14
+
15
+ curl -s \
16
+ -X PATCH \
17
+ "${ACTIONS_CACHE_URL}_apis/artifactcache/caches/$CACHE_ID" \
18
+ -H 'Content-Type: application/octet-stream' \
19
+ -H 'Accept: application/json;api-version=6.0-preview.1' \
20
+ -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
21
+ -H "Content-Range: bytes 0-$((FILE_SIZE - 1))/*" \
22
+ --data-binary @"$FILE"
23
+
24
+ curl -s \
25
+ -X POST \
26
+ "${ACTIONS_CACHE_URL}_apis/artifactcache/caches/$CACHE_ID" \
27
+ -H 'Content-Type: application/json' \
28
+ -H 'Accept: application/json;api-version=6.0-preview.1' \
29
+ -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
30
+ -d '{"size": '"$(stat --format="%s" $FILE)"'}'
31
+
32
+ curl -s \
33
+ -X GET \
34
+ "${ACTIONS_CACHE_URL}_apis/artifactcache/cache?keys=$CACHE_KEY" \
35
+ -H 'Content-Type: application/json' \
36
+ -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
37
+ | jq .
38
+ """
39
+ import dataclasses as dc
40
+ import typing as ta
41
+
42
+ from omlish.lite.strings import camel_case
43
+ from omlish.lite.strings import snake_case
44
+
45
+
46
+ T = ta.TypeVar('T')
47
+
48
+
49
+ ##
50
+
51
+
52
+ class GithubCacheServiceV1:
53
+ API_VERSION = '6.0-preview.1'
54
+
55
+ @classmethod
56
+ def get_service_url(cls, base_url: str) -> str:
57
+ return f'{base_url.rstrip("/")}/_apis/artifactcache'
58
+
59
+ #
60
+
61
+ @classmethod
62
+ def dataclass_to_json(cls, obj: ta.Any) -> ta.Any:
63
+ return {
64
+ camel_case(k, lower=True): v
65
+ for k, v in dc.asdict(obj).items()
66
+ if v is not None
67
+ }
68
+
69
+ @classmethod
70
+ def dataclass_from_json(cls, dcls: ta.Type[T], obj: ta.Any) -> T:
71
+ return dcls(**{
72
+ snake_case(k): v
73
+ for k, v in obj.items()
74
+ })
75
+
76
+ #
77
+
78
+ @dc.dataclass(frozen=True)
79
+ class ArtifactCacheEntry:
80
+ cache_key: ta.Optional[str]
81
+ scope: ta.Optional[str]
82
+ cache_version: ta.Optional[str]
83
+ creation_time: ta.Optional[str]
84
+ archive_location: ta.Optional[str]
85
+
86
+ @dc.dataclass(frozen=True)
87
+ class ArtifactCacheList:
88
+ total_count: int
89
+ artifact_caches: ta.Optional[ta.Sequence['GithubCacheServiceV1.ArtifactCacheEntry']]
90
+
91
+ #
92
+
93
+ @dc.dataclass(frozen=True)
94
+ class ReserveCacheRequest:
95
+ key: str
96
+ cache_size: ta.Optional[int]
97
+ version: ta.Optional[str] = None
98
+
99
+ @dc.dataclass(frozen=True)
100
+ class ReserveCacheResponse:
101
+ cache_id: int
102
+
103
+ #
104
+
105
+ @dc.dataclass(frozen=True)
106
+ class CommitCacheRequest:
107
+ size: int
108
+
109
+ #
110
+
111
+ class CompressionMethod:
112
+ GZIP = 'gzip'
113
+ ZSTD_WITHOUT_LONG = 'zstd-without-long'
114
+ ZSTD = 'zstd'
115
+
116
+ @dc.dataclass(frozen=True)
117
+ class InternalCacheOptions:
118
+ compression_method: ta.Optional[str] # CompressionMethod
119
+ enable_cross_os_archive: ta.Optional[bool]
120
+ cache_size: ta.Optional[int]
121
+
122
+
123
+ class GithubCacheServiceV2:
124
+ SERVICE_NAME = 'github.actions.results.api.v1.CacheService'
125
+
126
+ @dc.dataclass(frozen=True)
127
+ class Method:
128
+ name: str
129
+ request: type
130
+ response: type
131
+
132
+ #
133
+
134
+ class CacheScopePermission:
135
+ READ = 1
136
+ WRITE = 2
137
+ ALL = READ | WRITE
138
+
139
+ @dc.dataclass(frozen=True)
140
+ class CacheScope:
141
+ scope: str
142
+ permission: int # CacheScopePermission
143
+
144
+ @dc.dataclass(frozen=True)
145
+ class CacheMetadata:
146
+ repository_id: int
147
+ scope: ta.Sequence['GithubCacheServiceV2.CacheScope']
148
+
149
+ #
150
+
151
+ @dc.dataclass(frozen=True)
152
+ class CreateCacheEntryRequest:
153
+ key: str
154
+ version: str
155
+ metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
156
+
157
+ @dc.dataclass(frozen=True)
158
+ class CreateCacheEntryResponse:
159
+ ok: bool
160
+ signed_upload_url: str
161
+
162
+ CREATE_CACHE_ENTRY_METHOD = Method(
163
+ 'CreateCacheEntry',
164
+ CreateCacheEntryRequest,
165
+ CreateCacheEntryResponse,
166
+ )
167
+
168
+ #
169
+
170
+ @dc.dataclass(frozen=True)
171
+ class FinalizeCacheEntryUploadRequest:
172
+ key: str
173
+ size_bytes: int
174
+ version: str
175
+ metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
176
+
177
+ @dc.dataclass(frozen=True)
178
+ class FinalizeCacheEntryUploadResponse:
179
+ ok: bool
180
+ entry_id: str
181
+
182
+ FINALIZE_CACHE_ENTRY_METHOD = Method(
183
+ 'FinalizeCacheEntryUpload',
184
+ FinalizeCacheEntryUploadRequest,
185
+ FinalizeCacheEntryUploadResponse,
186
+ )
187
+
188
+ #
189
+
190
+ @dc.dataclass(frozen=True)
191
+ class GetCacheEntryDownloadUrlRequest:
192
+ key: str
193
+ restore_keys: ta.Sequence[str]
194
+ version: str
195
+ metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
196
+
197
+ @dc.dataclass(frozen=True)
198
+ class GetCacheEntryDownloadUrlResponse:
199
+ ok: bool
200
+ signed_download_url: str
201
+ matched_key: str
202
+
203
+ GET_CACHE_ENTRY_DOWNLOAD_URL_METHOD = Method(
204
+ 'GetCacheEntryDownloadURL',
205
+ GetCacheEntryDownloadUrlRequest,
206
+ GetCacheEntryDownloadUrlResponse,
207
+ )
omdev/ci/github/cli.py ADDED
@@ -0,0 +1,39 @@
1
+ # ruff: noqa: UP006 UP007
2
+ # @omlish-lite
3
+ """
4
+ See:
5
+ - https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28
6
+ """
7
+ import dataclasses as dc
8
+
9
+ from omlish.argparse.cli import ArgparseCli
10
+ from omlish.argparse.cli import argparse_arg
11
+ from omlish.argparse.cli import argparse_cmd
12
+ from omlish.lite.json import json_dumps_pretty
13
+
14
+ from .cache import GithubV1CacheShellClient
15
+
16
+
17
+ class GithubCli(ArgparseCli):
18
+ @argparse_cmd(
19
+ argparse_arg('key'),
20
+ )
21
+ def get_cache_entry(self) -> None:
22
+ shell_client = GithubV1CacheShellClient()
23
+ entry = shell_client.run_get_entry(self.args.key)
24
+ if entry is None:
25
+ return
26
+ print(json_dumps_pretty(dc.asdict(entry))) # noqa
27
+
28
+ @argparse_cmd(
29
+ argparse_arg('repository-id'),
30
+ )
31
+ def list_cache_entries(self) -> None:
32
+ raise NotImplementedError
33
+
34
+
35
+ if __name__ == '__main__':
36
+ def _main() -> None:
37
+ GithubCli().cli_run_and_exit()
38
+
39
+ _main()
@@ -0,0 +1,80 @@
1
+ # ruff: noqa: UP006 UP007
2
+ # @omlish-lite
3
+ """
4
+ TODO:
5
+ - pip compile lol
6
+ - but still support git+ stuff
7
+ - req.txt format aware hash
8
+ - more than just whitespace
9
+ - pyproject req rewriting
10
+ - download_requirements bootstrap off prev? not worth the dl?
11
+ - big deps (torch) change less, probably worth it
12
+ - follow embedded -r automatically like pyp
13
+ """
14
+ import itertools
15
+ import os.path
16
+ import shutil
17
+ import tempfile
18
+ import typing as ta
19
+
20
+ from omlish.lite.check import check
21
+ from omlish.lite.contextmanagers import defer
22
+ from omlish.subprocesses import subprocesses
23
+
24
+ from .utils import sha256_str
25
+
26
+
27
+ ##
28
+
29
+
30
+ def build_requirements_hash(
31
+ requirements_txts: ta.Sequence[str],
32
+ ) -> str:
33
+ txt_file_contents: dict = {}
34
+
35
+ for txt_file in requirements_txts:
36
+ txt_file_name = os.path.basename(txt_file)
37
+ check.not_in(txt_file_name, txt_file_contents)
38
+ with open(txt_file) as f:
39
+ txt_contents = f.read()
40
+ txt_file_contents[txt_file_name] = txt_contents
41
+
42
+ #
43
+
44
+ lines = []
45
+ for txt_file, txt_contents in sorted(txt_file_contents.items()):
46
+ txt_hash = sha256_str(txt_contents)
47
+ lines.append(f'{txt_file}={txt_hash}')
48
+
49
+ return sha256_str('\n'.join(lines))
50
+
51
+
52
+ ##
53
+
54
+
55
+ def download_requirements(
56
+ image: str,
57
+ requirements_dir: str,
58
+ requirements_txts: ta.Sequence[str],
59
+ ) -> None:
60
+ requirements_txt_dir = tempfile.mkdtemp()
61
+ with defer(lambda: shutil.rmtree(requirements_txt_dir)):
62
+ for rt in requirements_txts:
63
+ shutil.copyfile(rt, os.path.join(requirements_txt_dir, os.path.basename(rt)))
64
+
65
+ subprocesses.check_call(
66
+ 'docker',
67
+ 'run',
68
+ '--rm',
69
+ '-i',
70
+ '-v', f'{os.path.abspath(requirements_dir)}:/requirements',
71
+ '-v', f'{requirements_txt_dir}:/requirements_txt',
72
+ image,
73
+ 'pip',
74
+ 'download',
75
+ '-d', '/requirements',
76
+ *itertools.chain.from_iterable([
77
+ ['-r', f'/requirements_txt/{os.path.basename(rt)}']
78
+ for rt in requirements_txts
79
+ ]),
80
+ )