omdev 0.0.0.dev210__py3-none-any.whl → 0.0.0.dev211__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,8 +6,10 @@ TODO:
6
6
  - doesn't change too much though
7
7
  """
8
8
  import contextlib
9
+ import dataclasses as dc
9
10
  import json
10
11
  import os.path
12
+ import shlex
11
13
  import tarfile
12
14
  import typing as ta
13
15
 
@@ -15,6 +17,7 @@ from omlish.lite.check import check
15
17
  from omlish.lite.contextmanagers import defer
16
18
  from omlish.subprocesses import subprocesses
17
19
 
20
+ from .shell import ShellCmd
18
21
  from .utils import make_temp_file
19
22
  from .utils import sha256_str
20
23
 
@@ -73,12 +76,8 @@ def is_docker_image_present(image: str) -> bool:
73
76
  return True
74
77
 
75
78
 
76
- ##
77
-
78
-
79
- def pull_docker_tar(
79
+ def pull_docker_image(
80
80
  image: str,
81
- tar_file: str,
82
81
  ) -> None:
83
82
  subprocesses.check_call(
84
83
  'docker',
@@ -86,19 +85,11 @@ def pull_docker_tar(
86
85
  image,
87
86
  )
88
87
 
89
- subprocesses.check_call(
90
- 'docker',
91
- 'save',
92
- image,
93
- '-o', tar_file,
94
- )
95
-
96
88
 
97
- def build_docker_tar(
98
- docker_file: str,
99
- tar_file: str,
100
- *,
101
- cwd: ta.Optional[str] = None,
89
+ def build_docker_image(
90
+ docker_file: str,
91
+ *,
92
+ cwd: ta.Optional[str] = None,
102
93
  ) -> str:
103
94
  id_file = make_temp_file()
104
95
  with defer(lambda: os.unlink(id_file)):
@@ -115,24 +106,46 @@ def build_docker_tar(
115
106
  with open(id_file) as f:
116
107
  image_id = check.single(f.read().strip().splitlines()).strip()
117
108
 
118
- subprocesses.check_call(
119
- 'docker',
120
- 'save',
121
- image_id,
122
- '-o', tar_file,
123
- )
124
-
125
- return image_id
109
+ return image_id
126
110
 
127
111
 
128
112
  ##
129
113
 
130
114
 
131
- def load_docker_tar(
115
+ def save_docker_tar_cmd(
116
+ image: str,
117
+ output_cmd: ShellCmd,
118
+ ) -> None:
119
+ cmd = dc.replace(output_cmd, s=f'docker save {image} | {output_cmd.s}')
120
+ cmd.run(subprocesses.check_call)
121
+
122
+
123
+ def save_docker_tar(
124
+ image: str,
132
125
  tar_file: str,
133
126
  ) -> None:
134
- subprocesses.check_call(
135
- 'docker',
136
- 'load',
137
- '-i', tar_file,
127
+ return save_docker_tar_cmd(
128
+ image,
129
+ ShellCmd(f'cat > {shlex.quote(tar_file)}'),
138
130
  )
131
+
132
+
133
+ #
134
+
135
+
136
+ def load_docker_tar_cmd(
137
+ input_cmd: ShellCmd,
138
+ ) -> str:
139
+ cmd = dc.replace(input_cmd, s=f'{input_cmd.s} | docker load')
140
+
141
+ out = cmd.run(subprocesses.check_output).decode()
142
+
143
+ line = check.single(out.strip().splitlines())
144
+ loaded = line.partition(':')[2].strip()
145
+ return loaded
146
+
147
+
148
+ def load_docker_tar(
149
+ tar_file: str,
150
+ ) -> str:
151
+ return load_docker_tar_cmd(ShellCmd(f'cat {shlex.quote(tar_file)}'))
File without changes
@@ -0,0 +1,11 @@
1
+ # ruff: noqa: UP006 UP007
2
+ # @omlish-lite
3
+ """
4
+ sudo rm -rf \
5
+ /usr/local/.ghcup \
6
+ /opt/hostedtoolcache \
7
+
8
+ /usr/local/.ghcup 6.4G, 3391250 files
9
+ /opt/hostedtoolcache 8.0G, 14843980 files
10
+ /usr/local/lib/android 6.4G, 17251667 files
11
+ """
@@ -0,0 +1,355 @@
1
+ # ruff: noqa: UP006 UP007
2
+ # @omlish-lite
3
+ import dataclasses as dc
4
+ import json
5
+ import os
6
+ import shlex
7
+ import typing as ta
8
+
9
+ from omlish.lite.check import check
10
+ from omlish.lite.contextmanagers import defer
11
+ from omlish.lite.json import json_dumps_compact
12
+ from omlish.subprocesses import subprocesses
13
+
14
+ from ..cache import DirectoryFileCache
15
+ from ..cache import ShellCache
16
+ from ..shell import ShellCmd
17
+ from ..utils import make_temp_file
18
+ from .cacheapi import GithubCacheServiceV1
19
+
20
+
21
+ ##
22
+
23
+
24
+ class GithubV1CacheShellClient:
25
+ BASE_URL_ENV_KEY = 'ACTIONS_CACHE_URL'
26
+ AUTH_TOKEN_ENV_KEY = 'ACTIONS_RUNTIME_TOKEN' # noqa
27
+
28
+ def __init__(
29
+ self,
30
+ *,
31
+ base_url: ta.Optional[str] = None,
32
+ auth_token: ta.Optional[str] = None,
33
+ ) -> None:
34
+ super().__init__()
35
+
36
+ if base_url is None:
37
+ base_url = os.environ[self.BASE_URL_ENV_KEY]
38
+ self._base_url = check.non_empty_str(base_url)
39
+
40
+ if auth_token is None:
41
+ auth_token = os.environ.get(self.AUTH_TOKEN_ENV_KEY)
42
+ self._auth_token = auth_token
43
+
44
+ self._service_url = GithubCacheServiceV1.get_service_url(self._base_url)
45
+
46
+ #
47
+
48
+ _MISSING = object()
49
+
50
+ def build_headers(
51
+ self,
52
+ *,
53
+ auth_token: ta.Any = _MISSING,
54
+ content_type: ta.Optional[str] = None,
55
+ ) -> ta.Dict[str, str]:
56
+ dct = {
57
+ 'Accept': f'application/json;api-version={GithubCacheServiceV1.API_VERSION}',
58
+ }
59
+
60
+ if auth_token is self._MISSING:
61
+ auth_token = self._auth_token
62
+ if auth_token:
63
+ dct['Authorization'] = f'Bearer {auth_token}'
64
+
65
+ if content_type is not None:
66
+ dct['Content-Type'] = content_type
67
+
68
+ return dct
69
+
70
+ #
71
+
72
+ HEADER_AUTH_TOKEN_ENV_KEY = '_GITHUB_CACHE_AUTH_TOKEN' # noqa
73
+
74
+ def build_curl_cmd(
75
+ self,
76
+ method: str,
77
+ url: str,
78
+ *,
79
+ json_content: bool = False,
80
+ content_type: ta.Optional[str] = None,
81
+ ) -> ShellCmd:
82
+ if content_type is None and json_content:
83
+ content_type = 'application/json'
84
+
85
+ env = {}
86
+
87
+ header_auth_token: ta.Optional[str]
88
+ if self._auth_token:
89
+ env[self.HEADER_AUTH_TOKEN_ENV_KEY] = self._auth_token
90
+ header_auth_token = f'${self.HEADER_AUTH_TOKEN_ENV_KEY}'
91
+ else:
92
+ header_auth_token = None
93
+
94
+ hdrs = self.build_headers(
95
+ auth_token=header_auth_token,
96
+ content_type=content_type,
97
+ )
98
+
99
+ url = f'{self._service_url}/{url}'
100
+
101
+ cmd = ' '.join([
102
+ 'curl',
103
+ '-s',
104
+ '-X', method,
105
+ url,
106
+ *[f'-H "{k}: {v}"' for k, v in hdrs.items()],
107
+ ])
108
+
109
+ return ShellCmd(
110
+ cmd,
111
+ env=env,
112
+ )
113
+
114
+ def build_post_json_curl_cmd(
115
+ self,
116
+ url: str,
117
+ obj: ta.Any,
118
+ **kwargs: ta.Any,
119
+ ) -> ShellCmd:
120
+ curl_cmd = self.build_curl_cmd(
121
+ 'POST',
122
+ url,
123
+ json_content=True,
124
+ **kwargs,
125
+ )
126
+
127
+ obj_json = json_dumps_compact(obj)
128
+
129
+ return dc.replace(curl_cmd, s=f'{curl_cmd.s} -d {shlex.quote(obj_json)}')
130
+
131
+ #
132
+
133
+ @dc.dataclass()
134
+ class CurlError(RuntimeError):
135
+ status_code: int
136
+ body: ta.Optional[bytes]
137
+
138
+ def __str__(self) -> str:
139
+ return repr(self)
140
+
141
+ @dc.dataclass(frozen=True)
142
+ class CurlResult:
143
+ status_code: int
144
+ body: ta.Optional[bytes]
145
+
146
+ def as_error(self) -> 'GithubV1CacheShellClient.CurlError':
147
+ return GithubV1CacheShellClient.CurlError(
148
+ status_code=self.status_code,
149
+ body=self.body,
150
+ )
151
+
152
+ def run_curl_cmd(
153
+ self,
154
+ cmd: ShellCmd,
155
+ *,
156
+ raise_: bool = False,
157
+ ) -> CurlResult:
158
+ out_file = make_temp_file()
159
+ with defer(lambda: os.unlink(out_file)):
160
+ run_cmd = dc.replace(cmd, s=f"{cmd.s} -o {out_file} -w '%{{json}}'")
161
+
162
+ out_json_bytes = run_cmd.run(subprocesses.check_output)
163
+
164
+ out_json = json.loads(out_json_bytes.decode())
165
+ status_code = check.isinstance(out_json['response_code'], int)
166
+
167
+ with open(out_file, 'rb') as f:
168
+ body = f.read()
169
+
170
+ result = self.CurlResult(
171
+ status_code=status_code,
172
+ body=body,
173
+ )
174
+
175
+ if raise_ and (500 <= status_code <= 600):
176
+ raise result.as_error()
177
+
178
+ return result
179
+
180
+ def run_json_curl_cmd(
181
+ self,
182
+ cmd: ShellCmd,
183
+ *,
184
+ success_status_codes: ta.Optional[ta.Container[int]] = None,
185
+ ) -> ta.Optional[ta.Any]:
186
+ result = self.run_curl_cmd(cmd, raise_=True)
187
+
188
+ if success_status_codes is not None:
189
+ is_success = result.status_code in success_status_codes
190
+ else:
191
+ is_success = 200 <= result.status_code < 300
192
+
193
+ if is_success:
194
+ if not (body := result.body):
195
+ return None
196
+ return json.loads(body.decode('utf-8-sig'))
197
+
198
+ elif result.status_code == 404:
199
+ return None
200
+
201
+ else:
202
+ raise result.as_error()
203
+
204
+ #
205
+
206
+ def build_get_entry_curl_cmd(self, key: str) -> ShellCmd:
207
+ return self.build_curl_cmd(
208
+ 'GET',
209
+ f'cache?keys={key}',
210
+ )
211
+
212
+ def run_get_entry(self, key: str) -> ta.Optional[GithubCacheServiceV1.ArtifactCacheEntry]:
213
+ curl_cmd = self.build_get_entry_curl_cmd(key)
214
+
215
+ obj = self.run_json_curl_cmd(
216
+ curl_cmd,
217
+ success_status_codes=[200, 204],
218
+ )
219
+ if obj is None:
220
+ return None
221
+
222
+ return GithubCacheServiceV1.dataclass_from_json(
223
+ GithubCacheServiceV1.ArtifactCacheEntry,
224
+ obj,
225
+ )
226
+
227
+ #
228
+
229
+ def build_download_get_entry_cmd(
230
+ self,
231
+ entry: GithubCacheServiceV1.ArtifactCacheEntry,
232
+ out_file: str,
233
+ ) -> ShellCmd:
234
+ return ShellCmd(' '.join([
235
+ 'aria2c',
236
+ '-x', '4',
237
+ '-o', out_file,
238
+ check.non_empty_str(entry.archive_location),
239
+ ]))
240
+
241
+ def download_get_entry(
242
+ self,
243
+ entry: GithubCacheServiceV1.ArtifactCacheEntry,
244
+ out_file: str,
245
+ ) -> None:
246
+ dl_cmd = self.build_download_get_entry_cmd(entry, out_file)
247
+ dl_cmd.run(subprocesses.check_call)
248
+
249
+ #
250
+
251
+ def upload_cache_entry(
252
+ self,
253
+ key: str,
254
+ in_file: str,
255
+ ) -> None:
256
+ check.state(os.path.isfile(in_file))
257
+
258
+ file_size = os.stat(in_file).st_size
259
+
260
+ reserve_req = GithubCacheServiceV1.ReserveCacheRequest(
261
+ key=key,
262
+ cache_size=file_size,
263
+ )
264
+ reserve_cmd = self.build_post_json_curl_cmd(
265
+ 'caches',
266
+ GithubCacheServiceV1.dataclass_to_json(reserve_req),
267
+ )
268
+ reserve_resp_obj: ta.Any = check.not_none(self.run_json_curl_cmd(
269
+ reserve_cmd,
270
+ success_status_codes=[201],
271
+ ))
272
+ reserve_resp = GithubCacheServiceV1.dataclass_from_json( # noqa
273
+ GithubCacheServiceV1.ReserveCacheResponse,
274
+ reserve_resp_obj,
275
+ )
276
+
277
+ raise NotImplementedError
278
+
279
+
280
+ ##
281
+
282
+
283
+ class GithubShellCache(ShellCache):
284
+ def __init__(
285
+ self,
286
+ dir: str, # noqa
287
+ *,
288
+ client: ta.Optional[GithubV1CacheShellClient] = None,
289
+ ) -> None:
290
+ super().__init__()
291
+
292
+ self._dir = check.not_none(dir)
293
+
294
+ if client is None:
295
+ client = GithubV1CacheShellClient()
296
+ self._client = client
297
+
298
+ self._local = DirectoryFileCache(self._dir)
299
+
300
+ def get_file_cmd(self, key: str) -> ta.Optional[ShellCmd]:
301
+ local_file = self._local.get_cache_file_path(key)
302
+ if os.path.exists(local_file):
303
+ return ShellCmd(f'cat {shlex.quote(local_file)}')
304
+
305
+ if (entry := self._client.run_get_entry(key)) is None:
306
+ return None
307
+
308
+ tmp_file = self._local.format_incomplete_file(local_file)
309
+ try:
310
+ self._client.download_get_entry(entry, tmp_file)
311
+
312
+ os.replace(tmp_file, local_file)
313
+
314
+ except BaseException: # noqa
315
+ os.unlink(tmp_file)
316
+
317
+ raise
318
+
319
+ return ShellCmd(f'cat {shlex.quote(local_file)}')
320
+
321
+ class _PutFileCmdContext(ShellCache.PutFileCmdContext): # noqa
322
+ def __init__(
323
+ self,
324
+ owner: 'GithubShellCache',
325
+ key: str,
326
+ tmp_file: str,
327
+ local_file: str,
328
+ ) -> None:
329
+ super().__init__()
330
+
331
+ self._owner = owner
332
+ self._key = key
333
+ self._tmp_file = tmp_file
334
+ self._local_file = local_file
335
+
336
+ @property
337
+ def cmd(self) -> ShellCmd:
338
+ return ShellCmd(f'cat > {shlex.quote(self._tmp_file)}')
339
+
340
+ def _commit(self) -> None:
341
+ os.replace(self._tmp_file, self._local_file)
342
+
343
+ self._owner._client.upload_cache_entry(self._key, self._local_file) # noqa
344
+
345
+ def _abort(self) -> None:
346
+ os.unlink(self._tmp_file)
347
+
348
+ def put_file_cmd(self, key: str) -> ShellCache.PutFileCmdContext:
349
+ local_file = self._local.get_cache_file_path(key, make_dirs=True)
350
+ return self._PutFileCmdContext(
351
+ self,
352
+ key,
353
+ self._local.format_incomplete_file(local_file),
354
+ local_file,
355
+ )
@@ -0,0 +1,207 @@
1
+ # ruff: noqa: UP006 UP007
2
+ # @omlish-lite
3
+ """
4
+ export FILE_SIZE=$(stat --format="%s" $FILE)
5
+
6
+ export CACHE_ID=$(curl -s \
7
+ -X POST \
8
+ "${ACTIONS_CACHE_URL}_apis/artifactcache/caches" \
9
+ -H 'Content-Type: application/json' \
10
+ -H 'Accept: application/json;api-version=6.0-preview.1' \
11
+ -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
12
+ -d '{"key": "'"$CACHE_KEY"'", "cacheSize": '"$FILE_SIZE"'}' \
13
+ | jq .cacheId)
14
+
15
+ curl -s \
16
+ -X PATCH \
17
+ "${ACTIONS_CACHE_URL}_apis/artifactcache/caches/$CACHE_ID" \
18
+ -H 'Content-Type: application/octet-stream' \
19
+ -H 'Accept: application/json;api-version=6.0-preview.1' \
20
+ -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
21
+ -H "Content-Range: bytes 0-$((FILE_SIZE - 1))/*" \
22
+ --data-binary @"$FILE"
23
+
24
+ curl -s \
25
+ -X POST \
26
+ "${ACTIONS_CACHE_URL}_apis/artifactcache/caches/$CACHE_ID" \
27
+ -H 'Content-Type: application/json' \
28
+ -H 'Accept: application/json;api-version=6.0-preview.1' \
29
+ -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
30
+ -d '{"size": '"$(stat --format="%s" $FILE)"'}'
31
+
32
+ curl -s \
33
+ -X GET \
34
+ "${ACTIONS_CACHE_URL}_apis/artifactcache/cache?keys=$CACHE_KEY" \
35
+ -H 'Content-Type: application/json' \
36
+ -H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
37
+ | jq .
38
+ """
39
+ import dataclasses as dc
40
+ import typing as ta
41
+
42
+ from omlish.lite.strings import camel_case
43
+ from omlish.lite.strings import snake_case
44
+
45
+
46
+ T = ta.TypeVar('T')
47
+
48
+
49
+ ##
50
+
51
+
52
+ class GithubCacheServiceV1:
53
+ API_VERSION = '6.0-preview.1'
54
+
55
+ @classmethod
56
+ def get_service_url(cls, base_url: str) -> str:
57
+ return f'{base_url.rstrip("/")}/_apis/artifactcache'
58
+
59
+ #
60
+
61
+ @classmethod
62
+ def dataclass_to_json(cls, obj: ta.Any) -> ta.Any:
63
+ return {
64
+ camel_case(k, lower=True): v
65
+ for k, v in dc.asdict(obj).items()
66
+ if v is not None
67
+ }
68
+
69
+ @classmethod
70
+ def dataclass_from_json(cls, dcls: ta.Type[T], obj: ta.Any) -> T:
71
+ return dcls(**{
72
+ snake_case(k): v
73
+ for k, v in obj.items()
74
+ })
75
+
76
+ #
77
+
78
+ @dc.dataclass(frozen=True)
79
+ class ArtifactCacheEntry:
80
+ cache_key: ta.Optional[str]
81
+ scope: ta.Optional[str]
82
+ cache_version: ta.Optional[str]
83
+ creation_time: ta.Optional[str]
84
+ archive_location: ta.Optional[str]
85
+
86
+ @dc.dataclass(frozen=True)
87
+ class ArtifactCacheList:
88
+ total_count: int
89
+ artifact_caches: ta.Optional[ta.Sequence['GithubCacheServiceV1.ArtifactCacheEntry']]
90
+
91
+ #
92
+
93
+ @dc.dataclass(frozen=True)
94
+ class ReserveCacheRequest:
95
+ key: str
96
+ cache_size: ta.Optional[int]
97
+ version: ta.Optional[str] = None
98
+
99
+ @dc.dataclass(frozen=True)
100
+ class ReserveCacheResponse:
101
+ cache_id: int
102
+
103
+ #
104
+
105
+ @dc.dataclass(frozen=True)
106
+ class CommitCacheRequest:
107
+ size: int
108
+
109
+ #
110
+
111
+ class CompressionMethod:
112
+ GZIP = 'gzip'
113
+ ZSTD_WITHOUT_LONG = 'zstd-without-long'
114
+ ZSTD = 'zstd'
115
+
116
+ @dc.dataclass(frozen=True)
117
+ class InternalCacheOptions:
118
+ compression_method: ta.Optional[str] # CompressionMethod
119
+ enable_cross_os_archive: ta.Optional[bool]
120
+ cache_size: ta.Optional[int]
121
+
122
+
123
+ class GithubCacheServiceV2:
124
+ SERVICE_NAME = 'github.actions.results.api.v1.CacheService'
125
+
126
+ @dc.dataclass(frozen=True)
127
+ class Method:
128
+ name: str
129
+ request: type
130
+ response: type
131
+
132
+ #
133
+
134
+ class CacheScopePermission:
135
+ READ = 1
136
+ WRITE = 2
137
+ ALL = READ | WRITE
138
+
139
+ @dc.dataclass(frozen=True)
140
+ class CacheScope:
141
+ scope: str
142
+ permission: int # CacheScopePermission
143
+
144
+ @dc.dataclass(frozen=True)
145
+ class CacheMetadata:
146
+ repository_id: int
147
+ scope: ta.Sequence['GithubCacheServiceV2.CacheScope']
148
+
149
+ #
150
+
151
+ @dc.dataclass(frozen=True)
152
+ class CreateCacheEntryRequest:
153
+ key: str
154
+ version: str
155
+ metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
156
+
157
+ @dc.dataclass(frozen=True)
158
+ class CreateCacheEntryResponse:
159
+ ok: bool
160
+ signed_upload_url: str
161
+
162
+ CREATE_CACHE_ENTRY_METHOD = Method(
163
+ 'CreateCacheEntry',
164
+ CreateCacheEntryRequest,
165
+ CreateCacheEntryResponse,
166
+ )
167
+
168
+ #
169
+
170
+ @dc.dataclass(frozen=True)
171
+ class FinalizeCacheEntryUploadRequest:
172
+ key: str
173
+ size_bytes: int
174
+ version: str
175
+ metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
176
+
177
+ @dc.dataclass(frozen=True)
178
+ class FinalizeCacheEntryUploadResponse:
179
+ ok: bool
180
+ entry_id: str
181
+
182
+ FINALIZE_CACHE_ENTRY_METHOD = Method(
183
+ 'FinalizeCacheEntryUpload',
184
+ FinalizeCacheEntryUploadRequest,
185
+ FinalizeCacheEntryUploadResponse,
186
+ )
187
+
188
+ #
189
+
190
+ @dc.dataclass(frozen=True)
191
+ class GetCacheEntryDownloadUrlRequest:
192
+ key: str
193
+ restore_keys: ta.Sequence[str]
194
+ version: str
195
+ metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
196
+
197
+ @dc.dataclass(frozen=True)
198
+ class GetCacheEntryDownloadUrlResponse:
199
+ ok: bool
200
+ signed_download_url: str
201
+ matched_key: str
202
+
203
+ GET_CACHE_ENTRY_DOWNLOAD_URL_METHOD = Method(
204
+ 'GetCacheEntryDownloadURL',
205
+ GetCacheEntryDownloadUrlRequest,
206
+ GetCacheEntryDownloadUrlResponse,
207
+ )