omdev 0.0.0.dev212__py3-none-any.whl → 0.0.0.dev213__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
omdev/ci/docker.py CHANGED
@@ -13,9 +13,9 @@ import shlex
13
13
  import tarfile
14
14
  import typing as ta
15
15
 
16
+ from omlish.asyncs.asyncio.subprocesses import asyncio_subprocesses
16
17
  from omlish.lite.check import check
17
18
  from omlish.lite.contextmanagers import defer
18
- from omlish.subprocesses import subprocesses
19
19
 
20
20
  from .shell import ShellCmd
21
21
  from .utils import make_temp_file
@@ -60,8 +60,8 @@ def read_docker_tar_image_id(tar_file: str) -> str:
60
60
  ##
61
61
 
62
62
 
63
- def is_docker_image_present(image: str) -> bool:
64
- out = subprocesses.check_output(
63
+ async def is_docker_image_present(image: str) -> bool:
64
+ out = await asyncio_subprocesses.check_output(
65
65
  'docker',
66
66
  'images',
67
67
  '--format', 'json',
@@ -76,55 +76,74 @@ def is_docker_image_present(image: str) -> bool:
76
76
  return True
77
77
 
78
78
 
79
- def pull_docker_image(
79
+ async def pull_docker_image(
80
80
  image: str,
81
81
  ) -> None:
82
- subprocesses.check_call(
82
+ await asyncio_subprocesses.check_call(
83
83
  'docker',
84
84
  'pull',
85
85
  image,
86
86
  )
87
87
 
88
88
 
89
- def build_docker_image(
89
+ async def build_docker_image(
90
90
  docker_file: str,
91
91
  *,
92
+ tag: ta.Optional[str] = None,
92
93
  cwd: ta.Optional[str] = None,
93
94
  ) -> str:
94
95
  id_file = make_temp_file()
95
96
  with defer(lambda: os.unlink(id_file)):
96
- subprocesses.check_call(
97
+ await asyncio_subprocesses.check_call(
97
98
  'docker',
98
99
  'build',
99
100
  '-f', os.path.abspath(docker_file),
100
101
  '--iidfile', id_file,
101
102
  '--squash',
103
+ *(['--tag', tag] if tag is not None else []),
102
104
  '.',
103
105
  **(dict(cwd=cwd) if cwd is not None else {}),
104
106
  )
105
107
 
106
- with open(id_file) as f:
108
+ with open(id_file) as f: # noqa
107
109
  image_id = check.single(f.read().strip().splitlines()).strip()
108
110
 
109
111
  return image_id
110
112
 
111
113
 
114
+ async def tag_docker_image(image: str, tag: str) -> None:
115
+ await asyncio_subprocesses.check_call(
116
+ 'docker',
117
+ 'tag',
118
+ image,
119
+ tag,
120
+ )
121
+
122
+
123
+ async def delete_docker_tag(tag: str) -> None:
124
+ await asyncio_subprocesses.check_call(
125
+ 'docker',
126
+ 'rmi',
127
+ tag,
128
+ )
129
+
130
+
112
131
  ##
113
132
 
114
133
 
115
- def save_docker_tar_cmd(
134
+ async def save_docker_tar_cmd(
116
135
  image: str,
117
136
  output_cmd: ShellCmd,
118
137
  ) -> None:
119
138
  cmd = dc.replace(output_cmd, s=f'docker save {image} | {output_cmd.s}')
120
- cmd.run(subprocesses.check_call)
139
+ await cmd.run(asyncio_subprocesses.check_call)
121
140
 
122
141
 
123
- def save_docker_tar(
142
+ async def save_docker_tar(
124
143
  image: str,
125
144
  tar_file: str,
126
145
  ) -> None:
127
- return save_docker_tar_cmd(
146
+ return await save_docker_tar_cmd(
128
147
  image,
129
148
  ShellCmd(f'cat > {shlex.quote(tar_file)}'),
130
149
  )
@@ -133,19 +152,19 @@ def save_docker_tar(
133
152
  #
134
153
 
135
154
 
136
- def load_docker_tar_cmd(
155
+ async def load_docker_tar_cmd(
137
156
  input_cmd: ShellCmd,
138
157
  ) -> str:
139
158
  cmd = dc.replace(input_cmd, s=f'{input_cmd.s} | docker load')
140
159
 
141
- out = cmd.run(subprocesses.check_output).decode()
160
+ out = (await cmd.run(asyncio_subprocesses.check_output)).decode()
142
161
 
143
162
  line = check.single(out.strip().splitlines())
144
163
  loaded = line.partition(':')[2].strip()
145
164
  return loaded
146
165
 
147
166
 
148
- def load_docker_tar(
167
+ async def load_docker_tar(
149
168
  tar_file: str,
150
169
  ) -> str:
151
- return load_docker_tar_cmd(ShellCmd(f'cat {shlex.quote(tar_file)}'))
170
+ return await load_docker_tar_cmd(ShellCmd(f'cat {shlex.quote(tar_file)}'))
omdev/ci/github/cache.py CHANGED
@@ -1,14 +1,13 @@
1
1
  # ruff: noqa: UP006 UP007
2
2
  # @omlish-lite
3
+ import abc
3
4
  import dataclasses as dc
4
- import json
5
5
  import os
6
6
  import shlex
7
7
  import typing as ta
8
+ import urllib.parse
8
9
 
9
10
  from omlish.lite.check import check
10
- from omlish.lite.contextmanagers import defer
11
- from omlish.lite.json import json_dumps_compact
12
11
  from omlish.subprocesses import subprocesses
13
12
 
14
13
  from ..cache import DirectoryFileCache
@@ -16,256 +15,168 @@ from ..cache import ShellCache
16
15
  from ..shell import ShellCmd
17
16
  from ..utils import make_temp_file
18
17
  from .cacheapi import GithubCacheServiceV1
18
+ from .curl import GithubServiceCurlClient
19
19
 
20
20
 
21
21
  ##
22
22
 
23
23
 
24
- class GithubV1CacheShellClient:
24
+ class GithubCacheShellClient(abc.ABC):
25
+ class Entry(abc.ABC): # noqa
26
+ pass
27
+
28
+ @abc.abstractmethod
29
+ def run_get_entry(self, key: str) -> ta.Optional[Entry]:
30
+ raise NotImplementedError
31
+
32
+ @abc.abstractmethod
33
+ def download_get_entry(self, entry: Entry, out_file: str) -> None:
34
+ raise NotImplementedError
35
+
36
+ @abc.abstractmethod
37
+ def upload_cache_entry(self, key: str, in_file: str) -> None:
38
+ raise NotImplementedError
39
+
40
+
41
+ #
42
+
43
+
44
+ class GithubCacheServiceV1ShellClient(GithubCacheShellClient):
25
45
  BASE_URL_ENV_KEY = 'ACTIONS_CACHE_URL'
26
46
  AUTH_TOKEN_ENV_KEY = 'ACTIONS_RUNTIME_TOKEN' # noqa
27
47
 
48
+ KEY_SUFFIX_ENV_KEY = 'GITHUB_RUN_ID'
49
+
50
+ CACHE_VERSION: ta.ClassVar[int] = 1
51
+
52
+ #
53
+
28
54
  def __init__(
29
55
  self,
30
56
  *,
31
57
  base_url: ta.Optional[str] = None,
32
58
  auth_token: ta.Optional[str] = None,
59
+
60
+ key_prefix: ta.Optional[str] = None,
61
+ key_suffix: ta.Optional[str] = None,
33
62
  ) -> None:
34
63
  super().__init__()
35
64
 
65
+ #
66
+
36
67
  if base_url is None:
37
68
  base_url = os.environ[self.BASE_URL_ENV_KEY]
38
- self._base_url = check.non_empty_str(base_url)
69
+ service_url = GithubCacheServiceV1.get_service_url(base_url)
39
70
 
40
71
  if auth_token is None:
41
72
  auth_token = os.environ.get(self.AUTH_TOKEN_ENV_KEY)
42
- self._auth_token = auth_token
43
-
44
- self._service_url = GithubCacheServiceV1.get_service_url(self._base_url)
45
-
46
- #
47
-
48
- _MISSING = object()
49
73
 
50
- def build_headers(
51
- self,
52
- *,
53
- auth_token: ta.Any = _MISSING,
54
- content_type: ta.Optional[str] = None,
55
- ) -> ta.Dict[str, str]:
56
- dct = {
57
- 'Accept': f'application/json;api-version={GithubCacheServiceV1.API_VERSION}',
58
- }
74
+ self._curl = GithubServiceCurlClient(
75
+ service_url,
76
+ auth_token,
77
+ api_version=GithubCacheServiceV1.API_VERSION,
78
+ )
59
79
 
60
- if auth_token is self._MISSING:
61
- auth_token = self._auth_token
62
- if auth_token:
63
- dct['Authorization'] = f'Bearer {auth_token}'
80
+ #
64
81
 
65
- if content_type is not None:
66
- dct['Content-Type'] = content_type
82
+ self._key_prefix = key_prefix
67
83
 
68
- return dct
84
+ if key_suffix is None:
85
+ key_suffix = os.environ[self.KEY_SUFFIX_ENV_KEY]
86
+ self._key_suffix = check.non_empty_str(key_suffix)
69
87
 
70
88
  #
71
89
 
72
- HEADER_AUTH_TOKEN_ENV_KEY = '_GITHUB_CACHE_AUTH_TOKEN' # noqa
73
-
74
- def build_curl_cmd(
75
- self,
76
- method: str,
77
- url: str,
78
- *,
79
- json_content: bool = False,
80
- content_type: ta.Optional[str] = None,
81
- ) -> ShellCmd:
82
- if content_type is None and json_content:
83
- content_type = 'application/json'
84
-
85
- env = {}
86
-
87
- header_auth_token: ta.Optional[str]
88
- if self._auth_token:
89
- env[self.HEADER_AUTH_TOKEN_ENV_KEY] = self._auth_token
90
- header_auth_token = f'${self.HEADER_AUTH_TOKEN_ENV_KEY}'
91
- else:
92
- header_auth_token = None
93
-
94
- hdrs = self.build_headers(
95
- auth_token=header_auth_token,
96
- content_type=content_type,
97
- )
98
-
99
- url = f'{self._service_url}/{url}'
90
+ KEY_PART_SEPARATOR = '--'
100
91
 
101
- cmd = ' '.join([
102
- 'curl',
103
- '-s',
104
- '-X', method,
105
- url,
106
- *[f'-H "{k}: {v}"' for k, v in hdrs.items()],
92
+ def fix_key(self, s: str) -> str:
93
+ return self.KEY_PART_SEPARATOR.join([
94
+ *([self._key_prefix] if self._key_prefix else []),
95
+ s,
96
+ self._key_suffix,
107
97
  ])
108
98
 
109
- return ShellCmd(
110
- cmd,
111
- env=env,
112
- )
113
-
114
- def build_post_json_curl_cmd(
115
- self,
116
- url: str,
117
- obj: ta.Any,
118
- **kwargs: ta.Any,
119
- ) -> ShellCmd:
120
- curl_cmd = self.build_curl_cmd(
121
- 'POST',
122
- url,
123
- json_content=True,
124
- **kwargs,
125
- )
126
-
127
- obj_json = json_dumps_compact(obj)
128
-
129
- return dc.replace(curl_cmd, s=f'{curl_cmd.s} -d {shlex.quote(obj_json)}')
130
-
131
99
  #
132
100
 
133
- @dc.dataclass()
134
- class CurlError(RuntimeError):
135
- status_code: int
136
- body: ta.Optional[bytes]
137
-
138
- def __str__(self) -> str:
139
- return repr(self)
140
-
141
101
  @dc.dataclass(frozen=True)
142
- class CurlResult:
143
- status_code: int
144
- body: ta.Optional[bytes]
145
-
146
- def as_error(self) -> 'GithubV1CacheShellClient.CurlError':
147
- return GithubV1CacheShellClient.CurlError(
148
- status_code=self.status_code,
149
- body=self.body,
150
- )
151
-
152
- def run_curl_cmd(
153
- self,
154
- cmd: ShellCmd,
155
- *,
156
- raise_: bool = False,
157
- ) -> CurlResult:
158
- out_file = make_temp_file()
159
- with defer(lambda: os.unlink(out_file)):
160
- run_cmd = dc.replace(cmd, s=f"{cmd.s} -o {out_file} -w '%{{json}}'")
161
-
162
- out_json_bytes = run_cmd.run(subprocesses.check_output)
163
-
164
- out_json = json.loads(out_json_bytes.decode())
165
- status_code = check.isinstance(out_json['response_code'], int)
166
-
167
- with open(out_file, 'rb') as f:
168
- body = f.read()
169
-
170
- result = self.CurlResult(
171
- status_code=status_code,
172
- body=body,
173
- )
174
-
175
- if raise_ and (500 <= status_code <= 600):
176
- raise result.as_error()
177
-
178
- return result
179
-
180
- def run_json_curl_cmd(
181
- self,
182
- cmd: ShellCmd,
183
- *,
184
- success_status_codes: ta.Optional[ta.Container[int]] = None,
185
- ) -> ta.Optional[ta.Any]:
186
- result = self.run_curl_cmd(cmd, raise_=True)
187
-
188
- if success_status_codes is not None:
189
- is_success = result.status_code in success_status_codes
190
- else:
191
- is_success = 200 <= result.status_code < 300
192
-
193
- if is_success:
194
- if not (body := result.body):
195
- return None
196
- return json.loads(body.decode('utf-8-sig'))
197
-
198
- elif result.status_code == 404:
199
- return None
200
-
201
- else:
202
- raise result.as_error()
102
+ class Entry(GithubCacheShellClient.Entry):
103
+ artifact: GithubCacheServiceV1.ArtifactCacheEntry
203
104
 
204
105
  #
205
106
 
206
107
  def build_get_entry_curl_cmd(self, key: str) -> ShellCmd:
207
- return self.build_curl_cmd(
108
+ fixed_key = self.fix_key(key)
109
+
110
+ qp = dict(
111
+ keys=fixed_key,
112
+ version=str(self.CACHE_VERSION),
113
+ )
114
+
115
+ return self._curl.build_cmd(
208
116
  'GET',
209
- f'cache?keys={key}',
117
+ shlex.quote('?'.join([
118
+ 'cache',
119
+ '&'.join([
120
+ f'{k}={urllib.parse.quote_plus(v)}'
121
+ for k, v in qp.items()
122
+ ]),
123
+ ])),
210
124
  )
211
125
 
212
- def run_get_entry(self, key: str) -> ta.Optional[GithubCacheServiceV1.ArtifactCacheEntry]:
213
- curl_cmd = self.build_get_entry_curl_cmd(key)
126
+ def run_get_entry(self, key: str) -> ta.Optional[Entry]:
127
+ fixed_key = self.fix_key(key)
128
+ curl_cmd = self.build_get_entry_curl_cmd(fixed_key)
214
129
 
215
- obj = self.run_json_curl_cmd(
130
+ obj = self._curl.run_json_cmd(
216
131
  curl_cmd,
217
132
  success_status_codes=[200, 204],
218
133
  )
219
134
  if obj is None:
220
135
  return None
221
136
 
222
- return GithubCacheServiceV1.dataclass_from_json(
137
+ return self.Entry(GithubCacheServiceV1.dataclass_from_json(
223
138
  GithubCacheServiceV1.ArtifactCacheEntry,
224
139
  obj,
225
- )
140
+ ))
226
141
 
227
142
  #
228
143
 
229
- def build_download_get_entry_cmd(
230
- self,
231
- entry: GithubCacheServiceV1.ArtifactCacheEntry,
232
- out_file: str,
233
- ) -> ShellCmd:
144
+ def build_download_get_entry_cmd(self, entry: Entry, out_file: str) -> ShellCmd:
234
145
  return ShellCmd(' '.join([
235
146
  'aria2c',
236
147
  '-x', '4',
237
148
  '-o', out_file,
238
- check.non_empty_str(entry.archive_location),
149
+ check.non_empty_str(entry.artifact.archive_location),
239
150
  ]))
240
151
 
241
- def download_get_entry(
242
- self,
243
- entry: GithubCacheServiceV1.ArtifactCacheEntry,
244
- out_file: str,
245
- ) -> None:
246
- dl_cmd = self.build_download_get_entry_cmd(entry, out_file)
152
+ def download_get_entry(self, entry: GithubCacheShellClient.Entry, out_file: str) -> None:
153
+ dl_cmd = self.build_download_get_entry_cmd(
154
+ check.isinstance(entry, GithubCacheServiceV1ShellClient.Entry),
155
+ out_file,
156
+ )
247
157
  dl_cmd.run(subprocesses.check_call)
248
158
 
249
159
  #
250
160
 
251
- def upload_cache_entry(
252
- self,
253
- key: str,
254
- in_file: str,
255
- ) -> None:
161
+ def upload_cache_entry(self, key: str, in_file: str) -> None:
162
+ fixed_key = self.fix_key(key)
163
+
256
164
  check.state(os.path.isfile(in_file))
257
165
 
258
166
  file_size = os.stat(in_file).st_size
259
167
 
168
+ #
169
+
260
170
  reserve_req = GithubCacheServiceV1.ReserveCacheRequest(
261
- key=key,
171
+ key=fixed_key,
262
172
  cache_size=file_size,
173
+ version=str(self.CACHE_VERSION),
263
174
  )
264
- reserve_cmd = self.build_post_json_curl_cmd(
175
+ reserve_cmd = self._curl.build_post_json_cmd(
265
176
  'caches',
266
177
  GithubCacheServiceV1.dataclass_to_json(reserve_req),
267
178
  )
268
- reserve_resp_obj: ta.Any = check.not_none(self.run_json_curl_cmd(
179
+ reserve_resp_obj: ta.Any = check.not_none(self._curl.run_json_cmd(
269
180
  reserve_cmd,
270
181
  success_status_codes=[201],
271
182
  ))
@@ -273,8 +184,66 @@ class GithubV1CacheShellClient:
273
184
  GithubCacheServiceV1.ReserveCacheResponse,
274
185
  reserve_resp_obj,
275
186
  )
187
+ cache_id = check.isinstance(reserve_resp.cache_id, int)
188
+
189
+ #
190
+
191
+ tmp_file = make_temp_file()
192
+
193
+ print(f'{file_size=}')
194
+ num_written = 0
195
+ chunk_size = 32 * 1024 * 1024
196
+ for i in range((file_size // chunk_size) + (1 if file_size % chunk_size else 0)):
197
+ ofs = i * chunk_size
198
+ sz = min(chunk_size, file_size - ofs)
199
+
200
+ patch_cmd = self._curl.build_cmd(
201
+ 'PATCH',
202
+ f'caches/{cache_id}',
203
+ content_type='application/octet-stream',
204
+ headers={
205
+ 'Content-Range': f'bytes {ofs}-{ofs + sz - 1}/*',
206
+ },
207
+ )
276
208
 
277
- raise NotImplementedError
209
+ #
210
+
211
+ # patch_data_cmd = dc.replace(patch_cmd, s=' | '.join([
212
+ # f'dd if={in_file} bs={chunk_size} skip={i} count=1 status=none',
213
+ # f'{patch_cmd.s} --data-binary -',
214
+ # ]))
215
+ # print(f'{patch_data_cmd.s=}')
216
+ # patch_result = self._curl.run_cmd(patch_data_cmd, raise_=True)
217
+
218
+ #
219
+
220
+ with open(in_file, 'rb') as f:
221
+ f.seek(ofs)
222
+ buf = f.read(sz)
223
+ with open(tmp_file, 'wb') as f:
224
+ f.write(buf)
225
+ num_written += len(buf)
226
+ print(f'{num_written=}')
227
+ patch_data_cmd = dc.replace(patch_cmd, s=f'{patch_cmd.s} --data-binary @{tmp_file}')
228
+ print(f'{patch_data_cmd.s=}')
229
+ patch_result = self._curl.run_cmd(patch_data_cmd, raise_=True)
230
+
231
+ #
232
+
233
+ check.equal(patch_result.status_code, 204)
234
+ ofs += sz
235
+
236
+ #
237
+
238
+ commit_req = GithubCacheServiceV1.CommitCacheRequest(
239
+ size=file_size,
240
+ )
241
+ commit_cmd = self._curl.build_post_json_cmd(
242
+ f'caches/{cache_id}',
243
+ GithubCacheServiceV1.dataclass_to_json(commit_req),
244
+ )
245
+ commit_result = self._curl.run_cmd(commit_cmd, raise_=True)
246
+ check.equal(commit_result.status_code, 204)
278
247
 
279
248
 
280
249
  ##
@@ -285,15 +254,15 @@ class GithubShellCache(ShellCache):
285
254
  self,
286
255
  dir: str, # noqa
287
256
  *,
288
- client: ta.Optional[GithubV1CacheShellClient] = None,
257
+ client: ta.Optional[GithubCacheShellClient] = None,
289
258
  ) -> None:
290
259
  super().__init__()
291
260
 
292
261
  self._dir = check.not_none(dir)
293
262
 
294
263
  if client is None:
295
- client = GithubV1CacheShellClient()
296
- self._client = client
264
+ client = GithubCacheServiceV1ShellClient()
265
+ self._client: GithubCacheShellClient = client
297
266
 
298
267
  self._local = DirectoryFileCache(self._dir)
299
268
 
@@ -93,7 +93,7 @@ class GithubCacheServiceV1:
93
93
  @dc.dataclass(frozen=True)
94
94
  class ReserveCacheRequest:
95
95
  key: str
96
- cache_size: ta.Optional[int]
96
+ cache_size: ta.Optional[int] = None
97
97
  version: ta.Optional[str] = None
98
98
 
99
99
  @dc.dataclass(frozen=True)
omdev/ci/github/cli.py CHANGED
@@ -11,7 +11,7 @@ from omlish.argparse.cli import argparse_arg
11
11
  from omlish.argparse.cli import argparse_cmd
12
12
  from omlish.lite.json import json_dumps_pretty
13
13
 
14
- from .cache import GithubV1CacheShellClient
14
+ from .cache import GithubCacheServiceV1ShellClient
15
15
 
16
16
 
17
17
  class GithubCli(ArgparseCli):
@@ -19,7 +19,7 @@ class GithubCli(ArgparseCli):
19
19
  argparse_arg('key'),
20
20
  )
21
21
  def get_cache_entry(self) -> None:
22
- shell_client = GithubV1CacheShellClient()
22
+ shell_client = GithubCacheServiceV1ShellClient()
23
23
  entry = shell_client.run_get_entry(self.args.key)
24
24
  if entry is None:
25
25
  return