omdev 0.0.0.dev289__py3-none-any.whl → 0.0.0.dev291__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omdev/ci/github/api/__init__.py +0 -0
- omdev/ci/github/{client.py → api/clients.py} +190 -207
- omdev/ci/github/api/v1/__init__.py +0 -0
- omdev/ci/github/{api.py → api/v1/api.py} +5 -87
- omdev/ci/github/api/v1/client.py +171 -0
- omdev/ci/github/api/v2/__init__.py +0 -0
- omdev/ci/github/api/v2/api.py +148 -0
- omdev/ci/github/api/v2/azure.py +185 -0
- omdev/ci/github/api/v2/client.py +201 -0
- omdev/ci/github/cache.py +14 -3
- omdev/ci/github/cli.py +1 -1
- omdev/pyproject/pkg.py +5 -2
- omdev/scripts/ci.py +890 -285
- omdev/scripts/pyproject.py +5 -2
- {omdev-0.0.0.dev289.dist-info → omdev-0.0.0.dev291.dist-info}/METADATA +2 -2
- {omdev-0.0.0.dev289.dist-info → omdev-0.0.0.dev291.dist-info}/RECORD +20 -13
- {omdev-0.0.0.dev289.dist-info → omdev-0.0.0.dev291.dist-info}/WHEEL +1 -1
- {omdev-0.0.0.dev289.dist-info → omdev-0.0.0.dev291.dist-info}/entry_points.txt +0 -0
- {omdev-0.0.0.dev289.dist-info → omdev-0.0.0.dev291.dist-info}/licenses/LICENSE +0 -0
- {omdev-0.0.0.dev289.dist-info → omdev-0.0.0.dev291.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,171 @@
|
|
1
|
+
# ruff: noqa: TC003 UP006 UP007
|
2
|
+
import dataclasses as dc
|
3
|
+
import os
|
4
|
+
import typing as ta
|
5
|
+
import urllib.parse
|
6
|
+
import urllib.request
|
7
|
+
|
8
|
+
from omlish.lite.check import check
|
9
|
+
from omlish.lite.logs import log
|
10
|
+
from omlish.lite.timing import log_timing_context
|
11
|
+
|
12
|
+
from ...env import register_github_env_var
|
13
|
+
from ..clients import BaseGithubCacheClient
|
14
|
+
from ..clients import GithubCacheClient
|
15
|
+
from .api import GithubCacheServiceV1
|
16
|
+
|
17
|
+
|
18
|
+
##
|
19
|
+
|
20
|
+
|
21
|
+
class GithubCacheServiceV1Client(BaseGithubCacheClient):
|
22
|
+
BASE_URL_ENV_VAR = register_github_env_var('ACTIONS_CACHE_URL')
|
23
|
+
|
24
|
+
def __init__(
|
25
|
+
self,
|
26
|
+
*,
|
27
|
+
base_url: ta.Optional[str] = None,
|
28
|
+
|
29
|
+
**kwargs: ta.Any,
|
30
|
+
) -> None:
|
31
|
+
if base_url is None:
|
32
|
+
base_url = check.non_empty_str(self.BASE_URL_ENV_VAR())
|
33
|
+
service_url = GithubCacheServiceV1.get_service_url(base_url)
|
34
|
+
|
35
|
+
super().__init__(
|
36
|
+
service_url=service_url,
|
37
|
+
**kwargs,
|
38
|
+
)
|
39
|
+
|
40
|
+
#
|
41
|
+
|
42
|
+
def _build_request_headers(
|
43
|
+
self,
|
44
|
+
headers: ta.Optional[ta.Mapping[str, str]] = None,
|
45
|
+
**kwargs: ta.Any,
|
46
|
+
) -> ta.Dict[str, str]:
|
47
|
+
return super()._build_request_headers(
|
48
|
+
{
|
49
|
+
'Accept': ';'.join([
|
50
|
+
'application/json',
|
51
|
+
f'api-version={GithubCacheServiceV1.API_VERSION}',
|
52
|
+
]),
|
53
|
+
**(headers or {}),
|
54
|
+
},
|
55
|
+
**kwargs,
|
56
|
+
)
|
57
|
+
|
58
|
+
#
|
59
|
+
|
60
|
+
@dc.dataclass(frozen=True)
|
61
|
+
class Entry(GithubCacheClient.Entry):
|
62
|
+
artifact: GithubCacheServiceV1.ArtifactCacheEntry
|
63
|
+
|
64
|
+
def get_entry_url(self, entry: GithubCacheClient.Entry) -> ta.Optional[str]:
|
65
|
+
entry1 = check.isinstance(entry, self.Entry)
|
66
|
+
return entry1.artifact.archive_location
|
67
|
+
|
68
|
+
#
|
69
|
+
|
70
|
+
def _build_get_entry_url_path(self, *keys: str) -> str:
|
71
|
+
qp = dict(
|
72
|
+
keys=','.join(urllib.parse.quote_plus(k) for k in keys),
|
73
|
+
version=str(self._cache_version),
|
74
|
+
)
|
75
|
+
|
76
|
+
return '?'.join([
|
77
|
+
'cache',
|
78
|
+
'&'.join([
|
79
|
+
f'{k}={v}'
|
80
|
+
for k, v in qp.items()
|
81
|
+
]),
|
82
|
+
])
|
83
|
+
|
84
|
+
GET_ENTRY_SUCCESS_STATUS_CODES = (200, 204)
|
85
|
+
|
86
|
+
#
|
87
|
+
|
88
|
+
async def get_entry(self, key: str) -> ta.Optional[GithubCacheClient.Entry]:
|
89
|
+
obj = await self._send_request(
|
90
|
+
path=self._build_get_entry_url_path(self.fix_key(key, partial_suffix=True)),
|
91
|
+
)
|
92
|
+
if obj is None:
|
93
|
+
return None
|
94
|
+
|
95
|
+
return self.Entry(GithubCacheServiceV1.dataclass_from_json(
|
96
|
+
GithubCacheServiceV1.ArtifactCacheEntry,
|
97
|
+
obj,
|
98
|
+
))
|
99
|
+
|
100
|
+
#
|
101
|
+
|
102
|
+
async def download_file(self, entry: GithubCacheClient.Entry, out_file: str) -> None:
|
103
|
+
entry1 = check.isinstance(entry, self.Entry)
|
104
|
+
with log_timing_context(
|
105
|
+
'Downloading github cache '
|
106
|
+
f'key {entry1.artifact.cache_key} '
|
107
|
+
f'version {entry1.artifact.cache_version} '
|
108
|
+
f'to {out_file}',
|
109
|
+
):
|
110
|
+
await self._download_file_chunks(
|
111
|
+
key=check.non_empty_str(entry1.artifact.cache_key),
|
112
|
+
url=check.non_empty_str(entry1.artifact.archive_location),
|
113
|
+
out_file=out_file,
|
114
|
+
)
|
115
|
+
|
116
|
+
#
|
117
|
+
|
118
|
+
async def _upload_file(self, key: str, in_file: str) -> None:
|
119
|
+
fixed_key = self.fix_key(key)
|
120
|
+
|
121
|
+
check.state(os.path.isfile(in_file))
|
122
|
+
file_size = os.stat(in_file).st_size
|
123
|
+
|
124
|
+
#
|
125
|
+
|
126
|
+
reserve_req = GithubCacheServiceV1.ReserveCacheRequest(
|
127
|
+
key=fixed_key,
|
128
|
+
cache_size=file_size,
|
129
|
+
version=str(self._cache_version),
|
130
|
+
)
|
131
|
+
reserve_resp_obj = await self._send_request(
|
132
|
+
path='caches',
|
133
|
+
json_content=GithubCacheServiceV1.dataclass_to_json(reserve_req),
|
134
|
+
success_status_codes=[201],
|
135
|
+
)
|
136
|
+
reserve_resp = GithubCacheServiceV1.dataclass_from_json( # noqa
|
137
|
+
GithubCacheServiceV1.ReserveCacheResponse,
|
138
|
+
reserve_resp_obj,
|
139
|
+
)
|
140
|
+
cache_id = check.isinstance(reserve_resp.cache_id, int)
|
141
|
+
|
142
|
+
log.debug(f'Github cache file {os.path.basename(in_file)} got id {cache_id}') # noqa
|
143
|
+
|
144
|
+
#
|
145
|
+
|
146
|
+
url = f'{self._service_url}/caches/{cache_id}'
|
147
|
+
|
148
|
+
await self._upload_file_chunks(
|
149
|
+
in_file=in_file,
|
150
|
+
url=url,
|
151
|
+
key=fixed_key,
|
152
|
+
file_size=file_size,
|
153
|
+
)
|
154
|
+
|
155
|
+
#
|
156
|
+
|
157
|
+
commit_req = GithubCacheServiceV1.CommitCacheRequest(
|
158
|
+
size=file_size,
|
159
|
+
)
|
160
|
+
await self._send_request(
|
161
|
+
path=f'caches/{cache_id}',
|
162
|
+
json_content=GithubCacheServiceV1.dataclass_to_json(commit_req),
|
163
|
+
success_status_codes=[204],
|
164
|
+
)
|
165
|
+
|
166
|
+
async def upload_file(self, key: str, in_file: str) -> None:
|
167
|
+
with log_timing_context(
|
168
|
+
f'Uploading github cache file {os.path.basename(in_file)} '
|
169
|
+
f'key {key}',
|
170
|
+
):
|
171
|
+
await self._upload_file(key, in_file)
|
File without changes
|
@@ -0,0 +1,148 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
"""
|
3
|
+
https://github.com/tonistiigi/go-actions-cache/blob/3e9a6642607fd6e4d5d4fdab7c91fe8bf4c36a25/cache_v2.go
|
4
|
+
|
5
|
+
==
|
6
|
+
|
7
|
+
curl -s \
|
8
|
+
-X POST \
|
9
|
+
"${ACTIONS_RESULTS_URL}twirp/github.actions.results.api.v1.CacheService/CreateCacheEntry" \
|
10
|
+
-H 'Content-Type: application/json' \
|
11
|
+
-H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
|
12
|
+
-d '{"key": "foo", "version": "0000000000000000000000000000000000000000000000000000000000000001" }' \
|
13
|
+
| jq .
|
14
|
+
|
15
|
+
curl -s \
|
16
|
+
-X POST \
|
17
|
+
"${ACTIONS_RESULTS_URL}twirp/github.actions.results.api.v1.CacheService/GetCacheEntryDownloadURL" \
|
18
|
+
-H 'Content-Type: application/json' \
|
19
|
+
-H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
|
20
|
+
-d '{"key": "foo", "restoreKeys": [], "version": "0000000000000000000000000000000000000000000000000000000000000001" }' \
|
21
|
+
| jq .
|
22
|
+
|
23
|
+
""" # noqa
|
24
|
+
import dataclasses as dc
|
25
|
+
import typing as ta
|
26
|
+
|
27
|
+
from omlish.lite.check import check
|
28
|
+
|
29
|
+
|
30
|
+
T = ta.TypeVar('T')
|
31
|
+
|
32
|
+
GithubCacheServiceV2RequestT = ta.TypeVar('GithubCacheServiceV2RequestT')
|
33
|
+
GithubCacheServiceV2ResponseT = ta.TypeVar('GithubCacheServiceV2ResponseT')
|
34
|
+
|
35
|
+
|
36
|
+
##
|
37
|
+
|
38
|
+
|
39
|
+
class GithubCacheServiceV2:
|
40
|
+
def __new__(cls, *args, **kwargs): # noqa
|
41
|
+
raise TypeError
|
42
|
+
|
43
|
+
#
|
44
|
+
|
45
|
+
SERVICE_NAME = 'github.actions.results.api.v1.CacheService'
|
46
|
+
|
47
|
+
@classmethod
|
48
|
+
def get_service_url(cls, base_url: str) -> str:
|
49
|
+
return f'{base_url.rstrip("/")}/twirp/{cls.SERVICE_NAME}'
|
50
|
+
|
51
|
+
#
|
52
|
+
|
53
|
+
@dc.dataclass(frozen=True)
|
54
|
+
class Method(ta.Generic[GithubCacheServiceV2RequestT, GithubCacheServiceV2ResponseT]):
|
55
|
+
name: str
|
56
|
+
request: ta.Type[GithubCacheServiceV2RequestT]
|
57
|
+
response: ta.Type[GithubCacheServiceV2ResponseT]
|
58
|
+
|
59
|
+
#
|
60
|
+
|
61
|
+
class CacheScopePermission:
|
62
|
+
READ = 1
|
63
|
+
WRITE = 2
|
64
|
+
ALL = READ | WRITE
|
65
|
+
|
66
|
+
@dc.dataclass(frozen=True)
|
67
|
+
class CacheScope:
|
68
|
+
scope: str
|
69
|
+
permission: int # CacheScopePermission
|
70
|
+
|
71
|
+
@dc.dataclass(frozen=True)
|
72
|
+
class CacheMetadata:
|
73
|
+
repository_id: int
|
74
|
+
scope: ta.Sequence['GithubCacheServiceV2.CacheScope']
|
75
|
+
|
76
|
+
VERSION_LENGTH: int = 64
|
77
|
+
|
78
|
+
#
|
79
|
+
|
80
|
+
@dc.dataclass(frozen=True)
|
81
|
+
class CreateCacheEntryRequest:
|
82
|
+
key: str
|
83
|
+
version: str
|
84
|
+
metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
|
85
|
+
|
86
|
+
def __post_init__(self) -> None:
|
87
|
+
check.equal(len(self.version), GithubCacheServiceV2.VERSION_LENGTH)
|
88
|
+
|
89
|
+
@dc.dataclass(frozen=True)
|
90
|
+
class CreateCacheEntryResponse:
|
91
|
+
ok: bool
|
92
|
+
signed_upload_url: str
|
93
|
+
|
94
|
+
CREATE_CACHE_ENTRY_METHOD: Method[
|
95
|
+
CreateCacheEntryRequest,
|
96
|
+
CreateCacheEntryResponse,
|
97
|
+
] = Method(
|
98
|
+
'CreateCacheEntry',
|
99
|
+
CreateCacheEntryRequest,
|
100
|
+
CreateCacheEntryResponse,
|
101
|
+
)
|
102
|
+
|
103
|
+
#
|
104
|
+
|
105
|
+
@dc.dataclass(frozen=True)
|
106
|
+
class FinalizeCacheEntryUploadRequest:
|
107
|
+
key: str
|
108
|
+
size_bytes: int
|
109
|
+
version: str
|
110
|
+
metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
|
111
|
+
|
112
|
+
@dc.dataclass(frozen=True)
|
113
|
+
class FinalizeCacheEntryUploadResponse:
|
114
|
+
ok: bool
|
115
|
+
entry_id: str
|
116
|
+
|
117
|
+
FINALIZE_CACHE_ENTRY_METHOD: Method[
|
118
|
+
FinalizeCacheEntryUploadRequest,
|
119
|
+
FinalizeCacheEntryUploadResponse,
|
120
|
+
] = Method(
|
121
|
+
'FinalizeCacheEntryUpload',
|
122
|
+
FinalizeCacheEntryUploadRequest,
|
123
|
+
FinalizeCacheEntryUploadResponse,
|
124
|
+
)
|
125
|
+
|
126
|
+
#
|
127
|
+
|
128
|
+
@dc.dataclass(frozen=True)
|
129
|
+
class GetCacheEntryDownloadUrlRequest:
|
130
|
+
key: str
|
131
|
+
restore_keys: ta.Sequence[str]
|
132
|
+
version: str
|
133
|
+
metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
|
134
|
+
|
135
|
+
@dc.dataclass(frozen=True)
|
136
|
+
class GetCacheEntryDownloadUrlResponse:
|
137
|
+
ok: bool
|
138
|
+
signed_download_url: str
|
139
|
+
matched_key: str
|
140
|
+
|
141
|
+
GET_CACHE_ENTRY_DOWNLOAD_URL_METHOD: Method[
|
142
|
+
GetCacheEntryDownloadUrlRequest,
|
143
|
+
GetCacheEntryDownloadUrlResponse,
|
144
|
+
] = Method(
|
145
|
+
'GetCacheEntryDownloadURL',
|
146
|
+
GetCacheEntryDownloadUrlRequest,
|
147
|
+
GetCacheEntryDownloadUrlResponse,
|
148
|
+
)
|
@@ -0,0 +1,185 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
# @omlish-lite
|
3
|
+
"""
|
4
|
+
TODO:
|
5
|
+
- ominfra? no, circdep
|
6
|
+
"""
|
7
|
+
import base64
|
8
|
+
import dataclasses as dc
|
9
|
+
import datetime
|
10
|
+
import typing as ta
|
11
|
+
import urllib.parse
|
12
|
+
import xml.etree.ElementTree as ET
|
13
|
+
|
14
|
+
from omlish.asyncs.asyncio.utils import asyncio_wait_concurrent
|
15
|
+
from omlish.lite.check import check
|
16
|
+
from omlish.lite.logs import log
|
17
|
+
from omlish.lite.timing import log_timing_context
|
18
|
+
|
19
|
+
|
20
|
+
##
|
21
|
+
|
22
|
+
|
23
|
+
class AzureBlockBlobUploader:
|
24
|
+
"""
|
25
|
+
https://learn.microsoft.com/en-us/rest/api/storageservices/put-block
|
26
|
+
https://learn.microsoft.com/en-us/rest/api/storageservices/put-block-list
|
27
|
+
"""
|
28
|
+
|
29
|
+
DEFAULT_CONCURRENCY = 4
|
30
|
+
|
31
|
+
@dc.dataclass(frozen=True)
|
32
|
+
class Request:
|
33
|
+
method: str
|
34
|
+
url: str
|
35
|
+
headers: ta.Optional[ta.Dict[str, str]] = None
|
36
|
+
body: ta.Optional[bytes] = None
|
37
|
+
|
38
|
+
@dc.dataclass(frozen=True)
|
39
|
+
class Response:
|
40
|
+
status: int
|
41
|
+
headers: ta.Optional[ta.Mapping[str, str]] = None
|
42
|
+
data: ta.Optional[bytes] = None
|
43
|
+
|
44
|
+
def get_header(self, name: str) -> ta.Optional[str]:
|
45
|
+
for k, v in (self.headers or {}).items():
|
46
|
+
if k.lower() == name.lower():
|
47
|
+
return v
|
48
|
+
return None
|
49
|
+
|
50
|
+
def __init__(
|
51
|
+
self,
|
52
|
+
blob_url_with_sas: str,
|
53
|
+
make_request: ta.Callable[[Request], ta.Awaitable[Response]],
|
54
|
+
*,
|
55
|
+
api_version: str = '2020-10-02',
|
56
|
+
concurrency: int = DEFAULT_CONCURRENCY,
|
57
|
+
) -> None:
|
58
|
+
"""
|
59
|
+
blob_url_with_sas should be of the form:
|
60
|
+
https://<account>.blob.core.windows.net/<container>/<blob>?<SAS-token>
|
61
|
+
"""
|
62
|
+
|
63
|
+
super().__init__()
|
64
|
+
|
65
|
+
self._make_request = make_request
|
66
|
+
self._api_version = api_version
|
67
|
+
check.arg(concurrency >= 1)
|
68
|
+
self._concurrency = concurrency
|
69
|
+
|
70
|
+
parsed = urllib.parse.urlparse(blob_url_with_sas)
|
71
|
+
self._base_url = f'{parsed.scheme}://{parsed.netloc}'
|
72
|
+
parts = parsed.path.lstrip('/').split('/', 1)
|
73
|
+
self._container = parts[0]
|
74
|
+
self._blob_name = parts[1]
|
75
|
+
self._sas = parsed.query
|
76
|
+
|
77
|
+
def _headers(self) -> ta.Dict[str, str]:
|
78
|
+
"""Standard headers for Azure Blob REST calls."""
|
79
|
+
|
80
|
+
now = datetime.datetime.now(datetime.UTC).strftime('%a, %d %b %Y %H:%M:%S GMT')
|
81
|
+
return {
|
82
|
+
'x-ms-date': now,
|
83
|
+
'x-ms-version': self._api_version,
|
84
|
+
}
|
85
|
+
|
86
|
+
@dc.dataclass(frozen=True)
|
87
|
+
class FileChunk:
|
88
|
+
in_file: str
|
89
|
+
offset: int
|
90
|
+
size: int
|
91
|
+
|
92
|
+
async def _upload_file_chunk_(
|
93
|
+
self,
|
94
|
+
block_id: str,
|
95
|
+
chunk: FileChunk,
|
96
|
+
) -> None:
|
97
|
+
with open(chunk.in_file, 'rb') as f: # noqa
|
98
|
+
f.seek(chunk.offset)
|
99
|
+
data = f.read(chunk.size)
|
100
|
+
|
101
|
+
check.equal(len(data), chunk.size)
|
102
|
+
|
103
|
+
params = {
|
104
|
+
'comp': 'block',
|
105
|
+
'blockid': block_id,
|
106
|
+
}
|
107
|
+
query = self._sas + '&' + urllib.parse.urlencode(params)
|
108
|
+
url = f'{self._base_url}/{self._container}/{self._blob_name}?{query}'
|
109
|
+
|
110
|
+
log.debug(f'Uploading azure blob chunk {chunk} with block id {block_id}') # noqa
|
111
|
+
|
112
|
+
resp = await self._make_request(self.Request(
|
113
|
+
'PUT',
|
114
|
+
url,
|
115
|
+
headers=self._headers(),
|
116
|
+
body=data,
|
117
|
+
))
|
118
|
+
if resp.status not in (201, 202):
|
119
|
+
raise RuntimeError(f'Put Block failed: {block_id=} {resp.status=}')
|
120
|
+
|
121
|
+
async def _upload_file_chunk(
|
122
|
+
self,
|
123
|
+
block_id: str,
|
124
|
+
chunk: FileChunk,
|
125
|
+
) -> None:
|
126
|
+
with log_timing_context(f'Uploading azure blob chunk {chunk} with block id {block_id}'):
|
127
|
+
await self._upload_file_chunk_(
|
128
|
+
block_id,
|
129
|
+
chunk,
|
130
|
+
)
|
131
|
+
|
132
|
+
async def upload_file(
|
133
|
+
self,
|
134
|
+
chunks: ta.List[FileChunk],
|
135
|
+
) -> ta.Dict[str, ta.Any]:
|
136
|
+
block_ids = []
|
137
|
+
|
138
|
+
# 1) Stage each block
|
139
|
+
upload_tasks = []
|
140
|
+
for idx, chunk in enumerate(chunks):
|
141
|
+
# Generate a predictable block ID (must be URL-safe base64)
|
142
|
+
raw_id = f'{idx:08d}'.encode()
|
143
|
+
block_id = base64.b64encode(raw_id).decode('utf-8')
|
144
|
+
block_ids.append(block_id)
|
145
|
+
|
146
|
+
upload_tasks.append(self._upload_file_chunk(
|
147
|
+
block_id,
|
148
|
+
chunk,
|
149
|
+
))
|
150
|
+
|
151
|
+
await asyncio_wait_concurrent(upload_tasks, self._concurrency)
|
152
|
+
|
153
|
+
# 2) Commit block list
|
154
|
+
root = ET.Element('BlockList')
|
155
|
+
for bid in block_ids:
|
156
|
+
elm = ET.SubElement(root, 'Latest')
|
157
|
+
elm.text = bid
|
158
|
+
body = ET.tostring(root, encoding='utf-8', method='xml')
|
159
|
+
|
160
|
+
params = {'comp': 'blocklist'}
|
161
|
+
query = self._sas + '&' + urllib.parse.urlencode(params)
|
162
|
+
url = f'{self._base_url}/{self._container}/{self._blob_name}?{query}'
|
163
|
+
|
164
|
+
log.debug(f'Putting azure blob chunk list block ids {block_ids}') # noqa
|
165
|
+
|
166
|
+
resp = await self._make_request(self.Request(
|
167
|
+
'PUT',
|
168
|
+
url,
|
169
|
+
headers={
|
170
|
+
**self._headers(),
|
171
|
+
'Content-Type': 'application/xml',
|
172
|
+
},
|
173
|
+
body=body,
|
174
|
+
))
|
175
|
+
if resp.status not in (200, 201):
|
176
|
+
raise RuntimeError(f'Put Block List failed: {resp.status} {resp.data!r}')
|
177
|
+
|
178
|
+
ret = {
|
179
|
+
'status_code': resp.status,
|
180
|
+
'etag': resp.get_header('ETag'),
|
181
|
+
}
|
182
|
+
|
183
|
+
log.debug(f'Uploaded azure blob chunk {ret}') # noqa
|
184
|
+
|
185
|
+
return ret
|
@@ -0,0 +1,201 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
import dataclasses as dc
|
3
|
+
import os
|
4
|
+
import typing as ta
|
5
|
+
import urllib.request
|
6
|
+
|
7
|
+
from omlish.lite.check import check
|
8
|
+
from omlish.lite.logs import log
|
9
|
+
from omlish.lite.timing import log_timing_context
|
10
|
+
|
11
|
+
from ...env import register_github_env_var
|
12
|
+
from ..clients import BaseGithubCacheClient
|
13
|
+
from ..clients import GithubCacheClient
|
14
|
+
from .api import GithubCacheServiceV2
|
15
|
+
from .api import GithubCacheServiceV2RequestT
|
16
|
+
from .api import GithubCacheServiceV2ResponseT
|
17
|
+
from .azure import AzureBlockBlobUploader
|
18
|
+
|
19
|
+
|
20
|
+
##
|
21
|
+
|
22
|
+
|
23
|
+
class GithubCacheServiceV2Client(BaseGithubCacheClient):
|
24
|
+
BASE_URL_ENV_VAR = register_github_env_var('ACTIONS_RESULTS_URL')
|
25
|
+
|
26
|
+
def __init__(
|
27
|
+
self,
|
28
|
+
*,
|
29
|
+
base_url: ta.Optional[str] = None,
|
30
|
+
|
31
|
+
**kwargs: ta.Any,
|
32
|
+
) -> None:
|
33
|
+
if base_url is None:
|
34
|
+
base_url = check.non_empty_str(self.BASE_URL_ENV_VAR())
|
35
|
+
service_url = GithubCacheServiceV2.get_service_url(base_url)
|
36
|
+
|
37
|
+
super().__init__(
|
38
|
+
service_url=service_url,
|
39
|
+
**kwargs,
|
40
|
+
)
|
41
|
+
|
42
|
+
#
|
43
|
+
|
44
|
+
async def _send_method_request(
|
45
|
+
self,
|
46
|
+
method: GithubCacheServiceV2.Method[
|
47
|
+
GithubCacheServiceV2RequestT,
|
48
|
+
GithubCacheServiceV2ResponseT,
|
49
|
+
],
|
50
|
+
request: GithubCacheServiceV2RequestT,
|
51
|
+
**kwargs: ta.Any,
|
52
|
+
) -> ta.Optional[GithubCacheServiceV2ResponseT]:
|
53
|
+
obj = await self._send_request(
|
54
|
+
path=method.name,
|
55
|
+
json_content=dc.asdict(request), # type: ignore[call-overload]
|
56
|
+
**kwargs,
|
57
|
+
)
|
58
|
+
|
59
|
+
if obj is None:
|
60
|
+
return None
|
61
|
+
return method.response(**obj)
|
62
|
+
|
63
|
+
#
|
64
|
+
|
65
|
+
@dc.dataclass(frozen=True)
|
66
|
+
class Entry(GithubCacheClient.Entry):
|
67
|
+
request: GithubCacheServiceV2.GetCacheEntryDownloadUrlRequest
|
68
|
+
response: GithubCacheServiceV2.GetCacheEntryDownloadUrlResponse
|
69
|
+
|
70
|
+
def __post_init__(self) -> None:
|
71
|
+
check.state(self.response.ok)
|
72
|
+
check.non_empty_str(self.response.signed_download_url)
|
73
|
+
|
74
|
+
def get_entry_url(self, entry: GithubCacheClient.Entry) -> ta.Optional[str]:
|
75
|
+
entry2 = check.isinstance(entry, self.Entry)
|
76
|
+
return check.non_empty_str(entry2.response.signed_download_url)
|
77
|
+
|
78
|
+
#
|
79
|
+
|
80
|
+
async def get_entry(self, key: str) -> ta.Optional[GithubCacheClient.Entry]:
|
81
|
+
version = str(self._cache_version).zfill(GithubCacheServiceV2.VERSION_LENGTH)
|
82
|
+
|
83
|
+
req = GithubCacheServiceV2.GetCacheEntryDownloadUrlRequest(
|
84
|
+
key=self.fix_key(key),
|
85
|
+
restore_keys=[self.fix_key(key, partial_suffix=True)],
|
86
|
+
version=version,
|
87
|
+
)
|
88
|
+
|
89
|
+
resp = await self._send_method_request(
|
90
|
+
GithubCacheServiceV2.GET_CACHE_ENTRY_DOWNLOAD_URL_METHOD,
|
91
|
+
req,
|
92
|
+
)
|
93
|
+
if resp is None or not resp.ok:
|
94
|
+
return None
|
95
|
+
|
96
|
+
return self.Entry(
|
97
|
+
request=req,
|
98
|
+
response=resp,
|
99
|
+
)
|
100
|
+
|
101
|
+
#
|
102
|
+
|
103
|
+
async def download_file(self, entry: GithubCacheClient.Entry, out_file: str) -> None:
|
104
|
+
entry2 = check.isinstance(entry, self.Entry)
|
105
|
+
with log_timing_context(
|
106
|
+
'Downloading github cache '
|
107
|
+
f'key {entry2.response.matched_key} '
|
108
|
+
f'version {entry2.request.version} '
|
109
|
+
f'to {out_file}',
|
110
|
+
):
|
111
|
+
await self._download_file_chunks(
|
112
|
+
key=check.non_empty_str(entry2.response.matched_key),
|
113
|
+
url=check.non_empty_str(entry2.response.signed_download_url),
|
114
|
+
out_file=out_file,
|
115
|
+
)
|
116
|
+
|
117
|
+
#
|
118
|
+
|
119
|
+
async def _upload_file(self, key: str, in_file: str) -> None:
|
120
|
+
fixed_key = self.fix_key(key)
|
121
|
+
|
122
|
+
check.state(os.path.isfile(in_file))
|
123
|
+
file_size = os.stat(in_file).st_size
|
124
|
+
|
125
|
+
#
|
126
|
+
|
127
|
+
version = str(self._cache_version).zfill(GithubCacheServiceV2.VERSION_LENGTH)
|
128
|
+
|
129
|
+
reserve_resp = check.not_none(await self._send_method_request(
|
130
|
+
GithubCacheServiceV2.CREATE_CACHE_ENTRY_METHOD, # type: ignore[arg-type]
|
131
|
+
GithubCacheServiceV2.CreateCacheEntryRequest(
|
132
|
+
key=fixed_key,
|
133
|
+
version=version,
|
134
|
+
),
|
135
|
+
))
|
136
|
+
check.state(reserve_resp.ok)
|
137
|
+
|
138
|
+
log.debug(f'Github cache file {os.path.basename(in_file)} upload reserved for file size {file_size}') # noqa
|
139
|
+
|
140
|
+
#
|
141
|
+
|
142
|
+
upload_chunks = self._generate_file_upload_chunks(
|
143
|
+
in_file=in_file,
|
144
|
+
url=reserve_resp.signed_upload_url,
|
145
|
+
key=fixed_key,
|
146
|
+
file_size=file_size,
|
147
|
+
)
|
148
|
+
|
149
|
+
az_chunks = [
|
150
|
+
AzureBlockBlobUploader.FileChunk(
|
151
|
+
in_file=in_file,
|
152
|
+
offset=c.offset,
|
153
|
+
size=c.size,
|
154
|
+
)
|
155
|
+
for c in upload_chunks
|
156
|
+
]
|
157
|
+
|
158
|
+
async def az_make_request(req: AzureBlockBlobUploader.Request) -> AzureBlockBlobUploader.Response:
|
159
|
+
u_req = urllib.request.Request( # noqa
|
160
|
+
req.url,
|
161
|
+
method=req.method,
|
162
|
+
headers=req.headers or {},
|
163
|
+
data=req.body,
|
164
|
+
)
|
165
|
+
|
166
|
+
u_resp, u_body = await self._send_urllib_request(u_req)
|
167
|
+
|
168
|
+
return AzureBlockBlobUploader.Response(
|
169
|
+
status=u_resp.status,
|
170
|
+
headers=dict(u_resp.headers),
|
171
|
+
data=u_body,
|
172
|
+
)
|
173
|
+
|
174
|
+
az_uploader = AzureBlockBlobUploader(
|
175
|
+
reserve_resp.signed_upload_url,
|
176
|
+
az_make_request,
|
177
|
+
concurrency=self._concurrency,
|
178
|
+
)
|
179
|
+
|
180
|
+
await az_uploader.upload_file(az_chunks)
|
181
|
+
|
182
|
+
#
|
183
|
+
|
184
|
+
commit_resp = check.not_none(await self._send_method_request(
|
185
|
+
GithubCacheServiceV2.FINALIZE_CACHE_ENTRY_METHOD, # type: ignore[arg-type]
|
186
|
+
GithubCacheServiceV2.FinalizeCacheEntryUploadRequest(
|
187
|
+
key=fixed_key,
|
188
|
+
size_bytes=file_size,
|
189
|
+
version=version,
|
190
|
+
),
|
191
|
+
))
|
192
|
+
check.state(commit_resp.ok)
|
193
|
+
|
194
|
+
log.debug(f'Github cache file {os.path.basename(in_file)} upload complete, entry id {commit_resp.entry_id}') # noqa
|
195
|
+
|
196
|
+
async def upload_file(self, key: str, in_file: str) -> None:
|
197
|
+
with log_timing_context(
|
198
|
+
f'Uploading github cache file {os.path.basename(in_file)} '
|
199
|
+
f'key {key}',
|
200
|
+
):
|
201
|
+
await self._upload_file(key, in_file)
|