omdev 0.0.0.dev288__py3-none-any.whl → 0.0.0.dev290__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omdev/ci/github/api/__init__.py +0 -0
- omdev/ci/github/{client.py → api/clients.py} +172 -209
- omdev/ci/github/api/v1/__init__.py +0 -0
- omdev/ci/github/{api.py → api/v1/api.py} +5 -87
- omdev/ci/github/api/v1/client.py +171 -0
- omdev/ci/github/api/v2/__init__.py +0 -0
- omdev/ci/github/api/v2/api.py +148 -0
- omdev/ci/github/api/v2/client.py +166 -0
- omdev/ci/github/cache.py +14 -3
- omdev/ci/github/cli.py +1 -1
- omdev/py/scripts/importtrace.py +3 -3
- omdev/scripts/ci.py +663 -288
- omdev/tools/json/cli.py +2 -2
- {omdev-0.0.0.dev288.dist-info → omdev-0.0.0.dev290.dist-info}/METADATA +2 -2
- {omdev-0.0.0.dev288.dist-info → omdev-0.0.0.dev290.dist-info}/RECORD +19 -13
- {omdev-0.0.0.dev288.dist-info → omdev-0.0.0.dev290.dist-info}/WHEEL +1 -1
- {omdev-0.0.0.dev288.dist-info → omdev-0.0.0.dev290.dist-info}/entry_points.txt +0 -0
- {omdev-0.0.0.dev288.dist-info → omdev-0.0.0.dev290.dist-info}/licenses/LICENSE +0 -0
- {omdev-0.0.0.dev288.dist-info → omdev-0.0.0.dev290.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,171 @@
|
|
1
|
+
# ruff: noqa: TC003 UP006 UP007
|
2
|
+
import dataclasses as dc
|
3
|
+
import os
|
4
|
+
import typing as ta
|
5
|
+
import urllib.parse
|
6
|
+
import urllib.request
|
7
|
+
|
8
|
+
from omlish.lite.check import check
|
9
|
+
from omlish.lite.logs import log
|
10
|
+
from omlish.lite.timing import log_timing_context
|
11
|
+
|
12
|
+
from ...env import register_github_env_var
|
13
|
+
from ..clients import BaseGithubCacheClient
|
14
|
+
from ..clients import GithubCacheClient
|
15
|
+
from .api import GithubCacheServiceV1
|
16
|
+
|
17
|
+
|
18
|
+
##
|
19
|
+
|
20
|
+
|
21
|
+
class GithubCacheServiceV1Client(BaseGithubCacheClient):
|
22
|
+
BASE_URL_ENV_VAR = register_github_env_var('ACTIONS_CACHE_URL')
|
23
|
+
|
24
|
+
def __init__(
|
25
|
+
self,
|
26
|
+
*,
|
27
|
+
base_url: ta.Optional[str] = None,
|
28
|
+
|
29
|
+
**kwargs: ta.Any,
|
30
|
+
) -> None:
|
31
|
+
if base_url is None:
|
32
|
+
base_url = check.non_empty_str(self.BASE_URL_ENV_VAR())
|
33
|
+
service_url = GithubCacheServiceV1.get_service_url(base_url)
|
34
|
+
|
35
|
+
super().__init__(
|
36
|
+
service_url=service_url,
|
37
|
+
**kwargs,
|
38
|
+
)
|
39
|
+
|
40
|
+
#
|
41
|
+
|
42
|
+
def _build_request_headers(
|
43
|
+
self,
|
44
|
+
headers: ta.Optional[ta.Mapping[str, str]] = None,
|
45
|
+
**kwargs: ta.Any,
|
46
|
+
) -> ta.Dict[str, str]:
|
47
|
+
return super()._build_request_headers(
|
48
|
+
{
|
49
|
+
'Accept': ';'.join([
|
50
|
+
'application/json',
|
51
|
+
f'api-version={GithubCacheServiceV1.API_VERSION}',
|
52
|
+
]),
|
53
|
+
**(headers or {}),
|
54
|
+
},
|
55
|
+
**kwargs,
|
56
|
+
)
|
57
|
+
|
58
|
+
#
|
59
|
+
|
60
|
+
@dc.dataclass(frozen=True)
|
61
|
+
class Entry(GithubCacheClient.Entry):
|
62
|
+
artifact: GithubCacheServiceV1.ArtifactCacheEntry
|
63
|
+
|
64
|
+
def get_entry_url(self, entry: GithubCacheClient.Entry) -> ta.Optional[str]:
|
65
|
+
entry1 = check.isinstance(entry, self.Entry)
|
66
|
+
return entry1.artifact.archive_location
|
67
|
+
|
68
|
+
#
|
69
|
+
|
70
|
+
def _build_get_entry_url_path(self, *keys: str) -> str:
|
71
|
+
qp = dict(
|
72
|
+
keys=','.join(urllib.parse.quote_plus(k) for k in keys),
|
73
|
+
version=str(self._cache_version),
|
74
|
+
)
|
75
|
+
|
76
|
+
return '?'.join([
|
77
|
+
'cache',
|
78
|
+
'&'.join([
|
79
|
+
f'{k}={v}'
|
80
|
+
for k, v in qp.items()
|
81
|
+
]),
|
82
|
+
])
|
83
|
+
|
84
|
+
GET_ENTRY_SUCCESS_STATUS_CODES = (200, 204)
|
85
|
+
|
86
|
+
#
|
87
|
+
|
88
|
+
async def get_entry(self, key: str) -> ta.Optional[GithubCacheClient.Entry]:
|
89
|
+
obj = await self._send_request(
|
90
|
+
path=self._build_get_entry_url_path(self.fix_key(key, partial_suffix=True)),
|
91
|
+
)
|
92
|
+
if obj is None:
|
93
|
+
return None
|
94
|
+
|
95
|
+
return self.Entry(GithubCacheServiceV1.dataclass_from_json(
|
96
|
+
GithubCacheServiceV1.ArtifactCacheEntry,
|
97
|
+
obj,
|
98
|
+
))
|
99
|
+
|
100
|
+
#
|
101
|
+
|
102
|
+
async def download_file(self, entry: GithubCacheClient.Entry, out_file: str) -> None:
|
103
|
+
entry1 = check.isinstance(entry, self.Entry)
|
104
|
+
with log_timing_context(
|
105
|
+
'Downloading github cache '
|
106
|
+
f'key {entry1.artifact.cache_key} '
|
107
|
+
f'version {entry1.artifact.cache_version} '
|
108
|
+
f'to {out_file}',
|
109
|
+
):
|
110
|
+
await self._download_file_chunks(
|
111
|
+
key=check.non_empty_str(entry1.artifact.cache_key),
|
112
|
+
url=check.non_empty_str(entry1.artifact.archive_location),
|
113
|
+
out_file=out_file,
|
114
|
+
)
|
115
|
+
|
116
|
+
#
|
117
|
+
|
118
|
+
async def _upload_file(self, key: str, in_file: str) -> None:
|
119
|
+
fixed_key = self.fix_key(key)
|
120
|
+
|
121
|
+
check.state(os.path.isfile(in_file))
|
122
|
+
file_size = os.stat(in_file).st_size
|
123
|
+
|
124
|
+
#
|
125
|
+
|
126
|
+
reserve_req = GithubCacheServiceV1.ReserveCacheRequest(
|
127
|
+
key=fixed_key,
|
128
|
+
cache_size=file_size,
|
129
|
+
version=str(self._cache_version),
|
130
|
+
)
|
131
|
+
reserve_resp_obj = await self._send_request(
|
132
|
+
path='caches',
|
133
|
+
json_content=GithubCacheServiceV1.dataclass_to_json(reserve_req),
|
134
|
+
success_status_codes=[201],
|
135
|
+
)
|
136
|
+
reserve_resp = GithubCacheServiceV1.dataclass_from_json( # noqa
|
137
|
+
GithubCacheServiceV1.ReserveCacheResponse,
|
138
|
+
reserve_resp_obj,
|
139
|
+
)
|
140
|
+
cache_id = check.isinstance(reserve_resp.cache_id, int)
|
141
|
+
|
142
|
+
log.debug(f'Github cache file {os.path.basename(in_file)} got id {cache_id}') # noqa
|
143
|
+
|
144
|
+
#
|
145
|
+
|
146
|
+
url = f'{self._service_url}/caches/{cache_id}'
|
147
|
+
|
148
|
+
await self._upload_file_chunks(
|
149
|
+
in_file=in_file,
|
150
|
+
url=url,
|
151
|
+
key=fixed_key,
|
152
|
+
file_size=file_size,
|
153
|
+
)
|
154
|
+
|
155
|
+
#
|
156
|
+
|
157
|
+
commit_req = GithubCacheServiceV1.CommitCacheRequest(
|
158
|
+
size=file_size,
|
159
|
+
)
|
160
|
+
await self._send_request(
|
161
|
+
path=f'caches/{cache_id}',
|
162
|
+
json_content=GithubCacheServiceV1.dataclass_to_json(commit_req),
|
163
|
+
success_status_codes=[204],
|
164
|
+
)
|
165
|
+
|
166
|
+
async def upload_file(self, key: str, in_file: str) -> None:
|
167
|
+
with log_timing_context(
|
168
|
+
f'Uploading github cache file {os.path.basename(in_file)} '
|
169
|
+
f'key {key}',
|
170
|
+
):
|
171
|
+
await self._upload_file(key, in_file)
|
File without changes
|
@@ -0,0 +1,148 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
"""
|
3
|
+
https://github.com/tonistiigi/go-actions-cache/blob/3e9a6642607fd6e4d5d4fdab7c91fe8bf4c36a25/cache_v2.go
|
4
|
+
|
5
|
+
==
|
6
|
+
|
7
|
+
curl -s \
|
8
|
+
-X POST \
|
9
|
+
"${ACTIONS_RESULTS_URL}twirp/github.actions.results.api.v1.CacheService/CreateCacheEntry" \
|
10
|
+
-H 'Content-Type: application/json' \
|
11
|
+
-H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
|
12
|
+
-d '{"key": "foo", "version": "0000000000000000000000000000000000000000000000000000000000000001" }' \
|
13
|
+
| jq .
|
14
|
+
|
15
|
+
curl -s \
|
16
|
+
-X POST \
|
17
|
+
"${ACTIONS_RESULTS_URL}twirp/github.actions.results.api.v1.CacheService/GetCacheEntryDownloadURL" \
|
18
|
+
-H 'Content-Type: application/json' \
|
19
|
+
-H "Authorization: Bearer $ACTIONS_RUNTIME_TOKEN" \
|
20
|
+
-d '{"key": "foo", "restoreKeys": [], "version": "0000000000000000000000000000000000000000000000000000000000000001" }' \
|
21
|
+
| jq .
|
22
|
+
|
23
|
+
""" # noqa
|
24
|
+
import dataclasses as dc
|
25
|
+
import typing as ta
|
26
|
+
|
27
|
+
from omlish.lite.check import check
|
28
|
+
|
29
|
+
|
30
|
+
T = ta.TypeVar('T')
|
31
|
+
|
32
|
+
GithubCacheServiceV2RequestT = ta.TypeVar('GithubCacheServiceV2RequestT')
|
33
|
+
GithubCacheServiceV2ResponseT = ta.TypeVar('GithubCacheServiceV2ResponseT')
|
34
|
+
|
35
|
+
|
36
|
+
##
|
37
|
+
|
38
|
+
|
39
|
+
class GithubCacheServiceV2:
|
40
|
+
def __new__(cls, *args, **kwargs): # noqa
|
41
|
+
raise TypeError
|
42
|
+
|
43
|
+
#
|
44
|
+
|
45
|
+
SERVICE_NAME = 'github.actions.results.api.v1.CacheService'
|
46
|
+
|
47
|
+
@classmethod
|
48
|
+
def get_service_url(cls, base_url: str) -> str:
|
49
|
+
return f'{base_url.rstrip("/")}/twirp/{cls.SERVICE_NAME}'
|
50
|
+
|
51
|
+
#
|
52
|
+
|
53
|
+
@dc.dataclass(frozen=True)
|
54
|
+
class Method(ta.Generic[GithubCacheServiceV2RequestT, GithubCacheServiceV2ResponseT]):
|
55
|
+
name: str
|
56
|
+
request: ta.Type[GithubCacheServiceV2RequestT]
|
57
|
+
response: ta.Type[GithubCacheServiceV2ResponseT]
|
58
|
+
|
59
|
+
#
|
60
|
+
|
61
|
+
class CacheScopePermission:
|
62
|
+
READ = 1
|
63
|
+
WRITE = 2
|
64
|
+
ALL = READ | WRITE
|
65
|
+
|
66
|
+
@dc.dataclass(frozen=True)
|
67
|
+
class CacheScope:
|
68
|
+
scope: str
|
69
|
+
permission: int # CacheScopePermission
|
70
|
+
|
71
|
+
@dc.dataclass(frozen=True)
|
72
|
+
class CacheMetadata:
|
73
|
+
repository_id: int
|
74
|
+
scope: ta.Sequence['GithubCacheServiceV2.CacheScope']
|
75
|
+
|
76
|
+
VERSION_LENGTH: int = 64
|
77
|
+
|
78
|
+
#
|
79
|
+
|
80
|
+
@dc.dataclass(frozen=True)
|
81
|
+
class CreateCacheEntryRequest:
|
82
|
+
key: str
|
83
|
+
version: str
|
84
|
+
metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
|
85
|
+
|
86
|
+
def __post_init__(self) -> None:
|
87
|
+
check.equal(len(self.version), GithubCacheServiceV2.VERSION_LENGTH)
|
88
|
+
|
89
|
+
@dc.dataclass(frozen=True)
|
90
|
+
class CreateCacheEntryResponse:
|
91
|
+
ok: bool
|
92
|
+
signed_upload_url: str
|
93
|
+
|
94
|
+
CREATE_CACHE_ENTRY_METHOD: Method[
|
95
|
+
CreateCacheEntryRequest,
|
96
|
+
CreateCacheEntryResponse,
|
97
|
+
] = Method(
|
98
|
+
'CreateCacheEntry',
|
99
|
+
CreateCacheEntryRequest,
|
100
|
+
CreateCacheEntryResponse,
|
101
|
+
)
|
102
|
+
|
103
|
+
#
|
104
|
+
|
105
|
+
@dc.dataclass(frozen=True)
|
106
|
+
class FinalizeCacheEntryUploadRequest:
|
107
|
+
key: str
|
108
|
+
size_bytes: int
|
109
|
+
version: str
|
110
|
+
metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
|
111
|
+
|
112
|
+
@dc.dataclass(frozen=True)
|
113
|
+
class FinalizeCacheEntryUploadResponse:
|
114
|
+
ok: bool
|
115
|
+
entry_id: str
|
116
|
+
|
117
|
+
FINALIZE_CACHE_ENTRY_METHOD: Method[
|
118
|
+
FinalizeCacheEntryUploadRequest,
|
119
|
+
FinalizeCacheEntryUploadResponse,
|
120
|
+
] = Method(
|
121
|
+
'FinalizeCacheEntryUpload',
|
122
|
+
FinalizeCacheEntryUploadRequest,
|
123
|
+
FinalizeCacheEntryUploadResponse,
|
124
|
+
)
|
125
|
+
|
126
|
+
#
|
127
|
+
|
128
|
+
@dc.dataclass(frozen=True)
|
129
|
+
class GetCacheEntryDownloadUrlRequest:
|
130
|
+
key: str
|
131
|
+
restore_keys: ta.Sequence[str]
|
132
|
+
version: str
|
133
|
+
metadata: ta.Optional['GithubCacheServiceV2.CacheMetadata'] = None
|
134
|
+
|
135
|
+
@dc.dataclass(frozen=True)
|
136
|
+
class GetCacheEntryDownloadUrlResponse:
|
137
|
+
ok: bool
|
138
|
+
signed_download_url: str
|
139
|
+
matched_key: str
|
140
|
+
|
141
|
+
GET_CACHE_ENTRY_DOWNLOAD_URL_METHOD: Method[
|
142
|
+
GetCacheEntryDownloadUrlRequest,
|
143
|
+
GetCacheEntryDownloadUrlResponse,
|
144
|
+
] = Method(
|
145
|
+
'GetCacheEntryDownloadURL',
|
146
|
+
GetCacheEntryDownloadUrlRequest,
|
147
|
+
GetCacheEntryDownloadUrlResponse,
|
148
|
+
)
|
@@ -0,0 +1,166 @@
|
|
1
|
+
# ruff: noqa: UP006 UP007
|
2
|
+
import dataclasses as dc
|
3
|
+
import os
|
4
|
+
import typing as ta
|
5
|
+
|
6
|
+
from omlish.lite.check import check
|
7
|
+
from omlish.lite.logs import log
|
8
|
+
from omlish.lite.timing import log_timing_context
|
9
|
+
|
10
|
+
from ...env import register_github_env_var
|
11
|
+
from ..clients import BaseGithubCacheClient
|
12
|
+
from ..clients import GithubCacheClient
|
13
|
+
from .api import GithubCacheServiceV2
|
14
|
+
from .api import GithubCacheServiceV2RequestT
|
15
|
+
from .api import GithubCacheServiceV2ResponseT
|
16
|
+
|
17
|
+
|
18
|
+
##
|
19
|
+
|
20
|
+
|
21
|
+
class GithubCacheServiceV2Client(BaseGithubCacheClient):
|
22
|
+
BASE_URL_ENV_VAR = register_github_env_var('ACTIONS_RESULTS_URL')
|
23
|
+
|
24
|
+
def __init__(
|
25
|
+
self,
|
26
|
+
*,
|
27
|
+
base_url: ta.Optional[str] = None,
|
28
|
+
|
29
|
+
**kwargs: ta.Any,
|
30
|
+
) -> None:
|
31
|
+
if base_url is None:
|
32
|
+
base_url = check.non_empty_str(self.BASE_URL_ENV_VAR())
|
33
|
+
service_url = GithubCacheServiceV2.get_service_url(base_url)
|
34
|
+
|
35
|
+
super().__init__(
|
36
|
+
service_url=service_url,
|
37
|
+
**kwargs,
|
38
|
+
)
|
39
|
+
|
40
|
+
#
|
41
|
+
|
42
|
+
async def _send_method_request(
|
43
|
+
self,
|
44
|
+
method: GithubCacheServiceV2.Method[
|
45
|
+
GithubCacheServiceV2RequestT,
|
46
|
+
GithubCacheServiceV2ResponseT,
|
47
|
+
],
|
48
|
+
request: GithubCacheServiceV2RequestT,
|
49
|
+
**kwargs: ta.Any,
|
50
|
+
) -> ta.Optional[GithubCacheServiceV2ResponseT]:
|
51
|
+
obj = await self._send_request(
|
52
|
+
path=method.name,
|
53
|
+
json_content=dc.asdict(request), # type: ignore[call-overload]
|
54
|
+
**kwargs,
|
55
|
+
)
|
56
|
+
|
57
|
+
if obj is None:
|
58
|
+
return None
|
59
|
+
return method.response(**obj)
|
60
|
+
|
61
|
+
#
|
62
|
+
|
63
|
+
@dc.dataclass(frozen=True)
|
64
|
+
class Entry(GithubCacheClient.Entry):
|
65
|
+
request: GithubCacheServiceV2.GetCacheEntryDownloadUrlRequest
|
66
|
+
response: GithubCacheServiceV2.GetCacheEntryDownloadUrlResponse
|
67
|
+
|
68
|
+
def __post_init__(self) -> None:
|
69
|
+
check.state(self.response.ok)
|
70
|
+
check.non_empty_str(self.response.signed_download_url)
|
71
|
+
|
72
|
+
def get_entry_url(self, entry: GithubCacheClient.Entry) -> ta.Optional[str]:
|
73
|
+
entry2 = check.isinstance(entry, self.Entry)
|
74
|
+
return check.non_empty_str(entry2.response.signed_download_url)
|
75
|
+
|
76
|
+
#
|
77
|
+
|
78
|
+
async def get_entry(self, key: str) -> ta.Optional[GithubCacheClient.Entry]:
|
79
|
+
version = str(self._cache_version).zfill(GithubCacheServiceV2.VERSION_LENGTH)
|
80
|
+
|
81
|
+
req = GithubCacheServiceV2.GetCacheEntryDownloadUrlRequest(
|
82
|
+
key=self.fix_key(key),
|
83
|
+
restore_keys=[self.fix_key(key, partial_suffix=True)],
|
84
|
+
version=version,
|
85
|
+
)
|
86
|
+
|
87
|
+
resp = await self._send_method_request(
|
88
|
+
GithubCacheServiceV2.GET_CACHE_ENTRY_DOWNLOAD_URL_METHOD,
|
89
|
+
req,
|
90
|
+
)
|
91
|
+
if resp is None or not resp.ok:
|
92
|
+
return None
|
93
|
+
|
94
|
+
return self.Entry(
|
95
|
+
request=req,
|
96
|
+
response=resp,
|
97
|
+
)
|
98
|
+
|
99
|
+
#
|
100
|
+
|
101
|
+
async def download_file(self, entry: GithubCacheClient.Entry, out_file: str) -> None:
|
102
|
+
entry2 = check.isinstance(entry, self.Entry)
|
103
|
+
with log_timing_context(
|
104
|
+
'Downloading github cache '
|
105
|
+
f'key {entry2.response.matched_key} '
|
106
|
+
f'version {entry2.request.version} '
|
107
|
+
f'to {out_file}',
|
108
|
+
):
|
109
|
+
await self._download_file_chunks(
|
110
|
+
key=check.non_empty_str(entry2.response.matched_key),
|
111
|
+
url=check.non_empty_str(entry2.response.signed_download_url),
|
112
|
+
out_file=out_file,
|
113
|
+
)
|
114
|
+
|
115
|
+
#
|
116
|
+
|
117
|
+
async def _upload_file(self, key: str, in_file: str) -> None:
|
118
|
+
fixed_key = self.fix_key(key)
|
119
|
+
|
120
|
+
check.state(os.path.isfile(in_file))
|
121
|
+
file_size = os.stat(in_file).st_size
|
122
|
+
|
123
|
+
#
|
124
|
+
|
125
|
+
version = str(self._cache_version).zfill(GithubCacheServiceV2.VERSION_LENGTH)
|
126
|
+
|
127
|
+
reserve_resp = check.not_none(await self._send_method_request(
|
128
|
+
GithubCacheServiceV2.CREATE_CACHE_ENTRY_METHOD, # type: ignore[arg-type]
|
129
|
+
GithubCacheServiceV2.CreateCacheEntryRequest(
|
130
|
+
key=fixed_key,
|
131
|
+
version=version,
|
132
|
+
),
|
133
|
+
))
|
134
|
+
check.state(reserve_resp.ok)
|
135
|
+
|
136
|
+
log.debug(f'Github cache file {os.path.basename(in_file)} upload reserved for file size {file_size}') # noqa
|
137
|
+
|
138
|
+
#
|
139
|
+
|
140
|
+
await self._upload_file_chunks(
|
141
|
+
in_file=in_file,
|
142
|
+
url=reserve_resp.signed_upload_url,
|
143
|
+
key=fixed_key,
|
144
|
+
file_size=file_size,
|
145
|
+
)
|
146
|
+
|
147
|
+
#
|
148
|
+
|
149
|
+
commit_resp = check.not_none(await self._send_method_request(
|
150
|
+
GithubCacheServiceV2.FINALIZE_CACHE_ENTRY_METHOD, # type: ignore[arg-type]
|
151
|
+
GithubCacheServiceV2.FinalizeCacheEntryUploadRequest(
|
152
|
+
key=key,
|
153
|
+
size_bytes=file_size,
|
154
|
+
version=version,
|
155
|
+
),
|
156
|
+
))
|
157
|
+
check.state(commit_resp.ok)
|
158
|
+
|
159
|
+
log.debug(f'Github cache file {os.path.basename(in_file)} upload complete, entry id {commit_resp.entry_id}') # noqa
|
160
|
+
|
161
|
+
async def upload_file(self, key: str, in_file: str) -> None:
|
162
|
+
with log_timing_context(
|
163
|
+
f'Uploading github cache file {os.path.basename(in_file)} '
|
164
|
+
f'key {key}',
|
165
|
+
):
|
166
|
+
await self._upload_file(key, in_file)
|
omdev/ci/github/cache.py
CHANGED
@@ -11,8 +11,9 @@ from ..cache import DataCache
|
|
11
11
|
from ..cache import DirectoryFileCache
|
12
12
|
from ..cache import FileCache
|
13
13
|
from ..cache import FileCacheDataCache
|
14
|
-
from .
|
15
|
-
from .client import GithubCacheServiceV1Client
|
14
|
+
from .api.clients import GithubCacheClient
|
15
|
+
from .api.v1.client import GithubCacheServiceV1Client
|
16
|
+
from .api.v2.client import GithubCacheServiceV2Client
|
16
17
|
|
17
18
|
|
18
19
|
##
|
@@ -23,11 +24,20 @@ class GithubCache(FileCache, DataCache):
|
|
23
24
|
class Config:
|
24
25
|
pass
|
25
26
|
|
27
|
+
DEFAULT_CLIENT_VERSION: ta.ClassVar[int] = 2
|
28
|
+
|
29
|
+
DEFAULT_CLIENTS_BY_VERSION: ta.ClassVar[ta.Mapping[int, ta.Callable[..., GithubCacheClient]]] = {
|
30
|
+
1: GithubCacheServiceV1Client,
|
31
|
+
2: GithubCacheServiceV2Client,
|
32
|
+
}
|
33
|
+
|
26
34
|
def __init__(
|
27
35
|
self,
|
28
36
|
config: Config = Config(),
|
29
37
|
*,
|
30
38
|
client: ta.Optional[GithubCacheClient] = None,
|
39
|
+
default_client_version: ta.Optional[int] = None,
|
40
|
+
|
31
41
|
version: ta.Optional[CacheVersion] = None,
|
32
42
|
|
33
43
|
local: DirectoryFileCache,
|
@@ -39,7 +49,8 @@ class GithubCache(FileCache, DataCache):
|
|
39
49
|
self._config = config
|
40
50
|
|
41
51
|
if client is None:
|
42
|
-
|
52
|
+
client_cls = self.DEFAULT_CLIENTS_BY_VERSION[default_client_version or self.DEFAULT_CLIENT_VERSION]
|
53
|
+
client = client_cls(
|
43
54
|
cache_version=self._version,
|
44
55
|
)
|
45
56
|
self._client: GithubCacheClient = client
|
omdev/ci/github/cli.py
CHANGED
@@ -10,7 +10,7 @@ from omlish.argparse.cli import argparse_arg
|
|
10
10
|
from omlish.argparse.cli import argparse_cmd
|
11
11
|
from omlish.lite.json import json_dumps_pretty
|
12
12
|
|
13
|
-
from .client import GithubCacheServiceV1Client
|
13
|
+
from .api.v1.client import GithubCacheServiceV1Client
|
14
14
|
from .env import GITHUB_ENV_VARS
|
15
15
|
|
16
16
|
|
omdev/py/scripts/importtrace.py
CHANGED
@@ -488,20 +488,20 @@ def _main() -> None:
|
|
488
488
|
import argparse
|
489
489
|
|
490
490
|
parser = argparse.ArgumentParser()
|
491
|
-
parser.add_argument('--
|
491
|
+
parser.add_argument('-x', '--exe')
|
492
492
|
parser.add_argument('--sqlite')
|
493
493
|
parser.add_argument('--pretty', action='store_true')
|
494
494
|
parser.add_argument('mod')
|
495
495
|
args = parser.parse_args()
|
496
496
|
|
497
|
-
if args.
|
497
|
+
if args.exe:
|
498
498
|
import inspect
|
499
499
|
import subprocess
|
500
500
|
|
501
501
|
mod_src = inspect.getsource(sys.modules[__name__])
|
502
502
|
subprocess.run(
|
503
503
|
[
|
504
|
-
args.
|
504
|
+
args.exe,
|
505
505
|
'-',
|
506
506
|
*(['--sqlite', args.sqlite] if args.sqlite else []),
|
507
507
|
*(['--pretty'] if args.pretty else []),
|