omdev 0.0.0.dev223__py3-none-any.whl → 0.0.0.dev224__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- omdev/ci/cache.py +108 -0
- omdev/ci/ci.py +1 -1
- omdev/ci/docker/cacheserved.py +262 -0
- omdev/ci/docker/dataserver.py +204 -0
- omdev/ci/docker/imagepulling.py +2 -1
- omdev/ci/docker/packing.py +72 -0
- omdev/ci/docker/repositories.py +40 -0
- omdev/ci/github/cache.py +20 -1
- omdev/ci/github/client.py +9 -2
- omdev/ci/github/inject.py +4 -4
- omdev/ci/utils.py +0 -49
- omdev/dataserver/targets.py +32 -0
- omdev/oci/data.py +19 -0
- omdev/oci/dataserver.py +4 -1
- omdev/oci/pack/__init__.py +0 -0
- omdev/oci/pack/packing.py +185 -0
- omdev/oci/pack/repositories.py +162 -0
- omdev/oci/{packing.py → pack/unpacking.py} +0 -177
- omdev/oci/repositories.py +6 -0
- omdev/scripts/ci.py +423 -255
- omdev/scripts/interp.py +19 -0
- omdev/scripts/pyproject.py +19 -0
- {omdev-0.0.0.dev223.dist-info → omdev-0.0.0.dev224.dist-info}/METADATA +2 -2
- {omdev-0.0.0.dev223.dist-info → omdev-0.0.0.dev224.dist-info}/RECORD +28 -21
- {omdev-0.0.0.dev223.dist-info → omdev-0.0.0.dev224.dist-info}/LICENSE +0 -0
- {omdev-0.0.0.dev223.dist-info → omdev-0.0.0.dev224.dist-info}/WHEEL +0 -0
- {omdev-0.0.0.dev223.dist-info → omdev-0.0.0.dev224.dist-info}/entry_points.txt +0 -0
- {omdev-0.0.0.dev223.dist-info → omdev-0.0.0.dev224.dist-info}/top_level.txt +0 -0
omdev/scripts/ci.py
CHANGED
@@ -159,6 +159,27 @@ class ShellCmd:
|
|
159
159
|
)
|
160
160
|
|
161
161
|
|
162
|
+
########################################
|
163
|
+
# ../utils.py
|
164
|
+
|
165
|
+
|
166
|
+
##
|
167
|
+
|
168
|
+
|
169
|
+
def read_yaml_file(yaml_file: str) -> ta.Any:
|
170
|
+
yaml = __import__('yaml')
|
171
|
+
|
172
|
+
with open(yaml_file) as f:
|
173
|
+
return yaml.safe_load(f)
|
174
|
+
|
175
|
+
|
176
|
+
##
|
177
|
+
|
178
|
+
|
179
|
+
def sha256_str(s: str) -> str:
|
180
|
+
return hashlib.sha256(s.encode('utf-8')).hexdigest()
|
181
|
+
|
182
|
+
|
162
183
|
########################################
|
163
184
|
# ../../../omlish/asyncs/asyncio/asyncio.py
|
164
185
|
|
@@ -1146,6 +1167,63 @@ class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
|
|
1146
1167
|
self._underlying.handleError(record)
|
1147
1168
|
|
1148
1169
|
|
1170
|
+
########################################
|
1171
|
+
# ../../../omlish/logs/timing.py
|
1172
|
+
|
1173
|
+
|
1174
|
+
##
|
1175
|
+
|
1176
|
+
|
1177
|
+
class LogTimingContext:
|
1178
|
+
DEFAULT_LOG: ta.ClassVar[ta.Optional[logging.Logger]] = None
|
1179
|
+
|
1180
|
+
class _NOT_SPECIFIED: # noqa
|
1181
|
+
def __new__(cls, *args, **kwargs): # noqa
|
1182
|
+
raise TypeError
|
1183
|
+
|
1184
|
+
def __init__(
|
1185
|
+
self,
|
1186
|
+
description: str,
|
1187
|
+
*,
|
1188
|
+
log: ta.Union[logging.Logger, ta.Type[_NOT_SPECIFIED], None] = _NOT_SPECIFIED, # noqa
|
1189
|
+
level: int = logging.DEBUG,
|
1190
|
+
) -> None:
|
1191
|
+
super().__init__()
|
1192
|
+
|
1193
|
+
self._description = description
|
1194
|
+
if log is self._NOT_SPECIFIED:
|
1195
|
+
log = self.DEFAULT_LOG # noqa
|
1196
|
+
self._log: ta.Optional[logging.Logger] = log # type: ignore
|
1197
|
+
self._level = level
|
1198
|
+
|
1199
|
+
def set_description(self, description: str) -> 'LogTimingContext':
|
1200
|
+
self._description = description
|
1201
|
+
return self
|
1202
|
+
|
1203
|
+
_begin_time: float
|
1204
|
+
_end_time: float
|
1205
|
+
|
1206
|
+
def __enter__(self) -> 'LogTimingContext':
|
1207
|
+
self._begin_time = time.time()
|
1208
|
+
|
1209
|
+
if self._log is not None:
|
1210
|
+
self._log.log(self._level, f'Begin : {self._description}') # noqa
|
1211
|
+
|
1212
|
+
return self
|
1213
|
+
|
1214
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
1215
|
+
self._end_time = time.time()
|
1216
|
+
|
1217
|
+
if self._log is not None:
|
1218
|
+
self._log.log(
|
1219
|
+
self._level,
|
1220
|
+
f'End : {self._description} - {self._end_time - self._begin_time:0.2f} s elapsed',
|
1221
|
+
)
|
1222
|
+
|
1223
|
+
|
1224
|
+
log_timing_context = LogTimingContext
|
1225
|
+
|
1226
|
+
|
1149
1227
|
########################################
|
1150
1228
|
# ../../../omlish/os/files.py
|
1151
1229
|
|
@@ -1183,159 +1261,47 @@ def unlinking_if_exists(path: str) -> ta.Iterator[None]:
|
|
1183
1261
|
|
1184
1262
|
|
1185
1263
|
########################################
|
1186
|
-
# ../
|
1187
|
-
|
1188
|
-
|
1189
|
-
|
1264
|
+
# ../docker/utils.py
|
1265
|
+
"""
|
1266
|
+
TODO:
|
1267
|
+
- some less stupid Dockerfile hash
|
1268
|
+
- doesn't change too much though
|
1269
|
+
"""
|
1190
1270
|
|
1191
1271
|
|
1192
1272
|
##
|
1193
1273
|
|
1194
1274
|
|
1195
|
-
|
1196
|
-
|
1197
|
-
|
1198
|
-
def __init__(
|
1199
|
-
self,
|
1200
|
-
*,
|
1201
|
-
version: ta.Optional[CacheVersion] = None,
|
1202
|
-
) -> None:
|
1203
|
-
super().__init__()
|
1204
|
-
|
1205
|
-
if version is None:
|
1206
|
-
version = self.DEFAULT_CACHE_VERSION
|
1207
|
-
check.isinstance(version, int)
|
1208
|
-
check.arg(version >= 0)
|
1209
|
-
self._version: CacheVersion = version
|
1210
|
-
|
1211
|
-
@property
|
1212
|
-
def version(self) -> CacheVersion:
|
1213
|
-
return self._version
|
1214
|
-
|
1215
|
-
#
|
1216
|
-
|
1217
|
-
@abc.abstractmethod
|
1218
|
-
def get_file(self, key: str) -> ta.Awaitable[ta.Optional[str]]:
|
1219
|
-
raise NotImplementedError
|
1220
|
-
|
1221
|
-
@abc.abstractmethod
|
1222
|
-
def put_file(
|
1223
|
-
self,
|
1224
|
-
key: str,
|
1225
|
-
file_path: str,
|
1226
|
-
*,
|
1227
|
-
steal: bool = False,
|
1228
|
-
) -> ta.Awaitable[str]:
|
1229
|
-
raise NotImplementedError
|
1230
|
-
|
1231
|
-
|
1232
|
-
#
|
1233
|
-
|
1234
|
-
|
1235
|
-
class DirectoryFileCache(FileCache):
|
1236
|
-
@dc.dataclass(frozen=True)
|
1237
|
-
class Config:
|
1238
|
-
dir: str
|
1239
|
-
|
1240
|
-
no_create: bool = False
|
1241
|
-
no_purge: bool = False
|
1242
|
-
|
1243
|
-
def __init__(
|
1244
|
-
self,
|
1245
|
-
config: Config,
|
1246
|
-
*,
|
1247
|
-
version: ta.Optional[CacheVersion] = None,
|
1248
|
-
) -> None: # noqa
|
1249
|
-
super().__init__(
|
1250
|
-
version=version,
|
1251
|
-
)
|
1252
|
-
|
1253
|
-
self._config = config
|
1254
|
-
|
1255
|
-
@property
|
1256
|
-
def dir(self) -> str:
|
1257
|
-
return self._config.dir
|
1258
|
-
|
1259
|
-
#
|
1260
|
-
|
1261
|
-
VERSION_FILE_NAME = '.ci-cache-version'
|
1262
|
-
|
1263
|
-
@cached_nullary
|
1264
|
-
def setup_dir(self) -> None:
|
1265
|
-
version_file = os.path.join(self.dir, self.VERSION_FILE_NAME)
|
1266
|
-
|
1267
|
-
if self._config.no_create:
|
1268
|
-
check.state(os.path.isdir(self.dir))
|
1269
|
-
|
1270
|
-
elif not os.path.isdir(self.dir):
|
1271
|
-
os.makedirs(self.dir)
|
1272
|
-
with open(version_file, 'w') as f:
|
1273
|
-
f.write(str(self._version))
|
1274
|
-
return
|
1275
|
-
|
1276
|
-
# NOTE: intentionally raises FileNotFoundError to refuse to use an existing non-cache dir as a cache dir.
|
1277
|
-
with open(version_file) as f:
|
1278
|
-
dir_version = int(f.read().strip())
|
1279
|
-
|
1280
|
-
if dir_version == self._version:
|
1281
|
-
return
|
1282
|
-
|
1283
|
-
if self._config.no_purge:
|
1284
|
-
raise RuntimeError(f'{dir_version=} != {self._version=}')
|
1285
|
-
|
1286
|
-
dirs = [n for n in sorted(os.listdir(self.dir)) if os.path.isdir(os.path.join(self.dir, n))]
|
1287
|
-
if dirs:
|
1288
|
-
raise RuntimeError(
|
1289
|
-
f'Refusing to remove stale cache dir {self.dir!r} '
|
1290
|
-
f'due to present directories: {", ".join(dirs)}',
|
1291
|
-
)
|
1275
|
+
def build_docker_file_hash(docker_file: str) -> str:
|
1276
|
+
with open(docker_file) as f:
|
1277
|
+
contents = f.read()
|
1292
1278
|
|
1293
|
-
|
1294
|
-
if n.startswith('.'):
|
1295
|
-
continue
|
1296
|
-
fp = os.path.join(self.dir, n)
|
1297
|
-
check.state(os.path.isfile(fp))
|
1298
|
-
log.debug('Purging stale cache file: %s', fp)
|
1299
|
-
os.unlink(fp)
|
1279
|
+
return sha256_str(contents)
|
1300
1280
|
|
1301
|
-
os.unlink(version_file)
|
1302
1281
|
|
1303
|
-
|
1304
|
-
f.write(str(self._version))
|
1282
|
+
##
|
1305
1283
|
|
1306
|
-
#
|
1307
1284
|
|
1308
|
-
|
1309
|
-
|
1310
|
-
|
1311
|
-
|
1312
|
-
self.setup_dir()
|
1313
|
-
return os.path.join(self.dir, key)
|
1285
|
+
def read_docker_tar_image_tag(tar_file: str) -> str:
|
1286
|
+
with tarfile.open(tar_file) as tf:
|
1287
|
+
with contextlib.closing(check.not_none(tf.extractfile('manifest.json'))) as mf:
|
1288
|
+
m = mf.read()
|
1314
1289
|
|
1315
|
-
|
1316
|
-
|
1290
|
+
manifests = json.loads(m.decode('utf-8'))
|
1291
|
+
manifest = check.single(manifests)
|
1292
|
+
tag = check.non_empty_str(check.single(manifest['RepoTags']))
|
1293
|
+
return tag
|
1317
1294
|
|
1318
|
-
#
|
1319
1295
|
|
1320
|
-
|
1321
|
-
|
1322
|
-
|
1323
|
-
|
1324
|
-
return cache_file_path
|
1296
|
+
def read_docker_tar_image_id(tar_file: str) -> str:
|
1297
|
+
with tarfile.open(tar_file) as tf:
|
1298
|
+
with contextlib.closing(check.not_none(tf.extractfile('index.json'))) as mf:
|
1299
|
+
i = mf.read()
|
1325
1300
|
|
1326
|
-
|
1327
|
-
|
1328
|
-
|
1329
|
-
|
1330
|
-
*,
|
1331
|
-
steal: bool = False,
|
1332
|
-
) -> str:
|
1333
|
-
cache_file_path = self.get_cache_file_path(key)
|
1334
|
-
if steal:
|
1335
|
-
shutil.move(file_path, cache_file_path)
|
1336
|
-
else:
|
1337
|
-
shutil.copyfile(file_path, cache_file_path)
|
1338
|
-
return cache_file_path
|
1301
|
+
index = json.loads(i.decode('utf-8'))
|
1302
|
+
manifest = check.single(index['manifests'])
|
1303
|
+
image_id = check.non_empty_str(manifest['digest'])
|
1304
|
+
return image_id
|
1339
1305
|
|
1340
1306
|
|
1341
1307
|
########################################
|
@@ -1560,99 +1526,33 @@ def is_in_github_actions() -> bool:
|
|
1560
1526
|
|
1561
1527
|
|
1562
1528
|
########################################
|
1563
|
-
#
|
1529
|
+
# ../../../omlish/argparse/cli.py
|
1530
|
+
"""
|
1531
|
+
TODO:
|
1532
|
+
- default command
|
1533
|
+
- auto match all underscores to hyphens
|
1534
|
+
- pre-run, post-run hooks
|
1535
|
+
- exitstack?
|
1536
|
+
"""
|
1564
1537
|
|
1565
1538
|
|
1566
1539
|
##
|
1567
1540
|
|
1568
1541
|
|
1569
|
-
|
1570
|
-
|
1542
|
+
@dc.dataclass(eq=False)
|
1543
|
+
class ArgparseArg:
|
1544
|
+
args: ta.Sequence[ta.Any]
|
1545
|
+
kwargs: ta.Mapping[str, ta.Any]
|
1546
|
+
dest: ta.Optional[str] = None
|
1571
1547
|
|
1572
|
-
|
1573
|
-
|
1548
|
+
def __get__(self, instance, owner=None):
|
1549
|
+
if instance is None:
|
1550
|
+
return self
|
1551
|
+
return getattr(instance.args, self.dest) # type: ignore
|
1574
1552
|
|
1575
1553
|
|
1576
|
-
|
1577
|
-
|
1578
|
-
|
1579
|
-
def sha256_str(s: str) -> str:
|
1580
|
-
return hashlib.sha256(s.encode('utf-8')).hexdigest()
|
1581
|
-
|
1582
|
-
|
1583
|
-
##
|
1584
|
-
|
1585
|
-
|
1586
|
-
class LogTimingContext:
|
1587
|
-
DEFAULT_LOG: ta.ClassVar[logging.Logger] = log
|
1588
|
-
|
1589
|
-
def __init__(
|
1590
|
-
self,
|
1591
|
-
description: str,
|
1592
|
-
*,
|
1593
|
-
log: ta.Optional[logging.Logger] = None, # noqa
|
1594
|
-
level: int = logging.DEBUG,
|
1595
|
-
) -> None:
|
1596
|
-
super().__init__()
|
1597
|
-
|
1598
|
-
self._description = description
|
1599
|
-
self._log = log if log is not None else self.DEFAULT_LOG
|
1600
|
-
self._level = level
|
1601
|
-
|
1602
|
-
def set_description(self, description: str) -> 'LogTimingContext':
|
1603
|
-
self._description = description
|
1604
|
-
return self
|
1605
|
-
|
1606
|
-
_begin_time: float
|
1607
|
-
_end_time: float
|
1608
|
-
|
1609
|
-
def __enter__(self) -> 'LogTimingContext':
|
1610
|
-
self._begin_time = time.time()
|
1611
|
-
|
1612
|
-
self._log.log(self._level, f'Begin : {self._description}') # noqa
|
1613
|
-
|
1614
|
-
return self
|
1615
|
-
|
1616
|
-
def __exit__(self, exc_type, exc_val, exc_tb):
|
1617
|
-
self._end_time = time.time()
|
1618
|
-
|
1619
|
-
self._log.log(
|
1620
|
-
self._level,
|
1621
|
-
f'End : {self._description} - {self._end_time - self._begin_time:0.2f} s elapsed',
|
1622
|
-
)
|
1623
|
-
|
1624
|
-
|
1625
|
-
log_timing_context = LogTimingContext
|
1626
|
-
|
1627
|
-
|
1628
|
-
########################################
|
1629
|
-
# ../../../omlish/argparse/cli.py
|
1630
|
-
"""
|
1631
|
-
TODO:
|
1632
|
-
- default command
|
1633
|
-
- auto match all underscores to hyphens
|
1634
|
-
- pre-run, post-run hooks
|
1635
|
-
- exitstack?
|
1636
|
-
"""
|
1637
|
-
|
1638
|
-
|
1639
|
-
##
|
1640
|
-
|
1641
|
-
|
1642
|
-
@dc.dataclass(eq=False)
|
1643
|
-
class ArgparseArg:
|
1644
|
-
args: ta.Sequence[ta.Any]
|
1645
|
-
kwargs: ta.Mapping[str, ta.Any]
|
1646
|
-
dest: ta.Optional[str] = None
|
1647
|
-
|
1648
|
-
def __get__(self, instance, owner=None):
|
1649
|
-
if instance is None:
|
1650
|
-
return self
|
1651
|
-
return getattr(instance.args, self.dest) # type: ignore
|
1652
|
-
|
1653
|
-
|
1654
|
-
def argparse_arg(*args, **kwargs) -> ArgparseArg:
|
1655
|
-
return ArgparseArg(args, kwargs)
|
1554
|
+
def argparse_arg(*args, **kwargs) -> ArgparseArg:
|
1555
|
+
return ArgparseArg(args, kwargs)
|
1656
1556
|
|
1657
1557
|
|
1658
1558
|
#
|
@@ -3115,6 +3015,15 @@ def check_lite_runtime_version() -> None:
|
|
3115
3015
|
raise OSError(f'Requires python {LITE_REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
|
3116
3016
|
|
3117
3017
|
|
3018
|
+
########################################
|
3019
|
+
# ../../../omlish/lite/timing.py
|
3020
|
+
|
3021
|
+
|
3022
|
+
LogTimingContext.DEFAULT_LOG = log
|
3023
|
+
|
3024
|
+
log_timing_context = log_timing_context # noqa
|
3025
|
+
|
3026
|
+
|
3118
3027
|
########################################
|
3119
3028
|
# ../../../omlish/logs/json.py
|
3120
3029
|
"""
|
@@ -3215,47 +3124,263 @@ def temp_named_file_context(
|
|
3215
3124
|
|
3216
3125
|
|
3217
3126
|
########################################
|
3218
|
-
# ../
|
3219
|
-
|
3220
|
-
|
3221
|
-
|
3222
|
-
- doesn't change too much though
|
3223
|
-
"""
|
3127
|
+
# ../cache.py
|
3128
|
+
|
3129
|
+
|
3130
|
+
CacheVersion = ta.NewType('CacheVersion', int)
|
3224
3131
|
|
3225
3132
|
|
3226
3133
|
##
|
3227
3134
|
|
3228
3135
|
|
3229
|
-
|
3230
|
-
|
3231
|
-
contents = f.read()
|
3136
|
+
class FileCache(abc.ABC):
|
3137
|
+
DEFAULT_CACHE_VERSION: ta.ClassVar[CacheVersion] = CacheVersion(CI_CACHE_VERSION)
|
3232
3138
|
|
3233
|
-
|
3139
|
+
def __init__(
|
3140
|
+
self,
|
3141
|
+
*,
|
3142
|
+
version: ta.Optional[CacheVersion] = None,
|
3143
|
+
) -> None:
|
3144
|
+
super().__init__()
|
3145
|
+
|
3146
|
+
if version is None:
|
3147
|
+
version = self.DEFAULT_CACHE_VERSION
|
3148
|
+
check.isinstance(version, int)
|
3149
|
+
check.arg(version >= 0)
|
3150
|
+
self._version: CacheVersion = version
|
3151
|
+
|
3152
|
+
@property
|
3153
|
+
def version(self) -> CacheVersion:
|
3154
|
+
return self._version
|
3155
|
+
|
3156
|
+
#
|
3157
|
+
|
3158
|
+
@abc.abstractmethod
|
3159
|
+
def get_file(self, key: str) -> ta.Awaitable[ta.Optional[str]]:
|
3160
|
+
raise NotImplementedError
|
3161
|
+
|
3162
|
+
@abc.abstractmethod
|
3163
|
+
def put_file(
|
3164
|
+
self,
|
3165
|
+
key: str,
|
3166
|
+
file_path: str,
|
3167
|
+
*,
|
3168
|
+
steal: bool = False,
|
3169
|
+
) -> ta.Awaitable[str]:
|
3170
|
+
raise NotImplementedError
|
3171
|
+
|
3172
|
+
|
3173
|
+
#
|
3174
|
+
|
3175
|
+
|
3176
|
+
class DirectoryFileCache(FileCache):
|
3177
|
+
@dc.dataclass(frozen=True)
|
3178
|
+
class Config:
|
3179
|
+
dir: str
|
3180
|
+
|
3181
|
+
no_create: bool = False
|
3182
|
+
no_purge: bool = False
|
3183
|
+
|
3184
|
+
def __init__(
|
3185
|
+
self,
|
3186
|
+
config: Config,
|
3187
|
+
*,
|
3188
|
+
version: ta.Optional[CacheVersion] = None,
|
3189
|
+
) -> None: # noqa
|
3190
|
+
super().__init__(
|
3191
|
+
version=version,
|
3192
|
+
)
|
3193
|
+
|
3194
|
+
self._config = config
|
3195
|
+
|
3196
|
+
@property
|
3197
|
+
def dir(self) -> str:
|
3198
|
+
return self._config.dir
|
3199
|
+
|
3200
|
+
#
|
3201
|
+
|
3202
|
+
VERSION_FILE_NAME = '.ci-cache-version'
|
3203
|
+
|
3204
|
+
@cached_nullary
|
3205
|
+
def setup_dir(self) -> None:
|
3206
|
+
version_file = os.path.join(self.dir, self.VERSION_FILE_NAME)
|
3207
|
+
|
3208
|
+
if self._config.no_create:
|
3209
|
+
check.state(os.path.isdir(self.dir))
|
3210
|
+
|
3211
|
+
elif not os.path.isdir(self.dir):
|
3212
|
+
os.makedirs(self.dir)
|
3213
|
+
with open(version_file, 'w') as f:
|
3214
|
+
f.write(str(self._version))
|
3215
|
+
return
|
3216
|
+
|
3217
|
+
# NOTE: intentionally raises FileNotFoundError to refuse to use an existing non-cache dir as a cache dir.
|
3218
|
+
with open(version_file) as f:
|
3219
|
+
dir_version = int(f.read().strip())
|
3220
|
+
|
3221
|
+
if dir_version == self._version:
|
3222
|
+
return
|
3223
|
+
|
3224
|
+
if self._config.no_purge:
|
3225
|
+
raise RuntimeError(f'{dir_version=} != {self._version=}')
|
3226
|
+
|
3227
|
+
dirs = [n for n in sorted(os.listdir(self.dir)) if os.path.isdir(os.path.join(self.dir, n))]
|
3228
|
+
if dirs:
|
3229
|
+
raise RuntimeError(
|
3230
|
+
f'Refusing to remove stale cache dir {self.dir!r} '
|
3231
|
+
f'due to present directories: {", ".join(dirs)}',
|
3232
|
+
)
|
3233
|
+
|
3234
|
+
for n in sorted(os.listdir(self.dir)):
|
3235
|
+
if n.startswith('.'):
|
3236
|
+
continue
|
3237
|
+
fp = os.path.join(self.dir, n)
|
3238
|
+
check.state(os.path.isfile(fp))
|
3239
|
+
log.debug('Purging stale cache file: %s', fp)
|
3240
|
+
os.unlink(fp)
|
3241
|
+
|
3242
|
+
os.unlink(version_file)
|
3243
|
+
|
3244
|
+
with open(version_file, 'w') as f:
|
3245
|
+
f.write(str(self._version))
|
3246
|
+
|
3247
|
+
#
|
3248
|
+
|
3249
|
+
def get_cache_file_path(
|
3250
|
+
self,
|
3251
|
+
key: str,
|
3252
|
+
) -> str:
|
3253
|
+
self.setup_dir()
|
3254
|
+
return os.path.join(self.dir, key)
|
3255
|
+
|
3256
|
+
def format_incomplete_file(self, f: str) -> str:
|
3257
|
+
return os.path.join(os.path.dirname(f), f'_{os.path.basename(f)}.incomplete')
|
3258
|
+
|
3259
|
+
#
|
3260
|
+
|
3261
|
+
async def get_file(self, key: str) -> ta.Optional[str]:
|
3262
|
+
cache_file_path = self.get_cache_file_path(key)
|
3263
|
+
if not os.path.exists(cache_file_path):
|
3264
|
+
return None
|
3265
|
+
return cache_file_path
|
3266
|
+
|
3267
|
+
async def put_file(
|
3268
|
+
self,
|
3269
|
+
key: str,
|
3270
|
+
file_path: str,
|
3271
|
+
*,
|
3272
|
+
steal: bool = False,
|
3273
|
+
) -> str:
|
3274
|
+
cache_file_path = self.get_cache_file_path(key)
|
3275
|
+
if steal:
|
3276
|
+
shutil.move(file_path, cache_file_path)
|
3277
|
+
else:
|
3278
|
+
shutil.copyfile(file_path, cache_file_path)
|
3279
|
+
return cache_file_path
|
3234
3280
|
|
3235
3281
|
|
3236
3282
|
##
|
3237
3283
|
|
3238
3284
|
|
3239
|
-
|
3240
|
-
|
3241
|
-
|
3242
|
-
|
3285
|
+
class DataCache:
|
3286
|
+
@dc.dataclass(frozen=True)
|
3287
|
+
class Data(abc.ABC): # noqa
|
3288
|
+
pass
|
3243
3289
|
|
3244
|
-
|
3245
|
-
|
3246
|
-
|
3247
|
-
return tag
|
3290
|
+
@dc.dataclass(frozen=True)
|
3291
|
+
class BytesData(Data):
|
3292
|
+
data: bytes
|
3248
3293
|
|
3294
|
+
@dc.dataclass(frozen=True)
|
3295
|
+
class FileData(Data):
|
3296
|
+
file_path: str
|
3249
3297
|
|
3250
|
-
|
3251
|
-
|
3252
|
-
|
3253
|
-
i = mf.read()
|
3298
|
+
@dc.dataclass(frozen=True)
|
3299
|
+
class UrlData(Data):
|
3300
|
+
url: str
|
3254
3301
|
|
3255
|
-
|
3256
|
-
|
3257
|
-
|
3258
|
-
|
3302
|
+
#
|
3303
|
+
|
3304
|
+
@abc.abstractmethod
|
3305
|
+
def get_data(self, key: str) -> ta.Awaitable[ta.Optional[Data]]:
|
3306
|
+
raise NotImplementedError
|
3307
|
+
|
3308
|
+
@abc.abstractmethod
|
3309
|
+
def put_data(self, key: str, data: Data) -> ta.Awaitable[None]:
|
3310
|
+
raise NotImplementedError
|
3311
|
+
|
3312
|
+
|
3313
|
+
#
|
3314
|
+
|
3315
|
+
|
3316
|
+
@functools.singledispatch
|
3317
|
+
async def read_data_cache_data(data: DataCache.Data) -> bytes:
|
3318
|
+
raise TypeError(data)
|
3319
|
+
|
3320
|
+
|
3321
|
+
@read_data_cache_data.register
|
3322
|
+
async def _(data: DataCache.BytesData) -> bytes:
|
3323
|
+
return data.data
|
3324
|
+
|
3325
|
+
|
3326
|
+
@read_data_cache_data.register
|
3327
|
+
async def _(data: DataCache.FileData) -> bytes:
|
3328
|
+
with open(data.file_path, 'rb') as f: # noqa
|
3329
|
+
return f.read()
|
3330
|
+
|
3331
|
+
|
3332
|
+
@read_data_cache_data.register
|
3333
|
+
async def _(data: DataCache.UrlData) -> bytes:
|
3334
|
+
def inner() -> bytes:
|
3335
|
+
with urllib.request.urlopen(urllib.request.Request( # noqa
|
3336
|
+
data.url,
|
3337
|
+
)) as resp:
|
3338
|
+
return resp.read()
|
3339
|
+
|
3340
|
+
return await asyncio.get_running_loop().run_in_executor(None, inner)
|
3341
|
+
|
3342
|
+
|
3343
|
+
#
|
3344
|
+
|
3345
|
+
|
3346
|
+
class FileCacheDataCache(DataCache):
|
3347
|
+
def __init__(
|
3348
|
+
self,
|
3349
|
+
file_cache: FileCache,
|
3350
|
+
) -> None:
|
3351
|
+
super().__init__()
|
3352
|
+
|
3353
|
+
self._file_cache = file_cache
|
3354
|
+
|
3355
|
+
async def get_data(self, key: str) -> ta.Optional[DataCache.Data]:
|
3356
|
+
if (file_path := await self._file_cache.get_file(key)) is None:
|
3357
|
+
return None
|
3358
|
+
|
3359
|
+
return DataCache.FileData(file_path)
|
3360
|
+
|
3361
|
+
async def put_data(self, key: str, data: DataCache.Data) -> None:
|
3362
|
+
steal = False
|
3363
|
+
|
3364
|
+
if isinstance(data, DataCache.BytesData):
|
3365
|
+
file_path = make_temp_file()
|
3366
|
+
with open(file_path, 'wb') as f: # noqa
|
3367
|
+
f.write(data.data)
|
3368
|
+
steal = True
|
3369
|
+
|
3370
|
+
elif isinstance(data, DataCache.FileData):
|
3371
|
+
file_path = data.file_path
|
3372
|
+
|
3373
|
+
elif isinstance(data, DataCache.UrlData):
|
3374
|
+
raise NotImplementedError
|
3375
|
+
|
3376
|
+
else:
|
3377
|
+
raise TypeError(data)
|
3378
|
+
|
3379
|
+
await self._file_cache.put_file(
|
3380
|
+
key,
|
3381
|
+
file_path,
|
3382
|
+
steal=steal,
|
3383
|
+
)
|
3259
3384
|
|
3260
3385
|
|
3261
3386
|
########################################
|
@@ -3273,6 +3398,9 @@ class GithubCacheClient(abc.ABC):
|
|
3273
3398
|
def get_entry(self, key: str) -> ta.Awaitable[ta.Optional[Entry]]:
|
3274
3399
|
raise NotImplementedError
|
3275
3400
|
|
3401
|
+
def get_entry_url(self, entry: Entry) -> ta.Optional[str]:
|
3402
|
+
return None
|
3403
|
+
|
3276
3404
|
@abc.abstractmethod
|
3277
3405
|
def download_file(self, entry: Entry, out_file: str) -> ta.Awaitable[None]:
|
3278
3406
|
raise NotImplementedError
|
@@ -3339,7 +3467,7 @@ class GithubCacheServiceV1BaseClient(GithubCacheClient, abc.ABC):
|
|
3339
3467
|
def _get_loop(self) -> asyncio.AbstractEventLoop:
|
3340
3468
|
if (loop := self._given_loop) is not None:
|
3341
3469
|
return loop
|
3342
|
-
return asyncio.
|
3470
|
+
return asyncio.get_running_loop()
|
3343
3471
|
|
3344
3472
|
#
|
3345
3473
|
|
@@ -3467,6 +3595,10 @@ class GithubCacheServiceV1BaseClient(GithubCacheClient, abc.ABC):
|
|
3467
3595
|
class Entry(GithubCacheClient.Entry):
|
3468
3596
|
artifact: GithubCacheServiceV1.ArtifactCacheEntry
|
3469
3597
|
|
3598
|
+
def get_entry_url(self, entry: GithubCacheClient.Entry) -> ta.Optional[str]:
|
3599
|
+
entry1 = check.isinstance(entry, self.Entry)
|
3600
|
+
return entry1.artifact.cache_key
|
3601
|
+
|
3470
3602
|
#
|
3471
3603
|
|
3472
3604
|
def build_get_entry_url_path(self, *keys: str) -> str:
|
@@ -4107,6 +4239,25 @@ class SubprocessRun:
|
|
4107
4239
|
capture_output: ta.Optional[bool] = None
|
4108
4240
|
kwargs: ta.Optional[ta.Mapping[str, ta.Any]] = None
|
4109
4241
|
|
4242
|
+
@classmethod
|
4243
|
+
def of(
|
4244
|
+
cls,
|
4245
|
+
*cmd: str,
|
4246
|
+
input: ta.Any = None, # noqa
|
4247
|
+
timeout: ta.Optional[float] = None,
|
4248
|
+
check: bool = False,
|
4249
|
+
capture_output: ta.Optional[bool] = None,
|
4250
|
+
**kwargs: ta.Any,
|
4251
|
+
) -> 'SubprocessRun':
|
4252
|
+
return cls(
|
4253
|
+
cmd=cmd,
|
4254
|
+
input=input,
|
4255
|
+
timeout=timeout,
|
4256
|
+
check=check,
|
4257
|
+
capture_output=capture_output,
|
4258
|
+
kwargs=kwargs,
|
4259
|
+
)
|
4260
|
+
|
4110
4261
|
|
4111
4262
|
@dc.dataclass(frozen=True)
|
4112
4263
|
class SubprocessRunOutput(ta.Generic[T]):
|
@@ -4340,7 +4491,7 @@ class AbstractAsyncSubprocesses(BaseSubprocesses):
|
|
4340
4491
|
##
|
4341
4492
|
|
4342
4493
|
|
4343
|
-
class
|
4494
|
+
class GithubCache(FileCache, DataCache):
|
4344
4495
|
@dc.dataclass(frozen=True)
|
4345
4496
|
class Config:
|
4346
4497
|
dir: str
|
@@ -4371,6 +4522,8 @@ class GithubFileCache(FileCache):
|
|
4371
4522
|
version=self._version,
|
4372
4523
|
)
|
4373
4524
|
|
4525
|
+
#
|
4526
|
+
|
4374
4527
|
async def get_file(self, key: str) -> ta.Optional[str]:
|
4375
4528
|
local_file = self._local.get_cache_file_path(key)
|
4376
4529
|
if os.path.exists(local_file):
|
@@ -4404,6 +4557,21 @@ class GithubFileCache(FileCache):
|
|
4404
4557
|
|
4405
4558
|
return cache_file_path
|
4406
4559
|
|
4560
|
+
#
|
4561
|
+
|
4562
|
+
async def get_data(self, key: str) -> ta.Optional[DataCache.Data]:
|
4563
|
+
local_file = self._local.get_cache_file_path(key)
|
4564
|
+
if os.path.exists(local_file):
|
4565
|
+
return DataCache.FileData(local_file)
|
4566
|
+
|
4567
|
+
if (entry := await self._client.get_entry(key)) is None:
|
4568
|
+
return None
|
4569
|
+
|
4570
|
+
return DataCache.UrlData(check.non_empty_str(self._client.get_entry_url(entry)))
|
4571
|
+
|
4572
|
+
async def put_data(self, key: str, data: DataCache.Data) -> None:
|
4573
|
+
await FileCacheDataCache(self).put_data(key, data)
|
4574
|
+
|
4407
4575
|
|
4408
4576
|
########################################
|
4409
4577
|
# ../github/cli.py
|
@@ -5019,11 +5187,11 @@ def bind_github(
|
|
5019
5187
|
|
5020
5188
|
if cache_dir is not None:
|
5021
5189
|
lst.extend([
|
5022
|
-
inj.bind(
|
5190
|
+
inj.bind(GithubCache.Config(
|
5023
5191
|
dir=cache_dir,
|
5024
5192
|
)),
|
5025
|
-
inj.bind(
|
5026
|
-
inj.bind(FileCache, to_key=
|
5193
|
+
inj.bind(GithubCache, singleton=True),
|
5194
|
+
inj.bind(FileCache, to_key=GithubCache),
|
5027
5195
|
])
|
5028
5196
|
|
5029
5197
|
return inj.as_bindings(*lst)
|