omdev 0.0.0.dev222__py3-none-any.whl → 0.0.0.dev224__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omdev/ci/cache.py +148 -23
- omdev/ci/ci.py +50 -110
- omdev/ci/cli.py +24 -23
- omdev/ci/docker/__init__.py +0 -0
- omdev/ci/docker/buildcaching.py +69 -0
- omdev/ci/docker/cache.py +57 -0
- omdev/ci/docker/cacheserved.py +262 -0
- omdev/ci/{docker.py → docker/cmds.py} +1 -44
- omdev/ci/docker/dataserver.py +204 -0
- omdev/ci/docker/imagepulling.py +65 -0
- omdev/ci/docker/inject.py +37 -0
- omdev/ci/docker/packing.py +72 -0
- omdev/ci/docker/repositories.py +40 -0
- omdev/ci/docker/utils.py +48 -0
- omdev/ci/github/cache.py +35 -6
- omdev/ci/github/client.py +9 -2
- omdev/ci/github/inject.py +30 -0
- omdev/ci/inject.py +61 -0
- omdev/ci/utils.py +0 -49
- omdev/dataserver/__init__.py +1 -0
- omdev/dataserver/handlers.py +198 -0
- omdev/dataserver/http.py +69 -0
- omdev/dataserver/routes.py +49 -0
- omdev/dataserver/server.py +90 -0
- omdev/dataserver/targets.py +121 -0
- omdev/oci/building.py +107 -9
- omdev/oci/compression.py +8 -0
- omdev/oci/data.py +43 -0
- omdev/oci/datarefs.py +90 -50
- omdev/oci/dataserver.py +64 -0
- omdev/oci/loading.py +20 -0
- omdev/oci/media.py +20 -0
- omdev/oci/pack/__init__.py +0 -0
- omdev/oci/pack/packing.py +185 -0
- omdev/oci/pack/repositories.py +162 -0
- omdev/oci/pack/unpacking.py +204 -0
- omdev/oci/repositories.py +84 -2
- omdev/oci/tars.py +144 -0
- omdev/pyproject/resources/python.sh +1 -1
- omdev/scripts/ci.py +2137 -512
- omdev/scripts/interp.py +119 -22
- omdev/scripts/pyproject.py +141 -28
- {omdev-0.0.0.dev222.dist-info → omdev-0.0.0.dev224.dist-info}/METADATA +2 -2
- {omdev-0.0.0.dev222.dist-info → omdev-0.0.0.dev224.dist-info}/RECORD +48 -23
- {omdev-0.0.0.dev222.dist-info → omdev-0.0.0.dev224.dist-info}/LICENSE +0 -0
- {omdev-0.0.0.dev222.dist-info → omdev-0.0.0.dev224.dist-info}/WHEEL +0 -0
- {omdev-0.0.0.dev222.dist-info → omdev-0.0.0.dev224.dist-info}/entry_points.txt +0 -0
- {omdev-0.0.0.dev222.dist-info → omdev-0.0.0.dev224.dist-info}/top_level.txt +0 -0
omdev/scripts/ci.py
CHANGED
@@ -21,6 +21,7 @@ import asyncio.base_subprocess
|
|
21
21
|
import asyncio.subprocess
|
22
22
|
import collections
|
23
23
|
import contextlib
|
24
|
+
import contextvars
|
24
25
|
import dataclasses as dc
|
25
26
|
import datetime
|
26
27
|
import functools
|
@@ -44,6 +45,7 @@ import types
|
|
44
45
|
import typing as ta
|
45
46
|
import urllib.parse
|
46
47
|
import urllib.request
|
48
|
+
import weakref
|
47
49
|
|
48
50
|
|
49
51
|
########################################
|
@@ -80,6 +82,13 @@ ArgparseCmdFn = ta.Callable[[], ta.Optional[int]] # ta.TypeAlias
|
|
80
82
|
ExitStackedT = ta.TypeVar('ExitStackedT', bound='ExitStacked')
|
81
83
|
AsyncExitStackedT = ta.TypeVar('AsyncExitStackedT', bound='AsyncExitStacked')
|
82
84
|
|
85
|
+
# ../../omlish/lite/inject.py
|
86
|
+
U = ta.TypeVar('U')
|
87
|
+
InjectorKeyCls = ta.Union[type, ta.NewType]
|
88
|
+
InjectorProviderFn = ta.Callable[['Injector'], ta.Any]
|
89
|
+
InjectorProviderFnMap = ta.Mapping['InjectorKey', 'InjectorProviderFn']
|
90
|
+
InjectorBindingOrBindings = ta.Union['InjectorBinding', 'InjectorBindings']
|
91
|
+
|
83
92
|
# ../../omlish/subprocesses.py
|
84
93
|
SubprocessChannelOption = ta.Literal['pipe', 'stdout', 'devnull'] # ta.TypeAlias
|
85
94
|
|
@@ -150,6 +159,27 @@ class ShellCmd:
|
|
150
159
|
)
|
151
160
|
|
152
161
|
|
162
|
+
########################################
|
163
|
+
# ../utils.py
|
164
|
+
|
165
|
+
|
166
|
+
##
|
167
|
+
|
168
|
+
|
169
|
+
def read_yaml_file(yaml_file: str) -> ta.Any:
|
170
|
+
yaml = __import__('yaml')
|
171
|
+
|
172
|
+
with open(yaml_file) as f:
|
173
|
+
return yaml.safe_load(f)
|
174
|
+
|
175
|
+
|
176
|
+
##
|
177
|
+
|
178
|
+
|
179
|
+
def sha256_str(s: str) -> str:
|
180
|
+
return hashlib.sha256(s.encode('utf-8')).hexdigest()
|
181
|
+
|
182
|
+
|
153
183
|
########################################
|
154
184
|
# ../../../omlish/asyncs/asyncio/asyncio.py
|
155
185
|
|
@@ -797,6 +827,50 @@ json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON
|
|
797
827
|
log = logging.getLogger(__name__)
|
798
828
|
|
799
829
|
|
830
|
+
########################################
|
831
|
+
# ../../../omlish/lite/maybes.py
|
832
|
+
|
833
|
+
|
834
|
+
class Maybe(ta.Generic[T]):
|
835
|
+
@property
|
836
|
+
@abc.abstractmethod
|
837
|
+
def present(self) -> bool:
|
838
|
+
raise NotImplementedError
|
839
|
+
|
840
|
+
@abc.abstractmethod
|
841
|
+
def must(self) -> T:
|
842
|
+
raise NotImplementedError
|
843
|
+
|
844
|
+
@classmethod
|
845
|
+
def just(cls, v: T) -> 'Maybe[T]':
|
846
|
+
return tuple.__new__(_Maybe, (v,)) # noqa
|
847
|
+
|
848
|
+
_empty: ta.ClassVar['Maybe']
|
849
|
+
|
850
|
+
@classmethod
|
851
|
+
def empty(cls) -> 'Maybe[T]':
|
852
|
+
return Maybe._empty
|
853
|
+
|
854
|
+
|
855
|
+
class _Maybe(Maybe[T], tuple):
|
856
|
+
__slots__ = ()
|
857
|
+
|
858
|
+
def __init_subclass__(cls, **kwargs):
|
859
|
+
raise TypeError
|
860
|
+
|
861
|
+
@property
|
862
|
+
def present(self) -> bool:
|
863
|
+
return bool(self)
|
864
|
+
|
865
|
+
def must(self) -> T:
|
866
|
+
if not self:
|
867
|
+
raise ValueError
|
868
|
+
return self[0]
|
869
|
+
|
870
|
+
|
871
|
+
Maybe._empty = tuple.__new__(_Maybe, ()) # noqa
|
872
|
+
|
873
|
+
|
800
874
|
########################################
|
801
875
|
# ../../../omlish/lite/reflect.py
|
802
876
|
|
@@ -1093,6 +1167,63 @@ class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
|
|
1093
1167
|
self._underlying.handleError(record)
|
1094
1168
|
|
1095
1169
|
|
1170
|
+
########################################
|
1171
|
+
# ../../../omlish/logs/timing.py
|
1172
|
+
|
1173
|
+
|
1174
|
+
##
|
1175
|
+
|
1176
|
+
|
1177
|
+
class LogTimingContext:
|
1178
|
+
DEFAULT_LOG: ta.ClassVar[ta.Optional[logging.Logger]] = None
|
1179
|
+
|
1180
|
+
class _NOT_SPECIFIED: # noqa
|
1181
|
+
def __new__(cls, *args, **kwargs): # noqa
|
1182
|
+
raise TypeError
|
1183
|
+
|
1184
|
+
def __init__(
|
1185
|
+
self,
|
1186
|
+
description: str,
|
1187
|
+
*,
|
1188
|
+
log: ta.Union[logging.Logger, ta.Type[_NOT_SPECIFIED], None] = _NOT_SPECIFIED, # noqa
|
1189
|
+
level: int = logging.DEBUG,
|
1190
|
+
) -> None:
|
1191
|
+
super().__init__()
|
1192
|
+
|
1193
|
+
self._description = description
|
1194
|
+
if log is self._NOT_SPECIFIED:
|
1195
|
+
log = self.DEFAULT_LOG # noqa
|
1196
|
+
self._log: ta.Optional[logging.Logger] = log # type: ignore
|
1197
|
+
self._level = level
|
1198
|
+
|
1199
|
+
def set_description(self, description: str) -> 'LogTimingContext':
|
1200
|
+
self._description = description
|
1201
|
+
return self
|
1202
|
+
|
1203
|
+
_begin_time: float
|
1204
|
+
_end_time: float
|
1205
|
+
|
1206
|
+
def __enter__(self) -> 'LogTimingContext':
|
1207
|
+
self._begin_time = time.time()
|
1208
|
+
|
1209
|
+
if self._log is not None:
|
1210
|
+
self._log.log(self._level, f'Begin : {self._description}') # noqa
|
1211
|
+
|
1212
|
+
return self
|
1213
|
+
|
1214
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
1215
|
+
self._end_time = time.time()
|
1216
|
+
|
1217
|
+
if self._log is not None:
|
1218
|
+
self._log.log(
|
1219
|
+
self._level,
|
1220
|
+
f'End : {self._description} - {self._end_time - self._begin_time:0.2f} s elapsed',
|
1221
|
+
)
|
1222
|
+
|
1223
|
+
|
1224
|
+
log_timing_context = LogTimingContext
|
1225
|
+
|
1226
|
+
|
1096
1227
|
########################################
|
1097
1228
|
# ../../../omlish/os/files.py
|
1098
1229
|
|
@@ -1130,143 +1261,47 @@ def unlinking_if_exists(path: str) -> ta.Iterator[None]:
|
|
1130
1261
|
|
1131
1262
|
|
1132
1263
|
########################################
|
1133
|
-
# ../
|
1264
|
+
# ../docker/utils.py
|
1265
|
+
"""
|
1266
|
+
TODO:
|
1267
|
+
- some less stupid Dockerfile hash
|
1268
|
+
- doesn't change too much though
|
1269
|
+
"""
|
1134
1270
|
|
1135
1271
|
|
1136
1272
|
##
|
1137
1273
|
|
1138
1274
|
|
1139
|
-
|
1140
|
-
|
1141
|
-
|
1142
|
-
self,
|
1143
|
-
*,
|
1144
|
-
version: int = CI_CACHE_VERSION,
|
1145
|
-
) -> None:
|
1146
|
-
super().__init__()
|
1147
|
-
|
1148
|
-
check.isinstance(version, int)
|
1149
|
-
check.arg(version >= 0)
|
1150
|
-
self._version = version
|
1151
|
-
|
1152
|
-
@property
|
1153
|
-
def version(self) -> int:
|
1154
|
-
return self._version
|
1155
|
-
|
1156
|
-
#
|
1157
|
-
|
1158
|
-
@abc.abstractmethod
|
1159
|
-
def get_file(self, key: str) -> ta.Awaitable[ta.Optional[str]]:
|
1160
|
-
raise NotImplementedError
|
1161
|
-
|
1162
|
-
@abc.abstractmethod
|
1163
|
-
def put_file(
|
1164
|
-
self,
|
1165
|
-
key: str,
|
1166
|
-
file_path: str,
|
1167
|
-
*,
|
1168
|
-
steal: bool = False,
|
1169
|
-
) -> ta.Awaitable[str]:
|
1170
|
-
raise NotImplementedError
|
1171
|
-
|
1172
|
-
|
1173
|
-
#
|
1174
|
-
|
1175
|
-
|
1176
|
-
class DirectoryFileCache(FileCache):
|
1177
|
-
def __init__(
|
1178
|
-
self,
|
1179
|
-
dir: str, # noqa
|
1180
|
-
*,
|
1181
|
-
no_create: bool = False,
|
1182
|
-
no_purge: bool = False,
|
1183
|
-
**kwargs: ta.Any,
|
1184
|
-
) -> None: # noqa
|
1185
|
-
super().__init__(**kwargs)
|
1186
|
-
|
1187
|
-
self._dir = dir
|
1188
|
-
self._no_create = no_create
|
1189
|
-
self._no_purge = no_purge
|
1190
|
-
|
1191
|
-
#
|
1192
|
-
|
1193
|
-
VERSION_FILE_NAME = '.ci-cache-version'
|
1194
|
-
|
1195
|
-
@cached_nullary
|
1196
|
-
def setup_dir(self) -> None:
|
1197
|
-
version_file = os.path.join(self._dir, self.VERSION_FILE_NAME)
|
1198
|
-
|
1199
|
-
if self._no_create:
|
1200
|
-
check.state(os.path.isdir(self._dir))
|
1201
|
-
|
1202
|
-
elif not os.path.isdir(self._dir):
|
1203
|
-
os.makedirs(self._dir)
|
1204
|
-
with open(version_file, 'w') as f:
|
1205
|
-
f.write(str(self._version))
|
1206
|
-
return
|
1207
|
-
|
1208
|
-
with open(version_file) as f:
|
1209
|
-
dir_version = int(f.read().strip())
|
1210
|
-
|
1211
|
-
if dir_version == self._version:
|
1212
|
-
return
|
1213
|
-
|
1214
|
-
if self._no_purge:
|
1215
|
-
raise RuntimeError(f'{dir_version=} != {self._version=}')
|
1216
|
-
|
1217
|
-
dirs = [n for n in sorted(os.listdir(self._dir)) if os.path.isdir(os.path.join(self._dir, n))]
|
1218
|
-
if dirs:
|
1219
|
-
raise RuntimeError(
|
1220
|
-
f'Refusing to remove stale cache dir {self._dir!r} '
|
1221
|
-
f'due to present directories: {", ".join(dirs)}',
|
1222
|
-
)
|
1275
|
+
def build_docker_file_hash(docker_file: str) -> str:
|
1276
|
+
with open(docker_file) as f:
|
1277
|
+
contents = f.read()
|
1223
1278
|
|
1224
|
-
|
1225
|
-
if n.startswith('.'):
|
1226
|
-
continue
|
1227
|
-
fp = os.path.join(self._dir, n)
|
1228
|
-
check.state(os.path.isfile(fp))
|
1229
|
-
log.debug('Purging stale cache file: %s', fp)
|
1230
|
-
os.unlink(fp)
|
1279
|
+
return sha256_str(contents)
|
1231
1280
|
|
1232
|
-
os.unlink(version_file)
|
1233
1281
|
|
1234
|
-
|
1235
|
-
f.write(str(self._version))
|
1282
|
+
##
|
1236
1283
|
|
1237
|
-
#
|
1238
1284
|
|
1239
|
-
|
1240
|
-
|
1241
|
-
|
1242
|
-
|
1243
|
-
self.setup_dir()
|
1244
|
-
return os.path.join(self._dir, key)
|
1285
|
+
def read_docker_tar_image_tag(tar_file: str) -> str:
|
1286
|
+
with tarfile.open(tar_file) as tf:
|
1287
|
+
with contextlib.closing(check.not_none(tf.extractfile('manifest.json'))) as mf:
|
1288
|
+
m = mf.read()
|
1245
1289
|
|
1246
|
-
|
1247
|
-
|
1290
|
+
manifests = json.loads(m.decode('utf-8'))
|
1291
|
+
manifest = check.single(manifests)
|
1292
|
+
tag = check.non_empty_str(check.single(manifest['RepoTags']))
|
1293
|
+
return tag
|
1248
1294
|
|
1249
|
-
#
|
1250
1295
|
|
1251
|
-
|
1252
|
-
|
1253
|
-
|
1254
|
-
|
1255
|
-
return cache_file_path
|
1296
|
+
def read_docker_tar_image_id(tar_file: str) -> str:
|
1297
|
+
with tarfile.open(tar_file) as tf:
|
1298
|
+
with contextlib.closing(check.not_none(tf.extractfile('index.json'))) as mf:
|
1299
|
+
i = mf.read()
|
1256
1300
|
|
1257
|
-
|
1258
|
-
|
1259
|
-
|
1260
|
-
|
1261
|
-
*,
|
1262
|
-
steal: bool = False,
|
1263
|
-
) -> str:
|
1264
|
-
cache_file_path = self.get_cache_file_path(key)
|
1265
|
-
if steal:
|
1266
|
-
shutil.move(file_path, cache_file_path)
|
1267
|
-
else:
|
1268
|
-
shutil.copyfile(file_path, cache_file_path)
|
1269
|
-
return cache_file_path
|
1301
|
+
index = json.loads(i.decode('utf-8'))
|
1302
|
+
manifest = check.single(index['manifests'])
|
1303
|
+
image_id = check.non_empty_str(manifest['digest'])
|
1304
|
+
return image_id
|
1270
1305
|
|
1271
1306
|
|
1272
1307
|
########################################
|
@@ -1491,111 +1526,45 @@ def is_in_github_actions() -> bool:
|
|
1491
1526
|
|
1492
1527
|
|
1493
1528
|
########################################
|
1494
|
-
#
|
1529
|
+
# ../../../omlish/argparse/cli.py
|
1530
|
+
"""
|
1531
|
+
TODO:
|
1532
|
+
- default command
|
1533
|
+
- auto match all underscores to hyphens
|
1534
|
+
- pre-run, post-run hooks
|
1535
|
+
- exitstack?
|
1536
|
+
"""
|
1495
1537
|
|
1496
1538
|
|
1497
1539
|
##
|
1498
1540
|
|
1499
1541
|
|
1500
|
-
|
1501
|
-
|
1502
|
-
|
1503
|
-
|
1504
|
-
|
1542
|
+
@dc.dataclass(eq=False)
|
1543
|
+
class ArgparseArg:
|
1544
|
+
args: ta.Sequence[ta.Any]
|
1545
|
+
kwargs: ta.Mapping[str, ta.Any]
|
1546
|
+
dest: ta.Optional[str] = None
|
1505
1547
|
|
1548
|
+
def __get__(self, instance, owner=None):
|
1549
|
+
if instance is None:
|
1550
|
+
return self
|
1551
|
+
return getattr(instance.args, self.dest) # type: ignore
|
1506
1552
|
|
1507
|
-
##
|
1508
1553
|
|
1554
|
+
def argparse_arg(*args, **kwargs) -> ArgparseArg:
|
1555
|
+
return ArgparseArg(args, kwargs)
|
1509
1556
|
|
1510
|
-
def sha256_str(s: str) -> str:
|
1511
|
-
return hashlib.sha256(s.encode('utf-8')).hexdigest()
|
1512
1557
|
|
1558
|
+
#
|
1513
1559
|
|
1514
|
-
##
|
1515
1560
|
|
1561
|
+
@dc.dataclass(eq=False)
|
1562
|
+
class ArgparseCmd:
|
1563
|
+
name: str
|
1564
|
+
fn: ArgparseCmdFn
|
1565
|
+
args: ta.Sequence[ArgparseArg] = () # noqa
|
1516
1566
|
|
1517
|
-
|
1518
|
-
DEFAULT_LOG: ta.ClassVar[logging.Logger] = log
|
1519
|
-
|
1520
|
-
def __init__(
|
1521
|
-
self,
|
1522
|
-
description: str,
|
1523
|
-
*,
|
1524
|
-
log: ta.Optional[logging.Logger] = None, # noqa
|
1525
|
-
level: int = logging.DEBUG,
|
1526
|
-
) -> None:
|
1527
|
-
super().__init__()
|
1528
|
-
|
1529
|
-
self._description = description
|
1530
|
-
self._log = log if log is not None else self.DEFAULT_LOG
|
1531
|
-
self._level = level
|
1532
|
-
|
1533
|
-
def set_description(self, description: str) -> 'LogTimingContext':
|
1534
|
-
self._description = description
|
1535
|
-
return self
|
1536
|
-
|
1537
|
-
_begin_time: float
|
1538
|
-
_end_time: float
|
1539
|
-
|
1540
|
-
def __enter__(self) -> 'LogTimingContext':
|
1541
|
-
self._begin_time = time.time()
|
1542
|
-
|
1543
|
-
self._log.log(self._level, f'Begin : {self._description}') # noqa
|
1544
|
-
|
1545
|
-
return self
|
1546
|
-
|
1547
|
-
def __exit__(self, exc_type, exc_val, exc_tb):
|
1548
|
-
self._end_time = time.time()
|
1549
|
-
|
1550
|
-
self._log.log(
|
1551
|
-
self._level,
|
1552
|
-
f'End : {self._description} - {self._end_time - self._begin_time:0.2f} s elapsed',
|
1553
|
-
)
|
1554
|
-
|
1555
|
-
|
1556
|
-
log_timing_context = LogTimingContext
|
1557
|
-
|
1558
|
-
|
1559
|
-
########################################
|
1560
|
-
# ../../../omlish/argparse/cli.py
|
1561
|
-
"""
|
1562
|
-
TODO:
|
1563
|
-
- default command
|
1564
|
-
- auto match all underscores to hyphens
|
1565
|
-
- pre-run, post-run hooks
|
1566
|
-
- exitstack?
|
1567
|
-
"""
|
1568
|
-
|
1569
|
-
|
1570
|
-
##
|
1571
|
-
|
1572
|
-
|
1573
|
-
@dc.dataclass(eq=False)
|
1574
|
-
class ArgparseArg:
|
1575
|
-
args: ta.Sequence[ta.Any]
|
1576
|
-
kwargs: ta.Mapping[str, ta.Any]
|
1577
|
-
dest: ta.Optional[str] = None
|
1578
|
-
|
1579
|
-
def __get__(self, instance, owner=None):
|
1580
|
-
if instance is None:
|
1581
|
-
return self
|
1582
|
-
return getattr(instance.args, self.dest) # type: ignore
|
1583
|
-
|
1584
|
-
|
1585
|
-
def argparse_arg(*args, **kwargs) -> ArgparseArg:
|
1586
|
-
return ArgparseArg(args, kwargs)
|
1587
|
-
|
1588
|
-
|
1589
|
-
#
|
1590
|
-
|
1591
|
-
|
1592
|
-
@dc.dataclass(eq=False)
|
1593
|
-
class ArgparseCmd:
|
1594
|
-
name: str
|
1595
|
-
fn: ArgparseCmdFn
|
1596
|
-
args: ta.Sequence[ArgparseArg] = () # noqa
|
1597
|
-
|
1598
|
-
# _: dc.KW_ONLY
|
1567
|
+
# _: dc.KW_ONLY
|
1599
1568
|
|
1600
1569
|
aliases: ta.Optional[ta.Sequence[str]] = None
|
1601
1570
|
parent: ta.Optional['ArgparseCmd'] = None
|
@@ -1902,173 +1871,1516 @@ class AsyncExitStacked:
|
|
1902
1871
|
return await es.enter_async_context(cm)
|
1903
1872
|
|
1904
1873
|
|
1905
|
-
##
|
1874
|
+
##
|
1875
|
+
|
1876
|
+
|
1877
|
+
@contextlib.contextmanager
|
1878
|
+
def defer(fn: ta.Callable) -> ta.Generator[ta.Callable, None, None]:
|
1879
|
+
try:
|
1880
|
+
yield fn
|
1881
|
+
finally:
|
1882
|
+
fn()
|
1883
|
+
|
1884
|
+
|
1885
|
+
@contextlib.asynccontextmanager
|
1886
|
+
async def adefer(fn: ta.Callable) -> ta.AsyncGenerator[ta.Callable, None]:
|
1887
|
+
try:
|
1888
|
+
yield fn
|
1889
|
+
finally:
|
1890
|
+
await fn()
|
1891
|
+
|
1892
|
+
|
1893
|
+
##
|
1894
|
+
|
1895
|
+
|
1896
|
+
@contextlib.contextmanager
|
1897
|
+
def attr_setting(obj, attr, val, *, default=None): # noqa
|
1898
|
+
not_set = object()
|
1899
|
+
orig = getattr(obj, attr, not_set)
|
1900
|
+
try:
|
1901
|
+
setattr(obj, attr, val)
|
1902
|
+
if orig is not not_set:
|
1903
|
+
yield orig
|
1904
|
+
else:
|
1905
|
+
yield default
|
1906
|
+
finally:
|
1907
|
+
if orig is not_set:
|
1908
|
+
delattr(obj, attr)
|
1909
|
+
else:
|
1910
|
+
setattr(obj, attr, orig)
|
1911
|
+
|
1912
|
+
|
1913
|
+
##
|
1914
|
+
|
1915
|
+
|
1916
|
+
class aclosing(contextlib.AbstractAsyncContextManager): # noqa
|
1917
|
+
def __init__(self, thing):
|
1918
|
+
self.thing = thing
|
1919
|
+
|
1920
|
+
async def __aenter__(self):
|
1921
|
+
return self.thing
|
1922
|
+
|
1923
|
+
async def __aexit__(self, *exc_info):
|
1924
|
+
await self.thing.aclose()
|
1925
|
+
|
1926
|
+
|
1927
|
+
########################################
|
1928
|
+
# ../../../omlish/lite/inject.py
|
1929
|
+
|
1930
|
+
|
1931
|
+
###
|
1932
|
+
# types
|
1933
|
+
|
1934
|
+
|
1935
|
+
@dc.dataclass(frozen=True)
|
1936
|
+
class InjectorKey(ta.Generic[T]):
|
1937
|
+
# Before PEP-560 typing.Generic was a metaclass with a __new__ that takes a 'cls' arg, so instantiating a dataclass
|
1938
|
+
# with kwargs (such as through dc.replace) causes `TypeError: __new__() got multiple values for argument 'cls'`.
|
1939
|
+
# See:
|
1940
|
+
# - https://github.com/python/cpython/commit/d911e40e788fb679723d78b6ea11cabf46caed5a
|
1941
|
+
# - https://gist.github.com/wrmsr/4468b86efe9f373b6b114bfe85b98fd3
|
1942
|
+
cls_: InjectorKeyCls
|
1943
|
+
|
1944
|
+
tag: ta.Any = None
|
1945
|
+
array: bool = False
|
1946
|
+
|
1947
|
+
|
1948
|
+
def is_valid_injector_key_cls(cls: ta.Any) -> bool:
|
1949
|
+
return isinstance(cls, type) or is_new_type(cls)
|
1950
|
+
|
1951
|
+
|
1952
|
+
def check_valid_injector_key_cls(cls: T) -> T:
|
1953
|
+
if not is_valid_injector_key_cls(cls):
|
1954
|
+
raise TypeError(cls)
|
1955
|
+
return cls
|
1956
|
+
|
1957
|
+
|
1958
|
+
##
|
1959
|
+
|
1960
|
+
|
1961
|
+
class InjectorProvider(abc.ABC):
|
1962
|
+
@abc.abstractmethod
|
1963
|
+
def provider_fn(self) -> InjectorProviderFn:
|
1964
|
+
raise NotImplementedError
|
1965
|
+
|
1966
|
+
|
1967
|
+
##
|
1968
|
+
|
1969
|
+
|
1970
|
+
@dc.dataclass(frozen=True)
|
1971
|
+
class InjectorBinding:
|
1972
|
+
key: InjectorKey
|
1973
|
+
provider: InjectorProvider
|
1974
|
+
|
1975
|
+
def __post_init__(self) -> None:
|
1976
|
+
check.isinstance(self.key, InjectorKey)
|
1977
|
+
check.isinstance(self.provider, InjectorProvider)
|
1978
|
+
|
1979
|
+
|
1980
|
+
class InjectorBindings(abc.ABC):
|
1981
|
+
@abc.abstractmethod
|
1982
|
+
def bindings(self) -> ta.Iterator[InjectorBinding]:
|
1983
|
+
raise NotImplementedError
|
1984
|
+
|
1985
|
+
##
|
1986
|
+
|
1987
|
+
|
1988
|
+
class Injector(abc.ABC):
|
1989
|
+
@abc.abstractmethod
|
1990
|
+
def try_provide(self, key: ta.Any) -> Maybe[ta.Any]:
|
1991
|
+
raise NotImplementedError
|
1992
|
+
|
1993
|
+
@abc.abstractmethod
|
1994
|
+
def provide(self, key: ta.Any) -> ta.Any:
|
1995
|
+
raise NotImplementedError
|
1996
|
+
|
1997
|
+
@abc.abstractmethod
|
1998
|
+
def provide_kwargs(
|
1999
|
+
self,
|
2000
|
+
obj: ta.Any,
|
2001
|
+
*,
|
2002
|
+
skip_args: int = 0,
|
2003
|
+
skip_kwargs: ta.Optional[ta.Iterable[ta.Any]] = None,
|
2004
|
+
) -> ta.Mapping[str, ta.Any]:
|
2005
|
+
raise NotImplementedError
|
2006
|
+
|
2007
|
+
@abc.abstractmethod
|
2008
|
+
def inject(
|
2009
|
+
self,
|
2010
|
+
obj: ta.Any,
|
2011
|
+
*,
|
2012
|
+
args: ta.Optional[ta.Sequence[ta.Any]] = None,
|
2013
|
+
kwargs: ta.Optional[ta.Mapping[str, ta.Any]] = None,
|
2014
|
+
) -> ta.Any:
|
2015
|
+
raise NotImplementedError
|
2016
|
+
|
2017
|
+
def __getitem__(
|
2018
|
+
self,
|
2019
|
+
target: ta.Union[InjectorKey[T], ta.Type[T]],
|
2020
|
+
) -> T:
|
2021
|
+
return self.provide(target)
|
2022
|
+
|
2023
|
+
|
2024
|
+
###
|
2025
|
+
# exceptions
|
2026
|
+
|
2027
|
+
|
2028
|
+
class InjectorError(Exception):
|
2029
|
+
pass
|
2030
|
+
|
2031
|
+
|
2032
|
+
@dc.dataclass()
|
2033
|
+
class InjectorKeyError(InjectorError):
|
2034
|
+
key: InjectorKey
|
2035
|
+
|
2036
|
+
source: ta.Any = None
|
2037
|
+
name: ta.Optional[str] = None
|
2038
|
+
|
2039
|
+
|
2040
|
+
class UnboundInjectorKeyError(InjectorKeyError):
|
2041
|
+
pass
|
2042
|
+
|
2043
|
+
|
2044
|
+
class DuplicateInjectorKeyError(InjectorKeyError):
|
2045
|
+
pass
|
2046
|
+
|
2047
|
+
|
2048
|
+
class CyclicDependencyInjectorKeyError(InjectorKeyError):
|
2049
|
+
pass
|
2050
|
+
|
2051
|
+
|
2052
|
+
###
|
2053
|
+
# keys
|
2054
|
+
|
2055
|
+
|
2056
|
+
def as_injector_key(o: ta.Any) -> InjectorKey:
|
2057
|
+
if o is inspect.Parameter.empty:
|
2058
|
+
raise TypeError(o)
|
2059
|
+
if isinstance(o, InjectorKey):
|
2060
|
+
return o
|
2061
|
+
if is_valid_injector_key_cls(o):
|
2062
|
+
return InjectorKey(o)
|
2063
|
+
raise TypeError(o)
|
2064
|
+
|
2065
|
+
|
2066
|
+
###
|
2067
|
+
# providers
|
2068
|
+
|
2069
|
+
|
2070
|
+
@dc.dataclass(frozen=True)
|
2071
|
+
class FnInjectorProvider(InjectorProvider):
|
2072
|
+
fn: ta.Any
|
2073
|
+
|
2074
|
+
def __post_init__(self) -> None:
|
2075
|
+
check.not_isinstance(self.fn, type)
|
2076
|
+
|
2077
|
+
def provider_fn(self) -> InjectorProviderFn:
|
2078
|
+
def pfn(i: Injector) -> ta.Any:
|
2079
|
+
return i.inject(self.fn)
|
2080
|
+
|
2081
|
+
return pfn
|
2082
|
+
|
2083
|
+
|
2084
|
+
@dc.dataclass(frozen=True)
|
2085
|
+
class CtorInjectorProvider(InjectorProvider):
|
2086
|
+
cls_: type
|
2087
|
+
|
2088
|
+
def __post_init__(self) -> None:
|
2089
|
+
check.isinstance(self.cls_, type)
|
2090
|
+
|
2091
|
+
def provider_fn(self) -> InjectorProviderFn:
|
2092
|
+
def pfn(i: Injector) -> ta.Any:
|
2093
|
+
return i.inject(self.cls_)
|
2094
|
+
|
2095
|
+
return pfn
|
2096
|
+
|
2097
|
+
|
2098
|
+
@dc.dataclass(frozen=True)
|
2099
|
+
class ConstInjectorProvider(InjectorProvider):
|
2100
|
+
v: ta.Any
|
2101
|
+
|
2102
|
+
def provider_fn(self) -> InjectorProviderFn:
|
2103
|
+
return lambda _: self.v
|
2104
|
+
|
2105
|
+
|
2106
|
+
@dc.dataclass(frozen=True)
|
2107
|
+
class SingletonInjectorProvider(InjectorProvider):
|
2108
|
+
p: InjectorProvider
|
2109
|
+
|
2110
|
+
def __post_init__(self) -> None:
|
2111
|
+
check.isinstance(self.p, InjectorProvider)
|
2112
|
+
|
2113
|
+
def provider_fn(self) -> InjectorProviderFn:
|
2114
|
+
v = not_set = object()
|
2115
|
+
|
2116
|
+
def pfn(i: Injector) -> ta.Any:
|
2117
|
+
nonlocal v
|
2118
|
+
if v is not_set:
|
2119
|
+
v = ufn(i)
|
2120
|
+
return v
|
2121
|
+
|
2122
|
+
ufn = self.p.provider_fn()
|
2123
|
+
return pfn
|
2124
|
+
|
2125
|
+
|
2126
|
+
@dc.dataclass(frozen=True)
|
2127
|
+
class LinkInjectorProvider(InjectorProvider):
|
2128
|
+
k: InjectorKey
|
2129
|
+
|
2130
|
+
def __post_init__(self) -> None:
|
2131
|
+
check.isinstance(self.k, InjectorKey)
|
2132
|
+
|
2133
|
+
def provider_fn(self) -> InjectorProviderFn:
|
2134
|
+
def pfn(i: Injector) -> ta.Any:
|
2135
|
+
return i.provide(self.k)
|
2136
|
+
|
2137
|
+
return pfn
|
2138
|
+
|
2139
|
+
|
2140
|
+
@dc.dataclass(frozen=True)
|
2141
|
+
class ArrayInjectorProvider(InjectorProvider):
|
2142
|
+
ps: ta.Sequence[InjectorProvider]
|
2143
|
+
|
2144
|
+
def provider_fn(self) -> InjectorProviderFn:
|
2145
|
+
ps = [p.provider_fn() for p in self.ps]
|
2146
|
+
|
2147
|
+
def pfn(i: Injector) -> ta.Any:
|
2148
|
+
rv = []
|
2149
|
+
for ep in ps:
|
2150
|
+
o = ep(i)
|
2151
|
+
rv.append(o)
|
2152
|
+
return rv
|
2153
|
+
|
2154
|
+
return pfn
|
2155
|
+
|
2156
|
+
|
2157
|
+
###
|
2158
|
+
# bindings
|
2159
|
+
|
2160
|
+
|
2161
|
+
@dc.dataclass(frozen=True)
|
2162
|
+
class _InjectorBindings(InjectorBindings):
|
2163
|
+
bs: ta.Optional[ta.Sequence[InjectorBinding]] = None
|
2164
|
+
ps: ta.Optional[ta.Sequence[InjectorBindings]] = None
|
2165
|
+
|
2166
|
+
def bindings(self) -> ta.Iterator[InjectorBinding]:
|
2167
|
+
if self.bs is not None:
|
2168
|
+
yield from self.bs
|
2169
|
+
if self.ps is not None:
|
2170
|
+
for p in self.ps:
|
2171
|
+
yield from p.bindings()
|
2172
|
+
|
2173
|
+
|
2174
|
+
def as_injector_bindings(*args: InjectorBindingOrBindings) -> InjectorBindings:
|
2175
|
+
bs: ta.List[InjectorBinding] = []
|
2176
|
+
ps: ta.List[InjectorBindings] = []
|
2177
|
+
|
2178
|
+
for a in args:
|
2179
|
+
if isinstance(a, InjectorBindings):
|
2180
|
+
ps.append(a)
|
2181
|
+
elif isinstance(a, InjectorBinding):
|
2182
|
+
bs.append(a)
|
2183
|
+
else:
|
2184
|
+
raise TypeError(a)
|
2185
|
+
|
2186
|
+
return _InjectorBindings(
|
2187
|
+
bs or None,
|
2188
|
+
ps or None,
|
2189
|
+
)
|
2190
|
+
|
2191
|
+
|
2192
|
+
##
|
2193
|
+
|
2194
|
+
|
2195
|
+
def build_injector_provider_map(bs: InjectorBindings) -> ta.Mapping[InjectorKey, InjectorProvider]:
|
2196
|
+
pm: ta.Dict[InjectorKey, InjectorProvider] = {}
|
2197
|
+
am: ta.Dict[InjectorKey, ta.List[InjectorProvider]] = {}
|
2198
|
+
|
2199
|
+
for b in bs.bindings():
|
2200
|
+
if b.key.array:
|
2201
|
+
al = am.setdefault(b.key, [])
|
2202
|
+
if isinstance(b.provider, ArrayInjectorProvider):
|
2203
|
+
al.extend(b.provider.ps)
|
2204
|
+
else:
|
2205
|
+
al.append(b.provider)
|
2206
|
+
else:
|
2207
|
+
if b.key in pm:
|
2208
|
+
raise KeyError(b.key)
|
2209
|
+
pm[b.key] = b.provider
|
2210
|
+
|
2211
|
+
if am:
|
2212
|
+
for k, aps in am.items():
|
2213
|
+
pm[k] = ArrayInjectorProvider(aps)
|
2214
|
+
|
2215
|
+
return pm
|
2216
|
+
|
2217
|
+
|
2218
|
+
###
|
2219
|
+
# overrides
|
2220
|
+
|
2221
|
+
|
2222
|
+
@dc.dataclass(frozen=True)
|
2223
|
+
class OverridesInjectorBindings(InjectorBindings):
|
2224
|
+
p: InjectorBindings
|
2225
|
+
m: ta.Mapping[InjectorKey, InjectorBinding]
|
2226
|
+
|
2227
|
+
def bindings(self) -> ta.Iterator[InjectorBinding]:
|
2228
|
+
for b in self.p.bindings():
|
2229
|
+
yield self.m.get(b.key, b)
|
2230
|
+
|
2231
|
+
|
2232
|
+
def injector_override(p: InjectorBindings, *args: InjectorBindingOrBindings) -> InjectorBindings:
|
2233
|
+
m: ta.Dict[InjectorKey, InjectorBinding] = {}
|
2234
|
+
|
2235
|
+
for b in as_injector_bindings(*args).bindings():
|
2236
|
+
if b.key in m:
|
2237
|
+
raise DuplicateInjectorKeyError(b.key)
|
2238
|
+
m[b.key] = b
|
2239
|
+
|
2240
|
+
return OverridesInjectorBindings(p, m)
|
2241
|
+
|
2242
|
+
|
2243
|
+
###
|
2244
|
+
# scopes
|
2245
|
+
|
2246
|
+
|
2247
|
+
class InjectorScope(abc.ABC): # noqa
|
2248
|
+
def __init__(
|
2249
|
+
self,
|
2250
|
+
*,
|
2251
|
+
_i: Injector,
|
2252
|
+
) -> None:
|
2253
|
+
check.not_in(abc.ABC, type(self).__bases__)
|
2254
|
+
|
2255
|
+
super().__init__()
|
2256
|
+
|
2257
|
+
self._i = _i
|
2258
|
+
|
2259
|
+
all_seeds: ta.Iterable[_InjectorScopeSeed] = self._i.provide(InjectorKey(_InjectorScopeSeed, array=True))
|
2260
|
+
self._sks = {s.k for s in all_seeds if s.sc is type(self)}
|
2261
|
+
|
2262
|
+
#
|
2263
|
+
|
2264
|
+
@dc.dataclass(frozen=True)
|
2265
|
+
class State:
|
2266
|
+
seeds: ta.Dict[InjectorKey, ta.Any]
|
2267
|
+
provisions: ta.Dict[InjectorKey, ta.Any] = dc.field(default_factory=dict)
|
2268
|
+
|
2269
|
+
def new_state(self, vs: ta.Mapping[InjectorKey, ta.Any]) -> State:
|
2270
|
+
vs = dict(vs)
|
2271
|
+
check.equal(set(vs.keys()), self._sks)
|
2272
|
+
return InjectorScope.State(vs)
|
2273
|
+
|
2274
|
+
#
|
2275
|
+
|
2276
|
+
@abc.abstractmethod
|
2277
|
+
def state(self) -> State:
|
2278
|
+
raise NotImplementedError
|
2279
|
+
|
2280
|
+
@abc.abstractmethod
|
2281
|
+
def enter(self, vs: ta.Mapping[InjectorKey, ta.Any]) -> ta.ContextManager[None]:
|
2282
|
+
raise NotImplementedError
|
2283
|
+
|
2284
|
+
|
2285
|
+
class ExclusiveInjectorScope(InjectorScope, abc.ABC):
|
2286
|
+
_st: ta.Optional[InjectorScope.State] = None
|
2287
|
+
|
2288
|
+
def state(self) -> InjectorScope.State:
|
2289
|
+
return check.not_none(self._st)
|
2290
|
+
|
2291
|
+
@contextlib.contextmanager
|
2292
|
+
def enter(self, vs: ta.Mapping[InjectorKey, ta.Any]) -> ta.Iterator[None]:
|
2293
|
+
check.none(self._st)
|
2294
|
+
self._st = self.new_state(vs)
|
2295
|
+
try:
|
2296
|
+
yield
|
2297
|
+
finally:
|
2298
|
+
self._st = None
|
2299
|
+
|
2300
|
+
|
2301
|
+
class ContextvarInjectorScope(InjectorScope, abc.ABC):
|
2302
|
+
_cv: contextvars.ContextVar
|
2303
|
+
|
2304
|
+
def __init_subclass__(cls, **kwargs: ta.Any) -> None:
|
2305
|
+
super().__init_subclass__(**kwargs)
|
2306
|
+
check.not_in(abc.ABC, cls.__bases__)
|
2307
|
+
check.state(not hasattr(cls, '_cv'))
|
2308
|
+
cls._cv = contextvars.ContextVar(f'{cls.__name__}_cv')
|
2309
|
+
|
2310
|
+
def state(self) -> InjectorScope.State:
|
2311
|
+
return self._cv.get()
|
2312
|
+
|
2313
|
+
@contextlib.contextmanager
|
2314
|
+
def enter(self, vs: ta.Mapping[InjectorKey, ta.Any]) -> ta.Iterator[None]:
|
2315
|
+
try:
|
2316
|
+
self._cv.get()
|
2317
|
+
except LookupError:
|
2318
|
+
pass
|
2319
|
+
else:
|
2320
|
+
raise RuntimeError(f'Scope already entered: {self}')
|
2321
|
+
st = self.new_state(vs)
|
2322
|
+
tok = self._cv.set(st)
|
2323
|
+
try:
|
2324
|
+
yield
|
2325
|
+
finally:
|
2326
|
+
self._cv.reset(tok)
|
2327
|
+
|
2328
|
+
|
2329
|
+
#
|
2330
|
+
|
2331
|
+
|
2332
|
+
@dc.dataclass(frozen=True)
|
2333
|
+
class ScopedInjectorProvider(InjectorProvider):
|
2334
|
+
p: InjectorProvider
|
2335
|
+
k: InjectorKey
|
2336
|
+
sc: ta.Type[InjectorScope]
|
2337
|
+
|
2338
|
+
def __post_init__(self) -> None:
|
2339
|
+
check.isinstance(self.p, InjectorProvider)
|
2340
|
+
check.isinstance(self.k, InjectorKey)
|
2341
|
+
check.issubclass(self.sc, InjectorScope)
|
2342
|
+
|
2343
|
+
def provider_fn(self) -> InjectorProviderFn:
|
2344
|
+
def pfn(i: Injector) -> ta.Any:
|
2345
|
+
st = i[self.sc].state()
|
2346
|
+
try:
|
2347
|
+
return st.provisions[self.k]
|
2348
|
+
except KeyError:
|
2349
|
+
pass
|
2350
|
+
v = ufn(i)
|
2351
|
+
st.provisions[self.k] = v
|
2352
|
+
return v
|
2353
|
+
|
2354
|
+
ufn = self.p.provider_fn()
|
2355
|
+
return pfn
|
2356
|
+
|
2357
|
+
|
2358
|
+
@dc.dataclass(frozen=True)
|
2359
|
+
class _ScopeSeedInjectorProvider(InjectorProvider):
|
2360
|
+
k: InjectorKey
|
2361
|
+
sc: ta.Type[InjectorScope]
|
2362
|
+
|
2363
|
+
def __post_init__(self) -> None:
|
2364
|
+
check.isinstance(self.k, InjectorKey)
|
2365
|
+
check.issubclass(self.sc, InjectorScope)
|
2366
|
+
|
2367
|
+
def provider_fn(self) -> InjectorProviderFn:
|
2368
|
+
def pfn(i: Injector) -> ta.Any:
|
2369
|
+
st = i[self.sc].state()
|
2370
|
+
return st.seeds[self.k]
|
2371
|
+
return pfn
|
2372
|
+
|
2373
|
+
|
2374
|
+
def bind_injector_scope(sc: ta.Type[InjectorScope]) -> InjectorBindingOrBindings:
|
2375
|
+
return InjectorBinder.bind(sc, singleton=True)
|
2376
|
+
|
2377
|
+
|
2378
|
+
#
|
2379
|
+
|
2380
|
+
|
2381
|
+
@dc.dataclass(frozen=True)
|
2382
|
+
class _InjectorScopeSeed:
|
2383
|
+
sc: ta.Type['InjectorScope']
|
2384
|
+
k: InjectorKey
|
2385
|
+
|
2386
|
+
def __post_init__(self) -> None:
|
2387
|
+
check.issubclass(self.sc, InjectorScope)
|
2388
|
+
check.isinstance(self.k, InjectorKey)
|
2389
|
+
|
2390
|
+
|
2391
|
+
def bind_injector_scope_seed(k: ta.Any, sc: ta.Type[InjectorScope]) -> InjectorBindingOrBindings:
|
2392
|
+
kk = as_injector_key(k)
|
2393
|
+
return as_injector_bindings(
|
2394
|
+
InjectorBinding(kk, _ScopeSeedInjectorProvider(kk, sc)),
|
2395
|
+
InjectorBinder.bind(_InjectorScopeSeed(sc, kk), array=True),
|
2396
|
+
)
|
2397
|
+
|
2398
|
+
|
2399
|
+
###
|
2400
|
+
# inspection
|
2401
|
+
|
2402
|
+
|
2403
|
+
class _InjectionInspection(ta.NamedTuple):
|
2404
|
+
signature: inspect.Signature
|
2405
|
+
type_hints: ta.Mapping[str, ta.Any]
|
2406
|
+
args_offset: int
|
2407
|
+
|
2408
|
+
|
2409
|
+
_INJECTION_INSPECTION_CACHE: ta.MutableMapping[ta.Any, _InjectionInspection] = weakref.WeakKeyDictionary()
|
2410
|
+
|
2411
|
+
|
2412
|
+
def _do_injection_inspect(obj: ta.Any) -> _InjectionInspection:
|
2413
|
+
tgt = obj
|
2414
|
+
if isinstance(tgt, type) and tgt.__init__ is not object.__init__: # type: ignore[misc]
|
2415
|
+
# Python 3.8's inspect.signature can't handle subclasses overriding __new__, always generating *args/**kwargs.
|
2416
|
+
# - https://bugs.python.org/issue40897
|
2417
|
+
# - https://github.com/python/cpython/commit/df7c62980d15acd3125dfbd81546dad359f7add7
|
2418
|
+
tgt = tgt.__init__ # type: ignore[misc]
|
2419
|
+
has_generic_base = True
|
2420
|
+
else:
|
2421
|
+
has_generic_base = False
|
2422
|
+
|
2423
|
+
# inspect.signature(eval_str=True) was added in 3.10 and we have to support 3.8, so we have to get_type_hints to
|
2424
|
+
# eval str annotations *in addition to* getting the signature for parameter information.
|
2425
|
+
uw = tgt
|
2426
|
+
has_partial = False
|
2427
|
+
while True:
|
2428
|
+
if isinstance(uw, functools.partial):
|
2429
|
+
has_partial = True
|
2430
|
+
uw = uw.func
|
2431
|
+
else:
|
2432
|
+
if (uw2 := inspect.unwrap(uw)) is uw:
|
2433
|
+
break
|
2434
|
+
uw = uw2
|
2435
|
+
|
2436
|
+
if has_generic_base and has_partial:
|
2437
|
+
raise InjectorError(
|
2438
|
+
'Injector inspection does not currently support both a typing.Generic base and a functools.partial: '
|
2439
|
+
f'{obj}',
|
2440
|
+
)
|
2441
|
+
|
2442
|
+
return _InjectionInspection(
|
2443
|
+
inspect.signature(tgt),
|
2444
|
+
ta.get_type_hints(uw),
|
2445
|
+
1 if has_generic_base else 0,
|
2446
|
+
)
|
2447
|
+
|
2448
|
+
|
2449
|
+
def _injection_inspect(obj: ta.Any) -> _InjectionInspection:
|
2450
|
+
try:
|
2451
|
+
return _INJECTION_INSPECTION_CACHE[obj]
|
2452
|
+
except TypeError:
|
2453
|
+
return _do_injection_inspect(obj)
|
2454
|
+
except KeyError:
|
2455
|
+
pass
|
2456
|
+
insp = _do_injection_inspect(obj)
|
2457
|
+
_INJECTION_INSPECTION_CACHE[obj] = insp
|
2458
|
+
return insp
|
2459
|
+
|
2460
|
+
|
2461
|
+
class InjectionKwarg(ta.NamedTuple):
|
2462
|
+
name: str
|
2463
|
+
key: InjectorKey
|
2464
|
+
has_default: bool
|
2465
|
+
|
2466
|
+
|
2467
|
+
class InjectionKwargsTarget(ta.NamedTuple):
|
2468
|
+
obj: ta.Any
|
2469
|
+
kwargs: ta.Sequence[InjectionKwarg]
|
2470
|
+
|
2471
|
+
|
2472
|
+
def build_injection_kwargs_target(
|
2473
|
+
obj: ta.Any,
|
2474
|
+
*,
|
2475
|
+
skip_args: int = 0,
|
2476
|
+
skip_kwargs: ta.Optional[ta.Iterable[str]] = None,
|
2477
|
+
raw_optional: bool = False,
|
2478
|
+
) -> InjectionKwargsTarget:
|
2479
|
+
insp = _injection_inspect(obj)
|
2480
|
+
|
2481
|
+
params = list(insp.signature.parameters.values())
|
2482
|
+
|
2483
|
+
skip_names: ta.Set[str] = set()
|
2484
|
+
if skip_kwargs is not None:
|
2485
|
+
skip_names.update(check.not_isinstance(skip_kwargs, str))
|
2486
|
+
|
2487
|
+
seen: ta.Set[InjectorKey] = set()
|
2488
|
+
kws: ta.List[InjectionKwarg] = []
|
2489
|
+
for p in params[insp.args_offset + skip_args:]:
|
2490
|
+
if p.name in skip_names:
|
2491
|
+
continue
|
2492
|
+
|
2493
|
+
if p.annotation is inspect.Signature.empty:
|
2494
|
+
if p.default is not inspect.Parameter.empty:
|
2495
|
+
raise KeyError(f'{obj}, {p.name}')
|
2496
|
+
continue
|
2497
|
+
|
2498
|
+
if p.kind not in (inspect.Parameter.POSITIONAL_OR_KEYWORD, inspect.Parameter.KEYWORD_ONLY):
|
2499
|
+
raise TypeError(insp)
|
2500
|
+
|
2501
|
+
# 3.8 inspect.signature doesn't eval_str but typing.get_type_hints does, so prefer that.
|
2502
|
+
ann = insp.type_hints.get(p.name, p.annotation)
|
2503
|
+
if (
|
2504
|
+
not raw_optional and
|
2505
|
+
is_optional_alias(ann)
|
2506
|
+
):
|
2507
|
+
ann = get_optional_alias_arg(ann)
|
2508
|
+
|
2509
|
+
k = as_injector_key(ann)
|
2510
|
+
|
2511
|
+
if k in seen:
|
2512
|
+
raise DuplicateInjectorKeyError(k)
|
2513
|
+
seen.add(k)
|
2514
|
+
|
2515
|
+
kws.append(InjectionKwarg(
|
2516
|
+
p.name,
|
2517
|
+
k,
|
2518
|
+
p.default is not inspect.Parameter.empty,
|
2519
|
+
))
|
2520
|
+
|
2521
|
+
return InjectionKwargsTarget(
|
2522
|
+
obj,
|
2523
|
+
kws,
|
2524
|
+
)
|
2525
|
+
|
2526
|
+
|
2527
|
+
###
|
2528
|
+
# injector
|
2529
|
+
|
2530
|
+
|
2531
|
+
_INJECTOR_INJECTOR_KEY: InjectorKey[Injector] = InjectorKey(Injector)
|
2532
|
+
|
2533
|
+
|
2534
|
+
@dc.dataclass(frozen=True)
|
2535
|
+
class _InjectorEager:
|
2536
|
+
key: InjectorKey
|
2537
|
+
|
2538
|
+
|
2539
|
+
_INJECTOR_EAGER_ARRAY_KEY: InjectorKey[_InjectorEager] = InjectorKey(_InjectorEager, array=True)
|
2540
|
+
|
2541
|
+
|
2542
|
+
class _Injector(Injector):
|
2543
|
+
_DEFAULT_BINDINGS: ta.ClassVar[ta.List[InjectorBinding]] = []
|
2544
|
+
|
2545
|
+
def __init__(self, bs: InjectorBindings, p: ta.Optional[Injector] = None) -> None:
|
2546
|
+
super().__init__()
|
2547
|
+
|
2548
|
+
self._bs = check.isinstance(bs, InjectorBindings)
|
2549
|
+
self._p: ta.Optional[Injector] = check.isinstance(p, (Injector, type(None)))
|
2550
|
+
|
2551
|
+
self._pfm = {
|
2552
|
+
k: v.provider_fn()
|
2553
|
+
for k, v in build_injector_provider_map(as_injector_bindings(
|
2554
|
+
*self._DEFAULT_BINDINGS,
|
2555
|
+
bs,
|
2556
|
+
)).items()
|
2557
|
+
}
|
2558
|
+
|
2559
|
+
if _INJECTOR_INJECTOR_KEY in self._pfm:
|
2560
|
+
raise DuplicateInjectorKeyError(_INJECTOR_INJECTOR_KEY)
|
2561
|
+
|
2562
|
+
self.__cur_req: ta.Optional[_Injector._Request] = None
|
2563
|
+
|
2564
|
+
if _INJECTOR_EAGER_ARRAY_KEY in self._pfm:
|
2565
|
+
for e in self.provide(_INJECTOR_EAGER_ARRAY_KEY):
|
2566
|
+
self.provide(e.key)
|
2567
|
+
|
2568
|
+
class _Request:
|
2569
|
+
def __init__(self, injector: '_Injector') -> None:
|
2570
|
+
super().__init__()
|
2571
|
+
self._injector = injector
|
2572
|
+
self._provisions: ta.Dict[InjectorKey, Maybe] = {}
|
2573
|
+
self._seen_keys: ta.Set[InjectorKey] = set()
|
2574
|
+
|
2575
|
+
def handle_key(self, key: InjectorKey) -> Maybe[Maybe]:
|
2576
|
+
try:
|
2577
|
+
return Maybe.just(self._provisions[key])
|
2578
|
+
except KeyError:
|
2579
|
+
pass
|
2580
|
+
if key in self._seen_keys:
|
2581
|
+
raise CyclicDependencyInjectorKeyError(key)
|
2582
|
+
self._seen_keys.add(key)
|
2583
|
+
return Maybe.empty()
|
2584
|
+
|
2585
|
+
def handle_provision(self, key: InjectorKey, mv: Maybe) -> Maybe:
|
2586
|
+
check.in_(key, self._seen_keys)
|
2587
|
+
check.not_in(key, self._provisions)
|
2588
|
+
self._provisions[key] = mv
|
2589
|
+
return mv
|
2590
|
+
|
2591
|
+
@contextlib.contextmanager
|
2592
|
+
def _current_request(self) -> ta.Generator[_Request, None, None]:
|
2593
|
+
if (cr := self.__cur_req) is not None:
|
2594
|
+
yield cr
|
2595
|
+
return
|
2596
|
+
|
2597
|
+
cr = self._Request(self)
|
2598
|
+
try:
|
2599
|
+
self.__cur_req = cr
|
2600
|
+
yield cr
|
2601
|
+
finally:
|
2602
|
+
self.__cur_req = None
|
2603
|
+
|
2604
|
+
def try_provide(self, key: ta.Any) -> Maybe[ta.Any]:
|
2605
|
+
key = as_injector_key(key)
|
2606
|
+
|
2607
|
+
cr: _Injector._Request
|
2608
|
+
with self._current_request() as cr:
|
2609
|
+
if (rv := cr.handle_key(key)).present:
|
2610
|
+
return rv.must()
|
2611
|
+
|
2612
|
+
if key == _INJECTOR_INJECTOR_KEY:
|
2613
|
+
return cr.handle_provision(key, Maybe.just(self))
|
2614
|
+
|
2615
|
+
fn = self._pfm.get(key)
|
2616
|
+
if fn is not None:
|
2617
|
+
return cr.handle_provision(key, Maybe.just(fn(self)))
|
2618
|
+
|
2619
|
+
if self._p is not None:
|
2620
|
+
pv = self._p.try_provide(key)
|
2621
|
+
if pv is not None:
|
2622
|
+
return cr.handle_provision(key, Maybe.empty())
|
2623
|
+
|
2624
|
+
return cr.handle_provision(key, Maybe.empty())
|
2625
|
+
|
2626
|
+
def provide(self, key: ta.Any) -> ta.Any:
|
2627
|
+
v = self.try_provide(key)
|
2628
|
+
if v.present:
|
2629
|
+
return v.must()
|
2630
|
+
raise UnboundInjectorKeyError(key)
|
2631
|
+
|
2632
|
+
def provide_kwargs(
|
2633
|
+
self,
|
2634
|
+
obj: ta.Any,
|
2635
|
+
*,
|
2636
|
+
skip_args: int = 0,
|
2637
|
+
skip_kwargs: ta.Optional[ta.Iterable[ta.Any]] = None,
|
2638
|
+
) -> ta.Mapping[str, ta.Any]:
|
2639
|
+
kt = build_injection_kwargs_target(
|
2640
|
+
obj,
|
2641
|
+
skip_args=skip_args,
|
2642
|
+
skip_kwargs=skip_kwargs,
|
2643
|
+
)
|
2644
|
+
|
2645
|
+
ret: ta.Dict[str, ta.Any] = {}
|
2646
|
+
for kw in kt.kwargs:
|
2647
|
+
if kw.has_default:
|
2648
|
+
if not (mv := self.try_provide(kw.key)).present:
|
2649
|
+
continue
|
2650
|
+
v = mv.must()
|
2651
|
+
else:
|
2652
|
+
v = self.provide(kw.key)
|
2653
|
+
ret[kw.name] = v
|
2654
|
+
return ret
|
2655
|
+
|
2656
|
+
def inject(
|
2657
|
+
self,
|
2658
|
+
obj: ta.Any,
|
2659
|
+
*,
|
2660
|
+
args: ta.Optional[ta.Sequence[ta.Any]] = None,
|
2661
|
+
kwargs: ta.Optional[ta.Mapping[str, ta.Any]] = None,
|
2662
|
+
) -> ta.Any:
|
2663
|
+
provided = self.provide_kwargs(
|
2664
|
+
obj,
|
2665
|
+
skip_args=len(args) if args is not None else 0,
|
2666
|
+
skip_kwargs=kwargs if kwargs is not None else None,
|
2667
|
+
)
|
2668
|
+
|
2669
|
+
return obj(
|
2670
|
+
*(args if args is not None else ()),
|
2671
|
+
**(kwargs if kwargs is not None else {}),
|
2672
|
+
**provided,
|
2673
|
+
)
|
2674
|
+
|
2675
|
+
|
2676
|
+
###
|
2677
|
+
# binder
|
2678
|
+
|
2679
|
+
|
2680
|
+
class InjectorBinder:
|
2681
|
+
def __new__(cls, *args, **kwargs): # noqa
|
2682
|
+
raise TypeError
|
2683
|
+
|
2684
|
+
_FN_TYPES: ta.ClassVar[ta.Tuple[type, ...]] = (
|
2685
|
+
types.FunctionType,
|
2686
|
+
types.MethodType,
|
2687
|
+
|
2688
|
+
classmethod,
|
2689
|
+
staticmethod,
|
2690
|
+
|
2691
|
+
functools.partial,
|
2692
|
+
functools.partialmethod,
|
2693
|
+
)
|
2694
|
+
|
2695
|
+
@classmethod
|
2696
|
+
def _is_fn(cls, obj: ta.Any) -> bool:
|
2697
|
+
return isinstance(obj, cls._FN_TYPES)
|
2698
|
+
|
2699
|
+
@classmethod
|
2700
|
+
def bind_as_fn(cls, icls: ta.Type[T]) -> ta.Type[T]:
|
2701
|
+
check.isinstance(icls, type)
|
2702
|
+
if icls not in cls._FN_TYPES:
|
2703
|
+
cls._FN_TYPES = (*cls._FN_TYPES, icls)
|
2704
|
+
return icls
|
2705
|
+
|
2706
|
+
_BANNED_BIND_TYPES: ta.ClassVar[ta.Tuple[type, ...]] = (
|
2707
|
+
InjectorProvider,
|
2708
|
+
)
|
2709
|
+
|
2710
|
+
@classmethod
|
2711
|
+
def bind(
|
2712
|
+
cls,
|
2713
|
+
obj: ta.Any,
|
2714
|
+
*,
|
2715
|
+
key: ta.Any = None,
|
2716
|
+
tag: ta.Any = None,
|
2717
|
+
array: ta.Optional[bool] = None, # noqa
|
2718
|
+
|
2719
|
+
to_fn: ta.Any = None,
|
2720
|
+
to_ctor: ta.Any = None,
|
2721
|
+
to_const: ta.Any = None,
|
2722
|
+
to_key: ta.Any = None,
|
2723
|
+
|
2724
|
+
in_: ta.Optional[ta.Type[InjectorScope]] = None,
|
2725
|
+
singleton: bool = False,
|
2726
|
+
|
2727
|
+
eager: bool = False,
|
2728
|
+
) -> InjectorBindingOrBindings:
|
2729
|
+
if obj is None or obj is inspect.Parameter.empty:
|
2730
|
+
raise TypeError(obj)
|
2731
|
+
if isinstance(obj, cls._BANNED_BIND_TYPES):
|
2732
|
+
raise TypeError(obj)
|
2733
|
+
|
2734
|
+
#
|
2735
|
+
|
2736
|
+
if key is not None:
|
2737
|
+
key = as_injector_key(key)
|
2738
|
+
|
2739
|
+
#
|
2740
|
+
|
2741
|
+
has_to = (
|
2742
|
+
to_fn is not None or
|
2743
|
+
to_ctor is not None or
|
2744
|
+
to_const is not None or
|
2745
|
+
to_key is not None
|
2746
|
+
)
|
2747
|
+
if isinstance(obj, InjectorKey):
|
2748
|
+
if key is None:
|
2749
|
+
key = obj
|
2750
|
+
elif isinstance(obj, type):
|
2751
|
+
if not has_to:
|
2752
|
+
to_ctor = obj
|
2753
|
+
if key is None:
|
2754
|
+
key = InjectorKey(obj)
|
2755
|
+
elif cls._is_fn(obj) and not has_to:
|
2756
|
+
to_fn = obj
|
2757
|
+
if key is None:
|
2758
|
+
insp = _injection_inspect(obj)
|
2759
|
+
key_cls: ta.Any = check_valid_injector_key_cls(check.not_none(insp.type_hints.get('return')))
|
2760
|
+
key = InjectorKey(key_cls)
|
2761
|
+
else:
|
2762
|
+
if to_const is not None:
|
2763
|
+
raise TypeError('Cannot bind instance with to_const')
|
2764
|
+
to_const = obj
|
2765
|
+
if key is None:
|
2766
|
+
key = InjectorKey(type(obj))
|
2767
|
+
del has_to
|
2768
|
+
|
2769
|
+
#
|
2770
|
+
|
2771
|
+
if tag is not None:
|
2772
|
+
if key.tag is not None:
|
2773
|
+
raise TypeError('Tag already set')
|
2774
|
+
key = dc.replace(key, tag=tag)
|
2775
|
+
|
2776
|
+
if array is not None:
|
2777
|
+
key = dc.replace(key, array=array)
|
2778
|
+
|
2779
|
+
#
|
2780
|
+
|
2781
|
+
providers: ta.List[InjectorProvider] = []
|
2782
|
+
if to_fn is not None:
|
2783
|
+
providers.append(FnInjectorProvider(to_fn))
|
2784
|
+
if to_ctor is not None:
|
2785
|
+
providers.append(CtorInjectorProvider(to_ctor))
|
2786
|
+
if to_const is not None:
|
2787
|
+
providers.append(ConstInjectorProvider(to_const))
|
2788
|
+
if to_key is not None:
|
2789
|
+
providers.append(LinkInjectorProvider(as_injector_key(to_key)))
|
2790
|
+
if not providers:
|
2791
|
+
raise TypeError('Must specify provider')
|
2792
|
+
if len(providers) > 1:
|
2793
|
+
raise TypeError('May not specify multiple providers')
|
2794
|
+
provider = check.single(providers)
|
2795
|
+
|
2796
|
+
#
|
2797
|
+
|
2798
|
+
pws: ta.List[ta.Any] = []
|
2799
|
+
if in_ is not None:
|
2800
|
+
check.issubclass(in_, InjectorScope)
|
2801
|
+
check.not_in(abc.ABC, in_.__bases__)
|
2802
|
+
pws.append(functools.partial(ScopedInjectorProvider, k=key, sc=in_))
|
2803
|
+
if singleton:
|
2804
|
+
pws.append(SingletonInjectorProvider)
|
2805
|
+
if len(pws) > 1:
|
2806
|
+
raise TypeError('May not specify multiple provider wrappers')
|
2807
|
+
elif pws:
|
2808
|
+
provider = check.single(pws)(provider)
|
2809
|
+
|
2810
|
+
#
|
2811
|
+
|
2812
|
+
binding = InjectorBinding(key, provider)
|
2813
|
+
|
2814
|
+
#
|
2815
|
+
|
2816
|
+
extras: ta.List[InjectorBinding] = []
|
2817
|
+
|
2818
|
+
if eager:
|
2819
|
+
extras.append(bind_injector_eager_key(key))
|
2820
|
+
|
2821
|
+
#
|
2822
|
+
|
2823
|
+
if extras:
|
2824
|
+
return as_injector_bindings(binding, *extras)
|
2825
|
+
else:
|
2826
|
+
return binding
|
2827
|
+
|
2828
|
+
|
2829
|
+
###
|
2830
|
+
# injection helpers
|
2831
|
+
|
2832
|
+
|
2833
|
+
def make_injector_factory(
|
2834
|
+
fn: ta.Callable[..., T],
|
2835
|
+
cls: U,
|
2836
|
+
ann: ta.Any = None,
|
2837
|
+
) -> ta.Callable[..., U]:
|
2838
|
+
if ann is None:
|
2839
|
+
ann = cls
|
2840
|
+
|
2841
|
+
def outer(injector: Injector) -> ann:
|
2842
|
+
def inner(*args, **kwargs):
|
2843
|
+
return injector.inject(fn, args=args, kwargs=kwargs)
|
2844
|
+
return cls(inner) # type: ignore
|
2845
|
+
|
2846
|
+
return outer
|
2847
|
+
|
2848
|
+
|
2849
|
+
def bind_injector_array(
|
2850
|
+
obj: ta.Any = None,
|
2851
|
+
*,
|
2852
|
+
tag: ta.Any = None,
|
2853
|
+
) -> InjectorBindingOrBindings:
|
2854
|
+
key = as_injector_key(obj)
|
2855
|
+
if tag is not None:
|
2856
|
+
if key.tag is not None:
|
2857
|
+
raise ValueError('Must not specify multiple tags')
|
2858
|
+
key = dc.replace(key, tag=tag)
|
2859
|
+
|
2860
|
+
if key.array:
|
2861
|
+
raise ValueError('Key must not be array')
|
2862
|
+
|
2863
|
+
return InjectorBinding(
|
2864
|
+
dc.replace(key, array=True),
|
2865
|
+
ArrayInjectorProvider([]),
|
2866
|
+
)
|
2867
|
+
|
2868
|
+
|
2869
|
+
def make_injector_array_type(
|
2870
|
+
ele: ta.Union[InjectorKey, InjectorKeyCls],
|
2871
|
+
cls: U,
|
2872
|
+
ann: ta.Any = None,
|
2873
|
+
) -> ta.Callable[..., U]:
|
2874
|
+
if isinstance(ele, InjectorKey):
|
2875
|
+
if not ele.array:
|
2876
|
+
raise InjectorError('Provided key must be array', ele)
|
2877
|
+
key = ele
|
2878
|
+
else:
|
2879
|
+
key = dc.replace(as_injector_key(ele), array=True)
|
2880
|
+
|
2881
|
+
if ann is None:
|
2882
|
+
ann = cls
|
2883
|
+
|
2884
|
+
def inner(injector: Injector) -> ann:
|
2885
|
+
return cls(injector.provide(key)) # type: ignore[operator]
|
2886
|
+
|
2887
|
+
return inner
|
2888
|
+
|
2889
|
+
|
2890
|
+
def bind_injector_eager_key(key: ta.Any) -> InjectorBinding:
|
2891
|
+
return InjectorBinding(_INJECTOR_EAGER_ARRAY_KEY, ConstInjectorProvider(_InjectorEager(as_injector_key(key))))
|
2892
|
+
|
2893
|
+
|
2894
|
+
###
|
2895
|
+
# api
|
2896
|
+
|
2897
|
+
|
2898
|
+
class InjectionApi:
|
2899
|
+
# keys
|
2900
|
+
|
2901
|
+
def as_key(self, o: ta.Any) -> InjectorKey:
|
2902
|
+
return as_injector_key(o)
|
2903
|
+
|
2904
|
+
def array(self, o: ta.Any) -> InjectorKey:
|
2905
|
+
return dc.replace(as_injector_key(o), array=True)
|
2906
|
+
|
2907
|
+
def tag(self, o: ta.Any, t: ta.Any) -> InjectorKey:
|
2908
|
+
return dc.replace(as_injector_key(o), tag=t)
|
2909
|
+
|
2910
|
+
# bindings
|
2911
|
+
|
2912
|
+
def as_bindings(self, *args: InjectorBindingOrBindings) -> InjectorBindings:
|
2913
|
+
return as_injector_bindings(*args)
|
2914
|
+
|
2915
|
+
# overrides
|
2916
|
+
|
2917
|
+
def override(self, p: InjectorBindings, *args: InjectorBindingOrBindings) -> InjectorBindings:
|
2918
|
+
return injector_override(p, *args)
|
2919
|
+
|
2920
|
+
# scopes
|
2921
|
+
|
2922
|
+
def bind_scope(self, sc: ta.Type[InjectorScope]) -> InjectorBindingOrBindings:
|
2923
|
+
return bind_injector_scope(sc)
|
2924
|
+
|
2925
|
+
def bind_scope_seed(self, k: ta.Any, sc: ta.Type[InjectorScope]) -> InjectorBindingOrBindings:
|
2926
|
+
return bind_injector_scope_seed(k, sc)
|
2927
|
+
|
2928
|
+
# injector
|
2929
|
+
|
2930
|
+
def create_injector(self, *args: InjectorBindingOrBindings, parent: ta.Optional[Injector] = None) -> Injector:
|
2931
|
+
return _Injector(as_injector_bindings(*args), parent)
|
2932
|
+
|
2933
|
+
# binder
|
2934
|
+
|
2935
|
+
def bind(
|
2936
|
+
self,
|
2937
|
+
obj: ta.Any,
|
2938
|
+
*,
|
2939
|
+
key: ta.Any = None,
|
2940
|
+
tag: ta.Any = None,
|
2941
|
+
array: ta.Optional[bool] = None, # noqa
|
2942
|
+
|
2943
|
+
to_fn: ta.Any = None,
|
2944
|
+
to_ctor: ta.Any = None,
|
2945
|
+
to_const: ta.Any = None,
|
2946
|
+
to_key: ta.Any = None,
|
2947
|
+
|
2948
|
+
in_: ta.Optional[ta.Type[InjectorScope]] = None,
|
2949
|
+
singleton: bool = False,
|
2950
|
+
|
2951
|
+
eager: bool = False,
|
2952
|
+
) -> InjectorBindingOrBindings:
|
2953
|
+
return InjectorBinder.bind(
|
2954
|
+
obj,
|
2955
|
+
|
2956
|
+
key=key,
|
2957
|
+
tag=tag,
|
2958
|
+
array=array,
|
2959
|
+
|
2960
|
+
to_fn=to_fn,
|
2961
|
+
to_ctor=to_ctor,
|
2962
|
+
to_const=to_const,
|
2963
|
+
to_key=to_key,
|
2964
|
+
|
2965
|
+
in_=in_,
|
2966
|
+
singleton=singleton,
|
2967
|
+
|
2968
|
+
eager=eager,
|
2969
|
+
)
|
2970
|
+
|
2971
|
+
# helpers
|
2972
|
+
|
2973
|
+
def bind_factory(
|
2974
|
+
self,
|
2975
|
+
fn: ta.Callable[..., T],
|
2976
|
+
cls_: U,
|
2977
|
+
ann: ta.Any = None,
|
2978
|
+
) -> InjectorBindingOrBindings:
|
2979
|
+
return self.bind(make_injector_factory(fn, cls_, ann))
|
2980
|
+
|
2981
|
+
def bind_array(
|
2982
|
+
self,
|
2983
|
+
obj: ta.Any = None,
|
2984
|
+
*,
|
2985
|
+
tag: ta.Any = None,
|
2986
|
+
) -> InjectorBindingOrBindings:
|
2987
|
+
return bind_injector_array(obj, tag=tag)
|
2988
|
+
|
2989
|
+
def bind_array_type(
|
2990
|
+
self,
|
2991
|
+
ele: ta.Union[InjectorKey, InjectorKeyCls],
|
2992
|
+
cls_: U,
|
2993
|
+
ann: ta.Any = None,
|
2994
|
+
) -> InjectorBindingOrBindings:
|
2995
|
+
return self.bind(make_injector_array_type(ele, cls_, ann))
|
2996
|
+
|
2997
|
+
|
2998
|
+
inj = InjectionApi()
|
2999
|
+
|
3000
|
+
|
3001
|
+
########################################
|
3002
|
+
# ../../../omlish/lite/runtime.py
|
3003
|
+
|
3004
|
+
|
3005
|
+
@cached_nullary
|
3006
|
+
def is_debugger_attached() -> bool:
|
3007
|
+
return any(frame[1].endswith('pydevd.py') for frame in inspect.stack())
|
3008
|
+
|
3009
|
+
|
3010
|
+
LITE_REQUIRED_PYTHON_VERSION = (3, 8)
|
3011
|
+
|
3012
|
+
|
3013
|
+
def check_lite_runtime_version() -> None:
|
3014
|
+
if sys.version_info < LITE_REQUIRED_PYTHON_VERSION:
|
3015
|
+
raise OSError(f'Requires python {LITE_REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
|
3016
|
+
|
3017
|
+
|
3018
|
+
########################################
|
3019
|
+
# ../../../omlish/lite/timing.py
|
3020
|
+
|
3021
|
+
|
3022
|
+
LogTimingContext.DEFAULT_LOG = log
|
3023
|
+
|
3024
|
+
log_timing_context = log_timing_context # noqa
|
3025
|
+
|
3026
|
+
|
3027
|
+
########################################
|
3028
|
+
# ../../../omlish/logs/json.py
|
3029
|
+
"""
|
3030
|
+
TODO:
|
3031
|
+
- translate json keys
|
3032
|
+
"""
|
3033
|
+
|
3034
|
+
|
3035
|
+
class JsonLogFormatter(logging.Formatter):
|
3036
|
+
KEYS: ta.Mapping[str, bool] = {
|
3037
|
+
'name': False,
|
3038
|
+
'msg': False,
|
3039
|
+
'args': False,
|
3040
|
+
'levelname': False,
|
3041
|
+
'levelno': False,
|
3042
|
+
'pathname': False,
|
3043
|
+
'filename': False,
|
3044
|
+
'module': False,
|
3045
|
+
'exc_info': True,
|
3046
|
+
'exc_text': True,
|
3047
|
+
'stack_info': True,
|
3048
|
+
'lineno': False,
|
3049
|
+
'funcName': False,
|
3050
|
+
'created': False,
|
3051
|
+
'msecs': False,
|
3052
|
+
'relativeCreated': False,
|
3053
|
+
'thread': False,
|
3054
|
+
'threadName': False,
|
3055
|
+
'processName': False,
|
3056
|
+
'process': False,
|
3057
|
+
}
|
3058
|
+
|
3059
|
+
def __init__(
|
3060
|
+
self,
|
3061
|
+
*args: ta.Any,
|
3062
|
+
json_dumps: ta.Optional[ta.Callable[[ta.Any], str]] = None,
|
3063
|
+
**kwargs: ta.Any,
|
3064
|
+
) -> None:
|
3065
|
+
super().__init__(*args, **kwargs)
|
3066
|
+
|
3067
|
+
if json_dumps is None:
|
3068
|
+
json_dumps = json_dumps_compact
|
3069
|
+
self._json_dumps = json_dumps
|
3070
|
+
|
3071
|
+
def format(self, record: logging.LogRecord) -> str:
|
3072
|
+
dct = {
|
3073
|
+
k: v
|
3074
|
+
for k, o in self.KEYS.items()
|
3075
|
+
for v in [getattr(record, k)]
|
3076
|
+
if not (o and v is None)
|
3077
|
+
}
|
3078
|
+
return self._json_dumps(dct)
|
3079
|
+
|
3080
|
+
|
3081
|
+
########################################
|
3082
|
+
# ../../../omlish/os/temp.py
|
3083
|
+
|
3084
|
+
|
3085
|
+
def make_temp_file(**kwargs: ta.Any) -> str:
|
3086
|
+
file_fd, file = tempfile.mkstemp(**kwargs)
|
3087
|
+
os.close(file_fd)
|
3088
|
+
return file
|
3089
|
+
|
3090
|
+
|
3091
|
+
@contextlib.contextmanager
|
3092
|
+
def temp_file_context(**kwargs: ta.Any) -> ta.Iterator[str]:
|
3093
|
+
path = make_temp_file(**kwargs)
|
3094
|
+
try:
|
3095
|
+
yield path
|
3096
|
+
finally:
|
3097
|
+
unlink_if_exists(path)
|
3098
|
+
|
3099
|
+
|
3100
|
+
@contextlib.contextmanager
|
3101
|
+
def temp_dir_context(
|
3102
|
+
root_dir: ta.Optional[str] = None,
|
3103
|
+
**kwargs: ta.Any,
|
3104
|
+
) -> ta.Iterator[str]:
|
3105
|
+
path = tempfile.mkdtemp(dir=root_dir, **kwargs)
|
3106
|
+
try:
|
3107
|
+
yield path
|
3108
|
+
finally:
|
3109
|
+
shutil.rmtree(path, ignore_errors=True)
|
3110
|
+
|
3111
|
+
|
3112
|
+
@contextlib.contextmanager
|
3113
|
+
def temp_named_file_context(
|
3114
|
+
root_dir: ta.Optional[str] = None,
|
3115
|
+
cleanup: bool = True,
|
3116
|
+
**kwargs: ta.Any,
|
3117
|
+
) -> ta.Iterator[tempfile._TemporaryFileWrapper]: # noqa
|
3118
|
+
with tempfile.NamedTemporaryFile(dir=root_dir, delete=False, **kwargs) as f:
|
3119
|
+
try:
|
3120
|
+
yield f
|
3121
|
+
finally:
|
3122
|
+
if cleanup:
|
3123
|
+
shutil.rmtree(f.name, ignore_errors=True)
|
3124
|
+
|
3125
|
+
|
3126
|
+
########################################
|
3127
|
+
# ../cache.py
|
3128
|
+
|
3129
|
+
|
3130
|
+
CacheVersion = ta.NewType('CacheVersion', int)
|
3131
|
+
|
3132
|
+
|
3133
|
+
##
|
3134
|
+
|
3135
|
+
|
3136
|
+
class FileCache(abc.ABC):
|
3137
|
+
DEFAULT_CACHE_VERSION: ta.ClassVar[CacheVersion] = CacheVersion(CI_CACHE_VERSION)
|
3138
|
+
|
3139
|
+
def __init__(
|
3140
|
+
self,
|
3141
|
+
*,
|
3142
|
+
version: ta.Optional[CacheVersion] = None,
|
3143
|
+
) -> None:
|
3144
|
+
super().__init__()
|
3145
|
+
|
3146
|
+
if version is None:
|
3147
|
+
version = self.DEFAULT_CACHE_VERSION
|
3148
|
+
check.isinstance(version, int)
|
3149
|
+
check.arg(version >= 0)
|
3150
|
+
self._version: CacheVersion = version
|
3151
|
+
|
3152
|
+
@property
|
3153
|
+
def version(self) -> CacheVersion:
|
3154
|
+
return self._version
|
3155
|
+
|
3156
|
+
#
|
3157
|
+
|
3158
|
+
@abc.abstractmethod
|
3159
|
+
def get_file(self, key: str) -> ta.Awaitable[ta.Optional[str]]:
|
3160
|
+
raise NotImplementedError
|
3161
|
+
|
3162
|
+
@abc.abstractmethod
|
3163
|
+
def put_file(
|
3164
|
+
self,
|
3165
|
+
key: str,
|
3166
|
+
file_path: str,
|
3167
|
+
*,
|
3168
|
+
steal: bool = False,
|
3169
|
+
) -> ta.Awaitable[str]:
|
3170
|
+
raise NotImplementedError
|
3171
|
+
|
3172
|
+
|
3173
|
+
#
|
3174
|
+
|
3175
|
+
|
3176
|
+
class DirectoryFileCache(FileCache):
|
3177
|
+
@dc.dataclass(frozen=True)
|
3178
|
+
class Config:
|
3179
|
+
dir: str
|
3180
|
+
|
3181
|
+
no_create: bool = False
|
3182
|
+
no_purge: bool = False
|
3183
|
+
|
3184
|
+
def __init__(
|
3185
|
+
self,
|
3186
|
+
config: Config,
|
3187
|
+
*,
|
3188
|
+
version: ta.Optional[CacheVersion] = None,
|
3189
|
+
) -> None: # noqa
|
3190
|
+
super().__init__(
|
3191
|
+
version=version,
|
3192
|
+
)
|
3193
|
+
|
3194
|
+
self._config = config
|
3195
|
+
|
3196
|
+
@property
|
3197
|
+
def dir(self) -> str:
|
3198
|
+
return self._config.dir
|
3199
|
+
|
3200
|
+
#
|
3201
|
+
|
3202
|
+
VERSION_FILE_NAME = '.ci-cache-version'
|
3203
|
+
|
3204
|
+
@cached_nullary
|
3205
|
+
def setup_dir(self) -> None:
|
3206
|
+
version_file = os.path.join(self.dir, self.VERSION_FILE_NAME)
|
3207
|
+
|
3208
|
+
if self._config.no_create:
|
3209
|
+
check.state(os.path.isdir(self.dir))
|
3210
|
+
|
3211
|
+
elif not os.path.isdir(self.dir):
|
3212
|
+
os.makedirs(self.dir)
|
3213
|
+
with open(version_file, 'w') as f:
|
3214
|
+
f.write(str(self._version))
|
3215
|
+
return
|
3216
|
+
|
3217
|
+
# NOTE: intentionally raises FileNotFoundError to refuse to use an existing non-cache dir as a cache dir.
|
3218
|
+
with open(version_file) as f:
|
3219
|
+
dir_version = int(f.read().strip())
|
3220
|
+
|
3221
|
+
if dir_version == self._version:
|
3222
|
+
return
|
3223
|
+
|
3224
|
+
if self._config.no_purge:
|
3225
|
+
raise RuntimeError(f'{dir_version=} != {self._version=}')
|
3226
|
+
|
3227
|
+
dirs = [n for n in sorted(os.listdir(self.dir)) if os.path.isdir(os.path.join(self.dir, n))]
|
3228
|
+
if dirs:
|
3229
|
+
raise RuntimeError(
|
3230
|
+
f'Refusing to remove stale cache dir {self.dir!r} '
|
3231
|
+
f'due to present directories: {", ".join(dirs)}',
|
3232
|
+
)
|
3233
|
+
|
3234
|
+
for n in sorted(os.listdir(self.dir)):
|
3235
|
+
if n.startswith('.'):
|
3236
|
+
continue
|
3237
|
+
fp = os.path.join(self.dir, n)
|
3238
|
+
check.state(os.path.isfile(fp))
|
3239
|
+
log.debug('Purging stale cache file: %s', fp)
|
3240
|
+
os.unlink(fp)
|
3241
|
+
|
3242
|
+
os.unlink(version_file)
|
3243
|
+
|
3244
|
+
with open(version_file, 'w') as f:
|
3245
|
+
f.write(str(self._version))
|
3246
|
+
|
3247
|
+
#
|
3248
|
+
|
3249
|
+
def get_cache_file_path(
|
3250
|
+
self,
|
3251
|
+
key: str,
|
3252
|
+
) -> str:
|
3253
|
+
self.setup_dir()
|
3254
|
+
return os.path.join(self.dir, key)
|
3255
|
+
|
3256
|
+
def format_incomplete_file(self, f: str) -> str:
|
3257
|
+
return os.path.join(os.path.dirname(f), f'_{os.path.basename(f)}.incomplete')
|
3258
|
+
|
3259
|
+
#
|
1906
3260
|
|
3261
|
+
async def get_file(self, key: str) -> ta.Optional[str]:
|
3262
|
+
cache_file_path = self.get_cache_file_path(key)
|
3263
|
+
if not os.path.exists(cache_file_path):
|
3264
|
+
return None
|
3265
|
+
return cache_file_path
|
1907
3266
|
|
1908
|
-
|
1909
|
-
|
1910
|
-
|
1911
|
-
|
1912
|
-
|
1913
|
-
|
3267
|
+
async def put_file(
|
3268
|
+
self,
|
3269
|
+
key: str,
|
3270
|
+
file_path: str,
|
3271
|
+
*,
|
3272
|
+
steal: bool = False,
|
3273
|
+
) -> str:
|
3274
|
+
cache_file_path = self.get_cache_file_path(key)
|
3275
|
+
if steal:
|
3276
|
+
shutil.move(file_path, cache_file_path)
|
3277
|
+
else:
|
3278
|
+
shutil.copyfile(file_path, cache_file_path)
|
3279
|
+
return cache_file_path
|
1914
3280
|
|
1915
3281
|
|
1916
|
-
|
1917
|
-
async def adefer(fn: ta.Callable) -> ta.AsyncGenerator[ta.Callable, None]:
|
1918
|
-
try:
|
1919
|
-
yield fn
|
1920
|
-
finally:
|
1921
|
-
await fn()
|
3282
|
+
##
|
1922
3283
|
|
1923
3284
|
|
1924
|
-
|
3285
|
+
class DataCache:
|
3286
|
+
@dc.dataclass(frozen=True)
|
3287
|
+
class Data(abc.ABC): # noqa
|
3288
|
+
pass
|
1925
3289
|
|
3290
|
+
@dc.dataclass(frozen=True)
|
3291
|
+
class BytesData(Data):
|
3292
|
+
data: bytes
|
1926
3293
|
|
1927
|
-
@
|
1928
|
-
|
1929
|
-
|
1930
|
-
orig = getattr(obj, attr, not_set)
|
1931
|
-
try:
|
1932
|
-
setattr(obj, attr, val)
|
1933
|
-
if orig is not not_set:
|
1934
|
-
yield orig
|
1935
|
-
else:
|
1936
|
-
yield default
|
1937
|
-
finally:
|
1938
|
-
if orig is not_set:
|
1939
|
-
delattr(obj, attr)
|
1940
|
-
else:
|
1941
|
-
setattr(obj, attr, orig)
|
3294
|
+
@dc.dataclass(frozen=True)
|
3295
|
+
class FileData(Data):
|
3296
|
+
file_path: str
|
1942
3297
|
|
3298
|
+
@dc.dataclass(frozen=True)
|
3299
|
+
class UrlData(Data):
|
3300
|
+
url: str
|
1943
3301
|
|
1944
|
-
|
3302
|
+
#
|
1945
3303
|
|
3304
|
+
@abc.abstractmethod
|
3305
|
+
def get_data(self, key: str) -> ta.Awaitable[ta.Optional[Data]]:
|
3306
|
+
raise NotImplementedError
|
1946
3307
|
|
1947
|
-
|
1948
|
-
def
|
1949
|
-
|
3308
|
+
@abc.abstractmethod
|
3309
|
+
def put_data(self, key: str, data: Data) -> ta.Awaitable[None]:
|
3310
|
+
raise NotImplementedError
|
1950
3311
|
|
1951
|
-
async def __aenter__(self):
|
1952
|
-
return self.thing
|
1953
3312
|
|
1954
|
-
|
1955
|
-
await self.thing.aclose()
|
3313
|
+
#
|
1956
3314
|
|
1957
3315
|
|
1958
|
-
|
1959
|
-
|
3316
|
+
@functools.singledispatch
|
3317
|
+
async def read_data_cache_data(data: DataCache.Data) -> bytes:
|
3318
|
+
raise TypeError(data)
|
1960
3319
|
|
1961
3320
|
|
1962
|
-
@
|
1963
|
-
def
|
1964
|
-
return
|
3321
|
+
@read_data_cache_data.register
|
3322
|
+
async def _(data: DataCache.BytesData) -> bytes:
|
3323
|
+
return data.data
|
1965
3324
|
|
1966
3325
|
|
1967
|
-
|
3326
|
+
@read_data_cache_data.register
|
3327
|
+
async def _(data: DataCache.FileData) -> bytes:
|
3328
|
+
with open(data.file_path, 'rb') as f: # noqa
|
3329
|
+
return f.read()
|
1968
3330
|
|
1969
3331
|
|
1970
|
-
|
1971
|
-
|
1972
|
-
|
3332
|
+
@read_data_cache_data.register
|
3333
|
+
async def _(data: DataCache.UrlData) -> bytes:
|
3334
|
+
def inner() -> bytes:
|
3335
|
+
with urllib.request.urlopen(urllib.request.Request( # noqa
|
3336
|
+
data.url,
|
3337
|
+
)) as resp:
|
3338
|
+
return resp.read()
|
1973
3339
|
|
3340
|
+
return await asyncio.get_running_loop().run_in_executor(None, inner)
|
1974
3341
|
|
1975
|
-
########################################
|
1976
|
-
# ../../../omlish/logs/json.py
|
1977
|
-
"""
|
1978
|
-
TODO:
|
1979
|
-
- translate json keys
|
1980
|
-
"""
|
1981
3342
|
|
3343
|
+
#
|
1982
3344
|
|
1983
|
-
class JsonLogFormatter(logging.Formatter):
|
1984
|
-
KEYS: ta.Mapping[str, bool] = {
|
1985
|
-
'name': False,
|
1986
|
-
'msg': False,
|
1987
|
-
'args': False,
|
1988
|
-
'levelname': False,
|
1989
|
-
'levelno': False,
|
1990
|
-
'pathname': False,
|
1991
|
-
'filename': False,
|
1992
|
-
'module': False,
|
1993
|
-
'exc_info': True,
|
1994
|
-
'exc_text': True,
|
1995
|
-
'stack_info': True,
|
1996
|
-
'lineno': False,
|
1997
|
-
'funcName': False,
|
1998
|
-
'created': False,
|
1999
|
-
'msecs': False,
|
2000
|
-
'relativeCreated': False,
|
2001
|
-
'thread': False,
|
2002
|
-
'threadName': False,
|
2003
|
-
'processName': False,
|
2004
|
-
'process': False,
|
2005
|
-
}
|
2006
3345
|
|
3346
|
+
class FileCacheDataCache(DataCache):
|
2007
3347
|
def __init__(
|
2008
3348
|
self,
|
2009
|
-
|
2010
|
-
json_dumps: ta.Optional[ta.Callable[[ta.Any], str]] = None,
|
2011
|
-
**kwargs: ta.Any,
|
3349
|
+
file_cache: FileCache,
|
2012
3350
|
) -> None:
|
2013
|
-
super().__init__(
|
2014
|
-
|
2015
|
-
if json_dumps is None:
|
2016
|
-
json_dumps = json_dumps_compact
|
2017
|
-
self._json_dumps = json_dumps
|
2018
|
-
|
2019
|
-
def format(self, record: logging.LogRecord) -> str:
|
2020
|
-
dct = {
|
2021
|
-
k: v
|
2022
|
-
for k, o in self.KEYS.items()
|
2023
|
-
for v in [getattr(record, k)]
|
2024
|
-
if not (o and v is None)
|
2025
|
-
}
|
2026
|
-
return self._json_dumps(dct)
|
2027
|
-
|
3351
|
+
super().__init__()
|
2028
3352
|
|
2029
|
-
|
2030
|
-
# ../../../omlish/os/temp.py
|
3353
|
+
self._file_cache = file_cache
|
2031
3354
|
|
3355
|
+
async def get_data(self, key: str) -> ta.Optional[DataCache.Data]:
|
3356
|
+
if (file_path := await self._file_cache.get_file(key)) is None:
|
3357
|
+
return None
|
2032
3358
|
|
2033
|
-
|
2034
|
-
file_fd, file = tempfile.mkstemp(**kwargs)
|
2035
|
-
os.close(file_fd)
|
2036
|
-
return file
|
3359
|
+
return DataCache.FileData(file_path)
|
2037
3360
|
|
3361
|
+
async def put_data(self, key: str, data: DataCache.Data) -> None:
|
3362
|
+
steal = False
|
2038
3363
|
|
2039
|
-
|
2040
|
-
|
2041
|
-
|
2042
|
-
|
2043
|
-
|
2044
|
-
finally:
|
2045
|
-
unlink_if_exists(path)
|
3364
|
+
if isinstance(data, DataCache.BytesData):
|
3365
|
+
file_path = make_temp_file()
|
3366
|
+
with open(file_path, 'wb') as f: # noqa
|
3367
|
+
f.write(data.data)
|
3368
|
+
steal = True
|
2046
3369
|
|
3370
|
+
elif isinstance(data, DataCache.FileData):
|
3371
|
+
file_path = data.file_path
|
2047
3372
|
|
2048
|
-
|
2049
|
-
|
2050
|
-
root_dir: ta.Optional[str] = None,
|
2051
|
-
**kwargs: ta.Any,
|
2052
|
-
) -> ta.Iterator[str]:
|
2053
|
-
path = tempfile.mkdtemp(dir=root_dir, **kwargs)
|
2054
|
-
try:
|
2055
|
-
yield path
|
2056
|
-
finally:
|
2057
|
-
shutil.rmtree(path, ignore_errors=True)
|
3373
|
+
elif isinstance(data, DataCache.UrlData):
|
3374
|
+
raise NotImplementedError
|
2058
3375
|
|
3376
|
+
else:
|
3377
|
+
raise TypeError(data)
|
2059
3378
|
|
2060
|
-
|
2061
|
-
|
2062
|
-
|
2063
|
-
|
2064
|
-
|
2065
|
-
) -> ta.Iterator[tempfile._TemporaryFileWrapper]: # noqa
|
2066
|
-
with tempfile.NamedTemporaryFile(dir=root_dir, delete=False, **kwargs) as f:
|
2067
|
-
try:
|
2068
|
-
yield f
|
2069
|
-
finally:
|
2070
|
-
if cleanup:
|
2071
|
-
shutil.rmtree(f.name, ignore_errors=True)
|
3379
|
+
await self._file_cache.put_file(
|
3380
|
+
key,
|
3381
|
+
file_path,
|
3382
|
+
steal=steal,
|
3383
|
+
)
|
2072
3384
|
|
2073
3385
|
|
2074
3386
|
########################################
|
@@ -2086,6 +3398,9 @@ class GithubCacheClient(abc.ABC):
|
|
2086
3398
|
def get_entry(self, key: str) -> ta.Awaitable[ta.Optional[Entry]]:
|
2087
3399
|
raise NotImplementedError
|
2088
3400
|
|
3401
|
+
def get_entry_url(self, entry: Entry) -> ta.Optional[str]:
|
3402
|
+
return None
|
3403
|
+
|
2089
3404
|
@abc.abstractmethod
|
2090
3405
|
def download_file(self, entry: Entry, out_file: str) -> ta.Awaitable[None]:
|
2091
3406
|
raise NotImplementedError
|
@@ -2152,7 +3467,7 @@ class GithubCacheServiceV1BaseClient(GithubCacheClient, abc.ABC):
|
|
2152
3467
|
def _get_loop(self) -> asyncio.AbstractEventLoop:
|
2153
3468
|
if (loop := self._given_loop) is not None:
|
2154
3469
|
return loop
|
2155
|
-
return asyncio.
|
3470
|
+
return asyncio.get_running_loop()
|
2156
3471
|
|
2157
3472
|
#
|
2158
3473
|
|
@@ -2280,6 +3595,10 @@ class GithubCacheServiceV1BaseClient(GithubCacheClient, abc.ABC):
|
|
2280
3595
|
class Entry(GithubCacheClient.Entry):
|
2281
3596
|
artifact: GithubCacheServiceV1.ArtifactCacheEntry
|
2282
3597
|
|
3598
|
+
def get_entry_url(self, entry: GithubCacheClient.Entry) -> ta.Optional[str]:
|
3599
|
+
entry1 = check.isinstance(entry, self.Entry)
|
3600
|
+
return entry1.artifact.cache_key
|
3601
|
+
|
2283
3602
|
#
|
2284
3603
|
|
2285
3604
|
def build_get_entry_url_path(self, *keys: str) -> str:
|
@@ -2911,7 +4230,70 @@ class BaseSubprocesses(abc.ABC): # noqa
|
|
2911
4230
|
##
|
2912
4231
|
|
2913
4232
|
|
4233
|
+
@dc.dataclass(frozen=True)
|
4234
|
+
class SubprocessRun:
|
4235
|
+
cmd: ta.Sequence[str]
|
4236
|
+
input: ta.Any = None
|
4237
|
+
timeout: ta.Optional[float] = None
|
4238
|
+
check: bool = False
|
4239
|
+
capture_output: ta.Optional[bool] = None
|
4240
|
+
kwargs: ta.Optional[ta.Mapping[str, ta.Any]] = None
|
4241
|
+
|
4242
|
+
@classmethod
|
4243
|
+
def of(
|
4244
|
+
cls,
|
4245
|
+
*cmd: str,
|
4246
|
+
input: ta.Any = None, # noqa
|
4247
|
+
timeout: ta.Optional[float] = None,
|
4248
|
+
check: bool = False,
|
4249
|
+
capture_output: ta.Optional[bool] = None,
|
4250
|
+
**kwargs: ta.Any,
|
4251
|
+
) -> 'SubprocessRun':
|
4252
|
+
return cls(
|
4253
|
+
cmd=cmd,
|
4254
|
+
input=input,
|
4255
|
+
timeout=timeout,
|
4256
|
+
check=check,
|
4257
|
+
capture_output=capture_output,
|
4258
|
+
kwargs=kwargs,
|
4259
|
+
)
|
4260
|
+
|
4261
|
+
|
4262
|
+
@dc.dataclass(frozen=True)
|
4263
|
+
class SubprocessRunOutput(ta.Generic[T]):
|
4264
|
+
proc: T
|
4265
|
+
|
4266
|
+
returncode: int # noqa
|
4267
|
+
|
4268
|
+
stdout: ta.Optional[bytes] = None
|
4269
|
+
stderr: ta.Optional[bytes] = None
|
4270
|
+
|
4271
|
+
|
2914
4272
|
class AbstractSubprocesses(BaseSubprocesses, abc.ABC):
|
4273
|
+
@abc.abstractmethod
|
4274
|
+
def run_(self, run: SubprocessRun) -> SubprocessRunOutput:
|
4275
|
+
raise NotImplementedError
|
4276
|
+
|
4277
|
+
def run(
|
4278
|
+
self,
|
4279
|
+
*cmd: str,
|
4280
|
+
input: ta.Any = None, # noqa
|
4281
|
+
timeout: ta.Optional[float] = None,
|
4282
|
+
check: bool = False,
|
4283
|
+
capture_output: ta.Optional[bool] = None,
|
4284
|
+
**kwargs: ta.Any,
|
4285
|
+
) -> SubprocessRunOutput:
|
4286
|
+
return self.run_(SubprocessRun(
|
4287
|
+
cmd=cmd,
|
4288
|
+
input=input,
|
4289
|
+
timeout=timeout,
|
4290
|
+
check=check,
|
4291
|
+
capture_output=capture_output,
|
4292
|
+
kwargs=kwargs,
|
4293
|
+
))
|
4294
|
+
|
4295
|
+
#
|
4296
|
+
|
2915
4297
|
@abc.abstractmethod
|
2916
4298
|
def check_call(
|
2917
4299
|
self,
|
@@ -2975,6 +4357,25 @@ class AbstractSubprocesses(BaseSubprocesses, abc.ABC):
|
|
2975
4357
|
|
2976
4358
|
|
2977
4359
|
class Subprocesses(AbstractSubprocesses):
|
4360
|
+
def run_(self, run: SubprocessRun) -> SubprocessRunOutput[subprocess.CompletedProcess]:
|
4361
|
+
proc = subprocess.run(
|
4362
|
+
run.cmd,
|
4363
|
+
input=run.input,
|
4364
|
+
timeout=run.timeout,
|
4365
|
+
check=run.check,
|
4366
|
+
capture_output=run.capture_output or False,
|
4367
|
+
**(run.kwargs or {}),
|
4368
|
+
)
|
4369
|
+
|
4370
|
+
return SubprocessRunOutput(
|
4371
|
+
proc=proc,
|
4372
|
+
|
4373
|
+
returncode=proc.returncode,
|
4374
|
+
|
4375
|
+
stdout=proc.stdout, # noqa
|
4376
|
+
stderr=proc.stderr, # noqa
|
4377
|
+
)
|
4378
|
+
|
2978
4379
|
def check_call(
|
2979
4380
|
self,
|
2980
4381
|
*cmd: str,
|
@@ -3000,6 +4401,30 @@ subprocesses = Subprocesses()
|
|
3000
4401
|
|
3001
4402
|
|
3002
4403
|
class AbstractAsyncSubprocesses(BaseSubprocesses):
|
4404
|
+
@abc.abstractmethod
|
4405
|
+
async def run_(self, run: SubprocessRun) -> SubprocessRunOutput:
|
4406
|
+
raise NotImplementedError
|
4407
|
+
|
4408
|
+
def run(
|
4409
|
+
self,
|
4410
|
+
*cmd: str,
|
4411
|
+
input: ta.Any = None, # noqa
|
4412
|
+
timeout: ta.Optional[float] = None,
|
4413
|
+
check: bool = False,
|
4414
|
+
capture_output: ta.Optional[bool] = None,
|
4415
|
+
**kwargs: ta.Any,
|
4416
|
+
) -> ta.Awaitable[SubprocessRunOutput]:
|
4417
|
+
return self.run_(SubprocessRun(
|
4418
|
+
cmd=cmd,
|
4419
|
+
input=input,
|
4420
|
+
timeout=timeout,
|
4421
|
+
check=check,
|
4422
|
+
capture_output=capture_output,
|
4423
|
+
kwargs=kwargs,
|
4424
|
+
))
|
4425
|
+
|
4426
|
+
#
|
4427
|
+
|
3003
4428
|
@abc.abstractmethod
|
3004
4429
|
async def check_call(
|
3005
4430
|
self,
|
@@ -3066,17 +4491,23 @@ class AbstractAsyncSubprocesses(BaseSubprocesses):
|
|
3066
4491
|
##
|
3067
4492
|
|
3068
4493
|
|
3069
|
-
class
|
4494
|
+
class GithubCache(FileCache, DataCache):
|
4495
|
+
@dc.dataclass(frozen=True)
|
4496
|
+
class Config:
|
4497
|
+
dir: str
|
4498
|
+
|
3070
4499
|
def __init__(
|
3071
4500
|
self,
|
3072
|
-
|
4501
|
+
config: Config,
|
3073
4502
|
*,
|
3074
4503
|
client: ta.Optional[GithubCacheClient] = None,
|
3075
|
-
|
4504
|
+
version: ta.Optional[CacheVersion] = None,
|
3076
4505
|
) -> None:
|
3077
|
-
super().__init__(
|
4506
|
+
super().__init__(
|
4507
|
+
version=version,
|
4508
|
+
)
|
3078
4509
|
|
3079
|
-
self.
|
4510
|
+
self._config = config
|
3080
4511
|
|
3081
4512
|
if client is None:
|
3082
4513
|
client = GithubCacheServiceV1Client(
|
@@ -3085,10 +4516,14 @@ class GithubFileCache(FileCache):
|
|
3085
4516
|
self._client: GithubCacheClient = client
|
3086
4517
|
|
3087
4518
|
self._local = DirectoryFileCache(
|
3088
|
-
|
4519
|
+
DirectoryFileCache.Config(
|
4520
|
+
dir=check.non_empty_str(config.dir),
|
4521
|
+
),
|
3089
4522
|
version=self._version,
|
3090
4523
|
)
|
3091
4524
|
|
4525
|
+
#
|
4526
|
+
|
3092
4527
|
async def get_file(self, key: str) -> ta.Optional[str]:
|
3093
4528
|
local_file = self._local.get_cache_file_path(key)
|
3094
4529
|
if os.path.exists(local_file):
|
@@ -3122,6 +4557,21 @@ class GithubFileCache(FileCache):
|
|
3122
4557
|
|
3123
4558
|
return cache_file_path
|
3124
4559
|
|
4560
|
+
#
|
4561
|
+
|
4562
|
+
async def get_data(self, key: str) -> ta.Optional[DataCache.Data]:
|
4563
|
+
local_file = self._local.get_cache_file_path(key)
|
4564
|
+
if os.path.exists(local_file):
|
4565
|
+
return DataCache.FileData(local_file)
|
4566
|
+
|
4567
|
+
if (entry := await self._client.get_entry(key)) is None:
|
4568
|
+
return None
|
4569
|
+
|
4570
|
+
return DataCache.UrlData(check.non_empty_str(self._client.get_entry_url(entry)))
|
4571
|
+
|
4572
|
+
async def put_data(self, key: str, data: DataCache.Data) -> None:
|
4573
|
+
await FileCacheDataCache(self).put_data(key, data)
|
4574
|
+
|
3125
4575
|
|
3126
4576
|
########################################
|
3127
4577
|
# ../github/cli.py
|
@@ -3388,41 +4838,32 @@ class AsyncioSubprocesses(AbstractAsyncSubprocesses):
|
|
3388
4838
|
|
3389
4839
|
#
|
3390
4840
|
|
3391
|
-
|
3392
|
-
|
3393
|
-
proc: asyncio.subprocess.Process
|
3394
|
-
stdout: ta.Optional[bytes]
|
3395
|
-
stderr: ta.Optional[bytes]
|
4841
|
+
async def run_(self, run: SubprocessRun) -> SubprocessRunOutput[asyncio.subprocess.Process]:
|
4842
|
+
kwargs = dict(run.kwargs or {})
|
3396
4843
|
|
3397
|
-
|
3398
|
-
self,
|
3399
|
-
*cmd: str,
|
3400
|
-
input: ta.Any = None, # noqa
|
3401
|
-
timeout: ta.Optional[float] = None,
|
3402
|
-
check: bool = False, # noqa
|
3403
|
-
capture_output: ta.Optional[bool] = None,
|
3404
|
-
**kwargs: ta.Any,
|
3405
|
-
) -> RunOutput:
|
3406
|
-
if capture_output:
|
4844
|
+
if run.capture_output:
|
3407
4845
|
kwargs.setdefault('stdout', subprocess.PIPE)
|
3408
4846
|
kwargs.setdefault('stderr', subprocess.PIPE)
|
3409
4847
|
|
3410
4848
|
proc: asyncio.subprocess.Process
|
3411
|
-
async with self.popen(*cmd, **kwargs) as proc:
|
3412
|
-
stdout, stderr = await self.communicate(proc, input, timeout)
|
4849
|
+
async with self.popen(*run.cmd, **kwargs) as proc:
|
4850
|
+
stdout, stderr = await self.communicate(proc, run.input, run.timeout)
|
3413
4851
|
|
3414
4852
|
if check and proc.returncode:
|
3415
4853
|
raise subprocess.CalledProcessError(
|
3416
4854
|
proc.returncode,
|
3417
|
-
cmd,
|
4855
|
+
run.cmd,
|
3418
4856
|
output=stdout,
|
3419
4857
|
stderr=stderr,
|
3420
4858
|
)
|
3421
4859
|
|
3422
|
-
return
|
3423
|
-
proc,
|
3424
|
-
|
3425
|
-
|
4860
|
+
return SubprocessRunOutput(
|
4861
|
+
proc=proc,
|
4862
|
+
|
4863
|
+
returncode=check.isinstance(proc.returncode, int),
|
4864
|
+
|
4865
|
+
stdout=stdout,
|
4866
|
+
stderr=stderr,
|
3426
4867
|
)
|
3427
4868
|
|
3428
4869
|
#
|
@@ -3615,47 +5056,7 @@ class DockerComposeRun(AsyncExitStacked):
|
|
3615
5056
|
|
3616
5057
|
|
3617
5058
|
########################################
|
3618
|
-
# ../docker.py
|
3619
|
-
"""
|
3620
|
-
TODO:
|
3621
|
-
- some less stupid Dockerfile hash
|
3622
|
-
- doesn't change too much though
|
3623
|
-
"""
|
3624
|
-
|
3625
|
-
|
3626
|
-
##
|
3627
|
-
|
3628
|
-
|
3629
|
-
def build_docker_file_hash(docker_file: str) -> str:
|
3630
|
-
with open(docker_file) as f:
|
3631
|
-
contents = f.read()
|
3632
|
-
|
3633
|
-
return sha256_str(contents)
|
3634
|
-
|
3635
|
-
|
3636
|
-
##
|
3637
|
-
|
3638
|
-
|
3639
|
-
def read_docker_tar_image_tag(tar_file: str) -> str:
|
3640
|
-
with tarfile.open(tar_file) as tf:
|
3641
|
-
with contextlib.closing(check.not_none(tf.extractfile('manifest.json'))) as mf:
|
3642
|
-
m = mf.read()
|
3643
|
-
|
3644
|
-
manifests = json.loads(m.decode('utf-8'))
|
3645
|
-
manifest = check.single(manifests)
|
3646
|
-
tag = check.non_empty_str(check.single(manifest['RepoTags']))
|
3647
|
-
return tag
|
3648
|
-
|
3649
|
-
|
3650
|
-
def read_docker_tar_image_id(tar_file: str) -> str:
|
3651
|
-
with tarfile.open(tar_file) as tf:
|
3652
|
-
with contextlib.closing(check.not_none(tf.extractfile('index.json'))) as mf:
|
3653
|
-
i = mf.read()
|
3654
|
-
|
3655
|
-
index = json.loads(i.decode('utf-8'))
|
3656
|
-
manifest = check.single(index['manifests'])
|
3657
|
-
image_id = check.non_empty_str(manifest['digest'])
|
3658
|
-
return image_id
|
5059
|
+
# ../docker/cmds.py
|
3659
5060
|
|
3660
5061
|
|
3661
5062
|
##
|
@@ -3772,75 +5173,58 @@ async def load_docker_tar(
|
|
3772
5173
|
|
3773
5174
|
|
3774
5175
|
########################################
|
3775
|
-
# ../
|
5176
|
+
# ../github/inject.py
|
3776
5177
|
|
3777
5178
|
|
3778
|
-
|
3779
|
-
KEY_HASH_LEN = 16
|
5179
|
+
##
|
3780
5180
|
|
3781
|
-
@dc.dataclass(frozen=True)
|
3782
|
-
class Config:
|
3783
|
-
project_dir: str
|
3784
5181
|
|
3785
|
-
|
5182
|
+
def bind_github(
|
5183
|
+
*,
|
5184
|
+
cache_dir: ta.Optional[str] = None,
|
5185
|
+
) -> InjectorBindings:
|
5186
|
+
lst: ta.List[InjectorBindingOrBindings] = []
|
5187
|
+
|
5188
|
+
if cache_dir is not None:
|
5189
|
+
lst.extend([
|
5190
|
+
inj.bind(GithubCache.Config(
|
5191
|
+
dir=cache_dir,
|
5192
|
+
)),
|
5193
|
+
inj.bind(GithubCache, singleton=True),
|
5194
|
+
inj.bind(FileCache, to_key=GithubCache),
|
5195
|
+
])
|
3786
5196
|
|
3787
|
-
|
3788
|
-
service: str
|
5197
|
+
return inj.as_bindings(*lst)
|
3789
5198
|
|
3790
|
-
cmd: ShellCmd
|
3791
5199
|
|
3792
|
-
|
5200
|
+
########################################
|
5201
|
+
# ../docker/cache.py
|
3793
5202
|
|
3794
|
-
requirements_txts: ta.Optional[ta.Sequence[str]] = None
|
3795
5203
|
|
3796
|
-
|
3797
|
-
always_build: bool = False
|
5204
|
+
##
|
3798
5205
|
|
3799
|
-
no_dependencies: bool = False
|
3800
5206
|
|
3801
|
-
|
5207
|
+
class DockerCache(abc.ABC):
|
5208
|
+
@abc.abstractmethod
|
5209
|
+
def load_cache_docker_image(self, key: str) -> ta.Awaitable[ta.Optional[str]]:
|
5210
|
+
raise NotImplementedError
|
3802
5211
|
|
3803
|
-
|
5212
|
+
@abc.abstractmethod
|
5213
|
+
def save_cache_docker_image(self, key: str, image: str) -> ta.Awaitable[None]:
|
5214
|
+
raise NotImplementedError
|
3804
5215
|
|
3805
|
-
def __post_init__(self) -> None:
|
3806
|
-
check.not_isinstance(self.requirements_txts, str)
|
3807
5216
|
|
5217
|
+
class DockerCacheImpl(DockerCache):
|
3808
5218
|
def __init__(
|
3809
5219
|
self,
|
3810
|
-
cfg: Config,
|
3811
5220
|
*,
|
3812
5221
|
file_cache: ta.Optional[FileCache] = None,
|
3813
5222
|
) -> None:
|
3814
5223
|
super().__init__()
|
3815
5224
|
|
3816
|
-
self._cfg = cfg
|
3817
5225
|
self._file_cache = file_cache
|
3818
5226
|
|
3819
|
-
|
3820
|
-
|
3821
|
-
async def _load_docker_image(self, image: str) -> None:
|
3822
|
-
if not self._cfg.always_pull and (await is_docker_image_present(image)):
|
3823
|
-
return
|
3824
|
-
|
3825
|
-
dep_suffix = image
|
3826
|
-
for c in '/:.-_':
|
3827
|
-
dep_suffix = dep_suffix.replace(c, '-')
|
3828
|
-
|
3829
|
-
cache_key = f'docker-{dep_suffix}'
|
3830
|
-
if (await self._load_cache_docker_image(cache_key)) is not None:
|
3831
|
-
return
|
3832
|
-
|
3833
|
-
await pull_docker_image(image)
|
3834
|
-
|
3835
|
-
await self._save_cache_docker_image(cache_key, image)
|
3836
|
-
|
3837
|
-
async def load_docker_image(self, image: str) -> None:
|
3838
|
-
with log_timing_context(f'Load docker image: {image}'):
|
3839
|
-
await self._load_docker_image(image)
|
3840
|
-
|
3841
|
-
#
|
3842
|
-
|
3843
|
-
async def _load_cache_docker_image(self, key: str) -> ta.Optional[str]:
|
5227
|
+
async def load_cache_docker_image(self, key: str) -> ta.Optional[str]:
|
3844
5228
|
if self._file_cache is None:
|
3845
5229
|
return None
|
3846
5230
|
|
@@ -3852,7 +5236,7 @@ class Ci(AsyncExitStacked):
|
|
3852
5236
|
|
3853
5237
|
return await load_docker_tar_cmd(get_cache_cmd)
|
3854
5238
|
|
3855
|
-
async def
|
5239
|
+
async def save_cache_docker_image(self, key: str, image: str) -> None:
|
3856
5240
|
if self._file_cache is None:
|
3857
5241
|
return
|
3858
5242
|
|
@@ -3863,19 +5247,58 @@ class Ci(AsyncExitStacked):
|
|
3863
5247
|
|
3864
5248
|
await self._file_cache.put_file(key, tmp_file, steal=True)
|
3865
5249
|
|
3866
|
-
#
|
3867
5250
|
|
3868
|
-
|
5251
|
+
########################################
|
5252
|
+
# ../docker/buildcaching.py
|
5253
|
+
|
5254
|
+
|
5255
|
+
##
|
5256
|
+
|
5257
|
+
|
5258
|
+
class DockerBuildCaching(abc.ABC):
|
5259
|
+
@abc.abstractmethod
|
5260
|
+
def cached_build_docker_image(
|
5261
|
+
self,
|
5262
|
+
cache_key: str,
|
5263
|
+
build_and_tag: ta.Callable[[str], ta.Awaitable[str]], # image_tag -> image_id
|
5264
|
+
) -> ta.Awaitable[str]:
|
5265
|
+
raise NotImplementedError
|
5266
|
+
|
5267
|
+
|
5268
|
+
class DockerBuildCachingImpl(DockerBuildCaching):
|
5269
|
+
@dc.dataclass(frozen=True)
|
5270
|
+
class Config:
|
5271
|
+
service: str
|
5272
|
+
|
5273
|
+
always_build: bool = False
|
5274
|
+
|
5275
|
+
def __init__(
|
5276
|
+
self,
|
5277
|
+
*,
|
5278
|
+
config: Config,
|
5279
|
+
|
5280
|
+
docker_cache: ta.Optional[DockerCache] = None,
|
5281
|
+
) -> None:
|
5282
|
+
super().__init__()
|
5283
|
+
|
5284
|
+
self._config = config
|
5285
|
+
|
5286
|
+
self._docker_cache = docker_cache
|
5287
|
+
|
5288
|
+
async def cached_build_docker_image(
|
3869
5289
|
self,
|
3870
5290
|
cache_key: str,
|
3871
5291
|
build_and_tag: ta.Callable[[str], ta.Awaitable[str]],
|
3872
5292
|
) -> str:
|
3873
|
-
image_tag = f'{self.
|
5293
|
+
image_tag = f'{self._config.service}:{cache_key}'
|
3874
5294
|
|
3875
|
-
if not self.
|
5295
|
+
if not self._config.always_build and (await is_docker_image_present(image_tag)):
|
3876
5296
|
return image_tag
|
3877
5297
|
|
3878
|
-
if (
|
5298
|
+
if (
|
5299
|
+
self._docker_cache is not None and
|
5300
|
+
(cache_image_id := await self._docker_cache.load_cache_docker_image(cache_key)) is not None
|
5301
|
+
):
|
3879
5302
|
await tag_docker_image(
|
3880
5303
|
cache_image_id,
|
3881
5304
|
image_tag,
|
@@ -3884,27 +5307,143 @@ class Ci(AsyncExitStacked):
|
|
3884
5307
|
|
3885
5308
|
image_id = await build_and_tag(image_tag)
|
3886
5309
|
|
3887
|
-
|
5310
|
+
if self._docker_cache is not None:
|
5311
|
+
await self._docker_cache.save_cache_docker_image(cache_key, image_id)
|
3888
5312
|
|
3889
5313
|
return image_tag
|
3890
5314
|
|
5315
|
+
|
5316
|
+
########################################
|
5317
|
+
# ../docker/imagepulling.py
|
5318
|
+
|
5319
|
+
|
5320
|
+
##
|
5321
|
+
|
5322
|
+
|
5323
|
+
class DockerImagePulling(abc.ABC):
|
5324
|
+
@abc.abstractmethod
|
5325
|
+
def pull_docker_image(self, image: str) -> ta.Awaitable[None]:
|
5326
|
+
raise NotImplementedError
|
5327
|
+
|
5328
|
+
|
5329
|
+
class DockerImagePullingImpl(DockerImagePulling):
|
5330
|
+
@dc.dataclass(frozen=True)
|
5331
|
+
class Config:
|
5332
|
+
always_pull: bool = False
|
5333
|
+
|
5334
|
+
def __init__(
|
5335
|
+
self,
|
5336
|
+
*,
|
5337
|
+
config: Config = Config(),
|
5338
|
+
|
5339
|
+
file_cache: ta.Optional[FileCache] = None,
|
5340
|
+
docker_cache: ta.Optional[DockerCache] = None,
|
5341
|
+
) -> None:
|
5342
|
+
super().__init__()
|
5343
|
+
|
5344
|
+
self._config = config
|
5345
|
+
|
5346
|
+
self._file_cache = file_cache
|
5347
|
+
self._docker_cache = docker_cache
|
5348
|
+
|
5349
|
+
async def _pull_docker_image(self, image: str) -> None:
|
5350
|
+
if not self._config.always_pull and (await is_docker_image_present(image)):
|
5351
|
+
return
|
5352
|
+
|
5353
|
+
dep_suffix = image
|
5354
|
+
for c in '/:.-_':
|
5355
|
+
dep_suffix = dep_suffix.replace(c, '-')
|
5356
|
+
|
5357
|
+
cache_key = f'docker-{dep_suffix}'
|
5358
|
+
if (
|
5359
|
+
self._docker_cache is not None and
|
5360
|
+
(await self._docker_cache.load_cache_docker_image(cache_key)) is not None
|
5361
|
+
):
|
5362
|
+
return
|
5363
|
+
|
5364
|
+
await pull_docker_image(image)
|
5365
|
+
|
5366
|
+
if self._docker_cache is not None:
|
5367
|
+
await self._docker_cache.save_cache_docker_image(cache_key, image)
|
5368
|
+
|
5369
|
+
async def pull_docker_image(self, image: str) -> None:
|
5370
|
+
with log_timing_context(f'Load docker image: {image}'):
|
5371
|
+
await self._pull_docker_image(image)
|
5372
|
+
|
5373
|
+
|
5374
|
+
########################################
|
5375
|
+
# ../ci.py
|
5376
|
+
|
5377
|
+
|
5378
|
+
##
|
5379
|
+
|
5380
|
+
|
5381
|
+
class Ci(AsyncExitStacked):
|
5382
|
+
KEY_HASH_LEN = 16
|
5383
|
+
|
5384
|
+
@dc.dataclass(frozen=True)
|
5385
|
+
class Config:
|
5386
|
+
project_dir: str
|
5387
|
+
|
5388
|
+
docker_file: str
|
5389
|
+
|
5390
|
+
compose_file: str
|
5391
|
+
service: str
|
5392
|
+
|
5393
|
+
cmd: ShellCmd
|
5394
|
+
|
5395
|
+
#
|
5396
|
+
|
5397
|
+
requirements_txts: ta.Optional[ta.Sequence[str]] = None
|
5398
|
+
|
5399
|
+
always_pull: bool = False
|
5400
|
+
always_build: bool = False
|
5401
|
+
|
5402
|
+
no_dependencies: bool = False
|
5403
|
+
|
5404
|
+
run_options: ta.Optional[ta.Sequence[str]] = None
|
5405
|
+
|
5406
|
+
#
|
5407
|
+
|
5408
|
+
def __post_init__(self) -> None:
|
5409
|
+
check.not_isinstance(self.requirements_txts, str)
|
5410
|
+
|
5411
|
+
def __init__(
|
5412
|
+
self,
|
5413
|
+
config: Config,
|
5414
|
+
*,
|
5415
|
+
docker_build_caching: DockerBuildCaching,
|
5416
|
+
docker_image_pulling: DockerImagePulling,
|
5417
|
+
) -> None:
|
5418
|
+
super().__init__()
|
5419
|
+
|
5420
|
+
self._config = config
|
5421
|
+
|
5422
|
+
self._docker_build_caching = docker_build_caching
|
5423
|
+
self._docker_image_pulling = docker_image_pulling
|
5424
|
+
|
3891
5425
|
#
|
3892
5426
|
|
3893
5427
|
@cached_nullary
|
3894
5428
|
def docker_file_hash(self) -> str:
|
3895
|
-
return build_docker_file_hash(self.
|
5429
|
+
return build_docker_file_hash(self._config.docker_file)[:self.KEY_HASH_LEN]
|
5430
|
+
|
5431
|
+
@cached_nullary
|
5432
|
+
def ci_base_image_cache_key(self) -> str:
|
5433
|
+
return f'ci-base-{self.docker_file_hash()}'
|
3896
5434
|
|
3897
5435
|
async def _resolve_ci_base_image(self) -> str:
|
3898
5436
|
async def build_and_tag(image_tag: str) -> str:
|
3899
5437
|
return await build_docker_image(
|
3900
|
-
self.
|
5438
|
+
self._config.docker_file,
|
3901
5439
|
tag=image_tag,
|
3902
|
-
cwd=self.
|
5440
|
+
cwd=self._config.project_dir,
|
3903
5441
|
)
|
3904
5442
|
|
3905
|
-
|
3906
|
-
|
3907
|
-
|
5443
|
+
return await self._docker_build_caching.cached_build_docker_image(
|
5444
|
+
self.ci_base_image_cache_key(),
|
5445
|
+
build_and_tag,
|
5446
|
+
)
|
3908
5447
|
|
3909
5448
|
@async_cached_nullary
|
3910
5449
|
async def resolve_ci_base_image(self) -> str:
|
@@ -3918,14 +5457,18 @@ class Ci(AsyncExitStacked):
|
|
3918
5457
|
@cached_nullary
|
3919
5458
|
def requirements_txts(self) -> ta.Sequence[str]:
|
3920
5459
|
return [
|
3921
|
-
os.path.join(self.
|
3922
|
-
for rf in check.not_none(self.
|
5460
|
+
os.path.join(self._config.project_dir, rf)
|
5461
|
+
for rf in check.not_none(self._config.requirements_txts)
|
3923
5462
|
]
|
3924
5463
|
|
3925
5464
|
@cached_nullary
|
3926
5465
|
def requirements_hash(self) -> str:
|
3927
5466
|
return build_requirements_hash(self.requirements_txts())[:self.KEY_HASH_LEN]
|
3928
5467
|
|
5468
|
+
@cached_nullary
|
5469
|
+
def ci_image_cache_key(self) -> str:
|
5470
|
+
return f'ci-{self.docker_file_hash()}-{self.requirements_hash()}'
|
5471
|
+
|
3929
5472
|
async def _resolve_ci_image(self) -> str:
|
3930
5473
|
async def build_and_tag(image_tag: str) -> str:
|
3931
5474
|
base_image = await self.resolve_ci_base_image()
|
@@ -3942,7 +5485,7 @@ class Ci(AsyncExitStacked):
|
|
3942
5485
|
'--no-cache',
|
3943
5486
|
'--index-strategy unsafe-best-match',
|
3944
5487
|
'--system',
|
3945
|
-
*[f'-r /project/{rf}' for rf in self.
|
5488
|
+
*[f'-r /project/{rf}' for rf in self._config.requirements_txts or []],
|
3946
5489
|
]),
|
3947
5490
|
]
|
3948
5491
|
setup_cmd = ' && '.join(setup_cmds)
|
@@ -3950,7 +5493,7 @@ class Ci(AsyncExitStacked):
|
|
3950
5493
|
docker_file_lines = [
|
3951
5494
|
f'FROM {base_image}',
|
3952
5495
|
'RUN mkdir /project',
|
3953
|
-
*[f'COPY {rf} /project/{rf}' for rf in self.
|
5496
|
+
*[f'COPY {rf} /project/{rf}' for rf in self._config.requirements_txts or []],
|
3954
5497
|
f'RUN {setup_cmd}',
|
3955
5498
|
'RUN rm /project/*',
|
3956
5499
|
'WORKDIR /project',
|
@@ -3963,12 +5506,13 @@ class Ci(AsyncExitStacked):
|
|
3963
5506
|
return await build_docker_image(
|
3964
5507
|
docker_file,
|
3965
5508
|
tag=image_tag,
|
3966
|
-
cwd=self.
|
5509
|
+
cwd=self._config.project_dir,
|
3967
5510
|
)
|
3968
5511
|
|
3969
|
-
|
3970
|
-
|
3971
|
-
|
5512
|
+
return await self._docker_build_caching.cached_build_docker_image(
|
5513
|
+
self.ci_image_cache_key(),
|
5514
|
+
build_and_tag,
|
5515
|
+
)
|
3972
5516
|
|
3973
5517
|
@async_cached_nullary
|
3974
5518
|
async def resolve_ci_image(self) -> str:
|
@@ -3980,34 +5524,34 @@ class Ci(AsyncExitStacked):
|
|
3980
5524
|
#
|
3981
5525
|
|
3982
5526
|
@async_cached_nullary
|
3983
|
-
async def
|
5527
|
+
async def pull_dependencies(self) -> None:
|
3984
5528
|
deps = get_compose_service_dependencies(
|
3985
|
-
self.
|
3986
|
-
self.
|
5529
|
+
self._config.compose_file,
|
5530
|
+
self._config.service,
|
3987
5531
|
)
|
3988
5532
|
|
3989
5533
|
for dep_image in deps.values():
|
3990
|
-
await self.
|
5534
|
+
await self._docker_image_pulling.pull_docker_image(dep_image)
|
3991
5535
|
|
3992
5536
|
#
|
3993
5537
|
|
3994
5538
|
async def _run_compose_(self) -> None:
|
3995
5539
|
async with DockerComposeRun(DockerComposeRun.Config(
|
3996
|
-
compose_file=self.
|
3997
|
-
service=self.
|
5540
|
+
compose_file=self._config.compose_file,
|
5541
|
+
service=self._config.service,
|
3998
5542
|
|
3999
5543
|
image=await self.resolve_ci_image(),
|
4000
5544
|
|
4001
|
-
cmd=self.
|
5545
|
+
cmd=self._config.cmd,
|
4002
5546
|
|
4003
5547
|
run_options=[
|
4004
|
-
'-v', f'{os.path.abspath(self.
|
4005
|
-
*(self.
|
5548
|
+
'-v', f'{os.path.abspath(self._config.project_dir)}:/project',
|
5549
|
+
*(self._config.run_options or []),
|
4006
5550
|
],
|
4007
5551
|
|
4008
|
-
cwd=self.
|
5552
|
+
cwd=self._config.project_dir,
|
4009
5553
|
|
4010
|
-
no_dependencies=self.
|
5554
|
+
no_dependencies=self._config.no_dependencies,
|
4011
5555
|
)) as ci_compose_run:
|
4012
5556
|
await ci_compose_run.run()
|
4013
5557
|
|
@@ -4020,11 +5564,90 @@ class Ci(AsyncExitStacked):
|
|
4020
5564
|
async def run(self) -> None:
|
4021
5565
|
await self.resolve_ci_image()
|
4022
5566
|
|
4023
|
-
await self.
|
5567
|
+
await self.pull_dependencies()
|
4024
5568
|
|
4025
5569
|
await self._run_compose()
|
4026
5570
|
|
4027
5571
|
|
5572
|
+
########################################
|
5573
|
+
# ../docker/inject.py
|
5574
|
+
|
5575
|
+
|
5576
|
+
##
|
5577
|
+
|
5578
|
+
|
5579
|
+
def bind_docker(
|
5580
|
+
*,
|
5581
|
+
build_caching_config: DockerBuildCachingImpl.Config,
|
5582
|
+
image_pulling_config: DockerImagePullingImpl.Config = DockerImagePullingImpl.Config(),
|
5583
|
+
) -> InjectorBindings:
|
5584
|
+
lst: ta.List[InjectorBindingOrBindings] = [
|
5585
|
+
inj.bind(build_caching_config),
|
5586
|
+
inj.bind(DockerBuildCachingImpl, singleton=True),
|
5587
|
+
inj.bind(DockerBuildCaching, to_key=DockerBuildCachingImpl),
|
5588
|
+
|
5589
|
+
inj.bind(DockerCacheImpl, singleton=True),
|
5590
|
+
inj.bind(DockerCache, to_key=DockerCacheImpl),
|
5591
|
+
|
5592
|
+
inj.bind(image_pulling_config),
|
5593
|
+
inj.bind(DockerImagePullingImpl, singleton=True),
|
5594
|
+
inj.bind(DockerImagePulling, to_key=DockerImagePullingImpl),
|
5595
|
+
]
|
5596
|
+
|
5597
|
+
return inj.as_bindings(*lst)
|
5598
|
+
|
5599
|
+
|
5600
|
+
########################################
|
5601
|
+
# ../inject.py
|
5602
|
+
|
5603
|
+
|
5604
|
+
##
|
5605
|
+
|
5606
|
+
|
5607
|
+
def bind_ci(
|
5608
|
+
*,
|
5609
|
+
config: Ci.Config,
|
5610
|
+
|
5611
|
+
github: bool = False,
|
5612
|
+
|
5613
|
+
cache_dir: ta.Optional[str] = None,
|
5614
|
+
) -> InjectorBindings:
|
5615
|
+
lst: ta.List[InjectorBindingOrBindings] = [ # noqa
|
5616
|
+
inj.bind(config),
|
5617
|
+
inj.bind(Ci, singleton=True),
|
5618
|
+
]
|
5619
|
+
|
5620
|
+
lst.append(bind_docker(
|
5621
|
+
build_caching_config=DockerBuildCachingImpl.Config(
|
5622
|
+
service=config.service,
|
5623
|
+
|
5624
|
+
always_build=config.always_build,
|
5625
|
+
),
|
5626
|
+
|
5627
|
+
image_pulling_config=DockerImagePullingImpl.Config(
|
5628
|
+
always_pull=config.always_pull,
|
5629
|
+
),
|
5630
|
+
))
|
5631
|
+
|
5632
|
+
if cache_dir is not None:
|
5633
|
+
if github:
|
5634
|
+
lst.append(bind_github(
|
5635
|
+
cache_dir=cache_dir,
|
5636
|
+
))
|
5637
|
+
|
5638
|
+
else:
|
5639
|
+
lst.extend([
|
5640
|
+
inj.bind(DirectoryFileCache.Config(
|
5641
|
+
dir=cache_dir,
|
5642
|
+
)),
|
5643
|
+
inj.bind(DirectoryFileCache, singleton=True),
|
5644
|
+
inj.bind(FileCache, to_key=DirectoryFileCache),
|
5645
|
+
|
5646
|
+
])
|
5647
|
+
|
5648
|
+
return inj.as_bindings(*lst)
|
5649
|
+
|
5650
|
+
|
4028
5651
|
########################################
|
4029
5652
|
# cli.py
|
4030
5653
|
|
@@ -4159,14 +5782,9 @@ class CiCli(ArgparseCli):
|
|
4159
5782
|
|
4160
5783
|
#
|
4161
5784
|
|
4162
|
-
file_cache: ta.Optional[FileCache] = None
|
4163
5785
|
if cache_dir is not None:
|
4164
5786
|
cache_dir = os.path.abspath(cache_dir)
|
4165
5787
|
log.debug('Using cache dir %s', cache_dir)
|
4166
|
-
if github:
|
4167
|
-
file_cache = GithubFileCache(cache_dir)
|
4168
|
-
else:
|
4169
|
-
file_cache = DirectoryFileCache(cache_dir)
|
4170
5788
|
|
4171
5789
|
#
|
4172
5790
|
|
@@ -4182,28 +5800,35 @@ class CiCli(ArgparseCli):
|
|
4182
5800
|
|
4183
5801
|
#
|
4184
5802
|
|
4185
|
-
|
4186
|
-
|
4187
|
-
project_dir=project_dir,
|
5803
|
+
config = Ci.Config(
|
5804
|
+
project_dir=project_dir,
|
4188
5805
|
|
4189
|
-
|
5806
|
+
docker_file=docker_file,
|
4190
5807
|
|
4191
|
-
|
4192
|
-
|
5808
|
+
compose_file=compose_file,
|
5809
|
+
service=self.args.service,
|
4193
5810
|
|
4194
|
-
|
5811
|
+
requirements_txts=requirements_txts,
|
4195
5812
|
|
4196
|
-
|
5813
|
+
cmd=ShellCmd(cmd),
|
4197
5814
|
|
4198
|
-
|
4199
|
-
|
5815
|
+
always_pull=self.args.always_pull,
|
5816
|
+
always_build=self.args.always_build,
|
4200
5817
|
|
4201
|
-
|
5818
|
+
no_dependencies=self.args.no_dependencies,
|
4202
5819
|
|
4203
|
-
|
4204
|
-
|
4205
|
-
|
4206
|
-
|
5820
|
+
run_options=run_options,
|
5821
|
+
)
|
5822
|
+
|
5823
|
+
injector = inj.create_injector(bind_ci(
|
5824
|
+
config=config,
|
5825
|
+
|
5826
|
+
github=github,
|
5827
|
+
|
5828
|
+
cache_dir=cache_dir,
|
5829
|
+
))
|
5830
|
+
|
5831
|
+
async with injector[Ci] as ci:
|
4207
5832
|
await ci.run()
|
4208
5833
|
|
4209
5834
|
|