omdev 0.0.0.dev221__py3-none-any.whl → 0.0.0.dev223__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- omdev/ci/cache.py +40 -23
- omdev/ci/ci.py +49 -109
- omdev/ci/cli.py +24 -23
- omdev/ci/docker/__init__.py +0 -0
- omdev/ci/docker/buildcaching.py +69 -0
- omdev/ci/docker/cache.py +57 -0
- omdev/ci/{docker.py → docker/cmds.py} +1 -44
- omdev/ci/docker/imagepulling.py +64 -0
- omdev/ci/docker/inject.py +37 -0
- omdev/ci/docker/utils.py +48 -0
- omdev/ci/github/cache.py +15 -5
- omdev/ci/github/inject.py +30 -0
- omdev/ci/inject.py +61 -0
- omdev/dataserver/__init__.py +1 -0
- omdev/dataserver/handlers.py +198 -0
- omdev/dataserver/http.py +69 -0
- omdev/dataserver/routes.py +49 -0
- omdev/dataserver/server.py +90 -0
- omdev/dataserver/targets.py +89 -0
- omdev/oci/__init__.py +0 -0
- omdev/oci/building.py +221 -0
- omdev/oci/compression.py +8 -0
- omdev/oci/data.py +151 -0
- omdev/oci/datarefs.py +138 -0
- omdev/oci/dataserver.py +61 -0
- omdev/oci/loading.py +142 -0
- omdev/oci/media.py +179 -0
- omdev/oci/packing.py +381 -0
- omdev/oci/repositories.py +159 -0
- omdev/oci/tars.py +144 -0
- omdev/pyproject/resources/python.sh +1 -1
- omdev/scripts/ci.py +1841 -384
- omdev/scripts/interp.py +100 -22
- omdev/scripts/pyproject.py +122 -28
- {omdev-0.0.0.dev221.dist-info → omdev-0.0.0.dev223.dist-info}/METADATA +2 -2
- {omdev-0.0.0.dev221.dist-info → omdev-0.0.0.dev223.dist-info}/RECORD +40 -15
- {omdev-0.0.0.dev221.dist-info → omdev-0.0.0.dev223.dist-info}/LICENSE +0 -0
- {omdev-0.0.0.dev221.dist-info → omdev-0.0.0.dev223.dist-info}/WHEEL +0 -0
- {omdev-0.0.0.dev221.dist-info → omdev-0.0.0.dev223.dist-info}/entry_points.txt +0 -0
- {omdev-0.0.0.dev221.dist-info → omdev-0.0.0.dev223.dist-info}/top_level.txt +0 -0
omdev/scripts/ci.py
CHANGED
@@ -21,6 +21,7 @@ import asyncio.base_subprocess
|
|
21
21
|
import asyncio.subprocess
|
22
22
|
import collections
|
23
23
|
import contextlib
|
24
|
+
import contextvars
|
24
25
|
import dataclasses as dc
|
25
26
|
import datetime
|
26
27
|
import functools
|
@@ -44,6 +45,7 @@ import types
|
|
44
45
|
import typing as ta
|
45
46
|
import urllib.parse
|
46
47
|
import urllib.request
|
48
|
+
import weakref
|
47
49
|
|
48
50
|
|
49
51
|
########################################
|
@@ -80,6 +82,13 @@ ArgparseCmdFn = ta.Callable[[], ta.Optional[int]] # ta.TypeAlias
|
|
80
82
|
ExitStackedT = ta.TypeVar('ExitStackedT', bound='ExitStacked')
|
81
83
|
AsyncExitStackedT = ta.TypeVar('AsyncExitStackedT', bound='AsyncExitStacked')
|
82
84
|
|
85
|
+
# ../../omlish/lite/inject.py
|
86
|
+
U = ta.TypeVar('U')
|
87
|
+
InjectorKeyCls = ta.Union[type, ta.NewType]
|
88
|
+
InjectorProviderFn = ta.Callable[['Injector'], ta.Any]
|
89
|
+
InjectorProviderFnMap = ta.Mapping['InjectorKey', 'InjectorProviderFn']
|
90
|
+
InjectorBindingOrBindings = ta.Union['InjectorBinding', 'InjectorBindings']
|
91
|
+
|
83
92
|
# ../../omlish/subprocesses.py
|
84
93
|
SubprocessChannelOption = ta.Literal['pipe', 'stdout', 'devnull'] # ta.TypeAlias
|
85
94
|
|
@@ -797,6 +806,50 @@ json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON
|
|
797
806
|
log = logging.getLogger(__name__)
|
798
807
|
|
799
808
|
|
809
|
+
########################################
|
810
|
+
# ../../../omlish/lite/maybes.py
|
811
|
+
|
812
|
+
|
813
|
+
class Maybe(ta.Generic[T]):
|
814
|
+
@property
|
815
|
+
@abc.abstractmethod
|
816
|
+
def present(self) -> bool:
|
817
|
+
raise NotImplementedError
|
818
|
+
|
819
|
+
@abc.abstractmethod
|
820
|
+
def must(self) -> T:
|
821
|
+
raise NotImplementedError
|
822
|
+
|
823
|
+
@classmethod
|
824
|
+
def just(cls, v: T) -> 'Maybe[T]':
|
825
|
+
return tuple.__new__(_Maybe, (v,)) # noqa
|
826
|
+
|
827
|
+
_empty: ta.ClassVar['Maybe']
|
828
|
+
|
829
|
+
@classmethod
|
830
|
+
def empty(cls) -> 'Maybe[T]':
|
831
|
+
return Maybe._empty
|
832
|
+
|
833
|
+
|
834
|
+
class _Maybe(Maybe[T], tuple):
|
835
|
+
__slots__ = ()
|
836
|
+
|
837
|
+
def __init_subclass__(cls, **kwargs):
|
838
|
+
raise TypeError
|
839
|
+
|
840
|
+
@property
|
841
|
+
def present(self) -> bool:
|
842
|
+
return bool(self)
|
843
|
+
|
844
|
+
def must(self) -> T:
|
845
|
+
if not self:
|
846
|
+
raise ValueError
|
847
|
+
return self[0]
|
848
|
+
|
849
|
+
|
850
|
+
Maybe._empty = tuple.__new__(_Maybe, ()) # noqa
|
851
|
+
|
852
|
+
|
800
853
|
########################################
|
801
854
|
# ../../../omlish/lite/reflect.py
|
802
855
|
|
@@ -1133,24 +1186,30 @@ def unlinking_if_exists(path: str) -> ta.Iterator[None]:
|
|
1133
1186
|
# ../cache.py
|
1134
1187
|
|
1135
1188
|
|
1189
|
+
CacheVersion = ta.NewType('CacheVersion', int)
|
1190
|
+
|
1191
|
+
|
1136
1192
|
##
|
1137
1193
|
|
1138
1194
|
|
1139
|
-
@abc.abstractmethod
|
1140
1195
|
class FileCache(abc.ABC):
|
1196
|
+
DEFAULT_CACHE_VERSION: ta.ClassVar[CacheVersion] = CacheVersion(CI_CACHE_VERSION)
|
1197
|
+
|
1141
1198
|
def __init__(
|
1142
1199
|
self,
|
1143
1200
|
*,
|
1144
|
-
version:
|
1201
|
+
version: ta.Optional[CacheVersion] = None,
|
1145
1202
|
) -> None:
|
1146
1203
|
super().__init__()
|
1147
1204
|
|
1205
|
+
if version is None:
|
1206
|
+
version = self.DEFAULT_CACHE_VERSION
|
1148
1207
|
check.isinstance(version, int)
|
1149
1208
|
check.arg(version >= 0)
|
1150
|
-
self._version = version
|
1209
|
+
self._version: CacheVersion = version
|
1151
1210
|
|
1152
1211
|
@property
|
1153
|
-
def version(self) ->
|
1212
|
+
def version(self) -> CacheVersion:
|
1154
1213
|
return self._version
|
1155
1214
|
|
1156
1215
|
#
|
@@ -1174,19 +1233,28 @@ class FileCache(abc.ABC):
|
|
1174
1233
|
|
1175
1234
|
|
1176
1235
|
class DirectoryFileCache(FileCache):
|
1236
|
+
@dc.dataclass(frozen=True)
|
1237
|
+
class Config:
|
1238
|
+
dir: str
|
1239
|
+
|
1240
|
+
no_create: bool = False
|
1241
|
+
no_purge: bool = False
|
1242
|
+
|
1177
1243
|
def __init__(
|
1178
1244
|
self,
|
1179
|
-
|
1245
|
+
config: Config,
|
1180
1246
|
*,
|
1181
|
-
|
1182
|
-
no_purge: bool = False,
|
1183
|
-
**kwargs: ta.Any,
|
1247
|
+
version: ta.Optional[CacheVersion] = None,
|
1184
1248
|
) -> None: # noqa
|
1185
|
-
super().__init__(
|
1249
|
+
super().__init__(
|
1250
|
+
version=version,
|
1251
|
+
)
|
1252
|
+
|
1253
|
+
self._config = config
|
1186
1254
|
|
1187
|
-
|
1188
|
-
|
1189
|
-
self.
|
1255
|
+
@property
|
1256
|
+
def dir(self) -> str:
|
1257
|
+
return self._config.dir
|
1190
1258
|
|
1191
1259
|
#
|
1192
1260
|
|
@@ -1194,37 +1262,38 @@ class DirectoryFileCache(FileCache):
|
|
1194
1262
|
|
1195
1263
|
@cached_nullary
|
1196
1264
|
def setup_dir(self) -> None:
|
1197
|
-
version_file = os.path.join(self.
|
1265
|
+
version_file = os.path.join(self.dir, self.VERSION_FILE_NAME)
|
1198
1266
|
|
1199
|
-
if self.
|
1200
|
-
check.state(os.path.isdir(self.
|
1267
|
+
if self._config.no_create:
|
1268
|
+
check.state(os.path.isdir(self.dir))
|
1201
1269
|
|
1202
|
-
elif not os.path.isdir(self.
|
1203
|
-
os.makedirs(self.
|
1270
|
+
elif not os.path.isdir(self.dir):
|
1271
|
+
os.makedirs(self.dir)
|
1204
1272
|
with open(version_file, 'w') as f:
|
1205
1273
|
f.write(str(self._version))
|
1206
1274
|
return
|
1207
1275
|
|
1276
|
+
# NOTE: intentionally raises FileNotFoundError to refuse to use an existing non-cache dir as a cache dir.
|
1208
1277
|
with open(version_file) as f:
|
1209
1278
|
dir_version = int(f.read().strip())
|
1210
1279
|
|
1211
1280
|
if dir_version == self._version:
|
1212
1281
|
return
|
1213
1282
|
|
1214
|
-
if self.
|
1283
|
+
if self._config.no_purge:
|
1215
1284
|
raise RuntimeError(f'{dir_version=} != {self._version=}')
|
1216
1285
|
|
1217
|
-
dirs = [n for n in sorted(os.listdir(self.
|
1286
|
+
dirs = [n for n in sorted(os.listdir(self.dir)) if os.path.isdir(os.path.join(self.dir, n))]
|
1218
1287
|
if dirs:
|
1219
1288
|
raise RuntimeError(
|
1220
|
-
f'Refusing to remove stale cache dir {self.
|
1289
|
+
f'Refusing to remove stale cache dir {self.dir!r} '
|
1221
1290
|
f'due to present directories: {", ".join(dirs)}',
|
1222
1291
|
)
|
1223
1292
|
|
1224
|
-
for n in sorted(os.listdir(self.
|
1293
|
+
for n in sorted(os.listdir(self.dir)):
|
1225
1294
|
if n.startswith('.'):
|
1226
1295
|
continue
|
1227
|
-
fp = os.path.join(self.
|
1296
|
+
fp = os.path.join(self.dir, n)
|
1228
1297
|
check.state(os.path.isfile(fp))
|
1229
1298
|
log.debug('Purging stale cache file: %s', fp)
|
1230
1299
|
os.unlink(fp)
|
@@ -1241,7 +1310,7 @@ class DirectoryFileCache(FileCache):
|
|
1241
1310
|
key: str,
|
1242
1311
|
) -> str:
|
1243
1312
|
self.setup_dir()
|
1244
|
-
return os.path.join(self.
|
1313
|
+
return os.path.join(self.dir, key)
|
1245
1314
|
|
1246
1315
|
def format_incomplete_file(self, f: str) -> str:
|
1247
1316
|
return os.path.join(os.path.dirname(f), f'_{os.path.basename(f)}.incomplete')
|
@@ -1956,270 +2025,1388 @@ class aclosing(contextlib.AbstractAsyncContextManager): # noqa
|
|
1956
2025
|
|
1957
2026
|
|
1958
2027
|
########################################
|
1959
|
-
# ../../../omlish/lite/
|
1960
|
-
|
1961
|
-
|
1962
|
-
@cached_nullary
|
1963
|
-
def is_debugger_attached() -> bool:
|
1964
|
-
return any(frame[1].endswith('pydevd.py') for frame in inspect.stack())
|
1965
|
-
|
1966
|
-
|
1967
|
-
LITE_REQUIRED_PYTHON_VERSION = (3, 8)
|
2028
|
+
# ../../../omlish/lite/inject.py
|
1968
2029
|
|
1969
2030
|
|
1970
|
-
|
1971
|
-
|
1972
|
-
raise OSError(f'Requires python {LITE_REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
|
1973
|
-
|
1974
|
-
|
1975
|
-
########################################
|
1976
|
-
# ../../../omlish/logs/json.py
|
1977
|
-
"""
|
1978
|
-
TODO:
|
1979
|
-
- translate json keys
|
1980
|
-
"""
|
2031
|
+
###
|
2032
|
+
# types
|
1981
2033
|
|
1982
2034
|
|
1983
|
-
|
1984
|
-
|
1985
|
-
|
1986
|
-
|
1987
|
-
|
1988
|
-
|
1989
|
-
|
1990
|
-
|
1991
|
-
'filename': False,
|
1992
|
-
'module': False,
|
1993
|
-
'exc_info': True,
|
1994
|
-
'exc_text': True,
|
1995
|
-
'stack_info': True,
|
1996
|
-
'lineno': False,
|
1997
|
-
'funcName': False,
|
1998
|
-
'created': False,
|
1999
|
-
'msecs': False,
|
2000
|
-
'relativeCreated': False,
|
2001
|
-
'thread': False,
|
2002
|
-
'threadName': False,
|
2003
|
-
'processName': False,
|
2004
|
-
'process': False,
|
2005
|
-
}
|
2035
|
+
@dc.dataclass(frozen=True)
|
2036
|
+
class InjectorKey(ta.Generic[T]):
|
2037
|
+
# Before PEP-560 typing.Generic was a metaclass with a __new__ that takes a 'cls' arg, so instantiating a dataclass
|
2038
|
+
# with kwargs (such as through dc.replace) causes `TypeError: __new__() got multiple values for argument 'cls'`.
|
2039
|
+
# See:
|
2040
|
+
# - https://github.com/python/cpython/commit/d911e40e788fb679723d78b6ea11cabf46caed5a
|
2041
|
+
# - https://gist.github.com/wrmsr/4468b86efe9f373b6b114bfe85b98fd3
|
2042
|
+
cls_: InjectorKeyCls
|
2006
2043
|
|
2007
|
-
|
2008
|
-
|
2009
|
-
*args: ta.Any,
|
2010
|
-
json_dumps: ta.Optional[ta.Callable[[ta.Any], str]] = None,
|
2011
|
-
**kwargs: ta.Any,
|
2012
|
-
) -> None:
|
2013
|
-
super().__init__(*args, **kwargs)
|
2044
|
+
tag: ta.Any = None
|
2045
|
+
array: bool = False
|
2014
2046
|
|
2015
|
-
if json_dumps is None:
|
2016
|
-
json_dumps = json_dumps_compact
|
2017
|
-
self._json_dumps = json_dumps
|
2018
2047
|
|
2019
|
-
|
2020
|
-
|
2021
|
-
k: v
|
2022
|
-
for k, o in self.KEYS.items()
|
2023
|
-
for v in [getattr(record, k)]
|
2024
|
-
if not (o and v is None)
|
2025
|
-
}
|
2026
|
-
return self._json_dumps(dct)
|
2048
|
+
def is_valid_injector_key_cls(cls: ta.Any) -> bool:
|
2049
|
+
return isinstance(cls, type) or is_new_type(cls)
|
2027
2050
|
|
2028
2051
|
|
2029
|
-
|
2030
|
-
|
2052
|
+
def check_valid_injector_key_cls(cls: T) -> T:
|
2053
|
+
if not is_valid_injector_key_cls(cls):
|
2054
|
+
raise TypeError(cls)
|
2055
|
+
return cls
|
2031
2056
|
|
2032
2057
|
|
2033
|
-
|
2034
|
-
file_fd, file = tempfile.mkstemp(**kwargs)
|
2035
|
-
os.close(file_fd)
|
2036
|
-
return file
|
2058
|
+
##
|
2037
2059
|
|
2038
2060
|
|
2039
|
-
|
2040
|
-
|
2041
|
-
|
2042
|
-
|
2043
|
-
yield path
|
2044
|
-
finally:
|
2045
|
-
unlink_if_exists(path)
|
2061
|
+
class InjectorProvider(abc.ABC):
|
2062
|
+
@abc.abstractmethod
|
2063
|
+
def provider_fn(self) -> InjectorProviderFn:
|
2064
|
+
raise NotImplementedError
|
2046
2065
|
|
2047
2066
|
|
2048
|
-
|
2049
|
-
def temp_dir_context(
|
2050
|
-
root_dir: ta.Optional[str] = None,
|
2051
|
-
**kwargs: ta.Any,
|
2052
|
-
) -> ta.Iterator[str]:
|
2053
|
-
path = tempfile.mkdtemp(dir=root_dir, **kwargs)
|
2054
|
-
try:
|
2055
|
-
yield path
|
2056
|
-
finally:
|
2057
|
-
shutil.rmtree(path, ignore_errors=True)
|
2067
|
+
##
|
2058
2068
|
|
2059
2069
|
|
2060
|
-
@
|
2061
|
-
|
2062
|
-
|
2063
|
-
|
2064
|
-
**kwargs: ta.Any,
|
2065
|
-
) -> ta.Iterator[tempfile._TemporaryFileWrapper]: # noqa
|
2066
|
-
with tempfile.NamedTemporaryFile(dir=root_dir, delete=False, **kwargs) as f:
|
2067
|
-
try:
|
2068
|
-
yield f
|
2069
|
-
finally:
|
2070
|
-
if cleanup:
|
2071
|
-
shutil.rmtree(f.name, ignore_errors=True)
|
2070
|
+
@dc.dataclass(frozen=True)
|
2071
|
+
class InjectorBinding:
|
2072
|
+
key: InjectorKey
|
2073
|
+
provider: InjectorProvider
|
2072
2074
|
|
2075
|
+
def __post_init__(self) -> None:
|
2076
|
+
check.isinstance(self.key, InjectorKey)
|
2077
|
+
check.isinstance(self.provider, InjectorProvider)
|
2073
2078
|
|
2074
|
-
########################################
|
2075
|
-
# ../github/client.py
|
2076
2079
|
|
2080
|
+
class InjectorBindings(abc.ABC):
|
2081
|
+
@abc.abstractmethod
|
2082
|
+
def bindings(self) -> ta.Iterator[InjectorBinding]:
|
2083
|
+
raise NotImplementedError
|
2077
2084
|
|
2078
2085
|
##
|
2079
2086
|
|
2080
2087
|
|
2081
|
-
class
|
2082
|
-
|
2083
|
-
|
2088
|
+
class Injector(abc.ABC):
|
2089
|
+
@abc.abstractmethod
|
2090
|
+
def try_provide(self, key: ta.Any) -> Maybe[ta.Any]:
|
2091
|
+
raise NotImplementedError
|
2084
2092
|
|
2085
2093
|
@abc.abstractmethod
|
2086
|
-
def
|
2094
|
+
def provide(self, key: ta.Any) -> ta.Any:
|
2087
2095
|
raise NotImplementedError
|
2088
2096
|
|
2089
2097
|
@abc.abstractmethod
|
2090
|
-
def
|
2098
|
+
def provide_kwargs(
|
2099
|
+
self,
|
2100
|
+
obj: ta.Any,
|
2101
|
+
*,
|
2102
|
+
skip_args: int = 0,
|
2103
|
+
skip_kwargs: ta.Optional[ta.Iterable[ta.Any]] = None,
|
2104
|
+
) -> ta.Mapping[str, ta.Any]:
|
2091
2105
|
raise NotImplementedError
|
2092
2106
|
|
2093
2107
|
@abc.abstractmethod
|
2094
|
-
def
|
2108
|
+
def inject(
|
2109
|
+
self,
|
2110
|
+
obj: ta.Any,
|
2111
|
+
*,
|
2112
|
+
args: ta.Optional[ta.Sequence[ta.Any]] = None,
|
2113
|
+
kwargs: ta.Optional[ta.Mapping[str, ta.Any]] = None,
|
2114
|
+
) -> ta.Any:
|
2095
2115
|
raise NotImplementedError
|
2096
2116
|
|
2117
|
+
def __getitem__(
|
2118
|
+
self,
|
2119
|
+
target: ta.Union[InjectorKey[T], ta.Type[T]],
|
2120
|
+
) -> T:
|
2121
|
+
return self.provide(target)
|
2097
2122
|
|
2098
|
-
##
|
2099
2123
|
|
2124
|
+
###
|
2125
|
+
# exceptions
|
2100
2126
|
|
2101
|
-
class GithubCacheServiceV1BaseClient(GithubCacheClient, abc.ABC):
|
2102
|
-
BASE_URL_ENV_VAR = register_github_env_var('ACTIONS_CACHE_URL')
|
2103
|
-
AUTH_TOKEN_ENV_VAR = register_github_env_var('ACTIONS_RUNTIME_TOKEN') # noqa
|
2104
2127
|
|
2105
|
-
|
2128
|
+
class InjectorError(Exception):
|
2129
|
+
pass
|
2106
2130
|
|
2107
|
-
#
|
2108
2131
|
|
2109
|
-
|
2110
|
-
|
2111
|
-
|
2112
|
-
base_url: ta.Optional[str] = None,
|
2113
|
-
auth_token: ta.Optional[str] = None,
|
2132
|
+
@dc.dataclass()
|
2133
|
+
class InjectorKeyError(InjectorError):
|
2134
|
+
key: InjectorKey
|
2114
2135
|
|
2115
|
-
|
2116
|
-
|
2136
|
+
source: ta.Any = None
|
2137
|
+
name: ta.Optional[str] = None
|
2117
2138
|
|
2118
|
-
cache_version: int = CI_CACHE_VERSION,
|
2119
2139
|
|
2120
|
-
|
2121
|
-
|
2122
|
-
super().__init__()
|
2140
|
+
class UnboundInjectorKeyError(InjectorKeyError):
|
2141
|
+
pass
|
2123
2142
|
|
2124
|
-
#
|
2125
2143
|
|
2126
|
-
|
2127
|
-
|
2128
|
-
self._service_url = GithubCacheServiceV1.get_service_url(base_url)
|
2144
|
+
class DuplicateInjectorKeyError(InjectorKeyError):
|
2145
|
+
pass
|
2129
2146
|
|
2130
|
-
if auth_token is None:
|
2131
|
-
auth_token = self.AUTH_TOKEN_ENV_VAR()
|
2132
|
-
self._auth_token = auth_token
|
2133
2147
|
|
2134
|
-
|
2148
|
+
class CyclicDependencyInjectorKeyError(InjectorKeyError):
|
2149
|
+
pass
|
2135
2150
|
|
2136
|
-
self._key_prefix = key_prefix
|
2137
2151
|
|
2138
|
-
|
2139
|
-
|
2140
|
-
self._key_suffix = check.non_empty_str(key_suffix)
|
2152
|
+
###
|
2153
|
+
# keys
|
2141
2154
|
|
2142
|
-
#
|
2143
2155
|
|
2144
|
-
|
2156
|
+
def as_injector_key(o: ta.Any) -> InjectorKey:
|
2157
|
+
if o is inspect.Parameter.empty:
|
2158
|
+
raise TypeError(o)
|
2159
|
+
if isinstance(o, InjectorKey):
|
2160
|
+
return o
|
2161
|
+
if is_valid_injector_key_cls(o):
|
2162
|
+
return InjectorKey(o)
|
2163
|
+
raise TypeError(o)
|
2145
2164
|
|
2146
|
-
#
|
2147
2165
|
|
2148
|
-
|
2166
|
+
###
|
2167
|
+
# providers
|
2149
2168
|
|
2150
|
-
#
|
2151
2169
|
|
2152
|
-
|
2153
|
-
|
2154
|
-
|
2155
|
-
return asyncio.get_event_loop()
|
2170
|
+
@dc.dataclass(frozen=True)
|
2171
|
+
class FnInjectorProvider(InjectorProvider):
|
2172
|
+
fn: ta.Any
|
2156
2173
|
|
2157
|
-
|
2174
|
+
def __post_init__(self) -> None:
|
2175
|
+
check.not_isinstance(self.fn, type)
|
2158
2176
|
|
2159
|
-
def
|
2160
|
-
|
2161
|
-
|
2162
|
-
*,
|
2163
|
-
content_type: ta.Optional[str] = None,
|
2164
|
-
json_content: bool = False,
|
2165
|
-
) -> ta.Dict[str, str]:
|
2166
|
-
dct = {
|
2167
|
-
'Accept': ';'.join([
|
2168
|
-
'application/json',
|
2169
|
-
f'api-version={GithubCacheServiceV1.API_VERSION}',
|
2170
|
-
]),
|
2171
|
-
}
|
2177
|
+
def provider_fn(self) -> InjectorProviderFn:
|
2178
|
+
def pfn(i: Injector) -> ta.Any:
|
2179
|
+
return i.inject(self.fn)
|
2172
2180
|
|
2173
|
-
|
2174
|
-
dct['Authorization'] = f'Bearer {auth_token}'
|
2181
|
+
return pfn
|
2175
2182
|
|
2176
|
-
if content_type is None and json_content:
|
2177
|
-
content_type = 'application/json'
|
2178
|
-
if content_type is not None:
|
2179
|
-
dct['Content-Type'] = content_type
|
2180
2183
|
|
2181
|
-
|
2182
|
-
|
2184
|
+
@dc.dataclass(frozen=True)
|
2185
|
+
class CtorInjectorProvider(InjectorProvider):
|
2186
|
+
cls_: type
|
2183
2187
|
|
2184
|
-
|
2188
|
+
def __post_init__(self) -> None:
|
2189
|
+
check.isinstance(self.cls_, type)
|
2185
2190
|
|
2186
|
-
|
2191
|
+
def provider_fn(self) -> InjectorProviderFn:
|
2192
|
+
def pfn(i: Injector) -> ta.Any:
|
2193
|
+
return i.inject(self.cls_)
|
2187
2194
|
|
2188
|
-
|
2189
|
-
if not b:
|
2190
|
-
return None
|
2191
|
-
return json.loads(b.decode('utf-8-sig'))
|
2195
|
+
return pfn
|
2192
2196
|
|
2193
|
-
#
|
2194
2197
|
|
2195
|
-
|
2196
|
-
|
2197
|
-
|
2198
|
-
) -> ta.Tuple[http.client.HTTPResponse, ta.Optional[bytes]]:
|
2199
|
-
def run_sync():
|
2200
|
-
with urllib.request.urlopen(req) as resp: # noqa
|
2201
|
-
body = resp.read()
|
2202
|
-
return (resp, body)
|
2198
|
+
@dc.dataclass(frozen=True)
|
2199
|
+
class ConstInjectorProvider(InjectorProvider):
|
2200
|
+
v: ta.Any
|
2203
2201
|
|
2204
|
-
|
2202
|
+
def provider_fn(self) -> InjectorProviderFn:
|
2203
|
+
return lambda _: self.v
|
2205
2204
|
|
2206
|
-
#
|
2207
2205
|
|
2208
|
-
|
2209
|
-
|
2210
|
-
|
2211
|
-
body: ta.Optional[bytes]
|
2206
|
+
@dc.dataclass(frozen=True)
|
2207
|
+
class SingletonInjectorProvider(InjectorProvider):
|
2208
|
+
p: InjectorProvider
|
2212
2209
|
|
2213
|
-
|
2214
|
-
|
2210
|
+
def __post_init__(self) -> None:
|
2211
|
+
check.isinstance(self.p, InjectorProvider)
|
2215
2212
|
|
2216
|
-
|
2217
|
-
|
2218
|
-
|
2219
|
-
|
2220
|
-
|
2221
|
-
|
2222
|
-
|
2213
|
+
def provider_fn(self) -> InjectorProviderFn:
|
2214
|
+
v = not_set = object()
|
2215
|
+
|
2216
|
+
def pfn(i: Injector) -> ta.Any:
|
2217
|
+
nonlocal v
|
2218
|
+
if v is not_set:
|
2219
|
+
v = ufn(i)
|
2220
|
+
return v
|
2221
|
+
|
2222
|
+
ufn = self.p.provider_fn()
|
2223
|
+
return pfn
|
2224
|
+
|
2225
|
+
|
2226
|
+
@dc.dataclass(frozen=True)
|
2227
|
+
class LinkInjectorProvider(InjectorProvider):
|
2228
|
+
k: InjectorKey
|
2229
|
+
|
2230
|
+
def __post_init__(self) -> None:
|
2231
|
+
check.isinstance(self.k, InjectorKey)
|
2232
|
+
|
2233
|
+
def provider_fn(self) -> InjectorProviderFn:
|
2234
|
+
def pfn(i: Injector) -> ta.Any:
|
2235
|
+
return i.provide(self.k)
|
2236
|
+
|
2237
|
+
return pfn
|
2238
|
+
|
2239
|
+
|
2240
|
+
@dc.dataclass(frozen=True)
|
2241
|
+
class ArrayInjectorProvider(InjectorProvider):
|
2242
|
+
ps: ta.Sequence[InjectorProvider]
|
2243
|
+
|
2244
|
+
def provider_fn(self) -> InjectorProviderFn:
|
2245
|
+
ps = [p.provider_fn() for p in self.ps]
|
2246
|
+
|
2247
|
+
def pfn(i: Injector) -> ta.Any:
|
2248
|
+
rv = []
|
2249
|
+
for ep in ps:
|
2250
|
+
o = ep(i)
|
2251
|
+
rv.append(o)
|
2252
|
+
return rv
|
2253
|
+
|
2254
|
+
return pfn
|
2255
|
+
|
2256
|
+
|
2257
|
+
###
|
2258
|
+
# bindings
|
2259
|
+
|
2260
|
+
|
2261
|
+
@dc.dataclass(frozen=True)
|
2262
|
+
class _InjectorBindings(InjectorBindings):
|
2263
|
+
bs: ta.Optional[ta.Sequence[InjectorBinding]] = None
|
2264
|
+
ps: ta.Optional[ta.Sequence[InjectorBindings]] = None
|
2265
|
+
|
2266
|
+
def bindings(self) -> ta.Iterator[InjectorBinding]:
|
2267
|
+
if self.bs is not None:
|
2268
|
+
yield from self.bs
|
2269
|
+
if self.ps is not None:
|
2270
|
+
for p in self.ps:
|
2271
|
+
yield from p.bindings()
|
2272
|
+
|
2273
|
+
|
2274
|
+
def as_injector_bindings(*args: InjectorBindingOrBindings) -> InjectorBindings:
|
2275
|
+
bs: ta.List[InjectorBinding] = []
|
2276
|
+
ps: ta.List[InjectorBindings] = []
|
2277
|
+
|
2278
|
+
for a in args:
|
2279
|
+
if isinstance(a, InjectorBindings):
|
2280
|
+
ps.append(a)
|
2281
|
+
elif isinstance(a, InjectorBinding):
|
2282
|
+
bs.append(a)
|
2283
|
+
else:
|
2284
|
+
raise TypeError(a)
|
2285
|
+
|
2286
|
+
return _InjectorBindings(
|
2287
|
+
bs or None,
|
2288
|
+
ps or None,
|
2289
|
+
)
|
2290
|
+
|
2291
|
+
|
2292
|
+
##
|
2293
|
+
|
2294
|
+
|
2295
|
+
def build_injector_provider_map(bs: InjectorBindings) -> ta.Mapping[InjectorKey, InjectorProvider]:
|
2296
|
+
pm: ta.Dict[InjectorKey, InjectorProvider] = {}
|
2297
|
+
am: ta.Dict[InjectorKey, ta.List[InjectorProvider]] = {}
|
2298
|
+
|
2299
|
+
for b in bs.bindings():
|
2300
|
+
if b.key.array:
|
2301
|
+
al = am.setdefault(b.key, [])
|
2302
|
+
if isinstance(b.provider, ArrayInjectorProvider):
|
2303
|
+
al.extend(b.provider.ps)
|
2304
|
+
else:
|
2305
|
+
al.append(b.provider)
|
2306
|
+
else:
|
2307
|
+
if b.key in pm:
|
2308
|
+
raise KeyError(b.key)
|
2309
|
+
pm[b.key] = b.provider
|
2310
|
+
|
2311
|
+
if am:
|
2312
|
+
for k, aps in am.items():
|
2313
|
+
pm[k] = ArrayInjectorProvider(aps)
|
2314
|
+
|
2315
|
+
return pm
|
2316
|
+
|
2317
|
+
|
2318
|
+
###
|
2319
|
+
# overrides
|
2320
|
+
|
2321
|
+
|
2322
|
+
@dc.dataclass(frozen=True)
|
2323
|
+
class OverridesInjectorBindings(InjectorBindings):
|
2324
|
+
p: InjectorBindings
|
2325
|
+
m: ta.Mapping[InjectorKey, InjectorBinding]
|
2326
|
+
|
2327
|
+
def bindings(self) -> ta.Iterator[InjectorBinding]:
|
2328
|
+
for b in self.p.bindings():
|
2329
|
+
yield self.m.get(b.key, b)
|
2330
|
+
|
2331
|
+
|
2332
|
+
def injector_override(p: InjectorBindings, *args: InjectorBindingOrBindings) -> InjectorBindings:
|
2333
|
+
m: ta.Dict[InjectorKey, InjectorBinding] = {}
|
2334
|
+
|
2335
|
+
for b in as_injector_bindings(*args).bindings():
|
2336
|
+
if b.key in m:
|
2337
|
+
raise DuplicateInjectorKeyError(b.key)
|
2338
|
+
m[b.key] = b
|
2339
|
+
|
2340
|
+
return OverridesInjectorBindings(p, m)
|
2341
|
+
|
2342
|
+
|
2343
|
+
###
|
2344
|
+
# scopes
|
2345
|
+
|
2346
|
+
|
2347
|
+
class InjectorScope(abc.ABC): # noqa
|
2348
|
+
def __init__(
|
2349
|
+
self,
|
2350
|
+
*,
|
2351
|
+
_i: Injector,
|
2352
|
+
) -> None:
|
2353
|
+
check.not_in(abc.ABC, type(self).__bases__)
|
2354
|
+
|
2355
|
+
super().__init__()
|
2356
|
+
|
2357
|
+
self._i = _i
|
2358
|
+
|
2359
|
+
all_seeds: ta.Iterable[_InjectorScopeSeed] = self._i.provide(InjectorKey(_InjectorScopeSeed, array=True))
|
2360
|
+
self._sks = {s.k for s in all_seeds if s.sc is type(self)}
|
2361
|
+
|
2362
|
+
#
|
2363
|
+
|
2364
|
+
@dc.dataclass(frozen=True)
|
2365
|
+
class State:
|
2366
|
+
seeds: ta.Dict[InjectorKey, ta.Any]
|
2367
|
+
provisions: ta.Dict[InjectorKey, ta.Any] = dc.field(default_factory=dict)
|
2368
|
+
|
2369
|
+
def new_state(self, vs: ta.Mapping[InjectorKey, ta.Any]) -> State:
|
2370
|
+
vs = dict(vs)
|
2371
|
+
check.equal(set(vs.keys()), self._sks)
|
2372
|
+
return InjectorScope.State(vs)
|
2373
|
+
|
2374
|
+
#
|
2375
|
+
|
2376
|
+
@abc.abstractmethod
|
2377
|
+
def state(self) -> State:
|
2378
|
+
raise NotImplementedError
|
2379
|
+
|
2380
|
+
@abc.abstractmethod
|
2381
|
+
def enter(self, vs: ta.Mapping[InjectorKey, ta.Any]) -> ta.ContextManager[None]:
|
2382
|
+
raise NotImplementedError
|
2383
|
+
|
2384
|
+
|
2385
|
+
class ExclusiveInjectorScope(InjectorScope, abc.ABC):
|
2386
|
+
_st: ta.Optional[InjectorScope.State] = None
|
2387
|
+
|
2388
|
+
def state(self) -> InjectorScope.State:
|
2389
|
+
return check.not_none(self._st)
|
2390
|
+
|
2391
|
+
@contextlib.contextmanager
|
2392
|
+
def enter(self, vs: ta.Mapping[InjectorKey, ta.Any]) -> ta.Iterator[None]:
|
2393
|
+
check.none(self._st)
|
2394
|
+
self._st = self.new_state(vs)
|
2395
|
+
try:
|
2396
|
+
yield
|
2397
|
+
finally:
|
2398
|
+
self._st = None
|
2399
|
+
|
2400
|
+
|
2401
|
+
class ContextvarInjectorScope(InjectorScope, abc.ABC):
|
2402
|
+
_cv: contextvars.ContextVar
|
2403
|
+
|
2404
|
+
def __init_subclass__(cls, **kwargs: ta.Any) -> None:
|
2405
|
+
super().__init_subclass__(**kwargs)
|
2406
|
+
check.not_in(abc.ABC, cls.__bases__)
|
2407
|
+
check.state(not hasattr(cls, '_cv'))
|
2408
|
+
cls._cv = contextvars.ContextVar(f'{cls.__name__}_cv')
|
2409
|
+
|
2410
|
+
def state(self) -> InjectorScope.State:
|
2411
|
+
return self._cv.get()
|
2412
|
+
|
2413
|
+
@contextlib.contextmanager
|
2414
|
+
def enter(self, vs: ta.Mapping[InjectorKey, ta.Any]) -> ta.Iterator[None]:
|
2415
|
+
try:
|
2416
|
+
self._cv.get()
|
2417
|
+
except LookupError:
|
2418
|
+
pass
|
2419
|
+
else:
|
2420
|
+
raise RuntimeError(f'Scope already entered: {self}')
|
2421
|
+
st = self.new_state(vs)
|
2422
|
+
tok = self._cv.set(st)
|
2423
|
+
try:
|
2424
|
+
yield
|
2425
|
+
finally:
|
2426
|
+
self._cv.reset(tok)
|
2427
|
+
|
2428
|
+
|
2429
|
+
#
|
2430
|
+
|
2431
|
+
|
2432
|
+
@dc.dataclass(frozen=True)
|
2433
|
+
class ScopedInjectorProvider(InjectorProvider):
|
2434
|
+
p: InjectorProvider
|
2435
|
+
k: InjectorKey
|
2436
|
+
sc: ta.Type[InjectorScope]
|
2437
|
+
|
2438
|
+
def __post_init__(self) -> None:
|
2439
|
+
check.isinstance(self.p, InjectorProvider)
|
2440
|
+
check.isinstance(self.k, InjectorKey)
|
2441
|
+
check.issubclass(self.sc, InjectorScope)
|
2442
|
+
|
2443
|
+
def provider_fn(self) -> InjectorProviderFn:
|
2444
|
+
def pfn(i: Injector) -> ta.Any:
|
2445
|
+
st = i[self.sc].state()
|
2446
|
+
try:
|
2447
|
+
return st.provisions[self.k]
|
2448
|
+
except KeyError:
|
2449
|
+
pass
|
2450
|
+
v = ufn(i)
|
2451
|
+
st.provisions[self.k] = v
|
2452
|
+
return v
|
2453
|
+
|
2454
|
+
ufn = self.p.provider_fn()
|
2455
|
+
return pfn
|
2456
|
+
|
2457
|
+
|
2458
|
+
@dc.dataclass(frozen=True)
|
2459
|
+
class _ScopeSeedInjectorProvider(InjectorProvider):
|
2460
|
+
k: InjectorKey
|
2461
|
+
sc: ta.Type[InjectorScope]
|
2462
|
+
|
2463
|
+
def __post_init__(self) -> None:
|
2464
|
+
check.isinstance(self.k, InjectorKey)
|
2465
|
+
check.issubclass(self.sc, InjectorScope)
|
2466
|
+
|
2467
|
+
def provider_fn(self) -> InjectorProviderFn:
|
2468
|
+
def pfn(i: Injector) -> ta.Any:
|
2469
|
+
st = i[self.sc].state()
|
2470
|
+
return st.seeds[self.k]
|
2471
|
+
return pfn
|
2472
|
+
|
2473
|
+
|
2474
|
+
def bind_injector_scope(sc: ta.Type[InjectorScope]) -> InjectorBindingOrBindings:
|
2475
|
+
return InjectorBinder.bind(sc, singleton=True)
|
2476
|
+
|
2477
|
+
|
2478
|
+
#
|
2479
|
+
|
2480
|
+
|
2481
|
+
@dc.dataclass(frozen=True)
|
2482
|
+
class _InjectorScopeSeed:
|
2483
|
+
sc: ta.Type['InjectorScope']
|
2484
|
+
k: InjectorKey
|
2485
|
+
|
2486
|
+
def __post_init__(self) -> None:
|
2487
|
+
check.issubclass(self.sc, InjectorScope)
|
2488
|
+
check.isinstance(self.k, InjectorKey)
|
2489
|
+
|
2490
|
+
|
2491
|
+
def bind_injector_scope_seed(k: ta.Any, sc: ta.Type[InjectorScope]) -> InjectorBindingOrBindings:
|
2492
|
+
kk = as_injector_key(k)
|
2493
|
+
return as_injector_bindings(
|
2494
|
+
InjectorBinding(kk, _ScopeSeedInjectorProvider(kk, sc)),
|
2495
|
+
InjectorBinder.bind(_InjectorScopeSeed(sc, kk), array=True),
|
2496
|
+
)
|
2497
|
+
|
2498
|
+
|
2499
|
+
###
|
2500
|
+
# inspection
|
2501
|
+
|
2502
|
+
|
2503
|
+
class _InjectionInspection(ta.NamedTuple):
|
2504
|
+
signature: inspect.Signature
|
2505
|
+
type_hints: ta.Mapping[str, ta.Any]
|
2506
|
+
args_offset: int
|
2507
|
+
|
2508
|
+
|
2509
|
+
_INJECTION_INSPECTION_CACHE: ta.MutableMapping[ta.Any, _InjectionInspection] = weakref.WeakKeyDictionary()
|
2510
|
+
|
2511
|
+
|
2512
|
+
def _do_injection_inspect(obj: ta.Any) -> _InjectionInspection:
|
2513
|
+
tgt = obj
|
2514
|
+
if isinstance(tgt, type) and tgt.__init__ is not object.__init__: # type: ignore[misc]
|
2515
|
+
# Python 3.8's inspect.signature can't handle subclasses overriding __new__, always generating *args/**kwargs.
|
2516
|
+
# - https://bugs.python.org/issue40897
|
2517
|
+
# - https://github.com/python/cpython/commit/df7c62980d15acd3125dfbd81546dad359f7add7
|
2518
|
+
tgt = tgt.__init__ # type: ignore[misc]
|
2519
|
+
has_generic_base = True
|
2520
|
+
else:
|
2521
|
+
has_generic_base = False
|
2522
|
+
|
2523
|
+
# inspect.signature(eval_str=True) was added in 3.10 and we have to support 3.8, so we have to get_type_hints to
|
2524
|
+
# eval str annotations *in addition to* getting the signature for parameter information.
|
2525
|
+
uw = tgt
|
2526
|
+
has_partial = False
|
2527
|
+
while True:
|
2528
|
+
if isinstance(uw, functools.partial):
|
2529
|
+
has_partial = True
|
2530
|
+
uw = uw.func
|
2531
|
+
else:
|
2532
|
+
if (uw2 := inspect.unwrap(uw)) is uw:
|
2533
|
+
break
|
2534
|
+
uw = uw2
|
2535
|
+
|
2536
|
+
if has_generic_base and has_partial:
|
2537
|
+
raise InjectorError(
|
2538
|
+
'Injector inspection does not currently support both a typing.Generic base and a functools.partial: '
|
2539
|
+
f'{obj}',
|
2540
|
+
)
|
2541
|
+
|
2542
|
+
return _InjectionInspection(
|
2543
|
+
inspect.signature(tgt),
|
2544
|
+
ta.get_type_hints(uw),
|
2545
|
+
1 if has_generic_base else 0,
|
2546
|
+
)
|
2547
|
+
|
2548
|
+
|
2549
|
+
def _injection_inspect(obj: ta.Any) -> _InjectionInspection:
|
2550
|
+
try:
|
2551
|
+
return _INJECTION_INSPECTION_CACHE[obj]
|
2552
|
+
except TypeError:
|
2553
|
+
return _do_injection_inspect(obj)
|
2554
|
+
except KeyError:
|
2555
|
+
pass
|
2556
|
+
insp = _do_injection_inspect(obj)
|
2557
|
+
_INJECTION_INSPECTION_CACHE[obj] = insp
|
2558
|
+
return insp
|
2559
|
+
|
2560
|
+
|
2561
|
+
class InjectionKwarg(ta.NamedTuple):
|
2562
|
+
name: str
|
2563
|
+
key: InjectorKey
|
2564
|
+
has_default: bool
|
2565
|
+
|
2566
|
+
|
2567
|
+
class InjectionKwargsTarget(ta.NamedTuple):
|
2568
|
+
obj: ta.Any
|
2569
|
+
kwargs: ta.Sequence[InjectionKwarg]
|
2570
|
+
|
2571
|
+
|
2572
|
+
def build_injection_kwargs_target(
|
2573
|
+
obj: ta.Any,
|
2574
|
+
*,
|
2575
|
+
skip_args: int = 0,
|
2576
|
+
skip_kwargs: ta.Optional[ta.Iterable[str]] = None,
|
2577
|
+
raw_optional: bool = False,
|
2578
|
+
) -> InjectionKwargsTarget:
|
2579
|
+
insp = _injection_inspect(obj)
|
2580
|
+
|
2581
|
+
params = list(insp.signature.parameters.values())
|
2582
|
+
|
2583
|
+
skip_names: ta.Set[str] = set()
|
2584
|
+
if skip_kwargs is not None:
|
2585
|
+
skip_names.update(check.not_isinstance(skip_kwargs, str))
|
2586
|
+
|
2587
|
+
seen: ta.Set[InjectorKey] = set()
|
2588
|
+
kws: ta.List[InjectionKwarg] = []
|
2589
|
+
for p in params[insp.args_offset + skip_args:]:
|
2590
|
+
if p.name in skip_names:
|
2591
|
+
continue
|
2592
|
+
|
2593
|
+
if p.annotation is inspect.Signature.empty:
|
2594
|
+
if p.default is not inspect.Parameter.empty:
|
2595
|
+
raise KeyError(f'{obj}, {p.name}')
|
2596
|
+
continue
|
2597
|
+
|
2598
|
+
if p.kind not in (inspect.Parameter.POSITIONAL_OR_KEYWORD, inspect.Parameter.KEYWORD_ONLY):
|
2599
|
+
raise TypeError(insp)
|
2600
|
+
|
2601
|
+
# 3.8 inspect.signature doesn't eval_str but typing.get_type_hints does, so prefer that.
|
2602
|
+
ann = insp.type_hints.get(p.name, p.annotation)
|
2603
|
+
if (
|
2604
|
+
not raw_optional and
|
2605
|
+
is_optional_alias(ann)
|
2606
|
+
):
|
2607
|
+
ann = get_optional_alias_arg(ann)
|
2608
|
+
|
2609
|
+
k = as_injector_key(ann)
|
2610
|
+
|
2611
|
+
if k in seen:
|
2612
|
+
raise DuplicateInjectorKeyError(k)
|
2613
|
+
seen.add(k)
|
2614
|
+
|
2615
|
+
kws.append(InjectionKwarg(
|
2616
|
+
p.name,
|
2617
|
+
k,
|
2618
|
+
p.default is not inspect.Parameter.empty,
|
2619
|
+
))
|
2620
|
+
|
2621
|
+
return InjectionKwargsTarget(
|
2622
|
+
obj,
|
2623
|
+
kws,
|
2624
|
+
)
|
2625
|
+
|
2626
|
+
|
2627
|
+
###
|
2628
|
+
# injector
|
2629
|
+
|
2630
|
+
|
2631
|
+
_INJECTOR_INJECTOR_KEY: InjectorKey[Injector] = InjectorKey(Injector)
|
2632
|
+
|
2633
|
+
|
2634
|
+
@dc.dataclass(frozen=True)
|
2635
|
+
class _InjectorEager:
|
2636
|
+
key: InjectorKey
|
2637
|
+
|
2638
|
+
|
2639
|
+
_INJECTOR_EAGER_ARRAY_KEY: InjectorKey[_InjectorEager] = InjectorKey(_InjectorEager, array=True)
|
2640
|
+
|
2641
|
+
|
2642
|
+
class _Injector(Injector):
|
2643
|
+
_DEFAULT_BINDINGS: ta.ClassVar[ta.List[InjectorBinding]] = []
|
2644
|
+
|
2645
|
+
def __init__(self, bs: InjectorBindings, p: ta.Optional[Injector] = None) -> None:
|
2646
|
+
super().__init__()
|
2647
|
+
|
2648
|
+
self._bs = check.isinstance(bs, InjectorBindings)
|
2649
|
+
self._p: ta.Optional[Injector] = check.isinstance(p, (Injector, type(None)))
|
2650
|
+
|
2651
|
+
self._pfm = {
|
2652
|
+
k: v.provider_fn()
|
2653
|
+
for k, v in build_injector_provider_map(as_injector_bindings(
|
2654
|
+
*self._DEFAULT_BINDINGS,
|
2655
|
+
bs,
|
2656
|
+
)).items()
|
2657
|
+
}
|
2658
|
+
|
2659
|
+
if _INJECTOR_INJECTOR_KEY in self._pfm:
|
2660
|
+
raise DuplicateInjectorKeyError(_INJECTOR_INJECTOR_KEY)
|
2661
|
+
|
2662
|
+
self.__cur_req: ta.Optional[_Injector._Request] = None
|
2663
|
+
|
2664
|
+
if _INJECTOR_EAGER_ARRAY_KEY in self._pfm:
|
2665
|
+
for e in self.provide(_INJECTOR_EAGER_ARRAY_KEY):
|
2666
|
+
self.provide(e.key)
|
2667
|
+
|
2668
|
+
class _Request:
|
2669
|
+
def __init__(self, injector: '_Injector') -> None:
|
2670
|
+
super().__init__()
|
2671
|
+
self._injector = injector
|
2672
|
+
self._provisions: ta.Dict[InjectorKey, Maybe] = {}
|
2673
|
+
self._seen_keys: ta.Set[InjectorKey] = set()
|
2674
|
+
|
2675
|
+
def handle_key(self, key: InjectorKey) -> Maybe[Maybe]:
|
2676
|
+
try:
|
2677
|
+
return Maybe.just(self._provisions[key])
|
2678
|
+
except KeyError:
|
2679
|
+
pass
|
2680
|
+
if key in self._seen_keys:
|
2681
|
+
raise CyclicDependencyInjectorKeyError(key)
|
2682
|
+
self._seen_keys.add(key)
|
2683
|
+
return Maybe.empty()
|
2684
|
+
|
2685
|
+
def handle_provision(self, key: InjectorKey, mv: Maybe) -> Maybe:
|
2686
|
+
check.in_(key, self._seen_keys)
|
2687
|
+
check.not_in(key, self._provisions)
|
2688
|
+
self._provisions[key] = mv
|
2689
|
+
return mv
|
2690
|
+
|
2691
|
+
@contextlib.contextmanager
|
2692
|
+
def _current_request(self) -> ta.Generator[_Request, None, None]:
|
2693
|
+
if (cr := self.__cur_req) is not None:
|
2694
|
+
yield cr
|
2695
|
+
return
|
2696
|
+
|
2697
|
+
cr = self._Request(self)
|
2698
|
+
try:
|
2699
|
+
self.__cur_req = cr
|
2700
|
+
yield cr
|
2701
|
+
finally:
|
2702
|
+
self.__cur_req = None
|
2703
|
+
|
2704
|
+
def try_provide(self, key: ta.Any) -> Maybe[ta.Any]:
|
2705
|
+
key = as_injector_key(key)
|
2706
|
+
|
2707
|
+
cr: _Injector._Request
|
2708
|
+
with self._current_request() as cr:
|
2709
|
+
if (rv := cr.handle_key(key)).present:
|
2710
|
+
return rv.must()
|
2711
|
+
|
2712
|
+
if key == _INJECTOR_INJECTOR_KEY:
|
2713
|
+
return cr.handle_provision(key, Maybe.just(self))
|
2714
|
+
|
2715
|
+
fn = self._pfm.get(key)
|
2716
|
+
if fn is not None:
|
2717
|
+
return cr.handle_provision(key, Maybe.just(fn(self)))
|
2718
|
+
|
2719
|
+
if self._p is not None:
|
2720
|
+
pv = self._p.try_provide(key)
|
2721
|
+
if pv is not None:
|
2722
|
+
return cr.handle_provision(key, Maybe.empty())
|
2723
|
+
|
2724
|
+
return cr.handle_provision(key, Maybe.empty())
|
2725
|
+
|
2726
|
+
def provide(self, key: ta.Any) -> ta.Any:
|
2727
|
+
v = self.try_provide(key)
|
2728
|
+
if v.present:
|
2729
|
+
return v.must()
|
2730
|
+
raise UnboundInjectorKeyError(key)
|
2731
|
+
|
2732
|
+
def provide_kwargs(
|
2733
|
+
self,
|
2734
|
+
obj: ta.Any,
|
2735
|
+
*,
|
2736
|
+
skip_args: int = 0,
|
2737
|
+
skip_kwargs: ta.Optional[ta.Iterable[ta.Any]] = None,
|
2738
|
+
) -> ta.Mapping[str, ta.Any]:
|
2739
|
+
kt = build_injection_kwargs_target(
|
2740
|
+
obj,
|
2741
|
+
skip_args=skip_args,
|
2742
|
+
skip_kwargs=skip_kwargs,
|
2743
|
+
)
|
2744
|
+
|
2745
|
+
ret: ta.Dict[str, ta.Any] = {}
|
2746
|
+
for kw in kt.kwargs:
|
2747
|
+
if kw.has_default:
|
2748
|
+
if not (mv := self.try_provide(kw.key)).present:
|
2749
|
+
continue
|
2750
|
+
v = mv.must()
|
2751
|
+
else:
|
2752
|
+
v = self.provide(kw.key)
|
2753
|
+
ret[kw.name] = v
|
2754
|
+
return ret
|
2755
|
+
|
2756
|
+
def inject(
|
2757
|
+
self,
|
2758
|
+
obj: ta.Any,
|
2759
|
+
*,
|
2760
|
+
args: ta.Optional[ta.Sequence[ta.Any]] = None,
|
2761
|
+
kwargs: ta.Optional[ta.Mapping[str, ta.Any]] = None,
|
2762
|
+
) -> ta.Any:
|
2763
|
+
provided = self.provide_kwargs(
|
2764
|
+
obj,
|
2765
|
+
skip_args=len(args) if args is not None else 0,
|
2766
|
+
skip_kwargs=kwargs if kwargs is not None else None,
|
2767
|
+
)
|
2768
|
+
|
2769
|
+
return obj(
|
2770
|
+
*(args if args is not None else ()),
|
2771
|
+
**(kwargs if kwargs is not None else {}),
|
2772
|
+
**provided,
|
2773
|
+
)
|
2774
|
+
|
2775
|
+
|
2776
|
+
###
|
2777
|
+
# binder
|
2778
|
+
|
2779
|
+
|
2780
|
+
class InjectorBinder:
|
2781
|
+
def __new__(cls, *args, **kwargs): # noqa
|
2782
|
+
raise TypeError
|
2783
|
+
|
2784
|
+
_FN_TYPES: ta.ClassVar[ta.Tuple[type, ...]] = (
|
2785
|
+
types.FunctionType,
|
2786
|
+
types.MethodType,
|
2787
|
+
|
2788
|
+
classmethod,
|
2789
|
+
staticmethod,
|
2790
|
+
|
2791
|
+
functools.partial,
|
2792
|
+
functools.partialmethod,
|
2793
|
+
)
|
2794
|
+
|
2795
|
+
@classmethod
|
2796
|
+
def _is_fn(cls, obj: ta.Any) -> bool:
|
2797
|
+
return isinstance(obj, cls._FN_TYPES)
|
2798
|
+
|
2799
|
+
@classmethod
|
2800
|
+
def bind_as_fn(cls, icls: ta.Type[T]) -> ta.Type[T]:
|
2801
|
+
check.isinstance(icls, type)
|
2802
|
+
if icls not in cls._FN_TYPES:
|
2803
|
+
cls._FN_TYPES = (*cls._FN_TYPES, icls)
|
2804
|
+
return icls
|
2805
|
+
|
2806
|
+
_BANNED_BIND_TYPES: ta.ClassVar[ta.Tuple[type, ...]] = (
|
2807
|
+
InjectorProvider,
|
2808
|
+
)
|
2809
|
+
|
2810
|
+
@classmethod
|
2811
|
+
def bind(
|
2812
|
+
cls,
|
2813
|
+
obj: ta.Any,
|
2814
|
+
*,
|
2815
|
+
key: ta.Any = None,
|
2816
|
+
tag: ta.Any = None,
|
2817
|
+
array: ta.Optional[bool] = None, # noqa
|
2818
|
+
|
2819
|
+
to_fn: ta.Any = None,
|
2820
|
+
to_ctor: ta.Any = None,
|
2821
|
+
to_const: ta.Any = None,
|
2822
|
+
to_key: ta.Any = None,
|
2823
|
+
|
2824
|
+
in_: ta.Optional[ta.Type[InjectorScope]] = None,
|
2825
|
+
singleton: bool = False,
|
2826
|
+
|
2827
|
+
eager: bool = False,
|
2828
|
+
) -> InjectorBindingOrBindings:
|
2829
|
+
if obj is None or obj is inspect.Parameter.empty:
|
2830
|
+
raise TypeError(obj)
|
2831
|
+
if isinstance(obj, cls._BANNED_BIND_TYPES):
|
2832
|
+
raise TypeError(obj)
|
2833
|
+
|
2834
|
+
#
|
2835
|
+
|
2836
|
+
if key is not None:
|
2837
|
+
key = as_injector_key(key)
|
2838
|
+
|
2839
|
+
#
|
2840
|
+
|
2841
|
+
has_to = (
|
2842
|
+
to_fn is not None or
|
2843
|
+
to_ctor is not None or
|
2844
|
+
to_const is not None or
|
2845
|
+
to_key is not None
|
2846
|
+
)
|
2847
|
+
if isinstance(obj, InjectorKey):
|
2848
|
+
if key is None:
|
2849
|
+
key = obj
|
2850
|
+
elif isinstance(obj, type):
|
2851
|
+
if not has_to:
|
2852
|
+
to_ctor = obj
|
2853
|
+
if key is None:
|
2854
|
+
key = InjectorKey(obj)
|
2855
|
+
elif cls._is_fn(obj) and not has_to:
|
2856
|
+
to_fn = obj
|
2857
|
+
if key is None:
|
2858
|
+
insp = _injection_inspect(obj)
|
2859
|
+
key_cls: ta.Any = check_valid_injector_key_cls(check.not_none(insp.type_hints.get('return')))
|
2860
|
+
key = InjectorKey(key_cls)
|
2861
|
+
else:
|
2862
|
+
if to_const is not None:
|
2863
|
+
raise TypeError('Cannot bind instance with to_const')
|
2864
|
+
to_const = obj
|
2865
|
+
if key is None:
|
2866
|
+
key = InjectorKey(type(obj))
|
2867
|
+
del has_to
|
2868
|
+
|
2869
|
+
#
|
2870
|
+
|
2871
|
+
if tag is not None:
|
2872
|
+
if key.tag is not None:
|
2873
|
+
raise TypeError('Tag already set')
|
2874
|
+
key = dc.replace(key, tag=tag)
|
2875
|
+
|
2876
|
+
if array is not None:
|
2877
|
+
key = dc.replace(key, array=array)
|
2878
|
+
|
2879
|
+
#
|
2880
|
+
|
2881
|
+
providers: ta.List[InjectorProvider] = []
|
2882
|
+
if to_fn is not None:
|
2883
|
+
providers.append(FnInjectorProvider(to_fn))
|
2884
|
+
if to_ctor is not None:
|
2885
|
+
providers.append(CtorInjectorProvider(to_ctor))
|
2886
|
+
if to_const is not None:
|
2887
|
+
providers.append(ConstInjectorProvider(to_const))
|
2888
|
+
if to_key is not None:
|
2889
|
+
providers.append(LinkInjectorProvider(as_injector_key(to_key)))
|
2890
|
+
if not providers:
|
2891
|
+
raise TypeError('Must specify provider')
|
2892
|
+
if len(providers) > 1:
|
2893
|
+
raise TypeError('May not specify multiple providers')
|
2894
|
+
provider = check.single(providers)
|
2895
|
+
|
2896
|
+
#
|
2897
|
+
|
2898
|
+
pws: ta.List[ta.Any] = []
|
2899
|
+
if in_ is not None:
|
2900
|
+
check.issubclass(in_, InjectorScope)
|
2901
|
+
check.not_in(abc.ABC, in_.__bases__)
|
2902
|
+
pws.append(functools.partial(ScopedInjectorProvider, k=key, sc=in_))
|
2903
|
+
if singleton:
|
2904
|
+
pws.append(SingletonInjectorProvider)
|
2905
|
+
if len(pws) > 1:
|
2906
|
+
raise TypeError('May not specify multiple provider wrappers')
|
2907
|
+
elif pws:
|
2908
|
+
provider = check.single(pws)(provider)
|
2909
|
+
|
2910
|
+
#
|
2911
|
+
|
2912
|
+
binding = InjectorBinding(key, provider)
|
2913
|
+
|
2914
|
+
#
|
2915
|
+
|
2916
|
+
extras: ta.List[InjectorBinding] = []
|
2917
|
+
|
2918
|
+
if eager:
|
2919
|
+
extras.append(bind_injector_eager_key(key))
|
2920
|
+
|
2921
|
+
#
|
2922
|
+
|
2923
|
+
if extras:
|
2924
|
+
return as_injector_bindings(binding, *extras)
|
2925
|
+
else:
|
2926
|
+
return binding
|
2927
|
+
|
2928
|
+
|
2929
|
+
###
|
2930
|
+
# injection helpers
|
2931
|
+
|
2932
|
+
|
2933
|
+
def make_injector_factory(
|
2934
|
+
fn: ta.Callable[..., T],
|
2935
|
+
cls: U,
|
2936
|
+
ann: ta.Any = None,
|
2937
|
+
) -> ta.Callable[..., U]:
|
2938
|
+
if ann is None:
|
2939
|
+
ann = cls
|
2940
|
+
|
2941
|
+
def outer(injector: Injector) -> ann:
|
2942
|
+
def inner(*args, **kwargs):
|
2943
|
+
return injector.inject(fn, args=args, kwargs=kwargs)
|
2944
|
+
return cls(inner) # type: ignore
|
2945
|
+
|
2946
|
+
return outer
|
2947
|
+
|
2948
|
+
|
2949
|
+
def bind_injector_array(
|
2950
|
+
obj: ta.Any = None,
|
2951
|
+
*,
|
2952
|
+
tag: ta.Any = None,
|
2953
|
+
) -> InjectorBindingOrBindings:
|
2954
|
+
key = as_injector_key(obj)
|
2955
|
+
if tag is not None:
|
2956
|
+
if key.tag is not None:
|
2957
|
+
raise ValueError('Must not specify multiple tags')
|
2958
|
+
key = dc.replace(key, tag=tag)
|
2959
|
+
|
2960
|
+
if key.array:
|
2961
|
+
raise ValueError('Key must not be array')
|
2962
|
+
|
2963
|
+
return InjectorBinding(
|
2964
|
+
dc.replace(key, array=True),
|
2965
|
+
ArrayInjectorProvider([]),
|
2966
|
+
)
|
2967
|
+
|
2968
|
+
|
2969
|
+
def make_injector_array_type(
|
2970
|
+
ele: ta.Union[InjectorKey, InjectorKeyCls],
|
2971
|
+
cls: U,
|
2972
|
+
ann: ta.Any = None,
|
2973
|
+
) -> ta.Callable[..., U]:
|
2974
|
+
if isinstance(ele, InjectorKey):
|
2975
|
+
if not ele.array:
|
2976
|
+
raise InjectorError('Provided key must be array', ele)
|
2977
|
+
key = ele
|
2978
|
+
else:
|
2979
|
+
key = dc.replace(as_injector_key(ele), array=True)
|
2980
|
+
|
2981
|
+
if ann is None:
|
2982
|
+
ann = cls
|
2983
|
+
|
2984
|
+
def inner(injector: Injector) -> ann:
|
2985
|
+
return cls(injector.provide(key)) # type: ignore[operator]
|
2986
|
+
|
2987
|
+
return inner
|
2988
|
+
|
2989
|
+
|
2990
|
+
def bind_injector_eager_key(key: ta.Any) -> InjectorBinding:
|
2991
|
+
return InjectorBinding(_INJECTOR_EAGER_ARRAY_KEY, ConstInjectorProvider(_InjectorEager(as_injector_key(key))))
|
2992
|
+
|
2993
|
+
|
2994
|
+
###
|
2995
|
+
# api
|
2996
|
+
|
2997
|
+
|
2998
|
+
class InjectionApi:
|
2999
|
+
# keys
|
3000
|
+
|
3001
|
+
def as_key(self, o: ta.Any) -> InjectorKey:
|
3002
|
+
return as_injector_key(o)
|
3003
|
+
|
3004
|
+
def array(self, o: ta.Any) -> InjectorKey:
|
3005
|
+
return dc.replace(as_injector_key(o), array=True)
|
3006
|
+
|
3007
|
+
def tag(self, o: ta.Any, t: ta.Any) -> InjectorKey:
|
3008
|
+
return dc.replace(as_injector_key(o), tag=t)
|
3009
|
+
|
3010
|
+
# bindings
|
3011
|
+
|
3012
|
+
def as_bindings(self, *args: InjectorBindingOrBindings) -> InjectorBindings:
|
3013
|
+
return as_injector_bindings(*args)
|
3014
|
+
|
3015
|
+
# overrides
|
3016
|
+
|
3017
|
+
def override(self, p: InjectorBindings, *args: InjectorBindingOrBindings) -> InjectorBindings:
|
3018
|
+
return injector_override(p, *args)
|
3019
|
+
|
3020
|
+
# scopes
|
3021
|
+
|
3022
|
+
def bind_scope(self, sc: ta.Type[InjectorScope]) -> InjectorBindingOrBindings:
|
3023
|
+
return bind_injector_scope(sc)
|
3024
|
+
|
3025
|
+
def bind_scope_seed(self, k: ta.Any, sc: ta.Type[InjectorScope]) -> InjectorBindingOrBindings:
|
3026
|
+
return bind_injector_scope_seed(k, sc)
|
3027
|
+
|
3028
|
+
# injector
|
3029
|
+
|
3030
|
+
def create_injector(self, *args: InjectorBindingOrBindings, parent: ta.Optional[Injector] = None) -> Injector:
|
3031
|
+
return _Injector(as_injector_bindings(*args), parent)
|
3032
|
+
|
3033
|
+
# binder
|
3034
|
+
|
3035
|
+
def bind(
|
3036
|
+
self,
|
3037
|
+
obj: ta.Any,
|
3038
|
+
*,
|
3039
|
+
key: ta.Any = None,
|
3040
|
+
tag: ta.Any = None,
|
3041
|
+
array: ta.Optional[bool] = None, # noqa
|
3042
|
+
|
3043
|
+
to_fn: ta.Any = None,
|
3044
|
+
to_ctor: ta.Any = None,
|
3045
|
+
to_const: ta.Any = None,
|
3046
|
+
to_key: ta.Any = None,
|
3047
|
+
|
3048
|
+
in_: ta.Optional[ta.Type[InjectorScope]] = None,
|
3049
|
+
singleton: bool = False,
|
3050
|
+
|
3051
|
+
eager: bool = False,
|
3052
|
+
) -> InjectorBindingOrBindings:
|
3053
|
+
return InjectorBinder.bind(
|
3054
|
+
obj,
|
3055
|
+
|
3056
|
+
key=key,
|
3057
|
+
tag=tag,
|
3058
|
+
array=array,
|
3059
|
+
|
3060
|
+
to_fn=to_fn,
|
3061
|
+
to_ctor=to_ctor,
|
3062
|
+
to_const=to_const,
|
3063
|
+
to_key=to_key,
|
3064
|
+
|
3065
|
+
in_=in_,
|
3066
|
+
singleton=singleton,
|
3067
|
+
|
3068
|
+
eager=eager,
|
3069
|
+
)
|
3070
|
+
|
3071
|
+
# helpers
|
3072
|
+
|
3073
|
+
def bind_factory(
|
3074
|
+
self,
|
3075
|
+
fn: ta.Callable[..., T],
|
3076
|
+
cls_: U,
|
3077
|
+
ann: ta.Any = None,
|
3078
|
+
) -> InjectorBindingOrBindings:
|
3079
|
+
return self.bind(make_injector_factory(fn, cls_, ann))
|
3080
|
+
|
3081
|
+
def bind_array(
|
3082
|
+
self,
|
3083
|
+
obj: ta.Any = None,
|
3084
|
+
*,
|
3085
|
+
tag: ta.Any = None,
|
3086
|
+
) -> InjectorBindingOrBindings:
|
3087
|
+
return bind_injector_array(obj, tag=tag)
|
3088
|
+
|
3089
|
+
def bind_array_type(
|
3090
|
+
self,
|
3091
|
+
ele: ta.Union[InjectorKey, InjectorKeyCls],
|
3092
|
+
cls_: U,
|
3093
|
+
ann: ta.Any = None,
|
3094
|
+
) -> InjectorBindingOrBindings:
|
3095
|
+
return self.bind(make_injector_array_type(ele, cls_, ann))
|
3096
|
+
|
3097
|
+
|
3098
|
+
inj = InjectionApi()
|
3099
|
+
|
3100
|
+
|
3101
|
+
########################################
|
3102
|
+
# ../../../omlish/lite/runtime.py
|
3103
|
+
|
3104
|
+
|
3105
|
+
@cached_nullary
|
3106
|
+
def is_debugger_attached() -> bool:
|
3107
|
+
return any(frame[1].endswith('pydevd.py') for frame in inspect.stack())
|
3108
|
+
|
3109
|
+
|
3110
|
+
LITE_REQUIRED_PYTHON_VERSION = (3, 8)
|
3111
|
+
|
3112
|
+
|
3113
|
+
def check_lite_runtime_version() -> None:
|
3114
|
+
if sys.version_info < LITE_REQUIRED_PYTHON_VERSION:
|
3115
|
+
raise OSError(f'Requires python {LITE_REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
|
3116
|
+
|
3117
|
+
|
3118
|
+
########################################
|
3119
|
+
# ../../../omlish/logs/json.py
|
3120
|
+
"""
|
3121
|
+
TODO:
|
3122
|
+
- translate json keys
|
3123
|
+
"""
|
3124
|
+
|
3125
|
+
|
3126
|
+
class JsonLogFormatter(logging.Formatter):
|
3127
|
+
KEYS: ta.Mapping[str, bool] = {
|
3128
|
+
'name': False,
|
3129
|
+
'msg': False,
|
3130
|
+
'args': False,
|
3131
|
+
'levelname': False,
|
3132
|
+
'levelno': False,
|
3133
|
+
'pathname': False,
|
3134
|
+
'filename': False,
|
3135
|
+
'module': False,
|
3136
|
+
'exc_info': True,
|
3137
|
+
'exc_text': True,
|
3138
|
+
'stack_info': True,
|
3139
|
+
'lineno': False,
|
3140
|
+
'funcName': False,
|
3141
|
+
'created': False,
|
3142
|
+
'msecs': False,
|
3143
|
+
'relativeCreated': False,
|
3144
|
+
'thread': False,
|
3145
|
+
'threadName': False,
|
3146
|
+
'processName': False,
|
3147
|
+
'process': False,
|
3148
|
+
}
|
3149
|
+
|
3150
|
+
def __init__(
|
3151
|
+
self,
|
3152
|
+
*args: ta.Any,
|
3153
|
+
json_dumps: ta.Optional[ta.Callable[[ta.Any], str]] = None,
|
3154
|
+
**kwargs: ta.Any,
|
3155
|
+
) -> None:
|
3156
|
+
super().__init__(*args, **kwargs)
|
3157
|
+
|
3158
|
+
if json_dumps is None:
|
3159
|
+
json_dumps = json_dumps_compact
|
3160
|
+
self._json_dumps = json_dumps
|
3161
|
+
|
3162
|
+
def format(self, record: logging.LogRecord) -> str:
|
3163
|
+
dct = {
|
3164
|
+
k: v
|
3165
|
+
for k, o in self.KEYS.items()
|
3166
|
+
for v in [getattr(record, k)]
|
3167
|
+
if not (o and v is None)
|
3168
|
+
}
|
3169
|
+
return self._json_dumps(dct)
|
3170
|
+
|
3171
|
+
|
3172
|
+
########################################
|
3173
|
+
# ../../../omlish/os/temp.py
|
3174
|
+
|
3175
|
+
|
3176
|
+
def make_temp_file(**kwargs: ta.Any) -> str:
|
3177
|
+
file_fd, file = tempfile.mkstemp(**kwargs)
|
3178
|
+
os.close(file_fd)
|
3179
|
+
return file
|
3180
|
+
|
3181
|
+
|
3182
|
+
@contextlib.contextmanager
|
3183
|
+
def temp_file_context(**kwargs: ta.Any) -> ta.Iterator[str]:
|
3184
|
+
path = make_temp_file(**kwargs)
|
3185
|
+
try:
|
3186
|
+
yield path
|
3187
|
+
finally:
|
3188
|
+
unlink_if_exists(path)
|
3189
|
+
|
3190
|
+
|
3191
|
+
@contextlib.contextmanager
|
3192
|
+
def temp_dir_context(
|
3193
|
+
root_dir: ta.Optional[str] = None,
|
3194
|
+
**kwargs: ta.Any,
|
3195
|
+
) -> ta.Iterator[str]:
|
3196
|
+
path = tempfile.mkdtemp(dir=root_dir, **kwargs)
|
3197
|
+
try:
|
3198
|
+
yield path
|
3199
|
+
finally:
|
3200
|
+
shutil.rmtree(path, ignore_errors=True)
|
3201
|
+
|
3202
|
+
|
3203
|
+
@contextlib.contextmanager
|
3204
|
+
def temp_named_file_context(
|
3205
|
+
root_dir: ta.Optional[str] = None,
|
3206
|
+
cleanup: bool = True,
|
3207
|
+
**kwargs: ta.Any,
|
3208
|
+
) -> ta.Iterator[tempfile._TemporaryFileWrapper]: # noqa
|
3209
|
+
with tempfile.NamedTemporaryFile(dir=root_dir, delete=False, **kwargs) as f:
|
3210
|
+
try:
|
3211
|
+
yield f
|
3212
|
+
finally:
|
3213
|
+
if cleanup:
|
3214
|
+
shutil.rmtree(f.name, ignore_errors=True)
|
3215
|
+
|
3216
|
+
|
3217
|
+
########################################
|
3218
|
+
# ../docker/utils.py
|
3219
|
+
"""
|
3220
|
+
TODO:
|
3221
|
+
- some less stupid Dockerfile hash
|
3222
|
+
- doesn't change too much though
|
3223
|
+
"""
|
3224
|
+
|
3225
|
+
|
3226
|
+
##
|
3227
|
+
|
3228
|
+
|
3229
|
+
def build_docker_file_hash(docker_file: str) -> str:
|
3230
|
+
with open(docker_file) as f:
|
3231
|
+
contents = f.read()
|
3232
|
+
|
3233
|
+
return sha256_str(contents)
|
3234
|
+
|
3235
|
+
|
3236
|
+
##
|
3237
|
+
|
3238
|
+
|
3239
|
+
def read_docker_tar_image_tag(tar_file: str) -> str:
|
3240
|
+
with tarfile.open(tar_file) as tf:
|
3241
|
+
with contextlib.closing(check.not_none(tf.extractfile('manifest.json'))) as mf:
|
3242
|
+
m = mf.read()
|
3243
|
+
|
3244
|
+
manifests = json.loads(m.decode('utf-8'))
|
3245
|
+
manifest = check.single(manifests)
|
3246
|
+
tag = check.non_empty_str(check.single(manifest['RepoTags']))
|
3247
|
+
return tag
|
3248
|
+
|
3249
|
+
|
3250
|
+
def read_docker_tar_image_id(tar_file: str) -> str:
|
3251
|
+
with tarfile.open(tar_file) as tf:
|
3252
|
+
with contextlib.closing(check.not_none(tf.extractfile('index.json'))) as mf:
|
3253
|
+
i = mf.read()
|
3254
|
+
|
3255
|
+
index = json.loads(i.decode('utf-8'))
|
3256
|
+
manifest = check.single(index['manifests'])
|
3257
|
+
image_id = check.non_empty_str(manifest['digest'])
|
3258
|
+
return image_id
|
3259
|
+
|
3260
|
+
|
3261
|
+
########################################
|
3262
|
+
# ../github/client.py
|
3263
|
+
|
3264
|
+
|
3265
|
+
##
|
3266
|
+
|
3267
|
+
|
3268
|
+
class GithubCacheClient(abc.ABC):
|
3269
|
+
class Entry(abc.ABC): # noqa
|
3270
|
+
pass
|
3271
|
+
|
3272
|
+
@abc.abstractmethod
|
3273
|
+
def get_entry(self, key: str) -> ta.Awaitable[ta.Optional[Entry]]:
|
3274
|
+
raise NotImplementedError
|
3275
|
+
|
3276
|
+
@abc.abstractmethod
|
3277
|
+
def download_file(self, entry: Entry, out_file: str) -> ta.Awaitable[None]:
|
3278
|
+
raise NotImplementedError
|
3279
|
+
|
3280
|
+
@abc.abstractmethod
|
3281
|
+
def upload_file(self, key: str, in_file: str) -> ta.Awaitable[None]:
|
3282
|
+
raise NotImplementedError
|
3283
|
+
|
3284
|
+
|
3285
|
+
##
|
3286
|
+
|
3287
|
+
|
3288
|
+
class GithubCacheServiceV1BaseClient(GithubCacheClient, abc.ABC):
|
3289
|
+
BASE_URL_ENV_VAR = register_github_env_var('ACTIONS_CACHE_URL')
|
3290
|
+
AUTH_TOKEN_ENV_VAR = register_github_env_var('ACTIONS_RUNTIME_TOKEN') # noqa
|
3291
|
+
|
3292
|
+
KEY_SUFFIX_ENV_VAR = register_github_env_var('GITHUB_RUN_ID')
|
3293
|
+
|
3294
|
+
#
|
3295
|
+
|
3296
|
+
def __init__(
|
3297
|
+
self,
|
3298
|
+
*,
|
3299
|
+
base_url: ta.Optional[str] = None,
|
3300
|
+
auth_token: ta.Optional[str] = None,
|
3301
|
+
|
3302
|
+
key_prefix: ta.Optional[str] = None,
|
3303
|
+
key_suffix: ta.Optional[str] = None,
|
3304
|
+
|
3305
|
+
cache_version: int = CI_CACHE_VERSION,
|
3306
|
+
|
3307
|
+
loop: ta.Optional[asyncio.AbstractEventLoop] = None,
|
3308
|
+
) -> None:
|
3309
|
+
super().__init__()
|
3310
|
+
|
3311
|
+
#
|
3312
|
+
|
3313
|
+
if base_url is None:
|
3314
|
+
base_url = check.non_empty_str(self.BASE_URL_ENV_VAR())
|
3315
|
+
self._service_url = GithubCacheServiceV1.get_service_url(base_url)
|
3316
|
+
|
3317
|
+
if auth_token is None:
|
3318
|
+
auth_token = self.AUTH_TOKEN_ENV_VAR()
|
3319
|
+
self._auth_token = auth_token
|
3320
|
+
|
3321
|
+
#
|
3322
|
+
|
3323
|
+
self._key_prefix = key_prefix
|
3324
|
+
|
3325
|
+
if key_suffix is None:
|
3326
|
+
key_suffix = self.KEY_SUFFIX_ENV_VAR()
|
3327
|
+
self._key_suffix = check.non_empty_str(key_suffix)
|
3328
|
+
|
3329
|
+
#
|
3330
|
+
|
3331
|
+
self._cache_version = check.isinstance(cache_version, int)
|
3332
|
+
|
3333
|
+
#
|
3334
|
+
|
3335
|
+
self._given_loop = loop
|
3336
|
+
|
3337
|
+
#
|
3338
|
+
|
3339
|
+
def _get_loop(self) -> asyncio.AbstractEventLoop:
|
3340
|
+
if (loop := self._given_loop) is not None:
|
3341
|
+
return loop
|
3342
|
+
return asyncio.get_event_loop()
|
3343
|
+
|
3344
|
+
#
|
3345
|
+
|
3346
|
+
def build_request_headers(
|
3347
|
+
self,
|
3348
|
+
headers: ta.Optional[ta.Mapping[str, str]] = None,
|
3349
|
+
*,
|
3350
|
+
content_type: ta.Optional[str] = None,
|
3351
|
+
json_content: bool = False,
|
3352
|
+
) -> ta.Dict[str, str]:
|
3353
|
+
dct = {
|
3354
|
+
'Accept': ';'.join([
|
3355
|
+
'application/json',
|
3356
|
+
f'api-version={GithubCacheServiceV1.API_VERSION}',
|
3357
|
+
]),
|
3358
|
+
}
|
3359
|
+
|
3360
|
+
if (auth_token := self._auth_token):
|
3361
|
+
dct['Authorization'] = f'Bearer {auth_token}'
|
3362
|
+
|
3363
|
+
if content_type is None and json_content:
|
3364
|
+
content_type = 'application/json'
|
3365
|
+
if content_type is not None:
|
3366
|
+
dct['Content-Type'] = content_type
|
3367
|
+
|
3368
|
+
if headers:
|
3369
|
+
dct.update(headers)
|
3370
|
+
|
3371
|
+
return dct
|
3372
|
+
|
3373
|
+
#
|
3374
|
+
|
3375
|
+
def load_json_bytes(self, b: ta.Optional[bytes]) -> ta.Optional[ta.Any]:
|
3376
|
+
if not b:
|
3377
|
+
return None
|
3378
|
+
return json.loads(b.decode('utf-8-sig'))
|
3379
|
+
|
3380
|
+
#
|
3381
|
+
|
3382
|
+
async def send_url_request(
|
3383
|
+
self,
|
3384
|
+
req: urllib.request.Request,
|
3385
|
+
) -> ta.Tuple[http.client.HTTPResponse, ta.Optional[bytes]]:
|
3386
|
+
def run_sync():
|
3387
|
+
with urllib.request.urlopen(req) as resp: # noqa
|
3388
|
+
body = resp.read()
|
3389
|
+
return (resp, body)
|
3390
|
+
|
3391
|
+
return await self._get_loop().run_in_executor(None, run_sync) # noqa
|
3392
|
+
|
3393
|
+
#
|
3394
|
+
|
3395
|
+
@dc.dataclass()
|
3396
|
+
class ServiceRequestError(RuntimeError):
|
3397
|
+
status_code: int
|
3398
|
+
body: ta.Optional[bytes]
|
3399
|
+
|
3400
|
+
def __str__(self) -> str:
|
3401
|
+
return repr(self)
|
3402
|
+
|
3403
|
+
async def send_service_request(
|
3404
|
+
self,
|
3405
|
+
path: str,
|
3406
|
+
*,
|
3407
|
+
method: ta.Optional[str] = None,
|
3408
|
+
headers: ta.Optional[ta.Mapping[str, str]] = None,
|
3409
|
+
content_type: ta.Optional[str] = None,
|
2223
3410
|
content: ta.Optional[bytes] = None,
|
2224
3411
|
json_content: ta.Optional[ta.Any] = None,
|
2225
3412
|
success_status_codes: ta.Optional[ta.Container[int]] = None,
|
@@ -2911,7 +4098,51 @@ class BaseSubprocesses(abc.ABC): # noqa
|
|
2911
4098
|
##
|
2912
4099
|
|
2913
4100
|
|
4101
|
+
@dc.dataclass(frozen=True)
|
4102
|
+
class SubprocessRun:
|
4103
|
+
cmd: ta.Sequence[str]
|
4104
|
+
input: ta.Any = None
|
4105
|
+
timeout: ta.Optional[float] = None
|
4106
|
+
check: bool = False
|
4107
|
+
capture_output: ta.Optional[bool] = None
|
4108
|
+
kwargs: ta.Optional[ta.Mapping[str, ta.Any]] = None
|
4109
|
+
|
4110
|
+
|
4111
|
+
@dc.dataclass(frozen=True)
|
4112
|
+
class SubprocessRunOutput(ta.Generic[T]):
|
4113
|
+
proc: T
|
4114
|
+
|
4115
|
+
returncode: int # noqa
|
4116
|
+
|
4117
|
+
stdout: ta.Optional[bytes] = None
|
4118
|
+
stderr: ta.Optional[bytes] = None
|
4119
|
+
|
4120
|
+
|
2914
4121
|
class AbstractSubprocesses(BaseSubprocesses, abc.ABC):
|
4122
|
+
@abc.abstractmethod
|
4123
|
+
def run_(self, run: SubprocessRun) -> SubprocessRunOutput:
|
4124
|
+
raise NotImplementedError
|
4125
|
+
|
4126
|
+
def run(
|
4127
|
+
self,
|
4128
|
+
*cmd: str,
|
4129
|
+
input: ta.Any = None, # noqa
|
4130
|
+
timeout: ta.Optional[float] = None,
|
4131
|
+
check: bool = False,
|
4132
|
+
capture_output: ta.Optional[bool] = None,
|
4133
|
+
**kwargs: ta.Any,
|
4134
|
+
) -> SubprocessRunOutput:
|
4135
|
+
return self.run_(SubprocessRun(
|
4136
|
+
cmd=cmd,
|
4137
|
+
input=input,
|
4138
|
+
timeout=timeout,
|
4139
|
+
check=check,
|
4140
|
+
capture_output=capture_output,
|
4141
|
+
kwargs=kwargs,
|
4142
|
+
))
|
4143
|
+
|
4144
|
+
#
|
4145
|
+
|
2915
4146
|
@abc.abstractmethod
|
2916
4147
|
def check_call(
|
2917
4148
|
self,
|
@@ -2975,6 +4206,25 @@ class AbstractSubprocesses(BaseSubprocesses, abc.ABC):
|
|
2975
4206
|
|
2976
4207
|
|
2977
4208
|
class Subprocesses(AbstractSubprocesses):
|
4209
|
+
def run_(self, run: SubprocessRun) -> SubprocessRunOutput[subprocess.CompletedProcess]:
|
4210
|
+
proc = subprocess.run(
|
4211
|
+
run.cmd,
|
4212
|
+
input=run.input,
|
4213
|
+
timeout=run.timeout,
|
4214
|
+
check=run.check,
|
4215
|
+
capture_output=run.capture_output or False,
|
4216
|
+
**(run.kwargs or {}),
|
4217
|
+
)
|
4218
|
+
|
4219
|
+
return SubprocessRunOutput(
|
4220
|
+
proc=proc,
|
4221
|
+
|
4222
|
+
returncode=proc.returncode,
|
4223
|
+
|
4224
|
+
stdout=proc.stdout, # noqa
|
4225
|
+
stderr=proc.stderr, # noqa
|
4226
|
+
)
|
4227
|
+
|
2978
4228
|
def check_call(
|
2979
4229
|
self,
|
2980
4230
|
*cmd: str,
|
@@ -3000,6 +4250,30 @@ subprocesses = Subprocesses()
|
|
3000
4250
|
|
3001
4251
|
|
3002
4252
|
class AbstractAsyncSubprocesses(BaseSubprocesses):
|
4253
|
+
@abc.abstractmethod
|
4254
|
+
async def run_(self, run: SubprocessRun) -> SubprocessRunOutput:
|
4255
|
+
raise NotImplementedError
|
4256
|
+
|
4257
|
+
def run(
|
4258
|
+
self,
|
4259
|
+
*cmd: str,
|
4260
|
+
input: ta.Any = None, # noqa
|
4261
|
+
timeout: ta.Optional[float] = None,
|
4262
|
+
check: bool = False,
|
4263
|
+
capture_output: ta.Optional[bool] = None,
|
4264
|
+
**kwargs: ta.Any,
|
4265
|
+
) -> ta.Awaitable[SubprocessRunOutput]:
|
4266
|
+
return self.run_(SubprocessRun(
|
4267
|
+
cmd=cmd,
|
4268
|
+
input=input,
|
4269
|
+
timeout=timeout,
|
4270
|
+
check=check,
|
4271
|
+
capture_output=capture_output,
|
4272
|
+
kwargs=kwargs,
|
4273
|
+
))
|
4274
|
+
|
4275
|
+
#
|
4276
|
+
|
3003
4277
|
@abc.abstractmethod
|
3004
4278
|
async def check_call(
|
3005
4279
|
self,
|
@@ -3067,16 +4341,22 @@ class AbstractAsyncSubprocesses(BaseSubprocesses):
|
|
3067
4341
|
|
3068
4342
|
|
3069
4343
|
class GithubFileCache(FileCache):
|
4344
|
+
@dc.dataclass(frozen=True)
|
4345
|
+
class Config:
|
4346
|
+
dir: str
|
4347
|
+
|
3070
4348
|
def __init__(
|
3071
4349
|
self,
|
3072
|
-
|
4350
|
+
config: Config,
|
3073
4351
|
*,
|
3074
4352
|
client: ta.Optional[GithubCacheClient] = None,
|
3075
|
-
|
4353
|
+
version: ta.Optional[CacheVersion] = None,
|
3076
4354
|
) -> None:
|
3077
|
-
super().__init__(
|
4355
|
+
super().__init__(
|
4356
|
+
version=version,
|
4357
|
+
)
|
3078
4358
|
|
3079
|
-
self.
|
4359
|
+
self._config = config
|
3080
4360
|
|
3081
4361
|
if client is None:
|
3082
4362
|
client = GithubCacheServiceV1Client(
|
@@ -3085,7 +4365,9 @@ class GithubFileCache(FileCache):
|
|
3085
4365
|
self._client: GithubCacheClient = client
|
3086
4366
|
|
3087
4367
|
self._local = DirectoryFileCache(
|
3088
|
-
|
4368
|
+
DirectoryFileCache.Config(
|
4369
|
+
dir=check.non_empty_str(config.dir),
|
4370
|
+
),
|
3089
4371
|
version=self._version,
|
3090
4372
|
)
|
3091
4373
|
|
@@ -3388,41 +4670,32 @@ class AsyncioSubprocesses(AbstractAsyncSubprocesses):
|
|
3388
4670
|
|
3389
4671
|
#
|
3390
4672
|
|
3391
|
-
|
3392
|
-
|
3393
|
-
proc: asyncio.subprocess.Process
|
3394
|
-
stdout: ta.Optional[bytes]
|
3395
|
-
stderr: ta.Optional[bytes]
|
4673
|
+
async def run_(self, run: SubprocessRun) -> SubprocessRunOutput[asyncio.subprocess.Process]:
|
4674
|
+
kwargs = dict(run.kwargs or {})
|
3396
4675
|
|
3397
|
-
|
3398
|
-
self,
|
3399
|
-
*cmd: str,
|
3400
|
-
input: ta.Any = None, # noqa
|
3401
|
-
timeout: ta.Optional[float] = None,
|
3402
|
-
check: bool = False, # noqa
|
3403
|
-
capture_output: ta.Optional[bool] = None,
|
3404
|
-
**kwargs: ta.Any,
|
3405
|
-
) -> RunOutput:
|
3406
|
-
if capture_output:
|
4676
|
+
if run.capture_output:
|
3407
4677
|
kwargs.setdefault('stdout', subprocess.PIPE)
|
3408
4678
|
kwargs.setdefault('stderr', subprocess.PIPE)
|
3409
4679
|
|
3410
4680
|
proc: asyncio.subprocess.Process
|
3411
|
-
async with self.popen(*cmd, **kwargs) as proc:
|
3412
|
-
stdout, stderr = await self.communicate(proc, input, timeout)
|
4681
|
+
async with self.popen(*run.cmd, **kwargs) as proc:
|
4682
|
+
stdout, stderr = await self.communicate(proc, run.input, run.timeout)
|
3413
4683
|
|
3414
4684
|
if check and proc.returncode:
|
3415
4685
|
raise subprocess.CalledProcessError(
|
3416
4686
|
proc.returncode,
|
3417
|
-
cmd,
|
4687
|
+
run.cmd,
|
3418
4688
|
output=stdout,
|
3419
4689
|
stderr=stderr,
|
3420
4690
|
)
|
3421
4691
|
|
3422
|
-
return
|
3423
|
-
proc,
|
3424
|
-
|
3425
|
-
|
4692
|
+
return SubprocessRunOutput(
|
4693
|
+
proc=proc,
|
4694
|
+
|
4695
|
+
returncode=check.isinstance(proc.returncode, int),
|
4696
|
+
|
4697
|
+
stdout=stdout,
|
4698
|
+
stderr=stderr,
|
3426
4699
|
)
|
3427
4700
|
|
3428
4701
|
#
|
@@ -3615,47 +4888,7 @@ class DockerComposeRun(AsyncExitStacked):
|
|
3615
4888
|
|
3616
4889
|
|
3617
4890
|
########################################
|
3618
|
-
# ../docker.py
|
3619
|
-
"""
|
3620
|
-
TODO:
|
3621
|
-
- some less stupid Dockerfile hash
|
3622
|
-
- doesn't change too much though
|
3623
|
-
"""
|
3624
|
-
|
3625
|
-
|
3626
|
-
##
|
3627
|
-
|
3628
|
-
|
3629
|
-
def build_docker_file_hash(docker_file: str) -> str:
|
3630
|
-
with open(docker_file) as f:
|
3631
|
-
contents = f.read()
|
3632
|
-
|
3633
|
-
return sha256_str(contents)
|
3634
|
-
|
3635
|
-
|
3636
|
-
##
|
3637
|
-
|
3638
|
-
|
3639
|
-
def read_docker_tar_image_tag(tar_file: str) -> str:
|
3640
|
-
with tarfile.open(tar_file) as tf:
|
3641
|
-
with contextlib.closing(check.not_none(tf.extractfile('manifest.json'))) as mf:
|
3642
|
-
m = mf.read()
|
3643
|
-
|
3644
|
-
manifests = json.loads(m.decode('utf-8'))
|
3645
|
-
manifest = check.single(manifests)
|
3646
|
-
tag = check.non_empty_str(check.single(manifest['RepoTags']))
|
3647
|
-
return tag
|
3648
|
-
|
3649
|
-
|
3650
|
-
def read_docker_tar_image_id(tar_file: str) -> str:
|
3651
|
-
with tarfile.open(tar_file) as tf:
|
3652
|
-
with contextlib.closing(check.not_none(tf.extractfile('index.json'))) as mf:
|
3653
|
-
i = mf.read()
|
3654
|
-
|
3655
|
-
index = json.loads(i.decode('utf-8'))
|
3656
|
-
manifest = check.single(index['manifests'])
|
3657
|
-
image_id = check.non_empty_str(manifest['digest'])
|
3658
|
-
return image_id
|
4891
|
+
# ../docker/cmds.py
|
3659
4892
|
|
3660
4893
|
|
3661
4894
|
##
|
@@ -3772,75 +5005,58 @@ async def load_docker_tar(
|
|
3772
5005
|
|
3773
5006
|
|
3774
5007
|
########################################
|
3775
|
-
# ../
|
5008
|
+
# ../github/inject.py
|
3776
5009
|
|
3777
5010
|
|
3778
|
-
|
3779
|
-
KEY_HASH_LEN = 16
|
5011
|
+
##
|
3780
5012
|
|
3781
|
-
@dc.dataclass(frozen=True)
|
3782
|
-
class Config:
|
3783
|
-
project_dir: str
|
3784
5013
|
|
3785
|
-
|
5014
|
+
def bind_github(
|
5015
|
+
*,
|
5016
|
+
cache_dir: ta.Optional[str] = None,
|
5017
|
+
) -> InjectorBindings:
|
5018
|
+
lst: ta.List[InjectorBindingOrBindings] = []
|
5019
|
+
|
5020
|
+
if cache_dir is not None:
|
5021
|
+
lst.extend([
|
5022
|
+
inj.bind(GithubFileCache.Config(
|
5023
|
+
dir=cache_dir,
|
5024
|
+
)),
|
5025
|
+
inj.bind(GithubFileCache, singleton=True),
|
5026
|
+
inj.bind(FileCache, to_key=GithubFileCache),
|
5027
|
+
])
|
3786
5028
|
|
3787
|
-
|
3788
|
-
service: str
|
5029
|
+
return inj.as_bindings(*lst)
|
3789
5030
|
|
3790
|
-
cmd: ShellCmd
|
3791
5031
|
|
3792
|
-
|
5032
|
+
########################################
|
5033
|
+
# ../docker/cache.py
|
3793
5034
|
|
3794
|
-
requirements_txts: ta.Optional[ta.Sequence[str]] = None
|
3795
5035
|
|
3796
|
-
|
3797
|
-
always_build: bool = False
|
5036
|
+
##
|
3798
5037
|
|
3799
|
-
no_dependencies: bool = False
|
3800
5038
|
|
3801
|
-
|
5039
|
+
class DockerCache(abc.ABC):
|
5040
|
+
@abc.abstractmethod
|
5041
|
+
def load_cache_docker_image(self, key: str) -> ta.Awaitable[ta.Optional[str]]:
|
5042
|
+
raise NotImplementedError
|
3802
5043
|
|
3803
|
-
|
5044
|
+
@abc.abstractmethod
|
5045
|
+
def save_cache_docker_image(self, key: str, image: str) -> ta.Awaitable[None]:
|
5046
|
+
raise NotImplementedError
|
3804
5047
|
|
3805
|
-
def __post_init__(self) -> None:
|
3806
|
-
check.not_isinstance(self.requirements_txts, str)
|
3807
5048
|
|
5049
|
+
class DockerCacheImpl(DockerCache):
|
3808
5050
|
def __init__(
|
3809
5051
|
self,
|
3810
|
-
cfg: Config,
|
3811
5052
|
*,
|
3812
5053
|
file_cache: ta.Optional[FileCache] = None,
|
3813
5054
|
) -> None:
|
3814
5055
|
super().__init__()
|
3815
5056
|
|
3816
|
-
self._cfg = cfg
|
3817
5057
|
self._file_cache = file_cache
|
3818
5058
|
|
3819
|
-
|
3820
|
-
|
3821
|
-
async def _load_docker_image(self, image: str) -> None:
|
3822
|
-
if not self._cfg.always_pull and (await is_docker_image_present(image)):
|
3823
|
-
return
|
3824
|
-
|
3825
|
-
dep_suffix = image
|
3826
|
-
for c in '/:.-_':
|
3827
|
-
dep_suffix = dep_suffix.replace(c, '-')
|
3828
|
-
|
3829
|
-
cache_key = f'docker-{dep_suffix}'
|
3830
|
-
if (await self._load_cache_docker_image(cache_key)) is not None:
|
3831
|
-
return
|
3832
|
-
|
3833
|
-
await pull_docker_image(image)
|
3834
|
-
|
3835
|
-
await self._save_cache_docker_image(cache_key, image)
|
3836
|
-
|
3837
|
-
async def load_docker_image(self, image: str) -> None:
|
3838
|
-
with log_timing_context(f'Load docker image: {image}'):
|
3839
|
-
await self._load_docker_image(image)
|
3840
|
-
|
3841
|
-
#
|
3842
|
-
|
3843
|
-
async def _load_cache_docker_image(self, key: str) -> ta.Optional[str]:
|
5059
|
+
async def load_cache_docker_image(self, key: str) -> ta.Optional[str]:
|
3844
5060
|
if self._file_cache is None:
|
3845
5061
|
return None
|
3846
5062
|
|
@@ -3852,7 +5068,7 @@ class Ci(AsyncExitStacked):
|
|
3852
5068
|
|
3853
5069
|
return await load_docker_tar_cmd(get_cache_cmd)
|
3854
5070
|
|
3855
|
-
async def
|
5071
|
+
async def save_cache_docker_image(self, key: str, image: str) -> None:
|
3856
5072
|
if self._file_cache is None:
|
3857
5073
|
return
|
3858
5074
|
|
@@ -3863,19 +5079,58 @@ class Ci(AsyncExitStacked):
|
|
3863
5079
|
|
3864
5080
|
await self._file_cache.put_file(key, tmp_file, steal=True)
|
3865
5081
|
|
3866
|
-
#
|
3867
5082
|
|
3868
|
-
|
5083
|
+
########################################
|
5084
|
+
# ../docker/buildcaching.py
|
5085
|
+
|
5086
|
+
|
5087
|
+
##
|
5088
|
+
|
5089
|
+
|
5090
|
+
class DockerBuildCaching(abc.ABC):
|
5091
|
+
@abc.abstractmethod
|
5092
|
+
def cached_build_docker_image(
|
5093
|
+
self,
|
5094
|
+
cache_key: str,
|
5095
|
+
build_and_tag: ta.Callable[[str], ta.Awaitable[str]], # image_tag -> image_id
|
5096
|
+
) -> ta.Awaitable[str]:
|
5097
|
+
raise NotImplementedError
|
5098
|
+
|
5099
|
+
|
5100
|
+
class DockerBuildCachingImpl(DockerBuildCaching):
|
5101
|
+
@dc.dataclass(frozen=True)
|
5102
|
+
class Config:
|
5103
|
+
service: str
|
5104
|
+
|
5105
|
+
always_build: bool = False
|
5106
|
+
|
5107
|
+
def __init__(
|
5108
|
+
self,
|
5109
|
+
*,
|
5110
|
+
config: Config,
|
5111
|
+
|
5112
|
+
docker_cache: ta.Optional[DockerCache] = None,
|
5113
|
+
) -> None:
|
5114
|
+
super().__init__()
|
5115
|
+
|
5116
|
+
self._config = config
|
5117
|
+
|
5118
|
+
self._docker_cache = docker_cache
|
5119
|
+
|
5120
|
+
async def cached_build_docker_image(
|
3869
5121
|
self,
|
3870
5122
|
cache_key: str,
|
3871
5123
|
build_and_tag: ta.Callable[[str], ta.Awaitable[str]],
|
3872
5124
|
) -> str:
|
3873
|
-
image_tag = f'{self.
|
5125
|
+
image_tag = f'{self._config.service}:{cache_key}'
|
3874
5126
|
|
3875
|
-
if not self.
|
5127
|
+
if not self._config.always_build and (await is_docker_image_present(image_tag)):
|
3876
5128
|
return image_tag
|
3877
5129
|
|
3878
|
-
if (
|
5130
|
+
if (
|
5131
|
+
self._docker_cache is not None and
|
5132
|
+
(cache_image_id := await self._docker_cache.load_cache_docker_image(cache_key)) is not None
|
5133
|
+
):
|
3879
5134
|
await tag_docker_image(
|
3880
5135
|
cache_image_id,
|
3881
5136
|
image_tag,
|
@@ -3884,27 +5139,143 @@ class Ci(AsyncExitStacked):
|
|
3884
5139
|
|
3885
5140
|
image_id = await build_and_tag(image_tag)
|
3886
5141
|
|
3887
|
-
|
5142
|
+
if self._docker_cache is not None:
|
5143
|
+
await self._docker_cache.save_cache_docker_image(cache_key, image_id)
|
3888
5144
|
|
3889
5145
|
return image_tag
|
3890
5146
|
|
5147
|
+
|
5148
|
+
########################################
|
5149
|
+
# ../docker/imagepulling.py
|
5150
|
+
|
5151
|
+
|
5152
|
+
##
|
5153
|
+
|
5154
|
+
|
5155
|
+
class DockerImagePulling(abc.ABC):
|
5156
|
+
@abc.abstractmethod
|
5157
|
+
def pull_docker_image(self, image: str) -> ta.Awaitable[None]:
|
5158
|
+
raise NotImplementedError
|
5159
|
+
|
5160
|
+
|
5161
|
+
class DockerImagePullingImpl(DockerImagePulling):
|
5162
|
+
@dc.dataclass(frozen=True)
|
5163
|
+
class Config:
|
5164
|
+
always_pull: bool = False
|
5165
|
+
|
5166
|
+
def __init__(
|
5167
|
+
self,
|
5168
|
+
*,
|
5169
|
+
config: Config = Config(),
|
5170
|
+
|
5171
|
+
file_cache: ta.Optional[FileCache] = None,
|
5172
|
+
docker_cache: ta.Optional[DockerCache] = None,
|
5173
|
+
) -> None:
|
5174
|
+
super().__init__()
|
5175
|
+
|
5176
|
+
self._config = config
|
5177
|
+
|
5178
|
+
self._file_cache = file_cache
|
5179
|
+
self._docker_cache = docker_cache
|
5180
|
+
|
5181
|
+
async def _pull_docker_image(self, image: str) -> None:
|
5182
|
+
if not self._config.always_pull and (await is_docker_image_present(image)):
|
5183
|
+
return
|
5184
|
+
|
5185
|
+
dep_suffix = image
|
5186
|
+
for c in '/:.-_':
|
5187
|
+
dep_suffix = dep_suffix.replace(c, '-')
|
5188
|
+
|
5189
|
+
cache_key = f'docker-{dep_suffix}'
|
5190
|
+
if (
|
5191
|
+
self._docker_cache is not None and
|
5192
|
+
(await self._docker_cache.load_cache_docker_image(cache_key)) is not None
|
5193
|
+
):
|
5194
|
+
return
|
5195
|
+
|
5196
|
+
await pull_docker_image(image)
|
5197
|
+
|
5198
|
+
if self._docker_cache is not None:
|
5199
|
+
await self._docker_cache.save_cache_docker_image(cache_key, image)
|
5200
|
+
|
5201
|
+
async def pull_docker_image(self, image: str) -> None:
|
5202
|
+
with log_timing_context(f'Load docker image: {image}'):
|
5203
|
+
await self._pull_docker_image(image)
|
5204
|
+
|
5205
|
+
|
5206
|
+
########################################
|
5207
|
+
# ../ci.py
|
5208
|
+
|
5209
|
+
|
5210
|
+
##
|
5211
|
+
|
5212
|
+
|
5213
|
+
class Ci(AsyncExitStacked):
|
5214
|
+
KEY_HASH_LEN = 16
|
5215
|
+
|
5216
|
+
@dc.dataclass(frozen=True)
|
5217
|
+
class Config:
|
5218
|
+
project_dir: str
|
5219
|
+
|
5220
|
+
docker_file: str
|
5221
|
+
|
5222
|
+
compose_file: str
|
5223
|
+
service: str
|
5224
|
+
|
5225
|
+
cmd: ShellCmd
|
5226
|
+
|
5227
|
+
#
|
5228
|
+
|
5229
|
+
requirements_txts: ta.Optional[ta.Sequence[str]] = None
|
5230
|
+
|
5231
|
+
always_pull: bool = False
|
5232
|
+
always_build: bool = False
|
5233
|
+
|
5234
|
+
no_dependencies: bool = False
|
5235
|
+
|
5236
|
+
run_options: ta.Optional[ta.Sequence[str]] = None
|
5237
|
+
|
5238
|
+
#
|
5239
|
+
|
5240
|
+
def __post_init__(self) -> None:
|
5241
|
+
check.not_isinstance(self.requirements_txts, str)
|
5242
|
+
|
5243
|
+
def __init__(
|
5244
|
+
self,
|
5245
|
+
config: Config,
|
5246
|
+
*,
|
5247
|
+
docker_build_caching: DockerBuildCaching,
|
5248
|
+
docker_image_pulling: DockerImagePulling,
|
5249
|
+
) -> None:
|
5250
|
+
super().__init__()
|
5251
|
+
|
5252
|
+
self._config = config
|
5253
|
+
|
5254
|
+
self._docker_build_caching = docker_build_caching
|
5255
|
+
self._docker_image_pulling = docker_image_pulling
|
5256
|
+
|
3891
5257
|
#
|
3892
5258
|
|
3893
5259
|
@cached_nullary
|
3894
5260
|
def docker_file_hash(self) -> str:
|
3895
|
-
return build_docker_file_hash(self.
|
5261
|
+
return build_docker_file_hash(self._config.docker_file)[:self.KEY_HASH_LEN]
|
5262
|
+
|
5263
|
+
@cached_nullary
|
5264
|
+
def ci_base_image_cache_key(self) -> str:
|
5265
|
+
return f'ci-base-{self.docker_file_hash()}'
|
3896
5266
|
|
3897
5267
|
async def _resolve_ci_base_image(self) -> str:
|
3898
5268
|
async def build_and_tag(image_tag: str) -> str:
|
3899
5269
|
return await build_docker_image(
|
3900
|
-
self.
|
5270
|
+
self._config.docker_file,
|
3901
5271
|
tag=image_tag,
|
3902
|
-
cwd=self.
|
5272
|
+
cwd=self._config.project_dir,
|
3903
5273
|
)
|
3904
5274
|
|
3905
|
-
|
3906
|
-
|
3907
|
-
|
5275
|
+
return await self._docker_build_caching.cached_build_docker_image(
|
5276
|
+
self.ci_base_image_cache_key(),
|
5277
|
+
build_and_tag,
|
5278
|
+
)
|
3908
5279
|
|
3909
5280
|
@async_cached_nullary
|
3910
5281
|
async def resolve_ci_base_image(self) -> str:
|
@@ -3918,14 +5289,18 @@ class Ci(AsyncExitStacked):
|
|
3918
5289
|
@cached_nullary
|
3919
5290
|
def requirements_txts(self) -> ta.Sequence[str]:
|
3920
5291
|
return [
|
3921
|
-
os.path.join(self.
|
3922
|
-
for rf in check.not_none(self.
|
5292
|
+
os.path.join(self._config.project_dir, rf)
|
5293
|
+
for rf in check.not_none(self._config.requirements_txts)
|
3923
5294
|
]
|
3924
5295
|
|
3925
5296
|
@cached_nullary
|
3926
5297
|
def requirements_hash(self) -> str:
|
3927
5298
|
return build_requirements_hash(self.requirements_txts())[:self.KEY_HASH_LEN]
|
3928
5299
|
|
5300
|
+
@cached_nullary
|
5301
|
+
def ci_image_cache_key(self) -> str:
|
5302
|
+
return f'ci-{self.docker_file_hash()}-{self.requirements_hash()}'
|
5303
|
+
|
3929
5304
|
async def _resolve_ci_image(self) -> str:
|
3930
5305
|
async def build_and_tag(image_tag: str) -> str:
|
3931
5306
|
base_image = await self.resolve_ci_base_image()
|
@@ -3942,7 +5317,7 @@ class Ci(AsyncExitStacked):
|
|
3942
5317
|
'--no-cache',
|
3943
5318
|
'--index-strategy unsafe-best-match',
|
3944
5319
|
'--system',
|
3945
|
-
*[f'-r /project/{rf}' for rf in self.
|
5320
|
+
*[f'-r /project/{rf}' for rf in self._config.requirements_txts or []],
|
3946
5321
|
]),
|
3947
5322
|
]
|
3948
5323
|
setup_cmd = ' && '.join(setup_cmds)
|
@@ -3950,7 +5325,7 @@ class Ci(AsyncExitStacked):
|
|
3950
5325
|
docker_file_lines = [
|
3951
5326
|
f'FROM {base_image}',
|
3952
5327
|
'RUN mkdir /project',
|
3953
|
-
*[f'COPY {rf} /project/{rf}' for rf in self.
|
5328
|
+
*[f'COPY {rf} /project/{rf}' for rf in self._config.requirements_txts or []],
|
3954
5329
|
f'RUN {setup_cmd}',
|
3955
5330
|
'RUN rm /project/*',
|
3956
5331
|
'WORKDIR /project',
|
@@ -3963,12 +5338,13 @@ class Ci(AsyncExitStacked):
|
|
3963
5338
|
return await build_docker_image(
|
3964
5339
|
docker_file,
|
3965
5340
|
tag=image_tag,
|
3966
|
-
cwd=self.
|
5341
|
+
cwd=self._config.project_dir,
|
3967
5342
|
)
|
3968
5343
|
|
3969
|
-
|
3970
|
-
|
3971
|
-
|
5344
|
+
return await self._docker_build_caching.cached_build_docker_image(
|
5345
|
+
self.ci_image_cache_key(),
|
5346
|
+
build_and_tag,
|
5347
|
+
)
|
3972
5348
|
|
3973
5349
|
@async_cached_nullary
|
3974
5350
|
async def resolve_ci_image(self) -> str:
|
@@ -3980,34 +5356,34 @@ class Ci(AsyncExitStacked):
|
|
3980
5356
|
#
|
3981
5357
|
|
3982
5358
|
@async_cached_nullary
|
3983
|
-
async def
|
5359
|
+
async def pull_dependencies(self) -> None:
|
3984
5360
|
deps = get_compose_service_dependencies(
|
3985
|
-
self.
|
3986
|
-
self.
|
5361
|
+
self._config.compose_file,
|
5362
|
+
self._config.service,
|
3987
5363
|
)
|
3988
5364
|
|
3989
5365
|
for dep_image in deps.values():
|
3990
|
-
await self.
|
5366
|
+
await self._docker_image_pulling.pull_docker_image(dep_image)
|
3991
5367
|
|
3992
5368
|
#
|
3993
5369
|
|
3994
5370
|
async def _run_compose_(self) -> None:
|
3995
5371
|
async with DockerComposeRun(DockerComposeRun.Config(
|
3996
|
-
compose_file=self.
|
3997
|
-
service=self.
|
5372
|
+
compose_file=self._config.compose_file,
|
5373
|
+
service=self._config.service,
|
3998
5374
|
|
3999
5375
|
image=await self.resolve_ci_image(),
|
4000
5376
|
|
4001
|
-
cmd=self.
|
5377
|
+
cmd=self._config.cmd,
|
4002
5378
|
|
4003
5379
|
run_options=[
|
4004
|
-
'-v', f'{os.path.abspath(self.
|
4005
|
-
*(self.
|
5380
|
+
'-v', f'{os.path.abspath(self._config.project_dir)}:/project',
|
5381
|
+
*(self._config.run_options or []),
|
4006
5382
|
],
|
4007
5383
|
|
4008
|
-
cwd=self.
|
5384
|
+
cwd=self._config.project_dir,
|
4009
5385
|
|
4010
|
-
no_dependencies=self.
|
5386
|
+
no_dependencies=self._config.no_dependencies,
|
4011
5387
|
)) as ci_compose_run:
|
4012
5388
|
await ci_compose_run.run()
|
4013
5389
|
|
@@ -4020,11 +5396,90 @@ class Ci(AsyncExitStacked):
|
|
4020
5396
|
async def run(self) -> None:
|
4021
5397
|
await self.resolve_ci_image()
|
4022
5398
|
|
4023
|
-
await self.
|
5399
|
+
await self.pull_dependencies()
|
4024
5400
|
|
4025
5401
|
await self._run_compose()
|
4026
5402
|
|
4027
5403
|
|
5404
|
+
########################################
|
5405
|
+
# ../docker/inject.py
|
5406
|
+
|
5407
|
+
|
5408
|
+
##
|
5409
|
+
|
5410
|
+
|
5411
|
+
def bind_docker(
|
5412
|
+
*,
|
5413
|
+
build_caching_config: DockerBuildCachingImpl.Config,
|
5414
|
+
image_pulling_config: DockerImagePullingImpl.Config = DockerImagePullingImpl.Config(),
|
5415
|
+
) -> InjectorBindings:
|
5416
|
+
lst: ta.List[InjectorBindingOrBindings] = [
|
5417
|
+
inj.bind(build_caching_config),
|
5418
|
+
inj.bind(DockerBuildCachingImpl, singleton=True),
|
5419
|
+
inj.bind(DockerBuildCaching, to_key=DockerBuildCachingImpl),
|
5420
|
+
|
5421
|
+
inj.bind(DockerCacheImpl, singleton=True),
|
5422
|
+
inj.bind(DockerCache, to_key=DockerCacheImpl),
|
5423
|
+
|
5424
|
+
inj.bind(image_pulling_config),
|
5425
|
+
inj.bind(DockerImagePullingImpl, singleton=True),
|
5426
|
+
inj.bind(DockerImagePulling, to_key=DockerImagePullingImpl),
|
5427
|
+
]
|
5428
|
+
|
5429
|
+
return inj.as_bindings(*lst)
|
5430
|
+
|
5431
|
+
|
5432
|
+
########################################
|
5433
|
+
# ../inject.py
|
5434
|
+
|
5435
|
+
|
5436
|
+
##
|
5437
|
+
|
5438
|
+
|
5439
|
+
def bind_ci(
|
5440
|
+
*,
|
5441
|
+
config: Ci.Config,
|
5442
|
+
|
5443
|
+
github: bool = False,
|
5444
|
+
|
5445
|
+
cache_dir: ta.Optional[str] = None,
|
5446
|
+
) -> InjectorBindings:
|
5447
|
+
lst: ta.List[InjectorBindingOrBindings] = [ # noqa
|
5448
|
+
inj.bind(config),
|
5449
|
+
inj.bind(Ci, singleton=True),
|
5450
|
+
]
|
5451
|
+
|
5452
|
+
lst.append(bind_docker(
|
5453
|
+
build_caching_config=DockerBuildCachingImpl.Config(
|
5454
|
+
service=config.service,
|
5455
|
+
|
5456
|
+
always_build=config.always_build,
|
5457
|
+
),
|
5458
|
+
|
5459
|
+
image_pulling_config=DockerImagePullingImpl.Config(
|
5460
|
+
always_pull=config.always_pull,
|
5461
|
+
),
|
5462
|
+
))
|
5463
|
+
|
5464
|
+
if cache_dir is not None:
|
5465
|
+
if github:
|
5466
|
+
lst.append(bind_github(
|
5467
|
+
cache_dir=cache_dir,
|
5468
|
+
))
|
5469
|
+
|
5470
|
+
else:
|
5471
|
+
lst.extend([
|
5472
|
+
inj.bind(DirectoryFileCache.Config(
|
5473
|
+
dir=cache_dir,
|
5474
|
+
)),
|
5475
|
+
inj.bind(DirectoryFileCache, singleton=True),
|
5476
|
+
inj.bind(FileCache, to_key=DirectoryFileCache),
|
5477
|
+
|
5478
|
+
])
|
5479
|
+
|
5480
|
+
return inj.as_bindings(*lst)
|
5481
|
+
|
5482
|
+
|
4028
5483
|
########################################
|
4029
5484
|
# cli.py
|
4030
5485
|
|
@@ -4159,14 +5614,9 @@ class CiCli(ArgparseCli):
|
|
4159
5614
|
|
4160
5615
|
#
|
4161
5616
|
|
4162
|
-
file_cache: ta.Optional[FileCache] = None
|
4163
5617
|
if cache_dir is not None:
|
4164
5618
|
cache_dir = os.path.abspath(cache_dir)
|
4165
5619
|
log.debug('Using cache dir %s', cache_dir)
|
4166
|
-
if github:
|
4167
|
-
file_cache = GithubFileCache(cache_dir)
|
4168
|
-
else:
|
4169
|
-
file_cache = DirectoryFileCache(cache_dir)
|
4170
5620
|
|
4171
5621
|
#
|
4172
5622
|
|
@@ -4182,28 +5632,35 @@ class CiCli(ArgparseCli):
|
|
4182
5632
|
|
4183
5633
|
#
|
4184
5634
|
|
4185
|
-
|
4186
|
-
|
4187
|
-
project_dir=project_dir,
|
5635
|
+
config = Ci.Config(
|
5636
|
+
project_dir=project_dir,
|
4188
5637
|
|
4189
|
-
|
5638
|
+
docker_file=docker_file,
|
4190
5639
|
|
4191
|
-
|
4192
|
-
|
5640
|
+
compose_file=compose_file,
|
5641
|
+
service=self.args.service,
|
4193
5642
|
|
4194
|
-
|
5643
|
+
requirements_txts=requirements_txts,
|
4195
5644
|
|
4196
|
-
|
5645
|
+
cmd=ShellCmd(cmd),
|
4197
5646
|
|
4198
|
-
|
4199
|
-
|
5647
|
+
always_pull=self.args.always_pull,
|
5648
|
+
always_build=self.args.always_build,
|
4200
5649
|
|
4201
|
-
|
5650
|
+
no_dependencies=self.args.no_dependencies,
|
4202
5651
|
|
4203
|
-
|
4204
|
-
|
4205
|
-
|
4206
|
-
|
5652
|
+
run_options=run_options,
|
5653
|
+
)
|
5654
|
+
|
5655
|
+
injector = inj.create_injector(bind_ci(
|
5656
|
+
config=config,
|
5657
|
+
|
5658
|
+
github=github,
|
5659
|
+
|
5660
|
+
cache_dir=cache_dir,
|
5661
|
+
))
|
5662
|
+
|
5663
|
+
async with injector[Ci] as ci:
|
4207
5664
|
await ci.run()
|
4208
5665
|
|
4209
5666
|
|