ominfra 0.0.0.dev154__py3-none-any.whl → 0.0.0.dev156__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ominfra/manage/bootstrap.py +4 -0
- ominfra/manage/bootstrap_.py +5 -0
- ominfra/manage/commands/inject.py +8 -11
- ominfra/manage/commands/{execution.py → local.py} +1 -5
- ominfra/manage/commands/ping.py +23 -0
- ominfra/manage/commands/subprocess.py +3 -4
- ominfra/manage/commands/types.py +8 -0
- ominfra/manage/deploy/apps.py +72 -0
- ominfra/manage/deploy/config.py +8 -0
- ominfra/manage/deploy/git.py +136 -0
- ominfra/manage/deploy/inject.py +21 -0
- ominfra/manage/deploy/paths.py +81 -28
- ominfra/manage/deploy/types.py +13 -0
- ominfra/manage/deploy/venvs.py +66 -0
- ominfra/manage/inject.py +20 -4
- ominfra/manage/main.py +15 -27
- ominfra/manage/remote/_main.py +1 -1
- ominfra/manage/remote/config.py +0 -2
- ominfra/manage/remote/connection.py +7 -24
- ominfra/manage/remote/execution.py +1 -1
- ominfra/manage/remote/inject.py +3 -14
- ominfra/manage/remote/spawning.py +2 -2
- ominfra/manage/system/commands.py +22 -2
- ominfra/manage/system/config.py +3 -1
- ominfra/manage/system/inject.py +16 -6
- ominfra/manage/system/packages.py +38 -14
- ominfra/manage/system/platforms.py +72 -0
- ominfra/manage/targets/__init__.py +0 -0
- ominfra/manage/targets/connection.py +150 -0
- ominfra/manage/targets/inject.py +42 -0
- ominfra/manage/targets/targets.py +87 -0
- ominfra/scripts/journald2aws.py +205 -134
- ominfra/scripts/manage.py +2192 -734
- ominfra/scripts/supervisor.py +187 -25
- ominfra/supervisor/configs.py +163 -18
- {ominfra-0.0.0.dev154.dist-info → ominfra-0.0.0.dev156.dist-info}/METADATA +3 -3
- {ominfra-0.0.0.dev154.dist-info → ominfra-0.0.0.dev156.dist-info}/RECORD +42 -31
- ominfra/manage/system/types.py +0 -5
- /ominfra/manage/{commands → deploy}/interp.py +0 -0
- {ominfra-0.0.0.dev154.dist-info → ominfra-0.0.0.dev156.dist-info}/LICENSE +0 -0
- {ominfra-0.0.0.dev154.dist-info → ominfra-0.0.0.dev156.dist-info}/WHEEL +0 -0
- {ominfra-0.0.0.dev154.dist-info → ominfra-0.0.0.dev156.dist-info}/entry_points.txt +0 -0
- {ominfra-0.0.0.dev154.dist-info → ominfra-0.0.0.dev156.dist-info}/top_level.txt +0 -0
ominfra/scripts/manage.py
CHANGED
@@ -92,9 +92,16 @@ CallableVersionOperator = ta.Callable[['Version', str], bool]
|
|
92
92
|
CommandT = ta.TypeVar('CommandT', bound='Command')
|
93
93
|
CommandOutputT = ta.TypeVar('CommandOutputT', bound='Command.Output')
|
94
94
|
|
95
|
+
# deploy/paths.py
|
96
|
+
DeployPathKind = ta.Literal['dir', 'file'] # ta.TypeAlias
|
97
|
+
DeployPathSpec = ta.Literal['app', 'tag'] # ta.TypeAlias
|
98
|
+
|
95
99
|
# ../../omlish/argparse/cli.py
|
96
100
|
ArgparseCommandFn = ta.Callable[[], ta.Optional[int]] # ta.TypeAlias
|
97
101
|
|
102
|
+
# ../../omlish/lite/contextmanagers.py
|
103
|
+
ExitStackedT = ta.TypeVar('ExitStackedT', bound='ExitStacked')
|
104
|
+
|
98
105
|
# ../../omlish/lite/inject.py
|
99
106
|
U = ta.TypeVar('U')
|
100
107
|
InjectorKeyCls = ta.Union[type, ta.NewType]
|
@@ -528,19 +535,28 @@ class MainConfig:
|
|
528
535
|
|
529
536
|
|
530
537
|
########################################
|
531
|
-
# ../
|
538
|
+
# ../deploy/config.py
|
532
539
|
|
533
540
|
|
534
541
|
@dc.dataclass(frozen=True)
|
535
|
-
class
|
536
|
-
|
542
|
+
class DeployConfig:
|
543
|
+
deploy_home: ta.Optional[str] = None
|
537
544
|
|
538
545
|
|
539
546
|
########################################
|
540
|
-
# ../
|
547
|
+
# ../deploy/types.py
|
541
548
|
|
542
549
|
|
543
|
-
|
550
|
+
DeployHome = ta.NewType('DeployHome', str)
|
551
|
+
|
552
|
+
DeployApp = ta.NewType('DeployApp', str)
|
553
|
+
DeployTag = ta.NewType('DeployTag', str)
|
554
|
+
DeployRev = ta.NewType('DeployRev', str)
|
555
|
+
|
556
|
+
|
557
|
+
class DeployAppTag(ta.NamedTuple):
|
558
|
+
app: DeployApp
|
559
|
+
tag: DeployTag
|
544
560
|
|
545
561
|
|
546
562
|
########################################
|
@@ -1221,8 +1237,6 @@ def async_cached_nullary(fn): # ta.Callable[..., T]) -> ta.Callable[..., T]:
|
|
1221
1237
|
"""
|
1222
1238
|
TODO:
|
1223
1239
|
- def maybe(v: lang.Maybe[T])
|
1224
|
-
- patch / override lite.check ?
|
1225
|
-
- checker interface?
|
1226
1240
|
"""
|
1227
1241
|
|
1228
1242
|
|
@@ -1937,6 +1951,489 @@ def set_process_deathsig(sig: int) -> bool:
|
|
1937
1951
|
return False
|
1938
1952
|
|
1939
1953
|
|
1954
|
+
########################################
|
1955
|
+
# ../../../omlish/os/linux.py
|
1956
|
+
"""
|
1957
|
+
➜ ~ cat /etc/os-release
|
1958
|
+
NAME="Amazon Linux"
|
1959
|
+
VERSION="2"
|
1960
|
+
ID="amzn"
|
1961
|
+
ID_LIKE="centos rhel fedora"
|
1962
|
+
VERSION_ID="2"
|
1963
|
+
PRETTY_NAME="Amazon Linux 2"
|
1964
|
+
|
1965
|
+
➜ ~ cat /etc/os-release
|
1966
|
+
PRETTY_NAME="Ubuntu 22.04.5 LTS"
|
1967
|
+
NAME="Ubuntu"
|
1968
|
+
VERSION_ID="22.04"
|
1969
|
+
VERSION="22.04.5 LTS (Jammy Jellyfish)"
|
1970
|
+
VERSION_CODENAME=jammy
|
1971
|
+
ID=ubuntu
|
1972
|
+
ID_LIKE=debian
|
1973
|
+
UBUNTU_CODENAME=jammy
|
1974
|
+
|
1975
|
+
➜ omlish git:(master) docker run -i python:3.12 cat /etc/os-release
|
1976
|
+
PRETTY_NAME="Debian GNU/Linux 12 (bookworm)"
|
1977
|
+
NAME="Debian GNU/Linux"
|
1978
|
+
VERSION_ID="12"
|
1979
|
+
VERSION="12 (bookworm)"
|
1980
|
+
VERSION_CODENAME=bookworm
|
1981
|
+
ID=debian
|
1982
|
+
"""
|
1983
|
+
|
1984
|
+
|
1985
|
+
@dc.dataclass(frozen=True)
|
1986
|
+
class LinuxOsRelease:
|
1987
|
+
"""
|
1988
|
+
https://man7.org/linux/man-pages/man5/os-release.5.html
|
1989
|
+
"""
|
1990
|
+
|
1991
|
+
raw: ta.Mapping[str, str]
|
1992
|
+
|
1993
|
+
# General information identifying the operating system
|
1994
|
+
|
1995
|
+
@property
|
1996
|
+
def name(self) -> str:
|
1997
|
+
"""
|
1998
|
+
A string identifying the operating system, without a version component, and suitable for presentation to the
|
1999
|
+
user. If not set, a default of "NAME=Linux" may be used.
|
2000
|
+
|
2001
|
+
Examples: "NAME=Fedora", "NAME="Debian GNU/Linux"".
|
2002
|
+
"""
|
2003
|
+
|
2004
|
+
return self.raw['NAME']
|
2005
|
+
|
2006
|
+
@property
|
2007
|
+
def id(self) -> str:
|
2008
|
+
"""
|
2009
|
+
A lower-case string (no spaces or other characters outside of 0-9, a-z, ".", "_" and "-") identifying the
|
2010
|
+
operating system, excluding any version information and suitable for processing by scripts or usage in generated
|
2011
|
+
filenames. If not set, a default of "ID=linux" may be used. Note that even though this string may not include
|
2012
|
+
characters that require shell quoting, quoting may nevertheless be used.
|
2013
|
+
|
2014
|
+
Examples: "ID=fedora", "ID=debian".
|
2015
|
+
"""
|
2016
|
+
|
2017
|
+
return self.raw['ID']
|
2018
|
+
|
2019
|
+
@property
|
2020
|
+
def id_like(self) -> str:
|
2021
|
+
"""
|
2022
|
+
A space-separated list of operating system identifiers in the same syntax as the ID= setting. It should list
|
2023
|
+
identifiers of operating systems that are closely related to the local operating system in regards to packaging
|
2024
|
+
and programming interfaces, for example listing one or more OS identifiers the local OS is a derivative from. An
|
2025
|
+
OS should generally only list other OS identifiers it itself is a derivative of, and not any OSes that are
|
2026
|
+
derived from it, though symmetric relationships are possible. Build scripts and similar should check this
|
2027
|
+
variable if they need to identify the local operating system and the value of ID= is not recognized. Operating
|
2028
|
+
systems should be listed in order of how closely the local operating system relates to the listed ones, starting
|
2029
|
+
with the closest. This field is optional.
|
2030
|
+
|
2031
|
+
Examples: for an operating system with "ID=centos", an assignment of "ID_LIKE="rhel fedora"" would be
|
2032
|
+
appropriate. For an operating system with "ID=ubuntu", an assignment of "ID_LIKE=debian" is appropriate.
|
2033
|
+
"""
|
2034
|
+
|
2035
|
+
return self.raw['ID_LIKE']
|
2036
|
+
|
2037
|
+
@property
|
2038
|
+
def pretty_name(self) -> str:
|
2039
|
+
"""
|
2040
|
+
A pretty operating system name in a format suitable for presentation to the user. May or may not contain a
|
2041
|
+
release code name or OS version of some kind, as suitable. If not set, a default of "PRETTY_NAME="Linux"" may be
|
2042
|
+
used
|
2043
|
+
|
2044
|
+
Example: "PRETTY_NAME="Fedora 17 (Beefy Miracle)"".
|
2045
|
+
"""
|
2046
|
+
|
2047
|
+
return self.raw['PRETTY_NAME']
|
2048
|
+
|
2049
|
+
@property
|
2050
|
+
def cpe_name(self) -> str:
|
2051
|
+
"""
|
2052
|
+
A CPE name for the operating system, in URI binding syntax, following the Common Platform Enumeration
|
2053
|
+
Specification[4] as proposed by the NIST. This field is optional.
|
2054
|
+
|
2055
|
+
Example: "CPE_NAME="cpe:/o:fedoraproject:fedora:17""
|
2056
|
+
"""
|
2057
|
+
|
2058
|
+
return self.raw['CPE_NAME']
|
2059
|
+
|
2060
|
+
@property
|
2061
|
+
def variant(self) -> str:
|
2062
|
+
"""
|
2063
|
+
A string identifying a specific variant or edition of the operating system suitable for presentation to the
|
2064
|
+
user. This field may be used to inform the user that the configuration of this system is subject to a specific
|
2065
|
+
divergent set of rules or default configuration settings. This field is optional and may not be implemented on
|
2066
|
+
all systems.
|
2067
|
+
|
2068
|
+
Examples: "VARIANT="Server Edition"", "VARIANT="Smart Refrigerator Edition"".
|
2069
|
+
|
2070
|
+
Note: this field is for display purposes only. The VARIANT_ID field should be used for making programmatic
|
2071
|
+
decisions.
|
2072
|
+
|
2073
|
+
Added in version 220.
|
2074
|
+
"""
|
2075
|
+
|
2076
|
+
return self.raw['VARIANT']
|
2077
|
+
|
2078
|
+
@property
|
2079
|
+
def variant_id(self) -> str:
|
2080
|
+
"""
|
2081
|
+
A lower-case string (no spaces or other characters outside of 0-9, a-z, ".", "_" and "-"), identifying a
|
2082
|
+
specific variant or edition of the operating system. This may be interpreted by other packages in order to
|
2083
|
+
determine a divergent default configuration. This field is optional and may not be implemented on all systems.
|
2084
|
+
|
2085
|
+
Examples: "VARIANT_ID=server", "VARIANT_ID=embedded".
|
2086
|
+
|
2087
|
+
Added in version 220.
|
2088
|
+
"""
|
2089
|
+
|
2090
|
+
return self.raw['variant_id']
|
2091
|
+
|
2092
|
+
# Information about the version of the operating system
|
2093
|
+
|
2094
|
+
@property
|
2095
|
+
def version(self) -> str:
|
2096
|
+
"""
|
2097
|
+
A string identifying the operating system version, excluding any OS name information, possibly including a
|
2098
|
+
release code name, and suitable for presentation to the user. This field is optional.
|
2099
|
+
|
2100
|
+
Examples: "VERSION=17", "VERSION="17 (Beefy Miracle)"".
|
2101
|
+
"""
|
2102
|
+
|
2103
|
+
return self.raw['VERSION']
|
2104
|
+
|
2105
|
+
@property
|
2106
|
+
def version_id(self) -> str:
|
2107
|
+
"""
|
2108
|
+
A lower-case string (mostly numeric, no spaces or other characters outside of 0-9, a-z, ".", "_" and "-")
|
2109
|
+
identifying the operating system version, excluding any OS name information or release code name, and suitable
|
2110
|
+
for processing by scripts or usage in generated filenames. This field is optional.
|
2111
|
+
|
2112
|
+
Examples: "VERSION_ID=17", "VERSION_ID=11.04".
|
2113
|
+
"""
|
2114
|
+
|
2115
|
+
return self.raw['VERSION_ID']
|
2116
|
+
|
2117
|
+
@property
|
2118
|
+
def version_codename(self) -> str:
|
2119
|
+
"""
|
2120
|
+
A lower-case string (no spaces or other characters outside of 0-9, a-z, ".", "_" and "-") identifying the
|
2121
|
+
operating system release code name, excluding any OS name information or release version, and suitable for
|
2122
|
+
processing by scripts or usage in generated filenames. This field is optional and may not be implemented on all
|
2123
|
+
systems.
|
2124
|
+
|
2125
|
+
Examples: "VERSION_CODENAME=buster", "VERSION_CODENAME=xenial".
|
2126
|
+
|
2127
|
+
Added in version 231.
|
2128
|
+
"""
|
2129
|
+
|
2130
|
+
return self.raw['VERSION_CODENAME']
|
2131
|
+
|
2132
|
+
@property
|
2133
|
+
def build_id(self) -> str:
|
2134
|
+
"""
|
2135
|
+
A string uniquely identifying the system image originally used as the installation base. In most cases,
|
2136
|
+
VERSION_ID or IMAGE_ID+IMAGE_VERSION are updated when the entire system image is replaced during an update.
|
2137
|
+
BUILD_ID may be used in distributions where the original installation image version is important: VERSION_ID
|
2138
|
+
would change during incremental system updates, but BUILD_ID would not. This field is optional.
|
2139
|
+
|
2140
|
+
Examples: "BUILD_ID="2013-03-20.3"", "BUILD_ID=201303203".
|
2141
|
+
|
2142
|
+
Added in version 200.
|
2143
|
+
"""
|
2144
|
+
|
2145
|
+
return self.raw['BUILD_ID']
|
2146
|
+
|
2147
|
+
@property
|
2148
|
+
def image_id(self) -> str:
|
2149
|
+
"""
|
2150
|
+
A lower-case string (no spaces or other characters outside of 0-9, a-z, ".", "_" and "-"), identifying a
|
2151
|
+
specific image of the operating system. This is supposed to be used for environments where OS images are
|
2152
|
+
prepared, built, shipped and updated as comprehensive, consistent OS images. This field is optional and may not
|
2153
|
+
be implemented on all systems, in particularly not on those that are not managed via images but put together and
|
2154
|
+
updated from individual packages and on the local system.
|
2155
|
+
|
2156
|
+
Examples: "IMAGE_ID=vendorx-cashier-system", "IMAGE_ID=netbook-image".
|
2157
|
+
|
2158
|
+
Added in version 249.
|
2159
|
+
"""
|
2160
|
+
|
2161
|
+
return self.raw['IMAGE_ID']
|
2162
|
+
|
2163
|
+
@property
|
2164
|
+
def image_version(self) -> str:
|
2165
|
+
"""
|
2166
|
+
A lower-case string (mostly numeric, no spaces or other characters outside of 0-9, a-z, ".", "_" and "-")
|
2167
|
+
identifying the OS image version. This is supposed to be used together with IMAGE_ID described above, to discern
|
2168
|
+
different versions of the same image.
|
2169
|
+
|
2170
|
+
Examples: "IMAGE_VERSION=33", "IMAGE_VERSION=47.1rc1".
|
2171
|
+
|
2172
|
+
Added in version 249.
|
2173
|
+
"""
|
2174
|
+
|
2175
|
+
return self.raw['IMAGE_VERSION']
|
2176
|
+
|
2177
|
+
# To summarize: if the image updates are built and shipped as comprehensive units, IMAGE_ID+IMAGE_VERSION is the
|
2178
|
+
# best fit. Otherwise, if updates eventually completely replace previously installed contents, as in a typical
|
2179
|
+
# binary distribution, VERSION_ID should be used to identify major releases of the operating system. BUILD_ID may
|
2180
|
+
# be used instead or in addition to VERSION_ID when the original system image version is important.
|
2181
|
+
|
2182
|
+
#
|
2183
|
+
|
2184
|
+
# Presentation information and links
|
2185
|
+
|
2186
|
+
# Links to resources on the Internet related to the operating system. HOME_URL= should refer to the homepage of the
|
2187
|
+
# operating system, or alternatively some homepage of the specific version of the operating system.
|
2188
|
+
# DOCUMENTATION_URL= should refer to the main documentation page for this operating system. SUPPORT_URL= should
|
2189
|
+
# refer to the main support page for the operating system, if there is any. This is primarily intended for operating
|
2190
|
+
# systems which vendors provide support for. BUG_REPORT_URL= should refer to the main bug reporting page for the
|
2191
|
+
# operating system, if there is any. This is primarily intended for operating systems that rely on community QA.
|
2192
|
+
# PRIVACY_POLICY_URL= should refer to the main privacy policy page for the operating system, if there is any. These
|
2193
|
+
# settings are optional, and providing only some of these settings is common. These URLs are intended to be exposed
|
2194
|
+
# in "About this system" UIs behind links with captions such as "About this Operating System", "Obtain Support",
|
2195
|
+
# "Report a Bug", or "Privacy Policy". The values should be in RFC3986 format[5], and should be "http:" or "https:"
|
2196
|
+
# URLs, and possibly "mailto:" or "tel:". Only one URL shall be listed in each setting. If multiple resources need
|
2197
|
+
# to be referenced, it is recommended to provide an online landing page linking all available resources.
|
2198
|
+
|
2199
|
+
# Examples: "HOME_URL="https://fedoraproject.org/"", "BUG_REPORT_URL="https://bugzilla.redhat.com/"".
|
2200
|
+
|
2201
|
+
@property
|
2202
|
+
def home_url(self) -> str:
|
2203
|
+
return self.raw['HOME_URL']
|
2204
|
+
|
2205
|
+
@property
|
2206
|
+
def documentation_url(self) -> str:
|
2207
|
+
return self.raw['DOCUMENTATION_URL']
|
2208
|
+
|
2209
|
+
@property
|
2210
|
+
def support_url(self) -> str:
|
2211
|
+
return self.raw['SUPPORT_URL']
|
2212
|
+
|
2213
|
+
@property
|
2214
|
+
def bug_report_url(self) -> str:
|
2215
|
+
return self.raw['BUG_REPORT_URL']
|
2216
|
+
|
2217
|
+
@property
|
2218
|
+
def privacy_policy_url(self) -> str:
|
2219
|
+
return self.raw['PRIVACY_POLICY_URL']
|
2220
|
+
|
2221
|
+
@property
|
2222
|
+
def support_end(self) -> str:
|
2223
|
+
"""
|
2224
|
+
The date at which support for this version of the OS ends. (What exactly "lack of support" means varies between
|
2225
|
+
vendors, but generally users should assume that updates, including security fixes, will not be provided.) The
|
2226
|
+
value is a date in the ISO 8601 format "YYYY-MM-DD", and specifies the first day on which support is not
|
2227
|
+
provided.
|
2228
|
+
|
2229
|
+
For example, "SUPPORT_END=2001-01-01" means that the system was supported until the end of the last day of the
|
2230
|
+
previous millennium.
|
2231
|
+
|
2232
|
+
Added in version 252.
|
2233
|
+
"""
|
2234
|
+
|
2235
|
+
return self.raw['SUPPORT_END']
|
2236
|
+
|
2237
|
+
@property
|
2238
|
+
def logo(self) -> str:
|
2239
|
+
"""
|
2240
|
+
A string, specifying the name of an icon as defined by freedesktop.org Icon Theme Specification[6]. This can be
|
2241
|
+
used by graphical applications to display an operating system's or distributor's logo. This field is optional
|
2242
|
+
and may not necessarily be implemented on all systems.
|
2243
|
+
|
2244
|
+
Examples: "LOGO=fedora-logo", "LOGO=distributor-logo-opensuse"
|
2245
|
+
|
2246
|
+
Added in version 240.
|
2247
|
+
"""
|
2248
|
+
|
2249
|
+
return self.raw['LOGO']
|
2250
|
+
|
2251
|
+
@property
|
2252
|
+
def ansi_color(self) -> str:
|
2253
|
+
"""
|
2254
|
+
A suggested presentation color when showing the OS name on the console. This should be specified as string
|
2255
|
+
suitable for inclusion in the ESC [ m ANSI/ECMA-48 escape code for setting graphical rendition. This field is
|
2256
|
+
optional.
|
2257
|
+
|
2258
|
+
Examples: "ANSI_COLOR="0;31"" for red, "ANSI_COLOR="1;34"" for light blue, or "ANSI_COLOR="0;38;2;60;110;180""
|
2259
|
+
for Fedora blue.
|
2260
|
+
"""
|
2261
|
+
|
2262
|
+
return self.raw['ANSI_COLOR']
|
2263
|
+
|
2264
|
+
@property
|
2265
|
+
def vendor_name(self) -> str:
|
2266
|
+
"""
|
2267
|
+
The name of the OS vendor. This is the name of the organization or company which produces the OS. This field is
|
2268
|
+
optional.
|
2269
|
+
|
2270
|
+
This name is intended to be exposed in "About this system" UIs or software update UIs when needed to distinguish
|
2271
|
+
the OS vendor from the OS itself. It is intended to be human readable.
|
2272
|
+
|
2273
|
+
Examples: "VENDOR_NAME="Fedora Project"" for Fedora Linux, "VENDOR_NAME="Canonical"" for Ubuntu.
|
2274
|
+
|
2275
|
+
Added in version 254.
|
2276
|
+
"""
|
2277
|
+
|
2278
|
+
return self.raw['VENDOR_NAME']
|
2279
|
+
|
2280
|
+
@property
|
2281
|
+
def vendor_url(self) -> str:
|
2282
|
+
"""
|
2283
|
+
The homepage of the OS vendor. This field is optional. The VENDOR_NAME= field should be set if this one is,
|
2284
|
+
although clients must be robust against either field not being set.
|
2285
|
+
|
2286
|
+
The value should be in RFC3986 format[5], and should be "http:" or "https:" URLs. Only one URL shall be listed
|
2287
|
+
in the setting.
|
2288
|
+
|
2289
|
+
Examples: "VENDOR_URL="https://fedoraproject.org/"", "VENDOR_URL="https://canonical.com/"".
|
2290
|
+
|
2291
|
+
Added in version 254.
|
2292
|
+
"""
|
2293
|
+
|
2294
|
+
return self.raw['VENDOR_URL']
|
2295
|
+
|
2296
|
+
# Distribution-level defaults and metadata
|
2297
|
+
|
2298
|
+
@property
|
2299
|
+
def default_hostname(self) -> str:
|
2300
|
+
"""
|
2301
|
+
A string specifying the hostname if hostname(5) is not present and no other configuration source specifies the
|
2302
|
+
hostname. Must be either a single DNS label (a string composed of 7-bit ASCII lower-case characters and no
|
2303
|
+
spaces or dots, limited to the format allowed for DNS domain name labels), or a sequence of such labels
|
2304
|
+
separated by single dots that forms a valid DNS FQDN. The hostname must be at most 64 characters, which is a
|
2305
|
+
Linux limitation (DNS allows longer names).
|
2306
|
+
|
2307
|
+
See org.freedesktop.hostname1(5) for a description of how systemd-hostnamed.service(8) determines the fallback
|
2308
|
+
hostname.
|
2309
|
+
|
2310
|
+
Added in version 248.
|
2311
|
+
"""
|
2312
|
+
|
2313
|
+
return self.raw['DEFAULT_HOSTNAME']
|
2314
|
+
|
2315
|
+
@property
|
2316
|
+
def architecture(self) -> str:
|
2317
|
+
"""
|
2318
|
+
A string that specifies which CPU architecture the userspace binaries require. The architecture identifiers are
|
2319
|
+
the same as for ConditionArchitecture= described in systemd.unit(5). The field is optional and should only be
|
2320
|
+
used when just single architecture is supported. It may provide redundant information when used in a GPT
|
2321
|
+
partition with a GUID type that already encodes the architecture. If this is not the case, the architecture
|
2322
|
+
should be specified in e.g., an extension image, to prevent an incompatible host from loading it.
|
2323
|
+
|
2324
|
+
Added in version 252.
|
2325
|
+
"""
|
2326
|
+
|
2327
|
+
return self.raw['ARCHITECTURE']
|
2328
|
+
|
2329
|
+
@property
|
2330
|
+
def sysext_level(self) -> str:
|
2331
|
+
"""
|
2332
|
+
A lower-case string (mostly numeric, no spaces or other characters outside of 0-9, a-z, ".", "_" and "-")
|
2333
|
+
identifying the operating system extensions support level, to indicate which extension images are supported. See
|
2334
|
+
/usr/lib/extension-release.d/extension-release.IMAGE, initrd[2] and systemd-sysext(8)) for more information.
|
2335
|
+
|
2336
|
+
Examples: "SYSEXT_LEVEL=2", "SYSEXT_LEVEL=15.14".
|
2337
|
+
|
2338
|
+
Added in version 248.
|
2339
|
+
"""
|
2340
|
+
|
2341
|
+
return self.raw['SYSEXT_LEVEL']
|
2342
|
+
|
2343
|
+
@property
|
2344
|
+
def confext_level(self) -> str:
|
2345
|
+
"""
|
2346
|
+
Semantically the same as SYSEXT_LEVEL= but for confext images. See
|
2347
|
+
/etc/extension-release.d/extension-release.IMAGE for more information.
|
2348
|
+
|
2349
|
+
Examples: "CONFEXT_LEVEL=2", "CONFEXT_LEVEL=15.14".
|
2350
|
+
|
2351
|
+
Added in version 254.
|
2352
|
+
"""
|
2353
|
+
|
2354
|
+
return self.raw['CONFEXT_LEVEL']
|
2355
|
+
|
2356
|
+
@property
|
2357
|
+
def sysext_scope(self) -> str:
|
2358
|
+
"""
|
2359
|
+
Takes a space-separated list of one or more of the strings "system", "initrd" and "portable". This field is only
|
2360
|
+
supported in extension-release.d/ files and indicates what environments the system extension is applicable to:
|
2361
|
+
i.e. to regular systems, to initrds, or to portable service images. If unspecified, "SYSEXT_SCOPE=system
|
2362
|
+
portable" is implied, i.e. any system extension without this field is applicable to regular systems and to
|
2363
|
+
portable service environments, but not to initrd environments.
|
2364
|
+
|
2365
|
+
Added in version 250.
|
2366
|
+
"""
|
2367
|
+
|
2368
|
+
return self.raw['SYSEXT_SCOPE']
|
2369
|
+
|
2370
|
+
@property
|
2371
|
+
def confext_scope(self) -> str:
|
2372
|
+
"""
|
2373
|
+
Semantically the same as SYSEXT_SCOPE= but for confext images.
|
2374
|
+
|
2375
|
+
Added in version 254.
|
2376
|
+
"""
|
2377
|
+
|
2378
|
+
return self.raw['CONFEXT_SCOPE']
|
2379
|
+
|
2380
|
+
@property
|
2381
|
+
def portable_prefixes(self) -> str:
|
2382
|
+
"""
|
2383
|
+
Takes a space-separated list of one or more valid prefix match strings for the Portable Services[3] logic. This
|
2384
|
+
field serves two purposes: it is informational, identifying portable service images as such (and thus allowing
|
2385
|
+
them to be distinguished from other OS images, such as bootable system images). It is also used when a portable
|
2386
|
+
service image is attached: the specified or implied portable service prefix is checked against the list
|
2387
|
+
specified here, to enforce restrictions how images may be attached to a system.
|
2388
|
+
|
2389
|
+
Added in version 250.
|
2390
|
+
"""
|
2391
|
+
|
2392
|
+
return self.raw['PORTABLE_PREFIXES']
|
2393
|
+
|
2394
|
+
#
|
2395
|
+
|
2396
|
+
DEFAULT_PATHS: ta.ClassVar[ta.Sequence[str]] = [
|
2397
|
+
'/etc/os-release',
|
2398
|
+
'/usr/lib/os-release',
|
2399
|
+
]
|
2400
|
+
|
2401
|
+
@classmethod
|
2402
|
+
def read(cls, *paths: str) -> ta.Optional['LinuxOsRelease']:
|
2403
|
+
for fp in (paths or cls.DEFAULT_PATHS):
|
2404
|
+
if not os.path.isfile(fp):
|
2405
|
+
continue
|
2406
|
+
with open(fp) as f:
|
2407
|
+
src = f.read()
|
2408
|
+
break
|
2409
|
+
else:
|
2410
|
+
return None
|
2411
|
+
|
2412
|
+
raw = cls._parse_os_release(src)
|
2413
|
+
|
2414
|
+
return cls(raw)
|
2415
|
+
|
2416
|
+
@classmethod
|
2417
|
+
def _parse_os_release(cls, src: str) -> ta.Mapping[str, str]:
|
2418
|
+
dct: ta.Dict[str, str] = {}
|
2419
|
+
|
2420
|
+
for l in src.splitlines():
|
2421
|
+
if not (l := l.strip()):
|
2422
|
+
continue
|
2423
|
+
if l.startswith('#') or '=' not in l:
|
2424
|
+
continue
|
2425
|
+
|
2426
|
+
k, _, v = l.partition('=')
|
2427
|
+
if k.startswith('"'):
|
2428
|
+
k = k[1:-1]
|
2429
|
+
if v.startswith('"'):
|
2430
|
+
v = v[1:-1]
|
2431
|
+
|
2432
|
+
dct[k] = v
|
2433
|
+
|
2434
|
+
return dct
|
2435
|
+
|
2436
|
+
|
1940
2437
|
########################################
|
1941
2438
|
# ../../../omdev/packaging/specifiers.py
|
1942
2439
|
# Copyright (c) Donald Stufft and individual contributors.
|
@@ -2610,43 +3107,264 @@ def build_command_name_map(crs: CommandRegistrations) -> CommandNameMap:
|
|
2610
3107
|
|
2611
3108
|
|
2612
3109
|
########################################
|
2613
|
-
# ../
|
3110
|
+
# ../deploy/paths.py
|
3111
|
+
"""
|
3112
|
+
~deploy
|
3113
|
+
deploy.pid (flock)
|
3114
|
+
/app
|
3115
|
+
/<appspec> - shallow clone
|
3116
|
+
/conf
|
3117
|
+
/env
|
3118
|
+
<appspec>.env
|
3119
|
+
/nginx
|
3120
|
+
<appspec>.conf
|
3121
|
+
/supervisor
|
3122
|
+
<appspec>.conf
|
3123
|
+
/venv
|
3124
|
+
/<appspec>
|
3125
|
+
|
3126
|
+
?
|
3127
|
+
/logs
|
3128
|
+
/wrmsr--omlish--<spec>
|
3129
|
+
|
3130
|
+
spec = <name>--<rev>--<when>
|
3131
|
+
|
3132
|
+
==
|
3133
|
+
|
3134
|
+
for dn in [
|
3135
|
+
'app',
|
3136
|
+
'conf',
|
3137
|
+
'conf/env',
|
3138
|
+
'conf/nginx',
|
3139
|
+
'conf/supervisor',
|
3140
|
+
'venv',
|
3141
|
+
]:
|
3142
|
+
|
3143
|
+
==
|
2614
3144
|
|
3145
|
+
"""
|
2615
3146
|
|
2616
|
-
@dc.dataclass(frozen=True)
|
2617
|
-
class RemoteConfig:
|
2618
|
-
payload_file: ta.Optional[str] = None
|
2619
3147
|
|
2620
|
-
|
3148
|
+
##
|
2621
3149
|
|
2622
|
-
deathsig: ta.Optional[str] = 'KILL'
|
2623
3150
|
|
2624
|
-
|
3151
|
+
DEPLOY_PATH_SPEC_PLACEHOLDER = '@'
|
3152
|
+
DEPLOY_PATH_SPEC_SEPARATORS = '-.'
|
2625
3153
|
|
2626
|
-
|
3154
|
+
DEPLOY_PATH_SPECS: ta.FrozenSet[str] = frozenset([
|
3155
|
+
'app',
|
3156
|
+
'tag', # <rev>-<dt>
|
3157
|
+
])
|
2627
3158
|
|
2628
|
-
timebomb_delay_s: ta.Optional[float] = 60 * 60.
|
2629
3159
|
|
2630
|
-
|
3160
|
+
class DeployPathError(Exception):
|
3161
|
+
pass
|
2631
3162
|
|
2632
|
-
use_in_process_remote_executor: bool = False
|
2633
3163
|
|
3164
|
+
@dc.dataclass(frozen=True)
|
3165
|
+
class DeployPathPart(abc.ABC): # noqa
|
3166
|
+
@property
|
3167
|
+
@abc.abstractmethod
|
3168
|
+
def kind(self) -> DeployPathKind:
|
3169
|
+
raise NotImplementedError
|
2634
3170
|
|
2635
|
-
|
2636
|
-
|
3171
|
+
@abc.abstractmethod
|
3172
|
+
def render(self, specs: ta.Optional[ta.Mapping[DeployPathSpec, str]] = None) -> str:
|
3173
|
+
raise NotImplementedError
|
2637
3174
|
|
2638
3175
|
|
2639
|
-
|
3176
|
+
#
|
2640
3177
|
|
2641
3178
|
|
2642
|
-
|
2643
|
-
|
2644
|
-
|
3179
|
+
class DirDeployPathPart(DeployPathPart, abc.ABC):
|
3180
|
+
@property
|
3181
|
+
def kind(self) -> DeployPathKind:
|
3182
|
+
return 'dir'
|
2645
3183
|
|
3184
|
+
@classmethod
|
3185
|
+
def parse(cls, s: str) -> 'DirDeployPathPart':
|
3186
|
+
if DEPLOY_PATH_SPEC_PLACEHOLDER in s:
|
3187
|
+
check.equal(s[0], DEPLOY_PATH_SPEC_PLACEHOLDER)
|
3188
|
+
return SpecDirDeployPathPart(s[1:])
|
3189
|
+
else:
|
3190
|
+
return ConstDirDeployPathPart(s)
|
2646
3191
|
|
2647
|
-
|
2648
|
-
|
2649
|
-
|
3192
|
+
|
3193
|
+
class FileDeployPathPart(DeployPathPart, abc.ABC):
|
3194
|
+
@property
|
3195
|
+
def kind(self) -> DeployPathKind:
|
3196
|
+
return 'file'
|
3197
|
+
|
3198
|
+
@classmethod
|
3199
|
+
def parse(cls, s: str) -> 'FileDeployPathPart':
|
3200
|
+
if DEPLOY_PATH_SPEC_PLACEHOLDER in s:
|
3201
|
+
check.equal(s[0], DEPLOY_PATH_SPEC_PLACEHOLDER)
|
3202
|
+
if not any(c in s for c in DEPLOY_PATH_SPEC_SEPARATORS):
|
3203
|
+
return SpecFileDeployPathPart(s[1:], '')
|
3204
|
+
else:
|
3205
|
+
p = min(f for c in DEPLOY_PATH_SPEC_SEPARATORS if (f := s.find(c)) > 0)
|
3206
|
+
return SpecFileDeployPathPart(s[1:p], s[p:])
|
3207
|
+
else:
|
3208
|
+
return ConstFileDeployPathPart(s)
|
3209
|
+
|
3210
|
+
|
3211
|
+
#
|
3212
|
+
|
3213
|
+
|
3214
|
+
@dc.dataclass(frozen=True)
|
3215
|
+
class ConstDeployPathPart(DeployPathPart, abc.ABC):
|
3216
|
+
name: str
|
3217
|
+
|
3218
|
+
def __post_init__(self) -> None:
|
3219
|
+
check.non_empty_str(self.name)
|
3220
|
+
check.not_in('/', self.name)
|
3221
|
+
check.not_in(DEPLOY_PATH_SPEC_PLACEHOLDER, self.name)
|
3222
|
+
|
3223
|
+
def render(self, specs: ta.Optional[ta.Mapping[DeployPathSpec, str]] = None) -> str:
|
3224
|
+
return self.name
|
3225
|
+
|
3226
|
+
|
3227
|
+
class ConstDirDeployPathPart(ConstDeployPathPart, DirDeployPathPart):
|
3228
|
+
pass
|
3229
|
+
|
3230
|
+
|
3231
|
+
class ConstFileDeployPathPart(ConstDeployPathPart, FileDeployPathPart):
|
3232
|
+
pass
|
3233
|
+
|
3234
|
+
|
3235
|
+
#
|
3236
|
+
|
3237
|
+
|
3238
|
+
@dc.dataclass(frozen=True)
|
3239
|
+
class SpecDeployPathPart(DeployPathPart, abc.ABC):
|
3240
|
+
spec: str # DeployPathSpec
|
3241
|
+
|
3242
|
+
def __post_init__(self) -> None:
|
3243
|
+
check.non_empty_str(self.spec)
|
3244
|
+
for c in [*DEPLOY_PATH_SPEC_SEPARATORS, DEPLOY_PATH_SPEC_PLACEHOLDER, '/']:
|
3245
|
+
check.not_in(c, self.spec)
|
3246
|
+
check.in_(self.spec, DEPLOY_PATH_SPECS)
|
3247
|
+
|
3248
|
+
def _render_spec(self, specs: ta.Optional[ta.Mapping[DeployPathSpec, str]] = None) -> str:
|
3249
|
+
if specs is not None:
|
3250
|
+
return specs[self.spec] # type: ignore
|
3251
|
+
else:
|
3252
|
+
return DEPLOY_PATH_SPEC_PLACEHOLDER + self.spec
|
3253
|
+
|
3254
|
+
|
3255
|
+
@dc.dataclass(frozen=True)
|
3256
|
+
class SpecDirDeployPathPart(SpecDeployPathPart, DirDeployPathPart):
|
3257
|
+
def render(self, specs: ta.Optional[ta.Mapping[DeployPathSpec, str]] = None) -> str:
|
3258
|
+
return self._render_spec(specs)
|
3259
|
+
|
3260
|
+
|
3261
|
+
@dc.dataclass(frozen=True)
|
3262
|
+
class SpecFileDeployPathPart(SpecDeployPathPart, FileDeployPathPart):
|
3263
|
+
suffix: str
|
3264
|
+
|
3265
|
+
def __post_init__(self) -> None:
|
3266
|
+
super().__post_init__()
|
3267
|
+
if self.suffix:
|
3268
|
+
for c in [DEPLOY_PATH_SPEC_PLACEHOLDER, '/']:
|
3269
|
+
check.not_in(c, self.suffix)
|
3270
|
+
|
3271
|
+
def render(self, specs: ta.Optional[ta.Mapping[DeployPathSpec, str]] = None) -> str:
|
3272
|
+
return self._render_spec(specs) + self.suffix
|
3273
|
+
|
3274
|
+
|
3275
|
+
##
|
3276
|
+
|
3277
|
+
|
3278
|
+
@dc.dataclass(frozen=True)
|
3279
|
+
class DeployPath:
|
3280
|
+
parts: ta.Sequence[DeployPathPart]
|
3281
|
+
|
3282
|
+
def __post_init__(self) -> None:
|
3283
|
+
check.not_empty(self.parts)
|
3284
|
+
for p in self.parts[:-1]:
|
3285
|
+
check.equal(p.kind, 'dir')
|
3286
|
+
|
3287
|
+
pd = {}
|
3288
|
+
for i, p in enumerate(self.parts):
|
3289
|
+
if isinstance(p, SpecDeployPathPart):
|
3290
|
+
if p.spec in pd:
|
3291
|
+
raise DeployPathError('Duplicate specs in path', self)
|
3292
|
+
pd[p.spec] = i
|
3293
|
+
|
3294
|
+
if 'tag' in pd:
|
3295
|
+
if 'app' not in pd or pd['app'] >= pd['tag']:
|
3296
|
+
raise DeployPathError('Tag spec in path without preceding app', self)
|
3297
|
+
|
3298
|
+
@property
|
3299
|
+
def kind(self) -> ta.Literal['file', 'dir']:
|
3300
|
+
return self.parts[-1].kind
|
3301
|
+
|
3302
|
+
def render(self, specs: ta.Optional[ta.Mapping[DeployPathSpec, str]] = None) -> str:
|
3303
|
+
return os.path.join( # noqa
|
3304
|
+
*[p.render(specs) for p in self.parts],
|
3305
|
+
*([''] if self.kind == 'dir' else []),
|
3306
|
+
)
|
3307
|
+
|
3308
|
+
@classmethod
|
3309
|
+
def parse(cls, s: str) -> 'DeployPath':
|
3310
|
+
tail_parse: ta.Callable[[str], DeployPathPart]
|
3311
|
+
if s.endswith('/'):
|
3312
|
+
tail_parse = DirDeployPathPart.parse
|
3313
|
+
s = s[:-1]
|
3314
|
+
else:
|
3315
|
+
tail_parse = FileDeployPathPart.parse
|
3316
|
+
ps = check.non_empty_str(s).split('/')
|
3317
|
+
return cls([
|
3318
|
+
*([DirDeployPathPart.parse(p) for p in ps[:-1]] if len(ps) > 1 else []),
|
3319
|
+
tail_parse(ps[-1]),
|
3320
|
+
])
|
3321
|
+
|
3322
|
+
|
3323
|
+
##
|
3324
|
+
|
3325
|
+
|
3326
|
+
class DeployPathOwner(abc.ABC):
|
3327
|
+
@abc.abstractmethod
|
3328
|
+
def get_deploy_paths(self) -> ta.AbstractSet[DeployPath]:
|
3329
|
+
raise NotImplementedError
|
3330
|
+
|
3331
|
+
|
3332
|
+
########################################
|
3333
|
+
# ../remote/config.py
|
3334
|
+
|
3335
|
+
|
3336
|
+
@dc.dataclass(frozen=True)
|
3337
|
+
class RemoteConfig:
|
3338
|
+
payload_file: ta.Optional[str] = None
|
3339
|
+
|
3340
|
+
set_pgid: bool = True
|
3341
|
+
|
3342
|
+
deathsig: ta.Optional[str] = 'KILL'
|
3343
|
+
|
3344
|
+
pycharm_remote_debug: ta.Optional[PycharmRemoteDebug] = None
|
3345
|
+
|
3346
|
+
forward_logging: bool = True
|
3347
|
+
|
3348
|
+
timebomb_delay_s: ta.Optional[float] = 60 * 60.
|
3349
|
+
|
3350
|
+
heartbeat_interval_s: float = 3.
|
3351
|
+
|
3352
|
+
|
3353
|
+
########################################
|
3354
|
+
# ../remote/payload.py
|
3355
|
+
|
3356
|
+
|
3357
|
+
RemoteExecutionPayloadFile = ta.NewType('RemoteExecutionPayloadFile', str)
|
3358
|
+
|
3359
|
+
|
3360
|
+
@cached_nullary
|
3361
|
+
def _get_self_src() -> str:
|
3362
|
+
return inspect.getsource(sys.modules[__name__])
|
3363
|
+
|
3364
|
+
|
3365
|
+
def _is_src_amalg(src: str) -> bool:
|
3366
|
+
for l in src.splitlines(): # noqa
|
3367
|
+
if l.startswith('# @omlish-amalg-output '):
|
2650
3368
|
return True
|
2651
3369
|
return False
|
2652
3370
|
|
@@ -2671,6 +3389,90 @@ def get_remote_payload_src(
|
|
2671
3389
|
return importlib.resources.files(__package__.split('.')[0] + '.scripts').joinpath('manage.py').read_text()
|
2672
3390
|
|
2673
3391
|
|
3392
|
+
########################################
|
3393
|
+
# ../targets/targets.py
|
3394
|
+
"""
|
3395
|
+
It's desugaring. Subprocess and locals are only leafs. Retain an origin?
|
3396
|
+
** TWO LAYERS ** - ManageTarget is user-facing, ConnectorTarget is bound, internal
|
3397
|
+
"""
|
3398
|
+
|
3399
|
+
|
3400
|
+
##
|
3401
|
+
|
3402
|
+
|
3403
|
+
class ManageTarget(abc.ABC): # noqa
|
3404
|
+
def __init_subclass__(cls, **kwargs: ta.Any) -> None:
|
3405
|
+
super().__init_subclass__(**kwargs)
|
3406
|
+
|
3407
|
+
check.state(cls.__name__.endswith('ManageTarget'))
|
3408
|
+
|
3409
|
+
|
3410
|
+
#
|
3411
|
+
|
3412
|
+
|
3413
|
+
@dc.dataclass(frozen=True)
|
3414
|
+
class PythonRemoteManageTarget:
|
3415
|
+
DEFAULT_PYTHON: ta.ClassVar[str] = 'python3'
|
3416
|
+
python: str = DEFAULT_PYTHON
|
3417
|
+
|
3418
|
+
|
3419
|
+
#
|
3420
|
+
|
3421
|
+
|
3422
|
+
class RemoteManageTarget(ManageTarget, abc.ABC):
|
3423
|
+
pass
|
3424
|
+
|
3425
|
+
|
3426
|
+
class PhysicallyRemoteManageTarget(RemoteManageTarget, abc.ABC):
|
3427
|
+
pass
|
3428
|
+
|
3429
|
+
|
3430
|
+
class LocalManageTarget(ManageTarget, abc.ABC):
|
3431
|
+
pass
|
3432
|
+
|
3433
|
+
|
3434
|
+
##
|
3435
|
+
|
3436
|
+
|
3437
|
+
@dc.dataclass(frozen=True)
|
3438
|
+
class SshManageTarget(PhysicallyRemoteManageTarget, PythonRemoteManageTarget):
|
3439
|
+
host: ta.Optional[str] = None
|
3440
|
+
username: ta.Optional[str] = None
|
3441
|
+
key_file: ta.Optional[str] = None
|
3442
|
+
|
3443
|
+
def __post_init__(self) -> None:
|
3444
|
+
check.non_empty_str(self.host)
|
3445
|
+
|
3446
|
+
|
3447
|
+
##
|
3448
|
+
|
3449
|
+
|
3450
|
+
@dc.dataclass(frozen=True)
|
3451
|
+
class DockerManageTarget(RemoteManageTarget, PythonRemoteManageTarget): # noqa
|
3452
|
+
image: ta.Optional[str] = None
|
3453
|
+
container_id: ta.Optional[str] = None
|
3454
|
+
|
3455
|
+
def __post_init__(self) -> None:
|
3456
|
+
check.arg(bool(self.image) ^ bool(self.container_id))
|
3457
|
+
|
3458
|
+
|
3459
|
+
##
|
3460
|
+
|
3461
|
+
|
3462
|
+
@dc.dataclass(frozen=True)
|
3463
|
+
class InProcessManageTarget(LocalManageTarget):
|
3464
|
+
class Mode(enum.Enum):
|
3465
|
+
DIRECT = enum.auto()
|
3466
|
+
FAKE_REMOTE = enum.auto()
|
3467
|
+
|
3468
|
+
mode: Mode = Mode.DIRECT
|
3469
|
+
|
3470
|
+
|
3471
|
+
@dc.dataclass(frozen=True)
|
3472
|
+
class SubprocessManageTarget(LocalManageTarget, PythonRemoteManageTarget):
|
3473
|
+
pass
|
3474
|
+
|
3475
|
+
|
2674
3476
|
########################################
|
2675
3477
|
# ../../../omlish/argparse/cli.py
|
2676
3478
|
"""
|
@@ -2943,6 +3745,78 @@ class ArgparseCli:
|
|
2943
3745
|
return await fn()
|
2944
3746
|
|
2945
3747
|
|
3748
|
+
########################################
|
3749
|
+
# ../../../omlish/lite/contextmanagers.py
|
3750
|
+
|
3751
|
+
|
3752
|
+
##
|
3753
|
+
|
3754
|
+
|
3755
|
+
class ExitStacked:
|
3756
|
+
_exit_stack: ta.Optional[contextlib.ExitStack] = None
|
3757
|
+
|
3758
|
+
def __enter__(self: ExitStackedT) -> ExitStackedT:
|
3759
|
+
check.state(self._exit_stack is None)
|
3760
|
+
es = self._exit_stack = contextlib.ExitStack()
|
3761
|
+
es.__enter__()
|
3762
|
+
return self
|
3763
|
+
|
3764
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
3765
|
+
if (es := self._exit_stack) is None:
|
3766
|
+
return None
|
3767
|
+
self._exit_contexts()
|
3768
|
+
return es.__exit__(exc_type, exc_val, exc_tb)
|
3769
|
+
|
3770
|
+
def _exit_contexts(self) -> None:
|
3771
|
+
pass
|
3772
|
+
|
3773
|
+
def _enter_context(self, cm: ta.ContextManager[T]) -> T:
|
3774
|
+
es = check.not_none(self._exit_stack)
|
3775
|
+
return es.enter_context(cm)
|
3776
|
+
|
3777
|
+
|
3778
|
+
##
|
3779
|
+
|
3780
|
+
|
3781
|
+
@contextlib.contextmanager
|
3782
|
+
def defer(fn: ta.Callable) -> ta.Generator[ta.Callable, None, None]:
|
3783
|
+
try:
|
3784
|
+
yield fn
|
3785
|
+
finally:
|
3786
|
+
fn()
|
3787
|
+
|
3788
|
+
|
3789
|
+
@contextlib.contextmanager
|
3790
|
+
def attr_setting(obj, attr, val, *, default=None): # noqa
|
3791
|
+
not_set = object()
|
3792
|
+
orig = getattr(obj, attr, not_set)
|
3793
|
+
try:
|
3794
|
+
setattr(obj, attr, val)
|
3795
|
+
if orig is not not_set:
|
3796
|
+
yield orig
|
3797
|
+
else:
|
3798
|
+
yield default
|
3799
|
+
finally:
|
3800
|
+
if orig is not_set:
|
3801
|
+
delattr(obj, attr)
|
3802
|
+
else:
|
3803
|
+
setattr(obj, attr, orig)
|
3804
|
+
|
3805
|
+
|
3806
|
+
##
|
3807
|
+
|
3808
|
+
|
3809
|
+
class aclosing(contextlib.AbstractAsyncContextManager): # noqa
|
3810
|
+
def __init__(self, thing):
|
3811
|
+
self.thing = thing
|
3812
|
+
|
3813
|
+
async def __aenter__(self):
|
3814
|
+
return self.thing
|
3815
|
+
|
3816
|
+
async def __aexit__(self, *exc_info):
|
3817
|
+
await self.thing.aclose()
|
3818
|
+
|
3819
|
+
|
2946
3820
|
########################################
|
2947
3821
|
# ../../../omlish/lite/inject.py
|
2948
3822
|
|
@@ -4111,7 +4985,8 @@ def configure_standard_logging(
|
|
4111
4985
|
"""
|
4112
4986
|
TODO:
|
4113
4987
|
- pickle stdlib objs? have to pin to 3.8 pickle protocol, will be cross-version
|
4114
|
-
-
|
4988
|
+
- namedtuple
|
4989
|
+
- literals
|
4115
4990
|
"""
|
4116
4991
|
|
4117
4992
|
|
@@ -4401,14 +5276,18 @@ class ObjMarshalerManager:
|
|
4401
5276
|
) -> ObjMarshaler:
|
4402
5277
|
if isinstance(ty, type):
|
4403
5278
|
if abc.ABC in ty.__bases__:
|
4404
|
-
|
5279
|
+
impls = [ity for ity in deep_subclasses(ty) if abc.ABC not in ity.__bases__] # type: ignore
|
5280
|
+
if all(ity.__qualname__.endswith(ty.__name__) for ity in impls):
|
5281
|
+
ins = {ity: snake_case(ity.__qualname__[:-len(ty.__name__)]) for ity in impls}
|
5282
|
+
else:
|
5283
|
+
ins = {ity: ity.__qualname__ for ity in impls}
|
5284
|
+
return PolymorphicObjMarshaler.of([
|
4405
5285
|
PolymorphicObjMarshaler.Impl(
|
4406
5286
|
ity,
|
4407
|
-
|
5287
|
+
itn,
|
4408
5288
|
rec(ity),
|
4409
5289
|
)
|
4410
|
-
for ity in
|
4411
|
-
if abc.ABC not in ity.__bases__
|
5290
|
+
for ity, itn in ins.items()
|
4412
5291
|
])
|
4413
5292
|
|
4414
5293
|
if issubclass(ty, enum.Enum):
|
@@ -4655,41 +5534,6 @@ class Interp:
|
|
4655
5534
|
version: InterpVersion
|
4656
5535
|
|
4657
5536
|
|
4658
|
-
########################################
|
4659
|
-
# ../bootstrap.py
|
4660
|
-
|
4661
|
-
|
4662
|
-
@dc.dataclass(frozen=True)
|
4663
|
-
class MainBootstrap:
|
4664
|
-
main_config: MainConfig = MainConfig()
|
4665
|
-
|
4666
|
-
remote_config: RemoteConfig = RemoteConfig()
|
4667
|
-
|
4668
|
-
system_config: SystemConfig = SystemConfig()
|
4669
|
-
|
4670
|
-
|
4671
|
-
########################################
|
4672
|
-
# ../commands/execution.py
|
4673
|
-
|
4674
|
-
|
4675
|
-
CommandExecutorMap = ta.NewType('CommandExecutorMap', ta.Mapping[ta.Type[Command], CommandExecutor])
|
4676
|
-
|
4677
|
-
|
4678
|
-
class LocalCommandExecutor(CommandExecutor):
|
4679
|
-
def __init__(
|
4680
|
-
self,
|
4681
|
-
*,
|
4682
|
-
command_executors: CommandExecutorMap,
|
4683
|
-
) -> None:
|
4684
|
-
super().__init__()
|
4685
|
-
|
4686
|
-
self._command_executors = command_executors
|
4687
|
-
|
4688
|
-
async def execute(self, cmd: Command) -> Command.Output:
|
4689
|
-
ce: CommandExecutor = self._command_executors[type(cmd)]
|
4690
|
-
return await ce.execute(cmd)
|
4691
|
-
|
4692
|
-
|
4693
5537
|
########################################
|
4694
5538
|
# ../commands/marshal.py
|
4695
5539
|
|
@@ -4715,6 +5559,34 @@ def install_command_marshaling(
|
|
4715
5559
|
)
|
4716
5560
|
|
4717
5561
|
|
5562
|
+
########################################
|
5563
|
+
# ../commands/ping.py
|
5564
|
+
|
5565
|
+
|
5566
|
+
##
|
5567
|
+
|
5568
|
+
|
5569
|
+
@dc.dataclass(frozen=True)
|
5570
|
+
class PingCommand(Command['PingCommand.Output']):
|
5571
|
+
time: float = dc.field(default_factory=time.time)
|
5572
|
+
|
5573
|
+
@dc.dataclass(frozen=True)
|
5574
|
+
class Output(Command.Output):
|
5575
|
+
time: float
|
5576
|
+
|
5577
|
+
|
5578
|
+
class PingCommandExecutor(CommandExecutor[PingCommand, PingCommand.Output]):
|
5579
|
+
async def execute(self, cmd: PingCommand) -> PingCommand.Output:
|
5580
|
+
return PingCommand.Output(cmd.time)
|
5581
|
+
|
5582
|
+
|
5583
|
+
########################################
|
5584
|
+
# ../commands/types.py
|
5585
|
+
|
5586
|
+
|
5587
|
+
CommandExecutorMap = ta.NewType('CommandExecutorMap', ta.Mapping[ta.Type[Command], CommandExecutor])
|
5588
|
+
|
5589
|
+
|
4718
5590
|
########################################
|
4719
5591
|
# ../deploy/commands.py
|
4720
5592
|
|
@@ -4828,24 +5700,72 @@ class RemoteChannelImpl(RemoteChannel):
|
|
4828
5700
|
|
4829
5701
|
|
4830
5702
|
########################################
|
4831
|
-
# ../system/
|
5703
|
+
# ../system/platforms.py
|
4832
5704
|
|
4833
5705
|
|
4834
5706
|
##
|
4835
5707
|
|
4836
5708
|
|
4837
5709
|
@dc.dataclass(frozen=True)
|
4838
|
-
class
|
4839
|
-
|
4840
|
-
class Output(Command.Output):
|
4841
|
-
pass
|
5710
|
+
class Platform(abc.ABC): # noqa
|
5711
|
+
pass
|
4842
5712
|
|
4843
5713
|
|
4844
|
-
class
|
4845
|
-
|
4846
|
-
|
5714
|
+
class LinuxPlatform(Platform, abc.ABC):
|
5715
|
+
pass
|
5716
|
+
|
5717
|
+
|
5718
|
+
class UbuntuPlatform(LinuxPlatform):
|
5719
|
+
pass
|
5720
|
+
|
5721
|
+
|
5722
|
+
class AmazonLinuxPlatform(LinuxPlatform):
|
5723
|
+
pass
|
5724
|
+
|
5725
|
+
|
5726
|
+
class GenericLinuxPlatform(LinuxPlatform):
|
5727
|
+
pass
|
5728
|
+
|
5729
|
+
|
5730
|
+
class DarwinPlatform(Platform):
|
5731
|
+
pass
|
5732
|
+
|
5733
|
+
|
5734
|
+
class UnknownPlatform(Platform):
|
5735
|
+
pass
|
5736
|
+
|
5737
|
+
|
5738
|
+
##
|
5739
|
+
|
5740
|
+
|
5741
|
+
def _detect_system_platform() -> Platform:
|
5742
|
+
plat = sys.platform
|
5743
|
+
|
5744
|
+
if plat == 'linux':
|
5745
|
+
if (osr := LinuxOsRelease.read()) is None:
|
5746
|
+
return GenericLinuxPlatform()
|
5747
|
+
|
5748
|
+
if osr.id == 'amzn':
|
5749
|
+
return AmazonLinuxPlatform()
|
5750
|
+
|
5751
|
+
elif osr.id == 'ubuntu':
|
5752
|
+
return UbuntuPlatform()
|
5753
|
+
|
5754
|
+
else:
|
5755
|
+
return GenericLinuxPlatform()
|
4847
5756
|
|
4848
|
-
|
5757
|
+
elif plat == 'darwin':
|
5758
|
+
return DarwinPlatform()
|
5759
|
+
|
5760
|
+
else:
|
5761
|
+
return UnknownPlatform()
|
5762
|
+
|
5763
|
+
|
5764
|
+
@cached_nullary
|
5765
|
+
def detect_system_platform() -> Platform:
|
5766
|
+
platform = _detect_system_platform()
|
5767
|
+
log.info('Detected platform: %r', platform)
|
5768
|
+
return platform
|
4849
5769
|
|
4850
5770
|
|
4851
5771
|
########################################
|
@@ -4868,168 +5788,241 @@ SUBPROCESS_CHANNEL_OPTION_VALUES: ta.Mapping[SubprocessChannelOption, int] = {
|
|
4868
5788
|
_SUBPROCESS_SHELL_WRAP_EXECS = False
|
4869
5789
|
|
4870
5790
|
|
4871
|
-
def subprocess_shell_wrap_exec(*
|
4872
|
-
return ('sh', '-c', ' '.join(map(shlex.quote,
|
5791
|
+
def subprocess_shell_wrap_exec(*cmd: str) -> ta.Tuple[str, ...]:
|
5792
|
+
return ('sh', '-c', ' '.join(map(shlex.quote, cmd)))
|
4873
5793
|
|
4874
5794
|
|
4875
|
-
def subprocess_maybe_shell_wrap_exec(*
|
5795
|
+
def subprocess_maybe_shell_wrap_exec(*cmd: str) -> ta.Tuple[str, ...]:
|
4876
5796
|
if _SUBPROCESS_SHELL_WRAP_EXECS or is_debugger_attached():
|
4877
|
-
return subprocess_shell_wrap_exec(*
|
5797
|
+
return subprocess_shell_wrap_exec(*cmd)
|
4878
5798
|
else:
|
4879
|
-
return
|
4880
|
-
|
4881
|
-
|
4882
|
-
def prepare_subprocess_invocation(
|
4883
|
-
*args: str,
|
4884
|
-
env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
|
4885
|
-
extra_env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
|
4886
|
-
quiet: bool = False,
|
4887
|
-
shell: bool = False,
|
4888
|
-
**kwargs: ta.Any,
|
4889
|
-
) -> ta.Tuple[ta.Tuple[ta.Any, ...], ta.Dict[str, ta.Any]]:
|
4890
|
-
log.debug('prepare_subprocess_invocation: args=%r', args)
|
4891
|
-
if extra_env:
|
4892
|
-
log.debug('prepare_subprocess_invocation: extra_env=%r', extra_env)
|
4893
|
-
|
4894
|
-
if extra_env:
|
4895
|
-
env = {**(env if env is not None else os.environ), **extra_env}
|
4896
|
-
|
4897
|
-
if quiet and 'stderr' not in kwargs:
|
4898
|
-
if not log.isEnabledFor(logging.DEBUG):
|
4899
|
-
kwargs['stderr'] = subprocess.DEVNULL
|
4900
|
-
|
4901
|
-
if not shell:
|
4902
|
-
args = subprocess_maybe_shell_wrap_exec(*args)
|
4903
|
-
|
4904
|
-
return args, dict(
|
4905
|
-
env=env,
|
4906
|
-
shell=shell,
|
4907
|
-
**kwargs,
|
4908
|
-
)
|
5799
|
+
return cmd
|
4909
5800
|
|
4910
5801
|
|
4911
5802
|
##
|
4912
5803
|
|
4913
5804
|
|
4914
|
-
|
4915
|
-
|
4916
|
-
|
4917
|
-
|
4918
|
-
|
4919
|
-
|
4920
|
-
|
4921
|
-
|
4922
|
-
|
4923
|
-
|
5805
|
+
def subprocess_close(
|
5806
|
+
proc: subprocess.Popen,
|
5807
|
+
timeout: ta.Optional[float] = None,
|
5808
|
+
) -> None:
|
5809
|
+
# TODO: terminate, sleep, kill
|
5810
|
+
if proc.stdout:
|
5811
|
+
proc.stdout.close()
|
5812
|
+
if proc.stderr:
|
5813
|
+
proc.stderr.close()
|
5814
|
+
if proc.stdin:
|
5815
|
+
proc.stdin.close()
|
4924
5816
|
|
4925
|
-
|
4926
|
-
end_time = time.time()
|
4927
|
-
elapsed_s = end_time - start_time
|
4928
|
-
log.debug('subprocess_common_context.finally: elapsed_s=%f args=%r', elapsed_s, args)
|
5817
|
+
proc.wait(timeout)
|
4929
5818
|
|
4930
5819
|
|
4931
5820
|
##
|
4932
5821
|
|
4933
5822
|
|
4934
|
-
|
4935
|
-
|
4936
|
-
|
4937
|
-
|
4938
|
-
|
4939
|
-
|
4940
|
-
|
4941
|
-
|
5823
|
+
class AbstractSubprocesses(abc.ABC): # noqa
|
5824
|
+
DEFAULT_LOGGER: ta.ClassVar[ta.Optional[logging.Logger]] = log
|
5825
|
+
|
5826
|
+
def __init__(
|
5827
|
+
self,
|
5828
|
+
*,
|
5829
|
+
log: ta.Optional[logging.Logger] = None,
|
5830
|
+
try_exceptions: ta.Optional[ta.Tuple[ta.Type[Exception], ...]] = None,
|
5831
|
+
) -> None:
|
5832
|
+
super().__init__()
|
5833
|
+
|
5834
|
+
self._log = log if log is not None else self.DEFAULT_LOGGER
|
5835
|
+
self._try_exceptions = try_exceptions if try_exceptions is not None else self.DEFAULT_TRY_EXCEPTIONS
|
5836
|
+
|
5837
|
+
#
|
4942
5838
|
|
5839
|
+
def prepare_args(
|
5840
|
+
self,
|
5841
|
+
*cmd: str,
|
5842
|
+
env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
|
5843
|
+
extra_env: ta.Optional[ta.Mapping[str, ta.Any]] = None,
|
5844
|
+
quiet: bool = False,
|
5845
|
+
shell: bool = False,
|
5846
|
+
**kwargs: ta.Any,
|
5847
|
+
) -> ta.Tuple[ta.Tuple[ta.Any, ...], ta.Dict[str, ta.Any]]:
|
5848
|
+
if self._log:
|
5849
|
+
self._log.debug('Subprocesses.prepare_args: cmd=%r', cmd)
|
5850
|
+
if extra_env:
|
5851
|
+
self._log.debug('Subprocesses.prepare_args: extra_env=%r', extra_env)
|
4943
5852
|
|
4944
|
-
|
4945
|
-
|
4946
|
-
**kwargs: ta.Any,
|
4947
|
-
) -> bytes:
|
4948
|
-
args, kwargs = prepare_subprocess_invocation(*args, **kwargs)
|
4949
|
-
with subprocess_common_context(*args, **kwargs):
|
4950
|
-
return subprocess.check_output(args, **kwargs)
|
5853
|
+
if extra_env:
|
5854
|
+
env = {**(env if env is not None else os.environ), **extra_env}
|
4951
5855
|
|
5856
|
+
if quiet and 'stderr' not in kwargs:
|
5857
|
+
if self._log and not self._log.isEnabledFor(logging.DEBUG):
|
5858
|
+
kwargs['stderr'] = subprocess.DEVNULL
|
4952
5859
|
|
4953
|
-
|
4954
|
-
|
5860
|
+
if not shell:
|
5861
|
+
cmd = subprocess_maybe_shell_wrap_exec(*cmd)
|
4955
5862
|
|
5863
|
+
return cmd, dict(
|
5864
|
+
env=env,
|
5865
|
+
shell=shell,
|
5866
|
+
**kwargs,
|
5867
|
+
)
|
4956
5868
|
|
4957
|
-
|
5869
|
+
@contextlib.contextmanager
|
5870
|
+
def wrap_call(self, *cmd: ta.Any, **kwargs: ta.Any) -> ta.Iterator[None]:
|
5871
|
+
start_time = time.time()
|
5872
|
+
try:
|
5873
|
+
if self._log:
|
5874
|
+
self._log.debug('Subprocesses.wrap_call.try: cmd=%r', cmd)
|
5875
|
+
yield
|
4958
5876
|
|
5877
|
+
except Exception as exc: # noqa
|
5878
|
+
if self._log:
|
5879
|
+
self._log.debug('Subprocesses.wrap_call.except: exc=%r', exc)
|
5880
|
+
raise
|
4959
5881
|
|
4960
|
-
|
4961
|
-
|
4962
|
-
|
4963
|
-
|
5882
|
+
finally:
|
5883
|
+
end_time = time.time()
|
5884
|
+
elapsed_s = end_time - start_time
|
5885
|
+
if self._log:
|
5886
|
+
self._log.debug('sSubprocesses.wrap_call.finally: elapsed_s=%f cmd=%r', elapsed_s, cmd)
|
4964
5887
|
|
5888
|
+
@contextlib.contextmanager
|
5889
|
+
def prepare_and_wrap(
|
5890
|
+
self,
|
5891
|
+
*cmd: ta.Any,
|
5892
|
+
**kwargs: ta.Any,
|
5893
|
+
) -> ta.Iterator[ta.Tuple[
|
5894
|
+
ta.Tuple[ta.Any, ...],
|
5895
|
+
ta.Dict[str, ta.Any],
|
5896
|
+
]]:
|
5897
|
+
cmd, kwargs = self.prepare_args(*cmd, **kwargs)
|
5898
|
+
with self.wrap_call(*cmd, **kwargs):
|
5899
|
+
yield cmd, kwargs
|
4965
5900
|
|
4966
|
-
|
4967
|
-
fn: ta.Callable[..., T],
|
4968
|
-
*args: ta.Any,
|
4969
|
-
try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
|
4970
|
-
**kwargs: ta.Any,
|
4971
|
-
) -> ta.Union[T, Exception]:
|
4972
|
-
try:
|
4973
|
-
return fn(*args, **kwargs)
|
4974
|
-
except try_exceptions as e: # noqa
|
4975
|
-
if log.isEnabledFor(logging.DEBUG):
|
4976
|
-
log.exception('command failed')
|
4977
|
-
return e
|
4978
|
-
|
4979
|
-
|
4980
|
-
def subprocess_try_call(
|
4981
|
-
*args: str,
|
4982
|
-
try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
|
4983
|
-
**kwargs: ta.Any,
|
4984
|
-
) -> bool:
|
4985
|
-
if isinstance(_subprocess_try_run(
|
4986
|
-
subprocess_check_call,
|
4987
|
-
*args,
|
4988
|
-
try_exceptions=try_exceptions,
|
4989
|
-
**kwargs,
|
4990
|
-
), Exception):
|
4991
|
-
return False
|
4992
|
-
else:
|
4993
|
-
return True
|
5901
|
+
#
|
4994
5902
|
|
5903
|
+
DEFAULT_TRY_EXCEPTIONS: ta.Tuple[ta.Type[Exception], ...] = (
|
5904
|
+
FileNotFoundError,
|
5905
|
+
subprocess.CalledProcessError,
|
5906
|
+
)
|
4995
5907
|
|
4996
|
-
def
|
4997
|
-
|
4998
|
-
|
4999
|
-
|
5000
|
-
|
5001
|
-
|
5002
|
-
|
5003
|
-
|
5004
|
-
try_exceptions=
|
5005
|
-
|
5006
|
-
|
5007
|
-
|
5008
|
-
|
5009
|
-
|
5908
|
+
def try_fn(
|
5909
|
+
self,
|
5910
|
+
fn: ta.Callable[..., T],
|
5911
|
+
*cmd: str,
|
5912
|
+
try_exceptions: ta.Optional[ta.Tuple[ta.Type[Exception], ...]] = None,
|
5913
|
+
**kwargs: ta.Any,
|
5914
|
+
) -> ta.Union[T, Exception]:
|
5915
|
+
if try_exceptions is None:
|
5916
|
+
try_exceptions = self._try_exceptions
|
5917
|
+
|
5918
|
+
try:
|
5919
|
+
return fn(*cmd, **kwargs)
|
5920
|
+
|
5921
|
+
except try_exceptions as e: # noqa
|
5922
|
+
if self._log and self._log.isEnabledFor(logging.DEBUG):
|
5923
|
+
self._log.exception('command failed')
|
5924
|
+
return e
|
5010
5925
|
|
5926
|
+
async def async_try_fn(
|
5927
|
+
self,
|
5928
|
+
fn: ta.Callable[..., ta.Awaitable[T]],
|
5929
|
+
*cmd: ta.Any,
|
5930
|
+
try_exceptions: ta.Optional[ta.Tuple[ta.Type[Exception], ...]] = None,
|
5931
|
+
**kwargs: ta.Any,
|
5932
|
+
) -> ta.Union[T, Exception]:
|
5933
|
+
if try_exceptions is None:
|
5934
|
+
try_exceptions = self._try_exceptions
|
5935
|
+
|
5936
|
+
try:
|
5937
|
+
return await fn(*cmd, **kwargs)
|
5011
5938
|
|
5012
|
-
|
5013
|
-
|
5014
|
-
|
5939
|
+
except try_exceptions as e: # noqa
|
5940
|
+
if self._log and self._log.isEnabledFor(logging.DEBUG):
|
5941
|
+
self._log.exception('command failed')
|
5942
|
+
return e
|
5015
5943
|
|
5016
5944
|
|
5017
5945
|
##
|
5018
5946
|
|
5019
5947
|
|
5020
|
-
|
5021
|
-
|
5022
|
-
|
5023
|
-
|
5024
|
-
|
5025
|
-
|
5026
|
-
|
5027
|
-
|
5028
|
-
|
5029
|
-
if proc.stdin:
|
5030
|
-
proc.stdin.close()
|
5948
|
+
class Subprocesses(AbstractSubprocesses):
|
5949
|
+
def check_call(
|
5950
|
+
self,
|
5951
|
+
*cmd: str,
|
5952
|
+
stdout: ta.Any = sys.stderr,
|
5953
|
+
**kwargs: ta.Any,
|
5954
|
+
) -> None:
|
5955
|
+
with self.prepare_and_wrap(*cmd, stdout=stdout, **kwargs) as (cmd, kwargs): # noqa
|
5956
|
+
subprocess.check_call(cmd, **kwargs)
|
5031
5957
|
|
5032
|
-
|
5958
|
+
def check_output(
|
5959
|
+
self,
|
5960
|
+
*cmd: str,
|
5961
|
+
**kwargs: ta.Any,
|
5962
|
+
) -> bytes:
|
5963
|
+
with self.prepare_and_wrap(*cmd, **kwargs) as (cmd, kwargs): # noqa
|
5964
|
+
return subprocess.check_output(cmd, **kwargs)
|
5965
|
+
|
5966
|
+
def check_output_str(
|
5967
|
+
self,
|
5968
|
+
*cmd: str,
|
5969
|
+
**kwargs: ta.Any,
|
5970
|
+
) -> str:
|
5971
|
+
return self.check_output(*cmd, **kwargs).decode().strip()
|
5972
|
+
|
5973
|
+
#
|
5974
|
+
|
5975
|
+
def try_call(
|
5976
|
+
self,
|
5977
|
+
*cmd: str,
|
5978
|
+
**kwargs: ta.Any,
|
5979
|
+
) -> bool:
|
5980
|
+
if isinstance(self.try_fn(self.check_call, *cmd, **kwargs), Exception):
|
5981
|
+
return False
|
5982
|
+
else:
|
5983
|
+
return True
|
5984
|
+
|
5985
|
+
def try_output(
|
5986
|
+
self,
|
5987
|
+
*cmd: str,
|
5988
|
+
**kwargs: ta.Any,
|
5989
|
+
) -> ta.Optional[bytes]:
|
5990
|
+
if isinstance(ret := self.try_fn(self.check_output, *cmd, **kwargs), Exception):
|
5991
|
+
return None
|
5992
|
+
else:
|
5993
|
+
return ret
|
5994
|
+
|
5995
|
+
def try_output_str(
|
5996
|
+
self,
|
5997
|
+
*cmd: str,
|
5998
|
+
**kwargs: ta.Any,
|
5999
|
+
) -> ta.Optional[str]:
|
6000
|
+
if (ret := self.try_output(*cmd, **kwargs)) is None:
|
6001
|
+
return None
|
6002
|
+
else:
|
6003
|
+
return ret.decode().strip()
|
6004
|
+
|
6005
|
+
|
6006
|
+
subprocesses = Subprocesses()
|
6007
|
+
|
6008
|
+
|
6009
|
+
########################################
|
6010
|
+
# ../commands/local.py
|
6011
|
+
|
6012
|
+
|
6013
|
+
class LocalCommandExecutor(CommandExecutor):
|
6014
|
+
def __init__(
|
6015
|
+
self,
|
6016
|
+
*,
|
6017
|
+
command_executors: CommandExecutorMap,
|
6018
|
+
) -> None:
|
6019
|
+
super().__init__()
|
6020
|
+
|
6021
|
+
self._command_executors = command_executors
|
6022
|
+
|
6023
|
+
async def execute(self, cmd: Command) -> Command.Output:
|
6024
|
+
ce: CommandExecutor = self._command_executors[type(cmd)]
|
6025
|
+
return await ce.execute(cmd)
|
5033
6026
|
|
5034
6027
|
|
5035
6028
|
########################################
|
@@ -5409,7 +6402,7 @@ class RemoteCommandExecutor(CommandExecutor):
|
|
5409
6402
|
self,
|
5410
6403
|
cmd: Command,
|
5411
6404
|
*,
|
5412
|
-
log: ta.Optional[logging.Logger] = None,
|
6405
|
+
log: ta.Optional[logging.Logger] = None, # noqa
|
5413
6406
|
omit_exc_object: bool = False,
|
5414
6407
|
) -> CommandOutputOrException:
|
5415
6408
|
try:
|
@@ -5430,44 +6423,16 @@ class RemoteCommandExecutor(CommandExecutor):
|
|
5430
6423
|
|
5431
6424
|
|
5432
6425
|
########################################
|
5433
|
-
#
|
5434
|
-
|
5435
|
-
|
5436
|
-
##
|
6426
|
+
# ../system/config.py
|
5437
6427
|
|
5438
6428
|
|
5439
|
-
@
|
5440
|
-
|
5441
|
-
|
5442
|
-
shell: bool = False,
|
5443
|
-
timeout: ta.Optional[float] = None,
|
5444
|
-
**kwargs: ta.Any,
|
5445
|
-
) -> ta.AsyncGenerator[asyncio.subprocess.Process, None]:
|
5446
|
-
fac: ta.Any
|
5447
|
-
if shell:
|
5448
|
-
fac = functools.partial(
|
5449
|
-
asyncio.create_subprocess_shell,
|
5450
|
-
check.single(cmd),
|
5451
|
-
)
|
5452
|
-
else:
|
5453
|
-
fac = functools.partial(
|
5454
|
-
asyncio.create_subprocess_exec,
|
5455
|
-
*cmd,
|
5456
|
-
)
|
6429
|
+
@dc.dataclass(frozen=True)
|
6430
|
+
class SystemConfig:
|
6431
|
+
platform: ta.Optional[Platform] = None
|
5457
6432
|
|
5458
|
-
with subprocess_common_context(
|
5459
|
-
*cmd,
|
5460
|
-
shell=shell,
|
5461
|
-
timeout=timeout,
|
5462
|
-
**kwargs,
|
5463
|
-
):
|
5464
|
-
proc: asyncio.subprocess.Process
|
5465
|
-
proc = await fac(**kwargs)
|
5466
|
-
try:
|
5467
|
-
yield proc
|
5468
6433
|
|
5469
|
-
|
5470
|
-
|
6434
|
+
########################################
|
6435
|
+
# ../../../omlish/lite/asyncio/subprocesses.py
|
5471
6436
|
|
5472
6437
|
|
5473
6438
|
##
|
@@ -5583,146 +6548,156 @@ class AsyncioProcessCommunicator:
|
|
5583
6548
|
return await asyncio_maybe_timeout(self._communicate(input), timeout)
|
5584
6549
|
|
5585
6550
|
|
5586
|
-
async def asyncio_subprocess_communicate(
|
5587
|
-
proc: asyncio.subprocess.Process,
|
5588
|
-
input: ta.Any = None, # noqa
|
5589
|
-
timeout: ta.Optional[float] = None,
|
5590
|
-
) -> ta.Tuple[ta.Optional[bytes], ta.Optional[bytes]]:
|
5591
|
-
return await AsyncioProcessCommunicator(proc).communicate(input, timeout) # noqa
|
5592
|
-
|
5593
|
-
|
5594
|
-
async def asyncio_subprocess_run(
|
5595
|
-
*args: str,
|
5596
|
-
input: ta.Any = None, # noqa
|
5597
|
-
timeout: ta.Optional[float] = None,
|
5598
|
-
check: bool = False, # noqa
|
5599
|
-
capture_output: ta.Optional[bool] = None,
|
5600
|
-
**kwargs: ta.Any,
|
5601
|
-
) -> ta.Tuple[ta.Optional[bytes], ta.Optional[bytes]]:
|
5602
|
-
if capture_output:
|
5603
|
-
kwargs.setdefault('stdout', subprocess.PIPE)
|
5604
|
-
kwargs.setdefault('stderr', subprocess.PIPE)
|
5605
|
-
|
5606
|
-
args, kwargs = prepare_subprocess_invocation(*args, **kwargs)
|
5607
|
-
|
5608
|
-
proc: asyncio.subprocess.Process
|
5609
|
-
async with asyncio_subprocess_popen(*args, **kwargs) as proc:
|
5610
|
-
stdout, stderr = await asyncio_subprocess_communicate(proc, input, timeout)
|
5611
|
-
|
5612
|
-
if check and proc.returncode:
|
5613
|
-
raise subprocess.CalledProcessError(
|
5614
|
-
proc.returncode,
|
5615
|
-
args,
|
5616
|
-
output=stdout,
|
5617
|
-
stderr=stderr,
|
5618
|
-
)
|
5619
|
-
|
5620
|
-
return stdout, stderr
|
5621
|
-
|
5622
|
-
|
5623
6551
|
##
|
5624
6552
|
|
5625
6553
|
|
5626
|
-
|
5627
|
-
|
5628
|
-
|
5629
|
-
|
5630
|
-
|
5631
|
-
|
5632
|
-
) ->
|
5633
|
-
|
5634
|
-
*args,
|
5635
|
-
stdout=stdout,
|
5636
|
-
input=input,
|
5637
|
-
timeout=timeout,
|
5638
|
-
check=True,
|
5639
|
-
**kwargs,
|
5640
|
-
)
|
5641
|
-
|
5642
|
-
|
5643
|
-
async def asyncio_subprocess_check_output(
|
5644
|
-
*args: str,
|
5645
|
-
input: ta.Any = None, # noqa
|
5646
|
-
timeout: ta.Optional[float] = None,
|
5647
|
-
**kwargs: ta.Any,
|
5648
|
-
) -> bytes:
|
5649
|
-
stdout, stderr = await asyncio_subprocess_run(
|
5650
|
-
*args,
|
5651
|
-
stdout=asyncio.subprocess.PIPE,
|
5652
|
-
input=input,
|
5653
|
-
timeout=timeout,
|
5654
|
-
check=True,
|
5655
|
-
**kwargs,
|
5656
|
-
)
|
6554
|
+
class AsyncioSubprocesses(AbstractSubprocesses):
|
6555
|
+
async def communicate(
|
6556
|
+
self,
|
6557
|
+
proc: asyncio.subprocess.Process,
|
6558
|
+
input: ta.Any = None, # noqa
|
6559
|
+
timeout: ta.Optional[float] = None,
|
6560
|
+
) -> ta.Tuple[ta.Optional[bytes], ta.Optional[bytes]]:
|
6561
|
+
return await AsyncioProcessCommunicator(proc).communicate(input, timeout) # noqa
|
5657
6562
|
|
5658
|
-
|
6563
|
+
#
|
5659
6564
|
|
6565
|
+
@contextlib.asynccontextmanager
|
6566
|
+
async def popen(
|
6567
|
+
self,
|
6568
|
+
*cmd: str,
|
6569
|
+
shell: bool = False,
|
6570
|
+
timeout: ta.Optional[float] = None,
|
6571
|
+
**kwargs: ta.Any,
|
6572
|
+
) -> ta.AsyncGenerator[asyncio.subprocess.Process, None]:
|
6573
|
+
fac: ta.Any
|
6574
|
+
if shell:
|
6575
|
+
fac = functools.partial(
|
6576
|
+
asyncio.create_subprocess_shell,
|
6577
|
+
check.single(cmd),
|
6578
|
+
)
|
6579
|
+
else:
|
6580
|
+
fac = functools.partial(
|
6581
|
+
asyncio.create_subprocess_exec,
|
6582
|
+
*cmd,
|
6583
|
+
)
|
5660
6584
|
|
5661
|
-
|
5662
|
-
|
6585
|
+
with self.prepare_and_wrap( *cmd, shell=shell, **kwargs) as (cmd, kwargs): # noqa
|
6586
|
+
proc: asyncio.subprocess.Process = await fac(**kwargs)
|
6587
|
+
try:
|
6588
|
+
yield proc
|
5663
6589
|
|
6590
|
+
finally:
|
6591
|
+
await asyncio_maybe_timeout(proc.wait(), timeout)
|
5664
6592
|
|
5665
|
-
|
6593
|
+
#
|
5666
6594
|
|
6595
|
+
@dc.dataclass(frozen=True)
|
6596
|
+
class RunOutput:
|
6597
|
+
proc: asyncio.subprocess.Process
|
6598
|
+
stdout: ta.Optional[bytes]
|
6599
|
+
stderr: ta.Optional[bytes]
|
5667
6600
|
|
5668
|
-
async def
|
5669
|
-
|
5670
|
-
|
5671
|
-
|
5672
|
-
|
5673
|
-
|
5674
|
-
|
5675
|
-
|
5676
|
-
|
5677
|
-
if
|
5678
|
-
|
5679
|
-
|
5680
|
-
|
5681
|
-
|
5682
|
-
async def asyncio_subprocess_try_call(
|
5683
|
-
*args: str,
|
5684
|
-
try_exceptions: ta.Tuple[ta.Type[Exception], ...] = DEFAULT_SUBPROCESS_TRY_EXCEPTIONS,
|
5685
|
-
**kwargs: ta.Any,
|
5686
|
-
) -> bool:
|
5687
|
-
if isinstance(await _asyncio_subprocess_try_run(
|
5688
|
-
asyncio_subprocess_check_call,
|
5689
|
-
*args,
|
5690
|
-
try_exceptions=try_exceptions,
|
5691
|
-
**kwargs,
|
5692
|
-
), Exception):
|
5693
|
-
return False
|
5694
|
-
else:
|
5695
|
-
return True
|
6601
|
+
async def run(
|
6602
|
+
self,
|
6603
|
+
*cmd: str,
|
6604
|
+
input: ta.Any = None, # noqa
|
6605
|
+
timeout: ta.Optional[float] = None,
|
6606
|
+
check: bool = False, # noqa
|
6607
|
+
capture_output: ta.Optional[bool] = None,
|
6608
|
+
**kwargs: ta.Any,
|
6609
|
+
) -> RunOutput:
|
6610
|
+
if capture_output:
|
6611
|
+
kwargs.setdefault('stdout', subprocess.PIPE)
|
6612
|
+
kwargs.setdefault('stderr', subprocess.PIPE)
|
5696
6613
|
|
6614
|
+
proc: asyncio.subprocess.Process
|
6615
|
+
async with self.popen(*cmd, **kwargs) as proc:
|
6616
|
+
stdout, stderr = await self.communicate(proc, input, timeout)
|
6617
|
+
|
6618
|
+
if check and proc.returncode:
|
6619
|
+
raise subprocess.CalledProcessError(
|
6620
|
+
proc.returncode,
|
6621
|
+
cmd,
|
6622
|
+
output=stdout,
|
6623
|
+
stderr=stderr,
|
6624
|
+
)
|
5697
6625
|
|
5698
|
-
|
5699
|
-
|
5700
|
-
|
5701
|
-
|
5702
|
-
)
|
5703
|
-
if isinstance(ret := await _asyncio_subprocess_try_run(
|
5704
|
-
asyncio_subprocess_check_output,
|
5705
|
-
*args,
|
5706
|
-
try_exceptions=try_exceptions,
|
5707
|
-
**kwargs,
|
5708
|
-
), Exception):
|
5709
|
-
return None
|
5710
|
-
else:
|
5711
|
-
return ret
|
6626
|
+
return self.RunOutput(
|
6627
|
+
proc,
|
6628
|
+
stdout,
|
6629
|
+
stderr,
|
6630
|
+
)
|
5712
6631
|
|
6632
|
+
#
|
5713
6633
|
|
5714
|
-
async def
|
5715
|
-
|
5716
|
-
|
6634
|
+
async def check_call(
|
6635
|
+
self,
|
6636
|
+
*cmd: str,
|
6637
|
+
stdout: ta.Any = sys.stderr,
|
6638
|
+
**kwargs: ta.Any,
|
6639
|
+
) -> None:
|
6640
|
+
with self.prepare_and_wrap(*cmd, stdout=stdout, check=True, **kwargs) as (cmd, kwargs): # noqa
|
6641
|
+
await self.run(*cmd, **kwargs)
|
5717
6642
|
|
6643
|
+
async def check_output(
|
6644
|
+
self,
|
6645
|
+
*cmd: str,
|
6646
|
+
**kwargs: ta.Any,
|
6647
|
+
) -> bytes:
|
6648
|
+
with self.prepare_and_wrap(*cmd, stdout=subprocess.PIPE, check=True, **kwargs) as (cmd, kwargs): # noqa
|
6649
|
+
return check.not_none((await self.run(*cmd, **kwargs)).stdout)
|
5718
6650
|
|
5719
|
-
|
5720
|
-
|
6651
|
+
async def check_output_str(
|
6652
|
+
self,
|
6653
|
+
*cmd: str,
|
6654
|
+
**kwargs: ta.Any,
|
6655
|
+
) -> str:
|
6656
|
+
return (await self.check_output(*cmd, **kwargs)).decode().strip()
|
5721
6657
|
|
6658
|
+
#
|
5722
6659
|
|
5723
|
-
|
5724
|
-
|
5725
|
-
|
6660
|
+
async def try_call(
|
6661
|
+
self,
|
6662
|
+
*cmd: str,
|
6663
|
+
**kwargs: ta.Any,
|
6664
|
+
) -> bool:
|
6665
|
+
if isinstance(await self.async_try_fn(self.check_call, *cmd, **kwargs), Exception):
|
6666
|
+
return False
|
6667
|
+
else:
|
6668
|
+
return True
|
6669
|
+
|
6670
|
+
async def try_output(
|
6671
|
+
self,
|
6672
|
+
*cmd: str,
|
6673
|
+
**kwargs: ta.Any,
|
6674
|
+
) -> ta.Optional[bytes]:
|
6675
|
+
if isinstance(ret := await self.async_try_fn(self.check_output, *cmd, **kwargs), Exception):
|
6676
|
+
return None
|
6677
|
+
else:
|
6678
|
+
return ret
|
6679
|
+
|
6680
|
+
async def try_output_str(
|
6681
|
+
self,
|
6682
|
+
*cmd: str,
|
6683
|
+
**kwargs: ta.Any,
|
6684
|
+
) -> ta.Optional[str]:
|
6685
|
+
if (ret := await self.try_output(*cmd, **kwargs)) is None:
|
6686
|
+
return None
|
6687
|
+
else:
|
6688
|
+
return ret.decode().strip()
|
6689
|
+
|
6690
|
+
|
6691
|
+
asyncio_subprocesses = AsyncioSubprocesses()
|
6692
|
+
|
6693
|
+
|
6694
|
+
########################################
|
6695
|
+
# ../../../omdev/interp/inspect.py
|
6696
|
+
|
6697
|
+
|
6698
|
+
@dc.dataclass(frozen=True)
|
6699
|
+
class InterpInspection:
|
6700
|
+
exe: str
|
5726
6701
|
version: Version
|
5727
6702
|
|
5728
6703
|
version_str: str
|
@@ -5790,7 +6765,7 @@ class InterpInspector:
|
|
5790
6765
|
return cls._build_inspection(sys.executable, eval(cls._INSPECTION_CODE)) # noqa
|
5791
6766
|
|
5792
6767
|
async def _inspect(self, exe: str) -> InterpInspection:
|
5793
|
-
output = await
|
6768
|
+
output = await asyncio_subprocesses.check_output(exe, '-c', f'print({self._INSPECTION_CODE})', quiet=True)
|
5794
6769
|
return self._build_inspection(exe, output.decode())
|
5795
6770
|
|
5796
6771
|
async def inspect(self, exe: str) -> ta.Optional[InterpInspection]:
|
@@ -5811,6 +6786,21 @@ class InterpInspector:
|
|
5811
6786
|
INTERP_INSPECTOR = InterpInspector()
|
5812
6787
|
|
5813
6788
|
|
6789
|
+
########################################
|
6790
|
+
# ../bootstrap.py
|
6791
|
+
|
6792
|
+
|
6793
|
+
@dc.dataclass(frozen=True)
|
6794
|
+
class MainBootstrap:
|
6795
|
+
main_config: MainConfig = MainConfig()
|
6796
|
+
|
6797
|
+
deploy_config: DeployConfig = DeployConfig()
|
6798
|
+
|
6799
|
+
remote_config: RemoteConfig = RemoteConfig()
|
6800
|
+
|
6801
|
+
system_config: SystemConfig = SystemConfig()
|
6802
|
+
|
6803
|
+
|
5814
6804
|
########################################
|
5815
6805
|
# ../commands/subprocess.py
|
5816
6806
|
|
@@ -5849,7 +6839,7 @@ class SubprocessCommand(Command['SubprocessCommand.Output']):
|
|
5849
6839
|
class SubprocessCommandExecutor(CommandExecutor[SubprocessCommand, SubprocessCommand.Output]):
|
5850
6840
|
async def execute(self, cmd: SubprocessCommand) -> SubprocessCommand.Output:
|
5851
6841
|
proc: asyncio.subprocess.Process
|
5852
|
-
async with
|
6842
|
+
async with asyncio_subprocesses.popen(
|
5853
6843
|
*subprocess_maybe_shell_wrap_exec(*cmd.cmd),
|
5854
6844
|
|
5855
6845
|
shell=cmd.shell,
|
@@ -5863,7 +6853,7 @@ class SubprocessCommandExecutor(CommandExecutor[SubprocessCommand, SubprocessCom
|
|
5863
6853
|
timeout=cmd.timeout,
|
5864
6854
|
) as proc:
|
5865
6855
|
start_time = time.time()
|
5866
|
-
stdout, stderr = await
|
6856
|
+
stdout, stderr = await asyncio_subprocesses.communicate(
|
5867
6857
|
proc,
|
5868
6858
|
input=cmd.input,
|
5869
6859
|
timeout=cmd.timeout,
|
@@ -5882,146 +6872,190 @@ class SubprocessCommandExecutor(CommandExecutor[SubprocessCommand, SubprocessCom
|
|
5882
6872
|
|
5883
6873
|
|
5884
6874
|
########################################
|
5885
|
-
# ../
|
6875
|
+
# ../deploy/git.py
|
6876
|
+
"""
|
6877
|
+
TODO:
|
6878
|
+
- 'repos'?
|
6879
|
+
|
6880
|
+
git/github.com/wrmsr/omlish <- bootstrap repo
|
6881
|
+
- shallow clone off bootstrap into /apps
|
6882
|
+
|
6883
|
+
github.com/wrmsr/omlish@rev
|
6884
|
+
"""
|
5886
6885
|
|
5887
6886
|
|
5888
6887
|
##
|
5889
6888
|
|
5890
6889
|
|
5891
|
-
|
5892
|
-
|
5893
|
-
|
5894
|
-
|
6890
|
+
@dc.dataclass(frozen=True)
|
6891
|
+
class DeployGitRepo:
|
6892
|
+
host: ta.Optional[str] = None
|
6893
|
+
username: ta.Optional[str] = None
|
6894
|
+
path: ta.Optional[str] = None
|
5895
6895
|
|
5896
|
-
def
|
5897
|
-
|
5898
|
-
self.
|
6896
|
+
def __post_init__(self) -> None:
|
6897
|
+
check.not_in('..', check.non_empty_str(self.host))
|
6898
|
+
check.not_in('.', check.non_empty_str(self.path))
|
6899
|
+
|
6900
|
+
|
6901
|
+
@dc.dataclass(frozen=True)
|
6902
|
+
class DeployGitSpec:
|
6903
|
+
repo: DeployGitRepo
|
6904
|
+
rev: DeployRev
|
5899
6905
|
|
5900
6906
|
|
5901
6907
|
##
|
5902
6908
|
|
5903
6909
|
|
5904
|
-
class
|
6910
|
+
class DeployGitManager(DeployPathOwner):
|
5905
6911
|
def __init__(
|
5906
6912
|
self,
|
5907
|
-
|
6913
|
+
*,
|
6914
|
+
deploy_home: DeployHome,
|
5908
6915
|
) -> None:
|
5909
6916
|
super().__init__()
|
5910
6917
|
|
5911
|
-
self.
|
5912
|
-
|
5913
|
-
self.__bootstrap: ta.Optional[MainBootstrap] = None
|
5914
|
-
self.__injector: ta.Optional[Injector] = None
|
5915
|
-
|
5916
|
-
@property
|
5917
|
-
def _bootstrap(self) -> MainBootstrap:
|
5918
|
-
return check.not_none(self.__bootstrap)
|
6918
|
+
self._deploy_home = deploy_home
|
6919
|
+
self._dir = os.path.join(deploy_home, 'git')
|
5919
6920
|
|
5920
|
-
|
5921
|
-
def _injector(self) -> Injector:
|
5922
|
-
return check.not_none(self.__injector)
|
5923
|
-
|
5924
|
-
#
|
6921
|
+
self._repo_dirs: ta.Dict[DeployGitRepo, DeployGitManager.RepoDir] = {}
|
5925
6922
|
|
5926
|
-
def
|
5927
|
-
|
5928
|
-
|
5929
|
-
|
5930
|
-
sig: int = signal.SIGINT,
|
5931
|
-
code: int = 1,
|
5932
|
-
) -> None:
|
5933
|
-
time.sleep(delay_s)
|
6923
|
+
def get_deploy_paths(self) -> ta.AbstractSet[DeployPath]:
|
6924
|
+
return {
|
6925
|
+
DeployPath.parse('git'),
|
6926
|
+
}
|
5934
6927
|
|
5935
|
-
|
5936
|
-
|
6928
|
+
class RepoDir:
|
6929
|
+
def __init__(
|
6930
|
+
self,
|
6931
|
+
git: 'DeployGitManager',
|
6932
|
+
repo: DeployGitRepo,
|
6933
|
+
) -> None:
|
6934
|
+
super().__init__()
|
5937
6935
|
|
5938
|
-
|
6936
|
+
self._git = git
|
6937
|
+
self._repo = repo
|
6938
|
+
self._dir = os.path.join(
|
6939
|
+
self._git._dir, # noqa
|
6940
|
+
check.non_empty_str(repo.host),
|
6941
|
+
check.non_empty_str(repo.path),
|
6942
|
+
)
|
5939
6943
|
|
5940
|
-
|
5941
|
-
|
5942
|
-
|
5943
|
-
return None
|
6944
|
+
@property
|
6945
|
+
def repo(self) -> DeployGitRepo:
|
6946
|
+
return self._repo
|
5944
6947
|
|
5945
|
-
|
5946
|
-
|
5947
|
-
|
5948
|
-
|
5949
|
-
|
6948
|
+
@property
|
6949
|
+
def url(self) -> str:
|
6950
|
+
if self._repo.username is not None:
|
6951
|
+
return f'{self._repo.username}@{self._repo.host}:{self._repo.path}'
|
6952
|
+
else:
|
6953
|
+
return f'https://{self._repo.host}/{self._repo.path}'
|
5950
6954
|
|
5951
|
-
|
6955
|
+
async def _call(self, *cmd: str) -> None:
|
6956
|
+
await asyncio_subprocesses.check_call(
|
6957
|
+
*cmd,
|
6958
|
+
cwd=self._dir,
|
6959
|
+
)
|
5952
6960
|
|
5953
|
-
|
6961
|
+
@async_cached_nullary
|
6962
|
+
async def init(self) -> None:
|
6963
|
+
os.makedirs(self._dir, exist_ok=True)
|
6964
|
+
if os.path.exists(os.path.join(self._dir, '.git')):
|
6965
|
+
return
|
5954
6966
|
|
5955
|
-
|
6967
|
+
await self._call('git', 'init')
|
6968
|
+
await self._call('git', 'remote', 'add', 'origin', self.url)
|
5956
6969
|
|
5957
|
-
|
6970
|
+
async def fetch(self, rev: DeployRev) -> None:
|
6971
|
+
await self.init()
|
6972
|
+
await self._call('git', 'fetch', '--depth=1', 'origin', rev)
|
5958
6973
|
|
5959
|
-
|
5960
|
-
|
5961
|
-
return _RemoteLogHandler(self._chan)
|
6974
|
+
async def checkout(self, rev: DeployRev, dst_dir: str) -> None:
|
6975
|
+
check.state(not os.path.exists(dst_dir))
|
5962
6976
|
|
5963
|
-
|
6977
|
+
await self.fetch(rev)
|
5964
6978
|
|
5965
|
-
|
5966
|
-
|
5967
|
-
check.none(self.__injector)
|
6979
|
+
# FIXME: temp dir swap
|
6980
|
+
os.makedirs(dst_dir)
|
5968
6981
|
|
5969
|
-
|
6982
|
+
dst_call = functools.partial(asyncio_subprocesses.check_call, cwd=dst_dir)
|
6983
|
+
await dst_call('git', 'init')
|
5970
6984
|
|
5971
|
-
|
6985
|
+
await dst_call('git', 'remote', 'add', 'local', self._dir)
|
6986
|
+
await dst_call('git', 'fetch', '--depth=1', 'local', rev)
|
6987
|
+
await dst_call('git', 'checkout', rev)
|
5972
6988
|
|
5973
|
-
|
5974
|
-
|
6989
|
+
def get_repo_dir(self, repo: DeployGitRepo) -> RepoDir:
|
6990
|
+
try:
|
6991
|
+
return self._repo_dirs[repo]
|
6992
|
+
except KeyError:
|
6993
|
+
repo_dir = self._repo_dirs[repo] = DeployGitManager.RepoDir(self, repo)
|
6994
|
+
return repo_dir
|
5975
6995
|
|
5976
|
-
|
6996
|
+
async def checkout(self, spec: DeployGitSpec, dst_dir: str) -> None:
|
6997
|
+
await self.get_repo_dir(spec.repo).checkout(spec.rev, dst_dir)
|
5977
6998
|
|
5978
|
-
self._chan.set_marshaler(self._injector[ObjMarshalerManager])
|
5979
6999
|
|
5980
|
-
|
7000
|
+
########################################
|
7001
|
+
# ../deploy/venvs.py
|
7002
|
+
"""
|
7003
|
+
TODO:
|
7004
|
+
- interp
|
7005
|
+
- share more code with pyproject?
|
7006
|
+
"""
|
5981
7007
|
|
5982
|
-
if self._bootstrap.remote_config.set_pgid:
|
5983
|
-
if os.getpgid(0) != os.getpid():
|
5984
|
-
log.debug('Setting pgid')
|
5985
|
-
os.setpgid(0, 0)
|
5986
7008
|
|
5987
|
-
|
5988
|
-
|
5989
|
-
|
7009
|
+
class DeployVenvManager(DeployPathOwner):
|
7010
|
+
def __init__(
|
7011
|
+
self,
|
7012
|
+
*,
|
7013
|
+
deploy_home: DeployHome,
|
7014
|
+
) -> None:
|
7015
|
+
super().__init__()
|
5990
7016
|
|
5991
|
-
self.
|
7017
|
+
self._deploy_home = deploy_home
|
7018
|
+
self._dir = os.path.join(deploy_home, 'venvs')
|
5992
7019
|
|
5993
|
-
|
5994
|
-
|
5995
|
-
|
7020
|
+
def get_deploy_paths(self) -> ta.AbstractSet[DeployPath]:
|
7021
|
+
return {
|
7022
|
+
DeployPath.parse('venvs/@app/@tag/'),
|
7023
|
+
}
|
5996
7024
|
|
5997
|
-
|
7025
|
+
async def setup_venv(
|
7026
|
+
self,
|
7027
|
+
app_dir: str,
|
7028
|
+
venv_dir: str,
|
7029
|
+
*,
|
7030
|
+
use_uv: bool = True,
|
7031
|
+
) -> None:
|
7032
|
+
sys_exe = 'python3'
|
5998
7033
|
|
5999
|
-
|
6000
|
-
await self._setup()
|
7034
|
+
await asyncio_subprocesses.check_call(sys_exe, '-m', 'venv', venv_dir)
|
6001
7035
|
|
6002
|
-
|
7036
|
+
#
|
6003
7037
|
|
6004
|
-
|
7038
|
+
venv_exe = os.path.join(venv_dir, 'bin', 'python3')
|
6005
7039
|
|
6006
|
-
|
7040
|
+
#
|
6007
7041
|
|
7042
|
+
reqs_txt = os.path.join(app_dir, 'requirements.txt')
|
6008
7043
|
|
6009
|
-
|
6010
|
-
|
7044
|
+
if os.path.isfile(reqs_txt):
|
7045
|
+
if use_uv:
|
7046
|
+
await asyncio_subprocesses.check_call(venv_exe, '-m', 'pip', 'install', 'uv')
|
7047
|
+
pip_cmd = ['-m', 'uv', 'pip']
|
7048
|
+
else:
|
7049
|
+
pip_cmd = ['-m', 'pip']
|
6011
7050
|
|
6012
|
-
|
6013
|
-
input = await asyncio_open_stream_reader(rt.input) # noqa
|
6014
|
-
output = await asyncio_open_stream_writer(rt.output)
|
7051
|
+
await asyncio_subprocesses.check_call(venv_exe, *pip_cmd,'install', '-r', reqs_txt)
|
6015
7052
|
|
6016
|
-
|
6017
|
-
|
6018
|
-
|
7053
|
+
async def setup_app_venv(self, app_tag: DeployAppTag) -> None:
|
7054
|
+
await self.setup_venv(
|
7055
|
+
os.path.join(self._deploy_home, 'apps', app_tag.app, app_tag.tag),
|
7056
|
+
os.path.join(self._deploy_home, 'venvs', app_tag.app, app_tag.tag),
|
6019
7057
|
)
|
6020
7058
|
|
6021
|
-
await _RemoteExecutionMain(chan).run()
|
6022
|
-
|
6023
|
-
asyncio.run(inner())
|
6024
|
-
|
6025
7059
|
|
6026
7060
|
########################################
|
6027
7061
|
# ../remote/spawning.py
|
@@ -6099,7 +7133,7 @@ class SubprocessRemoteSpawning(RemoteSpawning):
|
|
6099
7133
|
if not debug:
|
6100
7134
|
cmd = subprocess_maybe_shell_wrap_exec(*cmd)
|
6101
7135
|
|
6102
|
-
async with
|
7136
|
+
async with asyncio_subprocesses.popen(
|
6103
7137
|
*cmd,
|
6104
7138
|
shell=pc.shell,
|
6105
7139
|
stdin=subprocess.PIPE,
|
@@ -6161,10 +7195,10 @@ class SystemPackageManager(abc.ABC):
|
|
6161
7195
|
|
6162
7196
|
class BrewSystemPackageManager(SystemPackageManager):
|
6163
7197
|
async def update(self) -> None:
|
6164
|
-
await
|
7198
|
+
await asyncio_subprocesses.check_call('brew', 'update')
|
6165
7199
|
|
6166
7200
|
async def upgrade(self) -> None:
|
6167
|
-
await
|
7201
|
+
await asyncio_subprocesses.check_call('brew', 'upgrade')
|
6168
7202
|
|
6169
7203
|
async def install(self, *packages: SystemPackageOrStr) -> None:
|
6170
7204
|
es: ta.List[str] = []
|
@@ -6173,11 +7207,11 @@ class BrewSystemPackageManager(SystemPackageManager):
|
|
6173
7207
|
es.append(p.name + (f'@{p.version}' if p.version is not None else ''))
|
6174
7208
|
else:
|
6175
7209
|
es.append(p)
|
6176
|
-
await
|
7210
|
+
await asyncio_subprocesses.check_call('brew', 'install', *es)
|
6177
7211
|
|
6178
7212
|
async def query(self, *packages: SystemPackageOrStr) -> ta.Mapping[str, SystemPackage]:
|
6179
7213
|
pns = [p.name if isinstance(p, SystemPackage) else p for p in packages]
|
6180
|
-
o = await
|
7214
|
+
o = await asyncio_subprocesses.check_output('brew', 'info', '--json', *pns)
|
6181
7215
|
j = json.loads(o.decode())
|
6182
7216
|
d: ta.Dict[str, SystemPackage] = {}
|
6183
7217
|
for e in j:
|
@@ -6196,25 +7230,24 @@ class AptSystemPackageManager(SystemPackageManager):
|
|
6196
7230
|
}
|
6197
7231
|
|
6198
7232
|
async def update(self) -> None:
|
6199
|
-
await
|
7233
|
+
await asyncio_subprocesses.check_call('sudo', 'apt', 'update', env={**os.environ, **self._APT_ENV})
|
6200
7234
|
|
6201
7235
|
async def upgrade(self) -> None:
|
6202
|
-
await
|
7236
|
+
await asyncio_subprocesses.check_call('sudo', 'apt', 'upgrade', '-y', env={**os.environ, **self._APT_ENV})
|
6203
7237
|
|
6204
7238
|
async def install(self, *packages: SystemPackageOrStr) -> None:
|
6205
7239
|
pns = [p.name if isinstance(p, SystemPackage) else p for p in packages] # FIXME: versions
|
6206
|
-
await
|
7240
|
+
await asyncio_subprocesses.check_call('sudo', 'apt', 'install', '-y', *pns, env={**os.environ, **self._APT_ENV})
|
6207
7241
|
|
6208
7242
|
async def query(self, *packages: SystemPackageOrStr) -> ta.Mapping[str, SystemPackage]:
|
6209
7243
|
pns = [p.name if isinstance(p, SystemPackage) else p for p in packages]
|
6210
|
-
|
6211
|
-
|
6212
|
-
*cmd,
|
7244
|
+
out = await asyncio_subprocesses.run(
|
7245
|
+
'dpkg-query', '-W', '-f=${Package}=${Version}\n', *pns,
|
6213
7246
|
capture_output=True,
|
6214
7247
|
check=False,
|
6215
7248
|
)
|
6216
7249
|
d: ta.Dict[str, SystemPackage] = {}
|
6217
|
-
for l in check.not_none(stdout).decode('utf-8').strip().splitlines():
|
7250
|
+
for l in check.not_none(out.stdout).decode('utf-8').strip().splitlines():
|
6218
7251
|
n, v = l.split('=', 1)
|
6219
7252
|
d[n] = SystemPackage(
|
6220
7253
|
name=n,
|
@@ -6223,6 +7256,33 @@ class AptSystemPackageManager(SystemPackageManager):
|
|
6223
7256
|
return d
|
6224
7257
|
|
6225
7258
|
|
7259
|
+
class YumSystemPackageManager(SystemPackageManager):
|
7260
|
+
async def update(self) -> None:
|
7261
|
+
await asyncio_subprocesses.check_call('sudo', 'yum', 'check-update')
|
7262
|
+
|
7263
|
+
async def upgrade(self) -> None:
|
7264
|
+
await asyncio_subprocesses.check_call('sudo', 'yum', 'update')
|
7265
|
+
|
7266
|
+
async def install(self, *packages: SystemPackageOrStr) -> None:
|
7267
|
+
pns = [p.name if isinstance(p, SystemPackage) else p for p in packages] # FIXME: versions
|
7268
|
+
await asyncio_subprocesses.check_call('sudo', 'yum', 'install', *pns)
|
7269
|
+
|
7270
|
+
async def query(self, *packages: SystemPackageOrStr) -> ta.Mapping[str, SystemPackage]:
|
7271
|
+
pns = [p.name if isinstance(p, SystemPackage) else p for p in packages]
|
7272
|
+
d: ta.Dict[str, SystemPackage] = {}
|
7273
|
+
for pn in pns:
|
7274
|
+
out = await asyncio_subprocesses.run(
|
7275
|
+
'rpm', '-q', pn,
|
7276
|
+
capture_output=True,
|
7277
|
+
)
|
7278
|
+
if not out.proc.returncode:
|
7279
|
+
d[pn] = SystemPackage(
|
7280
|
+
pn,
|
7281
|
+
check.not_none(out.stdout).decode().strip(),
|
7282
|
+
)
|
7283
|
+
return d
|
7284
|
+
|
7285
|
+
|
6226
7286
|
########################################
|
6227
7287
|
# ../../../omdev/interp/providers.py
|
6228
7288
|
"""
|
@@ -6285,139 +7345,342 @@ class RunningInterpProvider(InterpProvider):
|
|
6285
7345
|
|
6286
7346
|
|
6287
7347
|
########################################
|
6288
|
-
# ../
|
7348
|
+
# ../commands/inject.py
|
6289
7349
|
|
6290
7350
|
|
6291
7351
|
##
|
6292
7352
|
|
6293
7353
|
|
6294
|
-
|
6295
|
-
|
6296
|
-
|
6297
|
-
|
6298
|
-
|
6299
|
-
|
6300
|
-
|
6301
|
-
|
7354
|
+
def bind_command(
|
7355
|
+
command_cls: ta.Type[Command],
|
7356
|
+
executor_cls: ta.Optional[ta.Type[CommandExecutor]],
|
7357
|
+
) -> InjectorBindings:
|
7358
|
+
lst: ta.List[InjectorBindingOrBindings] = [
|
7359
|
+
inj.bind(CommandRegistration(command_cls), array=True),
|
7360
|
+
]
|
7361
|
+
|
7362
|
+
if executor_cls is not None:
|
7363
|
+
lst.extend([
|
7364
|
+
inj.bind(executor_cls, singleton=True),
|
7365
|
+
inj.bind(CommandExecutorRegistration(command_cls, executor_cls), array=True),
|
7366
|
+
])
|
7367
|
+
|
7368
|
+
return inj.as_bindings(*lst)
|
6302
7369
|
|
6303
7370
|
|
6304
7371
|
##
|
6305
7372
|
|
6306
7373
|
|
6307
|
-
|
6308
|
-
|
6309
|
-
|
6310
|
-
*,
|
6311
|
-
spawning: RemoteSpawning,
|
6312
|
-
msh: ObjMarshalerManager,
|
6313
|
-
payload_file: ta.Optional[RemoteExecutionPayloadFile] = None,
|
6314
|
-
) -> None:
|
6315
|
-
super().__init__()
|
7374
|
+
@dc.dataclass(frozen=True)
|
7375
|
+
class _FactoryCommandExecutor(CommandExecutor):
|
7376
|
+
factory: ta.Callable[[], CommandExecutor]
|
6316
7377
|
|
6317
|
-
|
6318
|
-
self.
|
6319
|
-
self._payload_file = payload_file
|
7378
|
+
def execute(self, i: Command) -> ta.Awaitable[Command.Output]:
|
7379
|
+
return self.factory().execute(i)
|
6320
7380
|
|
6321
|
-
#
|
6322
7381
|
|
6323
|
-
|
6324
|
-
def _payload_src(self) -> str:
|
6325
|
-
return get_remote_payload_src(file=self._payload_file)
|
7382
|
+
##
|
6326
7383
|
|
6327
|
-
@cached_nullary
|
6328
|
-
def _remote_src(self) -> ta.Sequence[str]:
|
6329
|
-
return [
|
6330
|
-
self._payload_src(),
|
6331
|
-
'_remote_execution_main()',
|
6332
|
-
]
|
6333
7384
|
|
6334
|
-
|
6335
|
-
|
6336
|
-
|
7385
|
+
def bind_commands(
|
7386
|
+
*,
|
7387
|
+
main_config: MainConfig,
|
7388
|
+
) -> InjectorBindings:
|
7389
|
+
lst: ta.List[InjectorBindingOrBindings] = [
|
7390
|
+
inj.bind_array(CommandRegistration),
|
7391
|
+
inj.bind_array_type(CommandRegistration, CommandRegistrations),
|
7392
|
+
|
7393
|
+
inj.bind_array(CommandExecutorRegistration),
|
7394
|
+
inj.bind_array_type(CommandExecutorRegistration, CommandExecutorRegistrations),
|
7395
|
+
|
7396
|
+
inj.bind(build_command_name_map, singleton=True),
|
7397
|
+
]
|
6337
7398
|
|
6338
7399
|
#
|
6339
7400
|
|
6340
|
-
|
6341
|
-
|
6342
|
-
self,
|
6343
|
-
tgt: RemoteSpawning.Target,
|
6344
|
-
bs: MainBootstrap,
|
6345
|
-
) -> ta.AsyncGenerator[RemoteCommandExecutor, None]:
|
6346
|
-
spawn_src = self._spawn_src()
|
6347
|
-
remote_src = self._remote_src()
|
7401
|
+
def provide_obj_marshaler_installer(cmds: CommandNameMap) -> ObjMarshalerInstaller:
|
7402
|
+
return ObjMarshalerInstaller(functools.partial(install_command_marshaling, cmds))
|
6348
7403
|
|
6349
|
-
|
6350
|
-
tgt,
|
6351
|
-
spawn_src,
|
6352
|
-
debug=bs.main_config.debug,
|
6353
|
-
) as proc:
|
6354
|
-
res = await PyremoteBootstrapDriver( # noqa
|
6355
|
-
remote_src,
|
6356
|
-
PyremoteBootstrapOptions(
|
6357
|
-
debug=bs.main_config.debug,
|
6358
|
-
),
|
6359
|
-
).async_run(
|
6360
|
-
proc.stdout,
|
6361
|
-
proc.stdin,
|
6362
|
-
)
|
7404
|
+
lst.append(inj.bind(provide_obj_marshaler_installer, array=True))
|
6363
7405
|
|
6364
|
-
|
6365
|
-
proc.stdout,
|
6366
|
-
proc.stdin,
|
6367
|
-
msh=self._msh,
|
6368
|
-
)
|
7406
|
+
#
|
6369
7407
|
|
6370
|
-
|
7408
|
+
def provide_command_executor_map(
|
7409
|
+
injector: Injector,
|
7410
|
+
crs: CommandExecutorRegistrations,
|
7411
|
+
) -> CommandExecutorMap:
|
7412
|
+
dct: ta.Dict[ta.Type[Command], CommandExecutor] = {}
|
6371
7413
|
|
6372
|
-
|
6373
|
-
|
6374
|
-
|
7414
|
+
cr: CommandExecutorRegistration
|
7415
|
+
for cr in crs:
|
7416
|
+
if cr.command_cls in dct:
|
7417
|
+
raise KeyError(cr.command_cls)
|
6375
7418
|
|
6376
|
-
|
7419
|
+
factory = functools.partial(injector.provide, cr.executor_cls)
|
7420
|
+
if main_config.debug:
|
7421
|
+
ce = factory()
|
7422
|
+
else:
|
7423
|
+
ce = _FactoryCommandExecutor(factory)
|
6377
7424
|
|
7425
|
+
dct[cr.command_cls] = ce
|
6378
7426
|
|
6379
|
-
|
7427
|
+
return CommandExecutorMap(dct)
|
6380
7428
|
|
7429
|
+
lst.extend([
|
7430
|
+
inj.bind(provide_command_executor_map, singleton=True),
|
7431
|
+
|
7432
|
+
inj.bind(LocalCommandExecutor, singleton=True, eager=main_config.debug),
|
7433
|
+
])
|
7434
|
+
|
7435
|
+
#
|
7436
|
+
|
7437
|
+
lst.extend([
|
7438
|
+
bind_command(PingCommand, PingCommandExecutor),
|
7439
|
+
bind_command(SubprocessCommand, SubprocessCommandExecutor),
|
7440
|
+
])
|
7441
|
+
|
7442
|
+
#
|
7443
|
+
|
7444
|
+
return inj.as_bindings(*lst)
|
7445
|
+
|
7446
|
+
|
7447
|
+
########################################
|
7448
|
+
# ../deploy/apps.py
|
7449
|
+
|
7450
|
+
|
7451
|
+
def make_deploy_tag(
|
7452
|
+
rev: DeployRev,
|
7453
|
+
now: ta.Optional[datetime.datetime] = None,
|
7454
|
+
) -> DeployTag:
|
7455
|
+
if now is None:
|
7456
|
+
now = datetime.datetime.utcnow() # noqa
|
7457
|
+
now_fmt = '%Y%m%dT%H%M%S'
|
7458
|
+
now_str = now.strftime(now_fmt)
|
7459
|
+
return DeployTag('-'.join([rev, now_str]))
|
6381
7460
|
|
6382
|
-
|
7461
|
+
|
7462
|
+
class DeployAppManager(DeployPathOwner):
|
6383
7463
|
def __init__(
|
6384
7464
|
self,
|
6385
7465
|
*,
|
6386
|
-
|
6387
|
-
|
7466
|
+
deploy_home: DeployHome,
|
7467
|
+
git: DeployGitManager,
|
7468
|
+
venvs: DeployVenvManager,
|
6388
7469
|
) -> None:
|
6389
7470
|
super().__init__()
|
6390
7471
|
|
6391
|
-
self.
|
6392
|
-
self.
|
7472
|
+
self._deploy_home = deploy_home
|
7473
|
+
self._git = git
|
7474
|
+
self._venvs = venvs
|
6393
7475
|
|
6394
|
-
|
6395
|
-
|
7476
|
+
self._dir = os.path.join(deploy_home, 'apps')
|
7477
|
+
|
7478
|
+
def get_deploy_paths(self) -> ta.AbstractSet[DeployPath]:
|
7479
|
+
return {
|
7480
|
+
DeployPath.parse('apps/@app/@tag'),
|
7481
|
+
}
|
7482
|
+
|
7483
|
+
async def prepare_app(
|
6396
7484
|
self,
|
6397
|
-
|
6398
|
-
|
6399
|
-
|
6400
|
-
|
6401
|
-
|
7485
|
+
app: DeployApp,
|
7486
|
+
rev: DeployRev,
|
7487
|
+
repo: DeployGitRepo,
|
7488
|
+
):
|
7489
|
+
app_tag = DeployAppTag(app, make_deploy_tag(rev))
|
7490
|
+
app_dir = os.path.join(self._dir, app, app_tag.tag)
|
6402
7491
|
|
6403
|
-
|
6404
|
-
local_chan = RemoteChannelImpl(r1, w0, msh=self._msh)
|
7492
|
+
#
|
6405
7493
|
|
6406
|
-
|
6407
|
-
|
6408
|
-
|
7494
|
+
await self._git.checkout(
|
7495
|
+
DeployGitSpec(
|
7496
|
+
repo=repo,
|
7497
|
+
rev=rev,
|
7498
|
+
),
|
7499
|
+
app_dir,
|
6409
7500
|
)
|
6410
|
-
rch_task = asyncio.create_task(rch.run()) # noqa
|
6411
|
-
try:
|
6412
|
-
rce: RemoteCommandExecutor
|
6413
|
-
async with contextlib.aclosing(RemoteCommandExecutor(local_chan)) as rce:
|
6414
|
-
await rce.start()
|
6415
7501
|
|
6416
|
-
|
7502
|
+
#
|
6417
7503
|
|
6418
|
-
|
6419
|
-
|
6420
|
-
|
7504
|
+
await self._venvs.setup_app_venv(app_tag)
|
7505
|
+
|
7506
|
+
|
7507
|
+
########################################
|
7508
|
+
# ../remote/_main.py
|
7509
|
+
|
7510
|
+
|
7511
|
+
##
|
7512
|
+
|
7513
|
+
|
7514
|
+
class _RemoteExecutionLogHandler(logging.Handler):
|
7515
|
+
def __init__(self, fn: ta.Callable[[str], None]) -> None:
|
7516
|
+
super().__init__()
|
7517
|
+
self._fn = fn
|
7518
|
+
|
7519
|
+
def emit(self, record):
|
7520
|
+
msg = self.format(record)
|
7521
|
+
self._fn(msg)
|
7522
|
+
|
7523
|
+
|
7524
|
+
##
|
7525
|
+
|
7526
|
+
|
7527
|
+
class _RemoteExecutionMain:
|
7528
|
+
def __init__(
|
7529
|
+
self,
|
7530
|
+
chan: RemoteChannel,
|
7531
|
+
) -> None:
|
7532
|
+
super().__init__()
|
7533
|
+
|
7534
|
+
self._chan = chan
|
7535
|
+
|
7536
|
+
self.__bootstrap: ta.Optional[MainBootstrap] = None
|
7537
|
+
self.__injector: ta.Optional[Injector] = None
|
7538
|
+
|
7539
|
+
@property
|
7540
|
+
def _bootstrap(self) -> MainBootstrap:
|
7541
|
+
return check.not_none(self.__bootstrap)
|
7542
|
+
|
7543
|
+
@property
|
7544
|
+
def _injector(self) -> Injector:
|
7545
|
+
return check.not_none(self.__injector)
|
7546
|
+
|
7547
|
+
#
|
7548
|
+
|
7549
|
+
def _timebomb_main(
|
7550
|
+
self,
|
7551
|
+
delay_s: float,
|
7552
|
+
*,
|
7553
|
+
sig: int = signal.SIGINT,
|
7554
|
+
code: int = 1,
|
7555
|
+
) -> None:
|
7556
|
+
time.sleep(delay_s)
|
7557
|
+
|
7558
|
+
if (pgid := os.getpgid(0)) == os.getpid():
|
7559
|
+
os.killpg(pgid, sig)
|
7560
|
+
|
7561
|
+
os._exit(code) # noqa
|
7562
|
+
|
7563
|
+
@cached_nullary
|
7564
|
+
def _timebomb_thread(self) -> ta.Optional[threading.Thread]:
|
7565
|
+
if (tbd := self._bootstrap.remote_config.timebomb_delay_s) is None:
|
7566
|
+
return None
|
7567
|
+
|
7568
|
+
thr = threading.Thread(
|
7569
|
+
target=functools.partial(self._timebomb_main, tbd),
|
7570
|
+
name=f'{self.__class__.__name__}.timebomb',
|
7571
|
+
daemon=True,
|
7572
|
+
)
|
7573
|
+
|
7574
|
+
thr.start()
|
7575
|
+
|
7576
|
+
log.debug('Started timebomb thread: %r', thr)
|
7577
|
+
|
7578
|
+
return thr
|
7579
|
+
|
7580
|
+
#
|
7581
|
+
|
7582
|
+
@cached_nullary
|
7583
|
+
def _log_handler(self) -> _RemoteLogHandler:
|
7584
|
+
return _RemoteLogHandler(self._chan)
|
7585
|
+
|
7586
|
+
#
|
7587
|
+
|
7588
|
+
async def _setup(self) -> None:
|
7589
|
+
check.none(self.__bootstrap)
|
7590
|
+
check.none(self.__injector)
|
7591
|
+
|
7592
|
+
# Bootstrap
|
7593
|
+
|
7594
|
+
self.__bootstrap = check.not_none(await self._chan.recv_obj(MainBootstrap))
|
7595
|
+
|
7596
|
+
if (prd := self._bootstrap.remote_config.pycharm_remote_debug) is not None:
|
7597
|
+
pycharm_debug_connect(prd)
|
7598
|
+
|
7599
|
+
self.__injector = main_bootstrap(self._bootstrap)
|
7600
|
+
|
7601
|
+
self._chan.set_marshaler(self._injector[ObjMarshalerManager])
|
7602
|
+
|
7603
|
+
# Post-bootstrap
|
7604
|
+
|
7605
|
+
if self._bootstrap.remote_config.set_pgid:
|
7606
|
+
if os.getpgid(0) != os.getpid():
|
7607
|
+
log.debug('Setting pgid')
|
7608
|
+
os.setpgid(0, 0)
|
7609
|
+
|
7610
|
+
if (ds := self._bootstrap.remote_config.deathsig) is not None:
|
7611
|
+
log.debug('Setting deathsig: %s', ds)
|
7612
|
+
set_process_deathsig(int(signal.Signals[f'SIG{ds.upper()}']))
|
7613
|
+
|
7614
|
+
self._timebomb_thread()
|
7615
|
+
|
7616
|
+
if self._bootstrap.remote_config.forward_logging:
|
7617
|
+
log.debug('Installing log forwarder')
|
7618
|
+
logging.root.addHandler(self._log_handler())
|
7619
|
+
|
7620
|
+
#
|
7621
|
+
|
7622
|
+
async def run(self) -> None:
|
7623
|
+
await self._setup()
|
7624
|
+
|
7625
|
+
executor = self._injector[LocalCommandExecutor]
|
7626
|
+
|
7627
|
+
handler = _RemoteCommandHandler(self._chan, executor)
|
7628
|
+
|
7629
|
+
await handler.run()
|
7630
|
+
|
7631
|
+
|
7632
|
+
def _remote_execution_main() -> None:
|
7633
|
+
rt = pyremote_bootstrap_finalize() # noqa
|
7634
|
+
|
7635
|
+
async def inner() -> None:
|
7636
|
+
input = await asyncio_open_stream_reader(rt.input) # noqa
|
7637
|
+
output = await asyncio_open_stream_writer(rt.output)
|
7638
|
+
|
7639
|
+
chan = RemoteChannelImpl(
|
7640
|
+
input,
|
7641
|
+
output,
|
7642
|
+
)
|
7643
|
+
|
7644
|
+
await _RemoteExecutionMain(chan).run()
|
7645
|
+
|
7646
|
+
asyncio.run(inner())
|
7647
|
+
|
7648
|
+
|
7649
|
+
########################################
|
7650
|
+
# ../system/commands.py
|
7651
|
+
|
7652
|
+
|
7653
|
+
##
|
7654
|
+
|
7655
|
+
|
7656
|
+
@dc.dataclass(frozen=True)
|
7657
|
+
class CheckSystemPackageCommand(Command['CheckSystemPackageCommand.Output']):
|
7658
|
+
pkgs: ta.Sequence[str] = ()
|
7659
|
+
|
7660
|
+
def __post_init__(self) -> None:
|
7661
|
+
check.not_isinstance(self.pkgs, str)
|
7662
|
+
|
7663
|
+
@dc.dataclass(frozen=True)
|
7664
|
+
class Output(Command.Output):
|
7665
|
+
pkgs: ta.Sequence[SystemPackage]
|
7666
|
+
|
7667
|
+
|
7668
|
+
class CheckSystemPackageCommandExecutor(CommandExecutor[CheckSystemPackageCommand, CheckSystemPackageCommand.Output]):
|
7669
|
+
def __init__(
|
7670
|
+
self,
|
7671
|
+
*,
|
7672
|
+
mgr: SystemPackageManager,
|
7673
|
+
) -> None:
|
7674
|
+
super().__init__()
|
7675
|
+
|
7676
|
+
self._mgr = mgr
|
7677
|
+
|
7678
|
+
async def execute(self, cmd: CheckSystemPackageCommand) -> CheckSystemPackageCommand.Output:
|
7679
|
+
log.info('Checking system package!')
|
7680
|
+
|
7681
|
+
ret = await self._mgr.query(*cmd.pkgs)
|
7682
|
+
|
7683
|
+
return CheckSystemPackageCommand.Output(list(ret.values()))
|
6421
7684
|
|
6422
7685
|
|
6423
7686
|
########################################
|
@@ -6457,7 +7720,7 @@ class Pyenv:
|
|
6457
7720
|
return self._root_kw
|
6458
7721
|
|
6459
7722
|
if shutil.which('pyenv'):
|
6460
|
-
return await
|
7723
|
+
return await asyncio_subprocesses.check_output_str('pyenv', 'root')
|
6461
7724
|
|
6462
7725
|
d = os.path.expanduser('~/.pyenv')
|
6463
7726
|
if os.path.isdir(d) and os.path.isfile(os.path.join(d, 'bin', 'pyenv')):
|
@@ -6486,7 +7749,7 @@ class Pyenv:
|
|
6486
7749
|
if await self.root() is None:
|
6487
7750
|
return []
|
6488
7751
|
ret = []
|
6489
|
-
s = await
|
7752
|
+
s = await asyncio_subprocesses.check_output_str(await self.exe(), 'install', '--list')
|
6490
7753
|
for l in s.splitlines():
|
6491
7754
|
if not l.startswith(' '):
|
6492
7755
|
continue
|
@@ -6501,7 +7764,7 @@ class Pyenv:
|
|
6501
7764
|
return False
|
6502
7765
|
if not os.path.isdir(os.path.join(root, '.git')):
|
6503
7766
|
return False
|
6504
|
-
await
|
7767
|
+
await asyncio_subprocesses.check_call('git', 'pull', cwd=root)
|
6505
7768
|
return True
|
6506
7769
|
|
6507
7770
|
|
@@ -6592,7 +7855,7 @@ class DarwinPyenvInstallOpts(PyenvInstallOptsProvider):
|
|
6592
7855
|
cflags = []
|
6593
7856
|
ldflags = []
|
6594
7857
|
for dep in self.BREW_DEPS:
|
6595
|
-
dep_prefix = await
|
7858
|
+
dep_prefix = await asyncio_subprocesses.check_output_str('brew', '--prefix', dep)
|
6596
7859
|
cflags.append(f'-I{dep_prefix}/include')
|
6597
7860
|
ldflags.append(f'-L{dep_prefix}/lib')
|
6598
7861
|
return PyenvInstallOpts(
|
@@ -6602,11 +7865,11 @@ class DarwinPyenvInstallOpts(PyenvInstallOptsProvider):
|
|
6602
7865
|
|
6603
7866
|
@async_cached_nullary
|
6604
7867
|
async def brew_tcl_opts(self) -> PyenvInstallOpts:
|
6605
|
-
if await
|
7868
|
+
if await asyncio_subprocesses.try_output('brew', '--prefix', 'tcl-tk') is None:
|
6606
7869
|
return PyenvInstallOpts()
|
6607
7870
|
|
6608
|
-
tcl_tk_prefix = await
|
6609
|
-
tcl_tk_ver_str = await
|
7871
|
+
tcl_tk_prefix = await asyncio_subprocesses.check_output_str('brew', '--prefix', 'tcl-tk')
|
7872
|
+
tcl_tk_ver_str = await asyncio_subprocesses.check_output_str('brew', 'ls', '--versions', 'tcl-tk')
|
6610
7873
|
tcl_tk_ver = '.'.join(tcl_tk_ver_str.split()[1].split('.')[:2])
|
6611
7874
|
|
6612
7875
|
return PyenvInstallOpts(conf_opts=[
|
@@ -6727,7 +7990,7 @@ class PyenvVersionInstaller:
|
|
6727
7990
|
*conf_args,
|
6728
7991
|
]
|
6729
7992
|
|
6730
|
-
await
|
7993
|
+
await asyncio_subprocesses.check_call(
|
6731
7994
|
*full_args,
|
6732
7995
|
env=env,
|
6733
7996
|
)
|
@@ -6961,54 +8224,183 @@ class SystemInterpProvider(InterpProvider):
|
|
6961
8224
|
lst.append((e, ev))
|
6962
8225
|
return lst
|
6963
8226
|
|
6964
|
-
#
|
8227
|
+
#
|
8228
|
+
|
8229
|
+
async def get_installed_versions(self, spec: InterpSpecifier) -> ta.Sequence[InterpVersion]:
|
8230
|
+
return [ev for e, ev in await self.exe_versions()]
|
8231
|
+
|
8232
|
+
async def get_installed_version(self, version: InterpVersion) -> Interp:
|
8233
|
+
for e, ev in await self.exe_versions():
|
8234
|
+
if ev != version:
|
8235
|
+
continue
|
8236
|
+
return Interp(
|
8237
|
+
exe=e,
|
8238
|
+
version=ev,
|
8239
|
+
)
|
8240
|
+
raise KeyError(version)
|
8241
|
+
|
8242
|
+
|
8243
|
+
########################################
|
8244
|
+
# ../remote/connection.py
|
8245
|
+
|
8246
|
+
|
8247
|
+
##
|
8248
|
+
|
8249
|
+
|
8250
|
+
class PyremoteRemoteExecutionConnector:
|
8251
|
+
def __init__(
|
8252
|
+
self,
|
8253
|
+
*,
|
8254
|
+
spawning: RemoteSpawning,
|
8255
|
+
msh: ObjMarshalerManager,
|
8256
|
+
payload_file: ta.Optional[RemoteExecutionPayloadFile] = None,
|
8257
|
+
) -> None:
|
8258
|
+
super().__init__()
|
8259
|
+
|
8260
|
+
self._spawning = spawning
|
8261
|
+
self._msh = msh
|
8262
|
+
self._payload_file = payload_file
|
8263
|
+
|
8264
|
+
#
|
8265
|
+
|
8266
|
+
@cached_nullary
|
8267
|
+
def _payload_src(self) -> str:
|
8268
|
+
return get_remote_payload_src(file=self._payload_file)
|
8269
|
+
|
8270
|
+
@cached_nullary
|
8271
|
+
def _remote_src(self) -> ta.Sequence[str]:
|
8272
|
+
return [
|
8273
|
+
self._payload_src(),
|
8274
|
+
'_remote_execution_main()',
|
8275
|
+
]
|
8276
|
+
|
8277
|
+
@cached_nullary
|
8278
|
+
def _spawn_src(self) -> str:
|
8279
|
+
return pyremote_build_bootstrap_cmd(__package__ or 'manage')
|
8280
|
+
|
8281
|
+
#
|
8282
|
+
|
8283
|
+
@contextlib.asynccontextmanager
|
8284
|
+
async def connect(
|
8285
|
+
self,
|
8286
|
+
tgt: RemoteSpawning.Target,
|
8287
|
+
bs: MainBootstrap,
|
8288
|
+
) -> ta.AsyncGenerator[RemoteCommandExecutor, None]:
|
8289
|
+
spawn_src = self._spawn_src()
|
8290
|
+
remote_src = self._remote_src()
|
8291
|
+
|
8292
|
+
async with self._spawning.spawn(
|
8293
|
+
tgt,
|
8294
|
+
spawn_src,
|
8295
|
+
debug=bs.main_config.debug,
|
8296
|
+
) as proc:
|
8297
|
+
res = await PyremoteBootstrapDriver( # noqa
|
8298
|
+
remote_src,
|
8299
|
+
PyremoteBootstrapOptions(
|
8300
|
+
debug=bs.main_config.debug,
|
8301
|
+
),
|
8302
|
+
).async_run(
|
8303
|
+
proc.stdout,
|
8304
|
+
proc.stdin,
|
8305
|
+
)
|
8306
|
+
|
8307
|
+
chan = RemoteChannelImpl(
|
8308
|
+
proc.stdout,
|
8309
|
+
proc.stdin,
|
8310
|
+
msh=self._msh,
|
8311
|
+
)
|
8312
|
+
|
8313
|
+
await chan.send_obj(bs)
|
8314
|
+
|
8315
|
+
rce: RemoteCommandExecutor
|
8316
|
+
async with aclosing(RemoteCommandExecutor(chan)) as rce:
|
8317
|
+
await rce.start()
|
8318
|
+
|
8319
|
+
yield rce
|
8320
|
+
|
8321
|
+
|
8322
|
+
##
|
8323
|
+
|
8324
|
+
|
8325
|
+
class InProcessRemoteExecutionConnector:
|
8326
|
+
def __init__(
|
8327
|
+
self,
|
8328
|
+
*,
|
8329
|
+
msh: ObjMarshalerManager,
|
8330
|
+
local_executor: LocalCommandExecutor,
|
8331
|
+
) -> None:
|
8332
|
+
super().__init__()
|
8333
|
+
|
8334
|
+
self._msh = msh
|
8335
|
+
self._local_executor = local_executor
|
8336
|
+
|
8337
|
+
@contextlib.asynccontextmanager
|
8338
|
+
async def connect(self) -> ta.AsyncGenerator[RemoteCommandExecutor, None]:
|
8339
|
+
r0, w0 = asyncio_create_bytes_channel()
|
8340
|
+
r1, w1 = asyncio_create_bytes_channel()
|
8341
|
+
|
8342
|
+
remote_chan = RemoteChannelImpl(r0, w1, msh=self._msh)
|
8343
|
+
local_chan = RemoteChannelImpl(r1, w0, msh=self._msh)
|
6965
8344
|
|
6966
|
-
|
6967
|
-
|
8345
|
+
rch = _RemoteCommandHandler(
|
8346
|
+
remote_chan,
|
8347
|
+
self._local_executor,
|
8348
|
+
)
|
8349
|
+
rch_task = asyncio.create_task(rch.run()) # noqa
|
8350
|
+
try:
|
8351
|
+
rce: RemoteCommandExecutor
|
8352
|
+
async with aclosing(RemoteCommandExecutor(local_chan)) as rce:
|
8353
|
+
await rce.start()
|
6968
8354
|
|
6969
|
-
|
6970
|
-
|
6971
|
-
|
6972
|
-
|
6973
|
-
|
6974
|
-
exe=e,
|
6975
|
-
version=ev,
|
6976
|
-
)
|
6977
|
-
raise KeyError(version)
|
8355
|
+
yield rce
|
8356
|
+
|
8357
|
+
finally:
|
8358
|
+
rch.stop()
|
8359
|
+
await rch_task
|
6978
8360
|
|
6979
8361
|
|
6980
8362
|
########################################
|
6981
|
-
# ../
|
8363
|
+
# ../system/inject.py
|
6982
8364
|
|
6983
8365
|
|
6984
|
-
def
|
8366
|
+
def bind_system(
|
6985
8367
|
*,
|
6986
|
-
|
8368
|
+
system_config: SystemConfig,
|
6987
8369
|
) -> InjectorBindings:
|
6988
8370
|
lst: ta.List[InjectorBindingOrBindings] = [
|
6989
|
-
inj.bind(
|
6990
|
-
|
6991
|
-
inj.bind(SubprocessRemoteSpawning, singleton=True),
|
6992
|
-
inj.bind(RemoteSpawning, to_key=SubprocessRemoteSpawning),
|
8371
|
+
inj.bind(system_config),
|
6993
8372
|
]
|
6994
8373
|
|
6995
8374
|
#
|
6996
8375
|
|
6997
|
-
|
8376
|
+
platform = system_config.platform or detect_system_platform()
|
8377
|
+
lst.append(inj.bind(platform, key=Platform))
|
8378
|
+
|
8379
|
+
#
|
8380
|
+
|
8381
|
+
if isinstance(platform, AmazonLinuxPlatform):
|
6998
8382
|
lst.extend([
|
6999
|
-
inj.bind(
|
7000
|
-
inj.bind(
|
8383
|
+
inj.bind(YumSystemPackageManager, singleton=True),
|
8384
|
+
inj.bind(SystemPackageManager, to_key=YumSystemPackageManager),
|
7001
8385
|
])
|
7002
|
-
|
8386
|
+
|
8387
|
+
elif isinstance(platform, LinuxPlatform):
|
8388
|
+
lst.extend([
|
8389
|
+
inj.bind(AptSystemPackageManager, singleton=True),
|
8390
|
+
inj.bind(SystemPackageManager, to_key=AptSystemPackageManager),
|
8391
|
+
])
|
8392
|
+
|
8393
|
+
elif isinstance(platform, DarwinPlatform):
|
7003
8394
|
lst.extend([
|
7004
|
-
inj.bind(
|
7005
|
-
inj.bind(
|
8395
|
+
inj.bind(BrewSystemPackageManager, singleton=True),
|
8396
|
+
inj.bind(SystemPackageManager, to_key=BrewSystemPackageManager),
|
7006
8397
|
])
|
7007
8398
|
|
7008
8399
|
#
|
7009
8400
|
|
7010
|
-
|
7011
|
-
|
8401
|
+
lst.extend([
|
8402
|
+
bind_command(CheckSystemPackageCommand, CheckSystemPackageCommandExecutor),
|
8403
|
+
])
|
7012
8404
|
|
7013
8405
|
#
|
7014
8406
|
|
@@ -7114,189 +8506,248 @@ DEFAULT_INTERP_RESOLVER = InterpResolver([(p.name, p) for p in [
|
|
7114
8506
|
|
7115
8507
|
|
7116
8508
|
########################################
|
7117
|
-
# ../
|
8509
|
+
# ../remote/inject.py
|
7118
8510
|
|
7119
8511
|
|
7120
|
-
|
8512
|
+
def bind_remote(
|
8513
|
+
*,
|
8514
|
+
remote_config: RemoteConfig,
|
8515
|
+
) -> InjectorBindings:
|
8516
|
+
lst: ta.List[InjectorBindingOrBindings] = [
|
8517
|
+
inj.bind(remote_config),
|
7121
8518
|
|
8519
|
+
inj.bind(SubprocessRemoteSpawning, singleton=True),
|
8520
|
+
inj.bind(RemoteSpawning, to_key=SubprocessRemoteSpawning),
|
7122
8521
|
|
7123
|
-
|
7124
|
-
|
7125
|
-
|
7126
|
-
install: bool = False
|
8522
|
+
inj.bind(PyremoteRemoteExecutionConnector, singleton=True),
|
8523
|
+
inj.bind(InProcessRemoteExecutionConnector, singleton=True),
|
8524
|
+
]
|
7127
8525
|
|
7128
|
-
|
7129
|
-
|
7130
|
-
|
7131
|
-
|
7132
|
-
opts: InterpOpts
|
8526
|
+
#
|
8527
|
+
|
8528
|
+
if (pf := remote_config.payload_file) is not None:
|
8529
|
+
lst.append(inj.bind(pf, key=RemoteExecutionPayloadFile))
|
7133
8530
|
|
8531
|
+
#
|
7134
8532
|
|
7135
|
-
|
7136
|
-
async def execute(self, cmd: InterpCommand) -> InterpCommand.Output:
|
7137
|
-
i = InterpSpecifier.parse(check.not_none(cmd.spec))
|
7138
|
-
o = check.not_none(await DEFAULT_INTERP_RESOLVER.resolve(i, install=cmd.install))
|
7139
|
-
return InterpCommand.Output(
|
7140
|
-
exe=o.exe,
|
7141
|
-
version=str(o.version.version),
|
7142
|
-
opts=o.version.opts,
|
7143
|
-
)
|
8533
|
+
return inj.as_bindings(*lst)
|
7144
8534
|
|
7145
8535
|
|
7146
8536
|
########################################
|
7147
|
-
# ../
|
8537
|
+
# ../targets/connection.py
|
7148
8538
|
|
7149
8539
|
|
7150
8540
|
##
|
7151
8541
|
|
7152
8542
|
|
7153
|
-
|
7154
|
-
|
7155
|
-
|
7156
|
-
|
7157
|
-
lst: ta.List[InjectorBindingOrBindings] = [
|
7158
|
-
inj.bind(CommandRegistration(command_cls), array=True),
|
7159
|
-
]
|
8543
|
+
class ManageTargetConnector(abc.ABC):
|
8544
|
+
@abc.abstractmethod
|
8545
|
+
def connect(self, tgt: ManageTarget) -> ta.AsyncContextManager[CommandExecutor]:
|
8546
|
+
raise NotImplementedError
|
7160
8547
|
|
7161
|
-
if executor_cls is not None:
|
7162
|
-
lst.extend([
|
7163
|
-
inj.bind(executor_cls, singleton=True),
|
7164
|
-
inj.bind(CommandExecutorRegistration(command_cls, executor_cls), array=True),
|
7165
|
-
])
|
7166
8548
|
|
7167
|
-
|
8549
|
+
##
|
7168
8550
|
|
7169
8551
|
|
7170
|
-
|
8552
|
+
ManageTargetConnectorMap = ta.NewType('ManageTargetConnectorMap', ta.Mapping[ta.Type[ManageTarget], ManageTargetConnector]) # noqa
|
7171
8553
|
|
7172
8554
|
|
7173
8555
|
@dc.dataclass(frozen=True)
|
7174
|
-
class
|
7175
|
-
|
8556
|
+
class TypeSwitchedManageTargetConnector(ManageTargetConnector):
|
8557
|
+
connectors: ManageTargetConnectorMap
|
7176
8558
|
|
7177
|
-
def
|
7178
|
-
|
8559
|
+
def get_connector(self, ty: ta.Type[ManageTarget]) -> ManageTargetConnector:
|
8560
|
+
for k, v in self.connectors.items():
|
8561
|
+
if issubclass(ty, k):
|
8562
|
+
return v
|
8563
|
+
raise KeyError(ty)
|
8564
|
+
|
8565
|
+
def connect(self, tgt: ManageTarget) -> ta.AsyncContextManager[CommandExecutor]:
|
8566
|
+
return self.get_connector(type(tgt)).connect(tgt)
|
7179
8567
|
|
7180
8568
|
|
7181
8569
|
##
|
7182
8570
|
|
7183
8571
|
|
7184
|
-
|
7185
|
-
|
7186
|
-
|
7187
|
-
|
7188
|
-
|
7189
|
-
|
7190
|
-
inj.bind_array_type(CommandRegistration, CommandRegistrations),
|
8572
|
+
@dc.dataclass(frozen=True)
|
8573
|
+
class LocalManageTargetConnector(ManageTargetConnector):
|
8574
|
+
_local_executor: LocalCommandExecutor
|
8575
|
+
_in_process_connector: InProcessRemoteExecutionConnector
|
8576
|
+
_pyremote_connector: PyremoteRemoteExecutionConnector
|
8577
|
+
_bootstrap: MainBootstrap
|
7191
8578
|
|
7192
|
-
|
7193
|
-
|
8579
|
+
@contextlib.asynccontextmanager
|
8580
|
+
async def connect(self, tgt: ManageTarget) -> ta.AsyncGenerator[CommandExecutor, None]:
|
8581
|
+
lmt = check.isinstance(tgt, LocalManageTarget)
|
7194
8582
|
|
7195
|
-
|
7196
|
-
|
8583
|
+
if isinstance(lmt, InProcessManageTarget):
|
8584
|
+
imt = check.isinstance(lmt, InProcessManageTarget)
|
7197
8585
|
|
7198
|
-
|
8586
|
+
if imt.mode == InProcessManageTarget.Mode.DIRECT:
|
8587
|
+
yield self._local_executor
|
7199
8588
|
|
7200
|
-
|
7201
|
-
|
8589
|
+
elif imt.mode == InProcessManageTarget.Mode.FAKE_REMOTE:
|
8590
|
+
async with self._in_process_connector.connect() as rce:
|
8591
|
+
yield rce
|
7202
8592
|
|
7203
|
-
|
8593
|
+
else:
|
8594
|
+
raise TypeError(imt.mode)
|
8595
|
+
|
8596
|
+
elif isinstance(lmt, SubprocessManageTarget):
|
8597
|
+
async with self._pyremote_connector.connect(
|
8598
|
+
RemoteSpawning.Target(
|
8599
|
+
python=lmt.python,
|
8600
|
+
),
|
8601
|
+
self._bootstrap,
|
8602
|
+
) as rce:
|
8603
|
+
yield rce
|
7204
8604
|
|
7205
|
-
|
8605
|
+
else:
|
8606
|
+
raise TypeError(lmt)
|
7206
8607
|
|
7207
|
-
def provide_command_executor_map(
|
7208
|
-
injector: Injector,
|
7209
|
-
crs: CommandExecutorRegistrations,
|
7210
|
-
) -> CommandExecutorMap:
|
7211
|
-
dct: ta.Dict[ta.Type[Command], CommandExecutor] = {}
|
7212
8608
|
|
7213
|
-
|
7214
|
-
for cr in crs:
|
7215
|
-
if cr.command_cls in dct:
|
7216
|
-
raise KeyError(cr.command_cls)
|
8609
|
+
##
|
7217
8610
|
|
7218
|
-
factory = functools.partial(injector.provide, cr.executor_cls)
|
7219
|
-
if main_config.debug:
|
7220
|
-
ce = factory()
|
7221
|
-
else:
|
7222
|
-
ce = _FactoryCommandExecutor(factory)
|
7223
8611
|
|
7224
|
-
|
8612
|
+
@dc.dataclass(frozen=True)
|
8613
|
+
class DockerManageTargetConnector(ManageTargetConnector):
|
8614
|
+
_pyremote_connector: PyremoteRemoteExecutionConnector
|
8615
|
+
_bootstrap: MainBootstrap
|
7225
8616
|
|
7226
|
-
|
8617
|
+
@contextlib.asynccontextmanager
|
8618
|
+
async def connect(self, tgt: ManageTarget) -> ta.AsyncGenerator[CommandExecutor, None]:
|
8619
|
+
dmt = check.isinstance(tgt, DockerManageTarget)
|
8620
|
+
|
8621
|
+
sh_parts: ta.List[str] = ['docker']
|
8622
|
+
if dmt.image is not None:
|
8623
|
+
sh_parts.extend(['run', '-i', dmt.image])
|
8624
|
+
elif dmt.container_id is not None:
|
8625
|
+
sh_parts.extend(['exec', dmt.container_id])
|
8626
|
+
else:
|
8627
|
+
raise ValueError(dmt)
|
7227
8628
|
|
7228
|
-
|
7229
|
-
|
8629
|
+
async with self._pyremote_connector.connect(
|
8630
|
+
RemoteSpawning.Target(
|
8631
|
+
shell=' '.join(sh_parts),
|
8632
|
+
python=dmt.python,
|
8633
|
+
),
|
8634
|
+
self._bootstrap,
|
8635
|
+
) as rce:
|
8636
|
+
yield rce
|
7230
8637
|
|
7231
|
-
inj.bind(LocalCommandExecutor, singleton=True, eager=main_config.debug),
|
7232
|
-
])
|
7233
8638
|
|
7234
|
-
|
8639
|
+
##
|
7235
8640
|
|
7236
|
-
command_cls: ta.Any
|
7237
|
-
executor_cls: ta.Any
|
7238
|
-
for command_cls, executor_cls in [
|
7239
|
-
(SubprocessCommand, SubprocessCommandExecutor),
|
7240
|
-
(InterpCommand, InterpCommandExecutor),
|
7241
|
-
]:
|
7242
|
-
lst.append(bind_command(command_cls, executor_cls))
|
7243
8641
|
|
7244
|
-
|
8642
|
+
@dc.dataclass(frozen=True)
|
8643
|
+
class SshManageTargetConnector(ManageTargetConnector):
|
8644
|
+
_pyremote_connector: PyremoteRemoteExecutionConnector
|
8645
|
+
_bootstrap: MainBootstrap
|
7245
8646
|
|
7246
|
-
|
8647
|
+
@contextlib.asynccontextmanager
|
8648
|
+
async def connect(self, tgt: ManageTarget) -> ta.AsyncGenerator[CommandExecutor, None]:
|
8649
|
+
smt = check.isinstance(tgt, SshManageTarget)
|
8650
|
+
|
8651
|
+
sh_parts: ta.List[str] = ['ssh']
|
8652
|
+
if smt.key_file is not None:
|
8653
|
+
sh_parts.extend(['-i', smt.key_file])
|
8654
|
+
addr = check.not_none(smt.host)
|
8655
|
+
if smt.username is not None:
|
8656
|
+
addr = f'{smt.username}@{addr}'
|
8657
|
+
sh_parts.append(addr)
|
8658
|
+
|
8659
|
+
async with self._pyremote_connector.connect(
|
8660
|
+
RemoteSpawning.Target(
|
8661
|
+
shell=' '.join(sh_parts),
|
8662
|
+
shell_quote=True,
|
8663
|
+
python=smt.python,
|
8664
|
+
),
|
8665
|
+
self._bootstrap,
|
8666
|
+
) as rce:
|
8667
|
+
yield rce
|
7247
8668
|
|
7248
8669
|
|
7249
8670
|
########################################
|
7250
|
-
# ../deploy/
|
8671
|
+
# ../deploy/interp.py
|
7251
8672
|
|
7252
8673
|
|
7253
|
-
|
7254
|
-
) -> InjectorBindings:
|
7255
|
-
lst: ta.List[InjectorBindingOrBindings] = [
|
7256
|
-
bind_command(DeployCommand, DeployCommandExecutor),
|
7257
|
-
]
|
8674
|
+
##
|
7258
8675
|
|
7259
|
-
|
8676
|
+
|
8677
|
+
@dc.dataclass(frozen=True)
|
8678
|
+
class InterpCommand(Command['InterpCommand.Output']):
|
8679
|
+
spec: str
|
8680
|
+
install: bool = False
|
8681
|
+
|
8682
|
+
@dc.dataclass(frozen=True)
|
8683
|
+
class Output(Command.Output):
|
8684
|
+
exe: str
|
8685
|
+
version: str
|
8686
|
+
opts: InterpOpts
|
8687
|
+
|
8688
|
+
|
8689
|
+
class InterpCommandExecutor(CommandExecutor[InterpCommand, InterpCommand.Output]):
|
8690
|
+
async def execute(self, cmd: InterpCommand) -> InterpCommand.Output:
|
8691
|
+
i = InterpSpecifier.parse(check.not_none(cmd.spec))
|
8692
|
+
o = check.not_none(await DEFAULT_INTERP_RESOLVER.resolve(i, install=cmd.install))
|
8693
|
+
return InterpCommand.Output(
|
8694
|
+
exe=o.exe,
|
8695
|
+
version=str(o.version.version),
|
8696
|
+
opts=o.version.opts,
|
8697
|
+
)
|
7260
8698
|
|
7261
8699
|
|
7262
8700
|
########################################
|
7263
|
-
# ../
|
8701
|
+
# ../targets/inject.py
|
7264
8702
|
|
7265
8703
|
|
7266
|
-
def
|
7267
|
-
*,
|
7268
|
-
system_config: SystemConfig,
|
7269
|
-
) -> InjectorBindings:
|
8704
|
+
def bind_targets() -> InjectorBindings:
|
7270
8705
|
lst: ta.List[InjectorBindingOrBindings] = [
|
7271
|
-
inj.bind(
|
8706
|
+
inj.bind(LocalManageTargetConnector, singleton=True),
|
8707
|
+
inj.bind(DockerManageTargetConnector, singleton=True),
|
8708
|
+
inj.bind(SshManageTargetConnector, singleton=True),
|
8709
|
+
|
8710
|
+
inj.bind(TypeSwitchedManageTargetConnector, singleton=True),
|
8711
|
+
inj.bind(ManageTargetConnector, to_key=TypeSwitchedManageTargetConnector),
|
7272
8712
|
]
|
7273
8713
|
|
7274
8714
|
#
|
7275
8715
|
|
7276
|
-
|
7277
|
-
|
8716
|
+
def provide_manage_target_connector_map(injector: Injector) -> ManageTargetConnectorMap:
|
8717
|
+
return ManageTargetConnectorMap({
|
8718
|
+
LocalManageTarget: injector[LocalManageTargetConnector],
|
8719
|
+
DockerManageTarget: injector[DockerManageTargetConnector],
|
8720
|
+
SshManageTarget: injector[SshManageTargetConnector],
|
8721
|
+
})
|
8722
|
+
lst.append(inj.bind(provide_manage_target_connector_map, singleton=True))
|
7278
8723
|
|
7279
8724
|
#
|
7280
8725
|
|
7281
|
-
|
7282
|
-
lst.extend([
|
7283
|
-
inj.bind(AptSystemPackageManager, singleton=True),
|
7284
|
-
inj.bind(SystemPackageManager, to_key=AptSystemPackageManager),
|
7285
|
-
])
|
8726
|
+
return inj.as_bindings(*lst)
|
7286
8727
|
|
7287
|
-
elif platform == 'darwin':
|
7288
|
-
lst.extend([
|
7289
|
-
inj.bind(BrewSystemPackageManager, singleton=True),
|
7290
|
-
inj.bind(SystemPackageManager, to_key=BrewSystemPackageManager),
|
7291
|
-
])
|
7292
8728
|
|
7293
|
-
|
8729
|
+
########################################
|
8730
|
+
# ../deploy/inject.py
|
7294
8731
|
|
7295
|
-
lst.extend([
|
7296
|
-
bind_command(CheckSystemPackageCommand, CheckSystemPackageCommandExecutor),
|
7297
|
-
])
|
7298
8732
|
|
7299
|
-
|
8733
|
+
def bind_deploy(
|
8734
|
+
*,
|
8735
|
+
deploy_config: DeployConfig,
|
8736
|
+
) -> InjectorBindings:
|
8737
|
+
lst: ta.List[InjectorBindingOrBindings] = [
|
8738
|
+
inj.bind(deploy_config),
|
8739
|
+
|
8740
|
+
inj.bind(DeployAppManager, singleton=True),
|
8741
|
+
inj.bind(DeployGitManager, singleton=True),
|
8742
|
+
inj.bind(DeployVenvManager, singleton=True),
|
8743
|
+
|
8744
|
+
bind_command(DeployCommand, DeployCommandExecutor),
|
8745
|
+
bind_command(InterpCommand, InterpCommandExecutor),
|
8746
|
+
]
|
8747
|
+
|
8748
|
+
if (dh := deploy_config.deploy_home) is not None:
|
8749
|
+
dh = os.path.abspath(os.path.expanduser(dh))
|
8750
|
+
lst.append(inj.bind(dh, key=DeployHome))
|
7300
8751
|
|
7301
8752
|
return inj.as_bindings(*lst)
|
7302
8753
|
|
@@ -7310,9 +8761,13 @@ def bind_system(
|
|
7310
8761
|
|
7311
8762
|
def bind_main(
|
7312
8763
|
*,
|
7313
|
-
main_config: MainConfig,
|
7314
|
-
|
7315
|
-
|
8764
|
+
main_config: MainConfig = MainConfig(),
|
8765
|
+
|
8766
|
+
deploy_config: DeployConfig = DeployConfig(),
|
8767
|
+
remote_config: RemoteConfig = RemoteConfig(),
|
8768
|
+
system_config: SystemConfig = SystemConfig(),
|
8769
|
+
|
8770
|
+
main_bootstrap: ta.Optional[MainBootstrap] = None,
|
7316
8771
|
) -> InjectorBindings:
|
7317
8772
|
lst: ta.List[InjectorBindingOrBindings] = [
|
7318
8773
|
inj.bind(main_config),
|
@@ -7321,7 +8776,9 @@ def bind_main(
|
|
7321
8776
|
main_config=main_config,
|
7322
8777
|
),
|
7323
8778
|
|
7324
|
-
bind_deploy(
|
8779
|
+
bind_deploy(
|
8780
|
+
deploy_config=deploy_config,
|
8781
|
+
),
|
7325
8782
|
|
7326
8783
|
bind_remote(
|
7327
8784
|
remote_config=remote_config,
|
@@ -7330,10 +8787,17 @@ def bind_main(
|
|
7330
8787
|
bind_system(
|
7331
8788
|
system_config=system_config,
|
7332
8789
|
),
|
8790
|
+
|
8791
|
+
bind_targets(),
|
7333
8792
|
]
|
7334
8793
|
|
7335
8794
|
#
|
7336
8795
|
|
8796
|
+
if main_bootstrap is not None:
|
8797
|
+
lst.append(inj.bind(main_bootstrap))
|
8798
|
+
|
8799
|
+
#
|
8800
|
+
|
7337
8801
|
def build_obj_marshaler_manager(insts: ObjMarshalerInstallers) -> ObjMarshalerManager:
|
7338
8802
|
msh = ObjMarshalerManager()
|
7339
8803
|
inst: ObjMarshalerInstaller
|
@@ -7363,8 +8827,12 @@ def main_bootstrap(bs: MainBootstrap) -> Injector:
|
|
7363
8827
|
|
7364
8828
|
injector = inj.create_injector(bind_main( # noqa
|
7365
8829
|
main_config=bs.main_config,
|
8830
|
+
|
8831
|
+
deploy_config=bs.deploy_config,
|
7366
8832
|
remote_config=bs.remote_config,
|
7367
8833
|
system_config=bs.system_config,
|
8834
|
+
|
8835
|
+
main_bootstrap=bs,
|
7368
8836
|
))
|
7369
8837
|
|
7370
8838
|
return injector
|
@@ -7378,10 +8846,6 @@ class MainCli(ArgparseCli):
|
|
7378
8846
|
@argparse_command(
|
7379
8847
|
argparse_arg('--_payload-file'),
|
7380
8848
|
|
7381
|
-
argparse_arg('-s', '--shell'),
|
7382
|
-
argparse_arg('-q', '--shell-quote', action='store_true'),
|
7383
|
-
argparse_arg('--python', default='python3'),
|
7384
|
-
|
7385
8849
|
argparse_arg('--pycharm-debug-port', type=int),
|
7386
8850
|
argparse_arg('--pycharm-debug-host'),
|
7387
8851
|
argparse_arg('--pycharm-debug-version'),
|
@@ -7390,8 +8854,9 @@ class MainCli(ArgparseCli):
|
|
7390
8854
|
|
7391
8855
|
argparse_arg('--debug', action='store_true'),
|
7392
8856
|
|
7393
|
-
argparse_arg('--
|
8857
|
+
argparse_arg('--deploy-home'),
|
7394
8858
|
|
8859
|
+
argparse_arg('target'),
|
7395
8860
|
argparse_arg('command', nargs='+'),
|
7396
8861
|
)
|
7397
8862
|
async def run(self) -> None:
|
@@ -7402,6 +8867,10 @@ class MainCli(ArgparseCli):
|
|
7402
8867
|
debug=bool(self.args.debug),
|
7403
8868
|
),
|
7404
8869
|
|
8870
|
+
deploy_config=DeployConfig(
|
8871
|
+
deploy_home=self.args.deploy_home,
|
8872
|
+
),
|
8873
|
+
|
7405
8874
|
remote_config=RemoteConfig(
|
7406
8875
|
payload_file=self.args._payload_file, # noqa
|
7407
8876
|
|
@@ -7412,8 +8881,6 @@ class MainCli(ArgparseCli):
|
|
7412
8881
|
) if self.args.pycharm_debug_port is not None else None,
|
7413
8882
|
|
7414
8883
|
timebomb_delay_s=self.args.remote_timebomb_delay_s,
|
7415
|
-
|
7416
|
-
use_in_process_remote_executor=True,
|
7417
8884
|
),
|
7418
8885
|
)
|
7419
8886
|
|
@@ -7427,6 +8894,11 @@ class MainCli(ArgparseCli):
|
|
7427
8894
|
|
7428
8895
|
msh = injector[ObjMarshalerManager]
|
7429
8896
|
|
8897
|
+
ts = self.args.target
|
8898
|
+
if not ts.startswith('{'):
|
8899
|
+
ts = json.dumps({ts: {}})
|
8900
|
+
tgt: ManageTarget = msh.unmarshal_obj(json.loads(ts), ManageTarget)
|
8901
|
+
|
7430
8902
|
cmds: ta.List[Command] = []
|
7431
8903
|
cmd: Command
|
7432
8904
|
for c in self.args.command:
|
@@ -7437,21 +8909,7 @@ class MainCli(ArgparseCli):
|
|
7437
8909
|
|
7438
8910
|
#
|
7439
8911
|
|
7440
|
-
async with
|
7441
|
-
ce: CommandExecutor
|
7442
|
-
|
7443
|
-
if self.args.local:
|
7444
|
-
ce = injector[LocalCommandExecutor]
|
7445
|
-
|
7446
|
-
else:
|
7447
|
-
tgt = RemoteSpawning.Target(
|
7448
|
-
shell=self.args.shell,
|
7449
|
-
shell_quote=self.args.shell_quote,
|
7450
|
-
python=self.args.python,
|
7451
|
-
)
|
7452
|
-
|
7453
|
-
ce = await es.enter_async_context(injector[RemoteExecutionConnector].connect(tgt, bs)) # noqa
|
7454
|
-
|
8912
|
+
async with injector[ManageTargetConnector].connect(tgt) as ce:
|
7455
8913
|
async def run_command(cmd: Command) -> None:
|
7456
8914
|
res = await ce.try_execute(
|
7457
8915
|
cmd,
|