omdev 0.0.0.dev157__py3-none-any.whl → 0.0.0.dev158__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of omdev might be problematic. Click here for more details.

omdev/scripts/interp.py CHANGED
@@ -75,7 +75,7 @@ UnparsedVersion = ta.Union['Version', str]
75
75
  UnparsedVersionVar = ta.TypeVar('UnparsedVersionVar', bound=UnparsedVersion)
76
76
  CallableVersionOperator = ta.Callable[['Version', str], bool]
77
77
 
78
- # ../../omlish/lite/subprocesses.py
78
+ # ../../omlish/subprocesses.py
79
79
  SubprocessChannelOption = ta.Literal['pipe', 'stdout', 'devnull'] # ta.TypeAlias
80
80
 
81
81
 
@@ -1036,6 +1036,13 @@ json_dump_compact: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON
1036
1036
  json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_COMPACT_KWARGS)
1037
1037
 
1038
1038
 
1039
+ ########################################
1040
+ # ../../../omlish/lite/logs.py
1041
+
1042
+
1043
+ log = logging.getLogger(__name__)
1044
+
1045
+
1039
1046
  ########################################
1040
1047
  # ../../../omlish/lite/reflect.py
1041
1048
 
@@ -1158,6 +1165,116 @@ def format_num_bytes(num_bytes: int) -> str:
1158
1165
  return f'{num_bytes / 1024 ** (len(FORMAT_NUM_BYTES_SUFFIXES) - 1):.2f}{FORMAT_NUM_BYTES_SUFFIXES[-1]}'
1159
1166
 
1160
1167
 
1168
+ ########################################
1169
+ # ../../../omlish/logs/filters.py
1170
+
1171
+
1172
+ class TidLogFilter(logging.Filter):
1173
+ def filter(self, record):
1174
+ record.tid = threading.get_native_id()
1175
+ return True
1176
+
1177
+
1178
+ ########################################
1179
+ # ../../../omlish/logs/proxy.py
1180
+
1181
+
1182
+ class ProxyLogFilterer(logging.Filterer):
1183
+ def __init__(self, underlying: logging.Filterer) -> None: # noqa
1184
+ self._underlying = underlying
1185
+
1186
+ @property
1187
+ def underlying(self) -> logging.Filterer:
1188
+ return self._underlying
1189
+
1190
+ @property
1191
+ def filters(self):
1192
+ return self._underlying.filters
1193
+
1194
+ @filters.setter
1195
+ def filters(self, filters):
1196
+ self._underlying.filters = filters
1197
+
1198
+ def addFilter(self, filter): # noqa
1199
+ self._underlying.addFilter(filter)
1200
+
1201
+ def removeFilter(self, filter): # noqa
1202
+ self._underlying.removeFilter(filter)
1203
+
1204
+ def filter(self, record):
1205
+ return self._underlying.filter(record)
1206
+
1207
+
1208
+ class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
1209
+ def __init__(self, underlying: logging.Handler) -> None: # noqa
1210
+ ProxyLogFilterer.__init__(self, underlying)
1211
+
1212
+ _underlying: logging.Handler
1213
+
1214
+ @property
1215
+ def underlying(self) -> logging.Handler:
1216
+ return self._underlying
1217
+
1218
+ def get_name(self):
1219
+ return self._underlying.get_name()
1220
+
1221
+ def set_name(self, name):
1222
+ self._underlying.set_name(name)
1223
+
1224
+ @property
1225
+ def name(self):
1226
+ return self._underlying.name
1227
+
1228
+ @property
1229
+ def level(self):
1230
+ return self._underlying.level
1231
+
1232
+ @level.setter
1233
+ def level(self, level):
1234
+ self._underlying.level = level
1235
+
1236
+ @property
1237
+ def formatter(self):
1238
+ return self._underlying.formatter
1239
+
1240
+ @formatter.setter
1241
+ def formatter(self, formatter):
1242
+ self._underlying.formatter = formatter
1243
+
1244
+ def createLock(self):
1245
+ self._underlying.createLock()
1246
+
1247
+ def acquire(self):
1248
+ self._underlying.acquire()
1249
+
1250
+ def release(self):
1251
+ self._underlying.release()
1252
+
1253
+ def setLevel(self, level):
1254
+ self._underlying.setLevel(level)
1255
+
1256
+ def format(self, record):
1257
+ return self._underlying.format(record)
1258
+
1259
+ def emit(self, record):
1260
+ self._underlying.emit(record)
1261
+
1262
+ def handle(self, record):
1263
+ return self._underlying.handle(record)
1264
+
1265
+ def setFormatter(self, fmt):
1266
+ self._underlying.setFormatter(fmt)
1267
+
1268
+ def flush(self):
1269
+ self._underlying.flush()
1270
+
1271
+ def close(self):
1272
+ self._underlying.close()
1273
+
1274
+ def handleError(self, record):
1275
+ self._underlying.handleError(record)
1276
+
1277
+
1161
1278
  ########################################
1162
1279
  # ../../packaging/specifiers.py
1163
1280
  # Copyright (c) Donald Stufft and individual contributors.
@@ -1681,32 +1798,31 @@ class SpecifierSet(BaseSpecifier):
1681
1798
 
1682
1799
 
1683
1800
  ########################################
1684
- # ../../../omlish/lite/logs.py
1685
- """
1686
- TODO:
1687
- - translate json keys
1688
- - debug
1689
- """
1690
-
1801
+ # ../../../omlish/lite/runtime.py
1691
1802
 
1692
- log = logging.getLogger(__name__)
1693
1803
 
1804
+ @cached_nullary
1805
+ def is_debugger_attached() -> bool:
1806
+ return any(frame[1].endswith('pydevd.py') for frame in inspect.stack())
1694
1807
 
1695
- ##
1696
1808
 
1809
+ REQUIRED_PYTHON_VERSION = (3, 8)
1697
1810
 
1698
- class TidLogFilter(logging.Filter):
1699
1811
 
1700
- def filter(self, record):
1701
- record.tid = threading.get_native_id()
1702
- return True
1812
+ def check_runtime_version() -> None:
1813
+ if sys.version_info < REQUIRED_PYTHON_VERSION:
1814
+ raise OSError(f'Requires python {REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
1703
1815
 
1704
1816
 
1705
- ##
1817
+ ########################################
1818
+ # ../../../omlish/logs/json.py
1819
+ """
1820
+ TODO:
1821
+ - translate json keys
1822
+ """
1706
1823
 
1707
1824
 
1708
1825
  class JsonLogFormatter(logging.Formatter):
1709
-
1710
1826
  KEYS: ta.Mapping[str, bool] = {
1711
1827
  'name': False,
1712
1828
  'msg': False,
@@ -1730,6 +1846,18 @@ class JsonLogFormatter(logging.Formatter):
1730
1846
  'process': False,
1731
1847
  }
1732
1848
 
1849
+ def __init__(
1850
+ self,
1851
+ *args: ta.Any,
1852
+ json_dumps: ta.Optional[ta.Callable[[ta.Any], str]] = None,
1853
+ **kwargs: ta.Any,
1854
+ ) -> None:
1855
+ super().__init__(*args, **kwargs)
1856
+
1857
+ if json_dumps is None:
1858
+ json_dumps = json_dumps_compact
1859
+ self._json_dumps = json_dumps
1860
+
1733
1861
  def format(self, record: logging.LogRecord) -> str:
1734
1862
  dct = {
1735
1863
  k: v
@@ -1737,7 +1865,110 @@ class JsonLogFormatter(logging.Formatter):
1737
1865
  for v in [getattr(record, k)]
1738
1866
  if not (o and v is None)
1739
1867
  }
1740
- return json_dumps_compact(dct)
1868
+ return self._json_dumps(dct)
1869
+
1870
+
1871
+ ########################################
1872
+ # ../types.py
1873
+
1874
+
1875
+ # See https://peps.python.org/pep-3149/
1876
+ INTERP_OPT_GLYPHS_BY_ATTR: ta.Mapping[str, str] = collections.OrderedDict([
1877
+ ('debug', 'd'),
1878
+ ('threaded', 't'),
1879
+ ])
1880
+
1881
+ INTERP_OPT_ATTRS_BY_GLYPH: ta.Mapping[str, str] = collections.OrderedDict(
1882
+ (g, a) for a, g in INTERP_OPT_GLYPHS_BY_ATTR.items()
1883
+ )
1884
+
1885
+
1886
+ @dc.dataclass(frozen=True)
1887
+ class InterpOpts:
1888
+ threaded: bool = False
1889
+ debug: bool = False
1890
+
1891
+ def __str__(self) -> str:
1892
+ return ''.join(g for a, g in INTERP_OPT_GLYPHS_BY_ATTR.items() if getattr(self, a))
1893
+
1894
+ @classmethod
1895
+ def parse(cls, s: str) -> 'InterpOpts':
1896
+ return cls(**{INTERP_OPT_ATTRS_BY_GLYPH[g]: True for g in s})
1897
+
1898
+ @classmethod
1899
+ def parse_suffix(cls, s: str) -> ta.Tuple[str, 'InterpOpts']:
1900
+ kw = {}
1901
+ while s and (a := INTERP_OPT_ATTRS_BY_GLYPH.get(s[-1])):
1902
+ s, kw[a] = s[:-1], True
1903
+ return s, cls(**kw)
1904
+
1905
+
1906
+ @dc.dataclass(frozen=True)
1907
+ class InterpVersion:
1908
+ version: Version
1909
+ opts: InterpOpts
1910
+
1911
+ def __str__(self) -> str:
1912
+ return str(self.version) + str(self.opts)
1913
+
1914
+ @classmethod
1915
+ def parse(cls, s: str) -> 'InterpVersion':
1916
+ s, o = InterpOpts.parse_suffix(s)
1917
+ v = Version(s)
1918
+ return cls(
1919
+ version=v,
1920
+ opts=o,
1921
+ )
1922
+
1923
+ @classmethod
1924
+ def try_parse(cls, s: str) -> ta.Optional['InterpVersion']:
1925
+ try:
1926
+ return cls.parse(s)
1927
+ except (KeyError, InvalidVersion):
1928
+ return None
1929
+
1930
+
1931
+ @dc.dataclass(frozen=True)
1932
+ class InterpSpecifier:
1933
+ specifier: Specifier
1934
+ opts: InterpOpts
1935
+
1936
+ def __str__(self) -> str:
1937
+ return str(self.specifier) + str(self.opts)
1938
+
1939
+ @classmethod
1940
+ def parse(cls, s: str) -> 'InterpSpecifier':
1941
+ s, o = InterpOpts.parse_suffix(s)
1942
+ if not any(s.startswith(o) for o in Specifier.OPERATORS):
1943
+ s = '~=' + s
1944
+ if s.count('.') < 2:
1945
+ s += '.0'
1946
+ return cls(
1947
+ specifier=Specifier(s),
1948
+ opts=o,
1949
+ )
1950
+
1951
+ def contains(self, iv: InterpVersion) -> bool:
1952
+ return self.specifier.contains(iv.version) and self.opts == iv.opts
1953
+
1954
+ def __contains__(self, iv: InterpVersion) -> bool:
1955
+ return self.contains(iv)
1956
+
1957
+
1958
+ @dc.dataclass(frozen=True)
1959
+ class Interp:
1960
+ exe: str
1961
+ version: InterpVersion
1962
+
1963
+
1964
+ ########################################
1965
+ # ../../../omlish/logs/standard.py
1966
+ """
1967
+ TODO:
1968
+ - structured
1969
+ - prefixed
1970
+ - debug
1971
+ """
1741
1972
 
1742
1973
 
1743
1974
  ##
@@ -1755,7 +1986,6 @@ STANDARD_LOG_FORMAT_PARTS = [
1755
1986
 
1756
1987
 
1757
1988
  class StandardLogFormatter(logging.Formatter):
1758
-
1759
1989
  @staticmethod
1760
1990
  def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
1761
1991
  return ' '.join(v for k, v in parts)
@@ -1774,107 +2004,8 @@ class StandardLogFormatter(logging.Formatter):
1774
2004
  ##
1775
2005
 
1776
2006
 
1777
- class ProxyLogFilterer(logging.Filterer):
1778
- def __init__(self, underlying: logging.Filterer) -> None: # noqa
1779
- self._underlying = underlying
1780
-
1781
- @property
1782
- def underlying(self) -> logging.Filterer:
1783
- return self._underlying
1784
-
1785
- @property
1786
- def filters(self):
1787
- return self._underlying.filters
1788
-
1789
- @filters.setter
1790
- def filters(self, filters):
1791
- self._underlying.filters = filters
1792
-
1793
- def addFilter(self, filter): # noqa
1794
- self._underlying.addFilter(filter)
1795
-
1796
- def removeFilter(self, filter): # noqa
1797
- self._underlying.removeFilter(filter)
1798
-
1799
- def filter(self, record):
1800
- return self._underlying.filter(record)
1801
-
1802
-
1803
- class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
1804
- def __init__(self, underlying: logging.Handler) -> None: # noqa
1805
- ProxyLogFilterer.__init__(self, underlying)
1806
-
1807
- _underlying: logging.Handler
1808
-
1809
- @property
1810
- def underlying(self) -> logging.Handler:
1811
- return self._underlying
1812
-
1813
- def get_name(self):
1814
- return self._underlying.get_name()
1815
-
1816
- def set_name(self, name):
1817
- self._underlying.set_name(name)
1818
-
1819
- @property
1820
- def name(self):
1821
- return self._underlying.name
1822
-
1823
- @property
1824
- def level(self):
1825
- return self._underlying.level
1826
-
1827
- @level.setter
1828
- def level(self, level):
1829
- self._underlying.level = level
1830
-
1831
- @property
1832
- def formatter(self):
1833
- return self._underlying.formatter
1834
-
1835
- @formatter.setter
1836
- def formatter(self, formatter):
1837
- self._underlying.formatter = formatter
1838
-
1839
- def createLock(self):
1840
- self._underlying.createLock()
1841
-
1842
- def acquire(self):
1843
- self._underlying.acquire()
1844
-
1845
- def release(self):
1846
- self._underlying.release()
1847
-
1848
- def setLevel(self, level):
1849
- self._underlying.setLevel(level)
1850
-
1851
- def format(self, record):
1852
- return self._underlying.format(record)
1853
-
1854
- def emit(self, record):
1855
- self._underlying.emit(record)
1856
-
1857
- def handle(self, record):
1858
- return self._underlying.handle(record)
1859
-
1860
- def setFormatter(self, fmt):
1861
- self._underlying.setFormatter(fmt)
1862
-
1863
- def flush(self):
1864
- self._underlying.flush()
1865
-
1866
- def close(self):
1867
- self._underlying.close()
1868
-
1869
- def handleError(self, record):
1870
- self._underlying.handleError(record)
1871
-
1872
-
1873
- ##
1874
-
1875
-
1876
- class StandardLogHandler(ProxyLogHandler):
1877
- pass
2007
+ class StandardLogHandler(ProxyLogHandler):
2008
+ pass
1878
2009
 
1879
2010
 
1880
2011
  ##
@@ -1951,117 +2082,7 @@ def configure_standard_logging(
1951
2082
 
1952
2083
 
1953
2084
  ########################################
1954
- # ../../../omlish/lite/runtime.py
1955
-
1956
-
1957
- @cached_nullary
1958
- def is_debugger_attached() -> bool:
1959
- return any(frame[1].endswith('pydevd.py') for frame in inspect.stack())
1960
-
1961
-
1962
- REQUIRED_PYTHON_VERSION = (3, 8)
1963
-
1964
-
1965
- def check_runtime_version() -> None:
1966
- if sys.version_info < REQUIRED_PYTHON_VERSION:
1967
- raise OSError(f'Requires python {REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
1968
-
1969
-
1970
- ########################################
1971
- # ../types.py
1972
-
1973
-
1974
- # See https://peps.python.org/pep-3149/
1975
- INTERP_OPT_GLYPHS_BY_ATTR: ta.Mapping[str, str] = collections.OrderedDict([
1976
- ('debug', 'd'),
1977
- ('threaded', 't'),
1978
- ])
1979
-
1980
- INTERP_OPT_ATTRS_BY_GLYPH: ta.Mapping[str, str] = collections.OrderedDict(
1981
- (g, a) for a, g in INTERP_OPT_GLYPHS_BY_ATTR.items()
1982
- )
1983
-
1984
-
1985
- @dc.dataclass(frozen=True)
1986
- class InterpOpts:
1987
- threaded: bool = False
1988
- debug: bool = False
1989
-
1990
- def __str__(self) -> str:
1991
- return ''.join(g for a, g in INTERP_OPT_GLYPHS_BY_ATTR.items() if getattr(self, a))
1992
-
1993
- @classmethod
1994
- def parse(cls, s: str) -> 'InterpOpts':
1995
- return cls(**{INTERP_OPT_ATTRS_BY_GLYPH[g]: True for g in s})
1996
-
1997
- @classmethod
1998
- def parse_suffix(cls, s: str) -> ta.Tuple[str, 'InterpOpts']:
1999
- kw = {}
2000
- while s and (a := INTERP_OPT_ATTRS_BY_GLYPH.get(s[-1])):
2001
- s, kw[a] = s[:-1], True
2002
- return s, cls(**kw)
2003
-
2004
-
2005
- @dc.dataclass(frozen=True)
2006
- class InterpVersion:
2007
- version: Version
2008
- opts: InterpOpts
2009
-
2010
- def __str__(self) -> str:
2011
- return str(self.version) + str(self.opts)
2012
-
2013
- @classmethod
2014
- def parse(cls, s: str) -> 'InterpVersion':
2015
- s, o = InterpOpts.parse_suffix(s)
2016
- v = Version(s)
2017
- return cls(
2018
- version=v,
2019
- opts=o,
2020
- )
2021
-
2022
- @classmethod
2023
- def try_parse(cls, s: str) -> ta.Optional['InterpVersion']:
2024
- try:
2025
- return cls.parse(s)
2026
- except (KeyError, InvalidVersion):
2027
- return None
2028
-
2029
-
2030
- @dc.dataclass(frozen=True)
2031
- class InterpSpecifier:
2032
- specifier: Specifier
2033
- opts: InterpOpts
2034
-
2035
- def __str__(self) -> str:
2036
- return str(self.specifier) + str(self.opts)
2037
-
2038
- @classmethod
2039
- def parse(cls, s: str) -> 'InterpSpecifier':
2040
- s, o = InterpOpts.parse_suffix(s)
2041
- if not any(s.startswith(o) for o in Specifier.OPERATORS):
2042
- s = '~=' + s
2043
- if s.count('.') < 2:
2044
- s += '.0'
2045
- return cls(
2046
- specifier=Specifier(s),
2047
- opts=o,
2048
- )
2049
-
2050
- def contains(self, iv: InterpVersion) -> bool:
2051
- return self.specifier.contains(iv.version) and self.opts == iv.opts
2052
-
2053
- def __contains__(self, iv: InterpVersion) -> bool:
2054
- return self.contains(iv)
2055
-
2056
-
2057
- @dc.dataclass(frozen=True)
2058
- class Interp:
2059
- exe: str
2060
- version: InterpVersion
2061
-
2062
-
2063
- ########################################
2064
- # ../../../omlish/lite/subprocesses.py
2085
+ # ../../../omlish/subprocesses.py
2065
2086
 
2066
2087
 
2067
2088
  ##
@@ -2112,8 +2133,8 @@ def subprocess_close(
2112
2133
  ##
2113
2134
 
2114
2135
 
2115
- class AbstractSubprocesses(abc.ABC): # noqa
2116
- DEFAULT_LOGGER: ta.ClassVar[ta.Optional[logging.Logger]] = log
2136
+ class BaseSubprocesses(abc.ABC): # noqa
2137
+ DEFAULT_LOGGER: ta.ClassVar[ta.Optional[logging.Logger]] = None
2117
2138
 
2118
2139
  def __init__(
2119
2140
  self,
@@ -2126,6 +2147,9 @@ class AbstractSubprocesses(abc.ABC): # noqa
2126
2147
  self._log = log if log is not None else self.DEFAULT_LOGGER
2127
2148
  self._try_exceptions = try_exceptions if try_exceptions is not None else self.DEFAULT_TRY_EXCEPTIONS
2128
2149
 
2150
+ def set_logger(self, log: ta.Optional[logging.Logger]) -> None:
2151
+ self._log = log
2152
+
2129
2153
  #
2130
2154
 
2131
2155
  def prepare_args(
@@ -2237,23 +2261,25 @@ class AbstractSubprocesses(abc.ABC): # noqa
2237
2261
  ##
2238
2262
 
2239
2263
 
2240
- class Subprocesses(AbstractSubprocesses):
2264
+ class AbstractSubprocesses(BaseSubprocesses, abc.ABC):
2265
+ @abc.abstractmethod
2241
2266
  def check_call(
2242
2267
  self,
2243
2268
  *cmd: str,
2244
2269
  stdout: ta.Any = sys.stderr,
2245
2270
  **kwargs: ta.Any,
2246
2271
  ) -> None:
2247
- with self.prepare_and_wrap(*cmd, stdout=stdout, **kwargs) as (cmd, kwargs): # noqa
2248
- subprocess.check_call(cmd, **kwargs)
2272
+ raise NotImplementedError
2249
2273
 
2274
+ @abc.abstractmethod
2250
2275
  def check_output(
2251
2276
  self,
2252
2277
  *cmd: str,
2253
2278
  **kwargs: ta.Any,
2254
2279
  ) -> bytes:
2255
- with self.prepare_and_wrap(*cmd, **kwargs) as (cmd, kwargs): # noqa
2256
- return subprocess.check_output(cmd, **kwargs)
2280
+ raise NotImplementedError
2281
+
2282
+ #
2257
2283
 
2258
2284
  def check_output_str(
2259
2285
  self,
@@ -2295,11 +2321,96 @@ class Subprocesses(AbstractSubprocesses):
2295
2321
  return ret.decode().strip()
2296
2322
 
2297
2323
 
2324
+ ##
2325
+
2326
+
2327
+ class Subprocesses(AbstractSubprocesses):
2328
+ def check_call(
2329
+ self,
2330
+ *cmd: str,
2331
+ stdout: ta.Any = sys.stderr,
2332
+ **kwargs: ta.Any,
2333
+ ) -> None:
2334
+ with self.prepare_and_wrap(*cmd, stdout=stdout, **kwargs) as (cmd, kwargs): # noqa
2335
+ subprocess.check_call(cmd, **kwargs)
2336
+
2337
+ def check_output(
2338
+ self,
2339
+ *cmd: str,
2340
+ **kwargs: ta.Any,
2341
+ ) -> bytes:
2342
+ with self.prepare_and_wrap(*cmd, **kwargs) as (cmd, kwargs): # noqa
2343
+ return subprocess.check_output(cmd, **kwargs)
2344
+
2345
+
2298
2346
  subprocesses = Subprocesses()
2299
2347
 
2300
2348
 
2349
+ ##
2350
+
2351
+
2352
+ class AbstractAsyncSubprocesses(BaseSubprocesses):
2353
+ @abc.abstractmethod
2354
+ async def check_call(
2355
+ self,
2356
+ *cmd: str,
2357
+ stdout: ta.Any = sys.stderr,
2358
+ **kwargs: ta.Any,
2359
+ ) -> None:
2360
+ raise NotImplementedError
2361
+
2362
+ @abc.abstractmethod
2363
+ async def check_output(
2364
+ self,
2365
+ *cmd: str,
2366
+ **kwargs: ta.Any,
2367
+ ) -> bytes:
2368
+ raise NotImplementedError
2369
+
2370
+ #
2371
+
2372
+ async def check_output_str(
2373
+ self,
2374
+ *cmd: str,
2375
+ **kwargs: ta.Any,
2376
+ ) -> str:
2377
+ return (await self.check_output(*cmd, **kwargs)).decode().strip()
2378
+
2379
+ #
2380
+
2381
+ async def try_call(
2382
+ self,
2383
+ *cmd: str,
2384
+ **kwargs: ta.Any,
2385
+ ) -> bool:
2386
+ if isinstance(await self.async_try_fn(self.check_call, *cmd, **kwargs), Exception):
2387
+ return False
2388
+ else:
2389
+ return True
2390
+
2391
+ async def try_output(
2392
+ self,
2393
+ *cmd: str,
2394
+ **kwargs: ta.Any,
2395
+ ) -> ta.Optional[bytes]:
2396
+ if isinstance(ret := await self.async_try_fn(self.check_output, *cmd, **kwargs), Exception):
2397
+ return None
2398
+ else:
2399
+ return ret
2400
+
2401
+ async def try_output_str(
2402
+ self,
2403
+ *cmd: str,
2404
+ **kwargs: ta.Any,
2405
+ ) -> ta.Optional[str]:
2406
+ if (ret := await self.try_output(*cmd, **kwargs)) is None:
2407
+ return None
2408
+ else:
2409
+ return ret.decode().strip()
2410
+
2411
+
2301
2412
  ########################################
2302
- # ../../../omlish/lite/asyncio/subprocesses.py
2413
+ # ../../../omlish/asyncs/asyncio/subprocesses.py
2303
2414
 
2304
2415
 
2305
2416
  ##
@@ -2310,6 +2421,8 @@ class AsyncioProcessCommunicator:
2310
2421
  self,
2311
2422
  proc: asyncio.subprocess.Process,
2312
2423
  loop: ta.Optional[ta.Any] = None,
2424
+ *,
2425
+ log: ta.Optional[logging.Logger] = None,
2313
2426
  ) -> None:
2314
2427
  super().__init__()
2315
2428
 
@@ -2318,6 +2431,7 @@ class AsyncioProcessCommunicator:
2318
2431
 
2319
2432
  self._proc = proc
2320
2433
  self._loop = loop
2434
+ self._log = log
2321
2435
 
2322
2436
  self._transport: asyncio.base_subprocess.BaseSubprocessTransport = check.isinstance(
2323
2437
  proc._transport, # type: ignore # noqa
@@ -2333,19 +2447,19 @@ class AsyncioProcessCommunicator:
2333
2447
  try:
2334
2448
  if input is not None:
2335
2449
  stdin.write(input)
2336
- if self._debug:
2337
- log.debug('%r communicate: feed stdin (%s bytes)', self, len(input))
2450
+ if self._debug and self._log is not None:
2451
+ self._log.debug('%r communicate: feed stdin (%s bytes)', self, len(input))
2338
2452
 
2339
2453
  await stdin.drain()
2340
2454
 
2341
2455
  except (BrokenPipeError, ConnectionResetError) as exc:
2342
2456
  # communicate() ignores BrokenPipeError and ConnectionResetError. write() and drain() can raise these
2343
2457
  # exceptions.
2344
- if self._debug:
2345
- log.debug('%r communicate: stdin got %r', self, exc)
2458
+ if self._debug and self._log is not None:
2459
+ self._log.debug('%r communicate: stdin got %r', self, exc)
2346
2460
 
2347
- if self._debug:
2348
- log.debug('%r communicate: close stdin', self)
2461
+ if self._debug and self._log is not None:
2462
+ self._log.debug('%r communicate: close stdin', self)
2349
2463
 
2350
2464
  stdin.close()
2351
2465
 
@@ -2361,15 +2475,15 @@ class AsyncioProcessCommunicator:
2361
2475
  check.equal(fd, 1)
2362
2476
  stream = check.not_none(self._proc.stdout)
2363
2477
 
2364
- if self._debug:
2478
+ if self._debug and self._log is not None:
2365
2479
  name = 'stdout' if fd == 1 else 'stderr'
2366
- log.debug('%r communicate: read %s', self, name)
2480
+ self._log.debug('%r communicate: read %s', self, name)
2367
2481
 
2368
2482
  output = await stream.read()
2369
2483
 
2370
- if self._debug:
2484
+ if self._debug and self._log is not None:
2371
2485
  name = 'stdout' if fd == 1 else 'stderr'
2372
- log.debug('%r communicate: close %s', self, name)
2486
+ self._log.debug('%r communicate: close %s', self, name)
2373
2487
 
2374
2488
  transport.close()
2375
2489
 
@@ -2418,7 +2532,7 @@ class AsyncioProcessCommunicator:
2418
2532
  ##
2419
2533
 
2420
2534
 
2421
- class AsyncioSubprocesses(AbstractSubprocesses):
2535
+ class AsyncioSubprocesses(AbstractAsyncSubprocesses):
2422
2536
  async def communicate(
2423
2537
  self,
2424
2538
  proc: asyncio.subprocess.Process,
@@ -2515,45 +2629,6 @@ class AsyncioSubprocesses(AbstractSubprocesses):
2515
2629
  with self.prepare_and_wrap(*cmd, stdout=subprocess.PIPE, check=True, **kwargs) as (cmd, kwargs): # noqa
2516
2630
  return check.not_none((await self.run(*cmd, **kwargs)).stdout)
2517
2631
 
2518
- async def check_output_str(
2519
- self,
2520
- *cmd: str,
2521
- **kwargs: ta.Any,
2522
- ) -> str:
2523
- return (await self.check_output(*cmd, **kwargs)).decode().strip()
2524
-
2525
- #
2526
-
2527
- async def try_call(
2528
- self,
2529
- *cmd: str,
2530
- **kwargs: ta.Any,
2531
- ) -> bool:
2532
- if isinstance(await self.async_try_fn(self.check_call, *cmd, **kwargs), Exception):
2533
- return False
2534
- else:
2535
- return True
2536
-
2537
- async def try_output(
2538
- self,
2539
- *cmd: str,
2540
- **kwargs: ta.Any,
2541
- ) -> ta.Optional[bytes]:
2542
- if isinstance(ret := await self.async_try_fn(self.check_output, *cmd, **kwargs), Exception):
2543
- return None
2544
- else:
2545
- return ret
2546
-
2547
- async def try_output_str(
2548
- self,
2549
- *cmd: str,
2550
- **kwargs: ta.Any,
2551
- ) -> ta.Optional[str]:
2552
- if (ret := await self.try_output(*cmd, **kwargs)) is None:
2553
- return None
2554
- else:
2555
- return ret.decode().strip()
2556
-
2557
2632
 
2558
2633
  asyncio_subprocesses = AsyncioSubprocesses()
2559
2634