omdev 0.0.0.dev157__py3-none-any.whl → 0.0.0.dev158__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of omdev might be problematic. Click here for more details.
- omdev/amalg/amalg.py +1 -1
- omdev/cexts/cmake.py +1 -1
- omdev/cexts/scan.py +1 -1
- omdev/git.py +1 -1
- omdev/interp/cli.py +1 -1
- omdev/interp/inspect.py +1 -1
- omdev/interp/pyenv.py +1 -1
- omdev/manifests/main.py +1 -1
- omdev/precheck/lite.py +1 -1
- omdev/precheck/main.py +1 -1
- omdev/pyproject/cli.py +2 -2
- omdev/pyproject/pkg.py +1 -1
- omdev/pyproject/venvs.py +1 -1
- omdev/revisions.py +1 -1
- omdev/scripts/interp.py +363 -288
- omdev/scripts/pyproject.py +494 -419
- omdev/tools/docker.py +1 -1
- omdev/tools/git.py +1 -1
- omdev/tools/mkrelimp.py +1 -1
- omdev/tools/sqlrepl.py +1 -1
- {omdev-0.0.0.dev157.dist-info → omdev-0.0.0.dev158.dist-info}/METADATA +2 -2
- {omdev-0.0.0.dev157.dist-info → omdev-0.0.0.dev158.dist-info}/RECORD +26 -26
- {omdev-0.0.0.dev157.dist-info → omdev-0.0.0.dev158.dist-info}/LICENSE +0 -0
- {omdev-0.0.0.dev157.dist-info → omdev-0.0.0.dev158.dist-info}/WHEEL +0 -0
- {omdev-0.0.0.dev157.dist-info → omdev-0.0.0.dev158.dist-info}/entry_points.txt +0 -0
- {omdev-0.0.0.dev157.dist-info → omdev-0.0.0.dev158.dist-info}/top_level.txt +0 -0
omdev/scripts/pyproject.py
CHANGED
|
@@ -115,7 +115,7 @@ CallableVersionOperator = ta.Callable[['Version', str], bool]
|
|
|
115
115
|
# ../../omlish/argparse/cli.py
|
|
116
116
|
ArgparseCommandFn = ta.Callable[[], ta.Optional[int]] # ta.TypeAlias
|
|
117
117
|
|
|
118
|
-
# ../../omlish/
|
|
118
|
+
# ../../omlish/subprocesses.py
|
|
119
119
|
SubprocessChannelOption = ta.Literal['pipe', 'stdout', 'devnull'] # ta.TypeAlias
|
|
120
120
|
|
|
121
121
|
|
|
@@ -2328,6 +2328,13 @@ json_dump_compact: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON
|
|
|
2328
2328
|
json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_COMPACT_KWARGS)
|
|
2329
2329
|
|
|
2330
2330
|
|
|
2331
|
+
########################################
|
|
2332
|
+
# ../../../omlish/lite/logs.py
|
|
2333
|
+
|
|
2334
|
+
|
|
2335
|
+
log = logging.getLogger(__name__)
|
|
2336
|
+
|
|
2337
|
+
|
|
2331
2338
|
########################################
|
|
2332
2339
|
# ../../../omlish/lite/reflect.py
|
|
2333
2340
|
|
|
@@ -2450,6 +2457,116 @@ def format_num_bytes(num_bytes: int) -> str:
|
|
|
2450
2457
|
return f'{num_bytes / 1024 ** (len(FORMAT_NUM_BYTES_SUFFIXES) - 1):.2f}{FORMAT_NUM_BYTES_SUFFIXES[-1]}'
|
|
2451
2458
|
|
|
2452
2459
|
|
|
2460
|
+
########################################
|
|
2461
|
+
# ../../../omlish/logs/filters.py
|
|
2462
|
+
|
|
2463
|
+
|
|
2464
|
+
class TidLogFilter(logging.Filter):
|
|
2465
|
+
def filter(self, record):
|
|
2466
|
+
record.tid = threading.get_native_id()
|
|
2467
|
+
return True
|
|
2468
|
+
|
|
2469
|
+
|
|
2470
|
+
########################################
|
|
2471
|
+
# ../../../omlish/logs/proxy.py
|
|
2472
|
+
|
|
2473
|
+
|
|
2474
|
+
class ProxyLogFilterer(logging.Filterer):
|
|
2475
|
+
def __init__(self, underlying: logging.Filterer) -> None: # noqa
|
|
2476
|
+
self._underlying = underlying
|
|
2477
|
+
|
|
2478
|
+
@property
|
|
2479
|
+
def underlying(self) -> logging.Filterer:
|
|
2480
|
+
return self._underlying
|
|
2481
|
+
|
|
2482
|
+
@property
|
|
2483
|
+
def filters(self):
|
|
2484
|
+
return self._underlying.filters
|
|
2485
|
+
|
|
2486
|
+
@filters.setter
|
|
2487
|
+
def filters(self, filters):
|
|
2488
|
+
self._underlying.filters = filters
|
|
2489
|
+
|
|
2490
|
+
def addFilter(self, filter): # noqa
|
|
2491
|
+
self._underlying.addFilter(filter)
|
|
2492
|
+
|
|
2493
|
+
def removeFilter(self, filter): # noqa
|
|
2494
|
+
self._underlying.removeFilter(filter)
|
|
2495
|
+
|
|
2496
|
+
def filter(self, record):
|
|
2497
|
+
return self._underlying.filter(record)
|
|
2498
|
+
|
|
2499
|
+
|
|
2500
|
+
class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
|
|
2501
|
+
def __init__(self, underlying: logging.Handler) -> None: # noqa
|
|
2502
|
+
ProxyLogFilterer.__init__(self, underlying)
|
|
2503
|
+
|
|
2504
|
+
_underlying: logging.Handler
|
|
2505
|
+
|
|
2506
|
+
@property
|
|
2507
|
+
def underlying(self) -> logging.Handler:
|
|
2508
|
+
return self._underlying
|
|
2509
|
+
|
|
2510
|
+
def get_name(self):
|
|
2511
|
+
return self._underlying.get_name()
|
|
2512
|
+
|
|
2513
|
+
def set_name(self, name):
|
|
2514
|
+
self._underlying.set_name(name)
|
|
2515
|
+
|
|
2516
|
+
@property
|
|
2517
|
+
def name(self):
|
|
2518
|
+
return self._underlying.name
|
|
2519
|
+
|
|
2520
|
+
@property
|
|
2521
|
+
def level(self):
|
|
2522
|
+
return self._underlying.level
|
|
2523
|
+
|
|
2524
|
+
@level.setter
|
|
2525
|
+
def level(self, level):
|
|
2526
|
+
self._underlying.level = level
|
|
2527
|
+
|
|
2528
|
+
@property
|
|
2529
|
+
def formatter(self):
|
|
2530
|
+
return self._underlying.formatter
|
|
2531
|
+
|
|
2532
|
+
@formatter.setter
|
|
2533
|
+
def formatter(self, formatter):
|
|
2534
|
+
self._underlying.formatter = formatter
|
|
2535
|
+
|
|
2536
|
+
def createLock(self):
|
|
2537
|
+
self._underlying.createLock()
|
|
2538
|
+
|
|
2539
|
+
def acquire(self):
|
|
2540
|
+
self._underlying.acquire()
|
|
2541
|
+
|
|
2542
|
+
def release(self):
|
|
2543
|
+
self._underlying.release()
|
|
2544
|
+
|
|
2545
|
+
def setLevel(self, level):
|
|
2546
|
+
self._underlying.setLevel(level)
|
|
2547
|
+
|
|
2548
|
+
def format(self, record):
|
|
2549
|
+
return self._underlying.format(record)
|
|
2550
|
+
|
|
2551
|
+
def emit(self, record):
|
|
2552
|
+
self._underlying.emit(record)
|
|
2553
|
+
|
|
2554
|
+
def handle(self, record):
|
|
2555
|
+
return self._underlying.handle(record)
|
|
2556
|
+
|
|
2557
|
+
def setFormatter(self, fmt):
|
|
2558
|
+
self._underlying.setFormatter(fmt)
|
|
2559
|
+
|
|
2560
|
+
def flush(self):
|
|
2561
|
+
self._underlying.flush()
|
|
2562
|
+
|
|
2563
|
+
def close(self):
|
|
2564
|
+
self._underlying.close()
|
|
2565
|
+
|
|
2566
|
+
def handleError(self, record):
|
|
2567
|
+
self._underlying.handleError(record)
|
|
2568
|
+
|
|
2569
|
+
|
|
2453
2570
|
########################################
|
|
2454
2571
|
# ../../cexts/magic.py
|
|
2455
2572
|
|
|
@@ -3207,6 +3324,83 @@ class SpecifierSet(BaseSpecifier):
|
|
|
3207
3324
|
return iter(filtered)
|
|
3208
3325
|
|
|
3209
3326
|
|
|
3327
|
+
########################################
|
|
3328
|
+
# ../reqs.py
|
|
3329
|
+
"""
|
|
3330
|
+
TODO:
|
|
3331
|
+
- embed pip._internal.req.parse_requirements, add additional env stuff? breaks compat with raw pip
|
|
3332
|
+
"""
|
|
3333
|
+
|
|
3334
|
+
|
|
3335
|
+
class RequirementsRewriter:
|
|
3336
|
+
def __init__(
|
|
3337
|
+
self,
|
|
3338
|
+
venv: ta.Optional[str] = None,
|
|
3339
|
+
) -> None:
|
|
3340
|
+
super().__init__()
|
|
3341
|
+
self._venv = venv
|
|
3342
|
+
|
|
3343
|
+
@cached_nullary
|
|
3344
|
+
def _tmp_dir(self) -> str:
|
|
3345
|
+
return tempfile.mkdtemp('-omlish-reqs')
|
|
3346
|
+
|
|
3347
|
+
VENV_MAGIC = '# @omlish-venv'
|
|
3348
|
+
|
|
3349
|
+
def rewrite_file(self, in_file: str) -> str:
|
|
3350
|
+
with open(in_file) as f:
|
|
3351
|
+
src = f.read()
|
|
3352
|
+
|
|
3353
|
+
in_lines = src.splitlines(keepends=True)
|
|
3354
|
+
out_lines = []
|
|
3355
|
+
|
|
3356
|
+
for l in in_lines:
|
|
3357
|
+
if self.VENV_MAGIC in l:
|
|
3358
|
+
lp, _, rp = l.partition(self.VENV_MAGIC)
|
|
3359
|
+
rp = rp.partition('#')[0]
|
|
3360
|
+
omit = False
|
|
3361
|
+
for v in rp.split():
|
|
3362
|
+
if v[0] == '!':
|
|
3363
|
+
if self._venv is not None and self._venv == v[1:]:
|
|
3364
|
+
omit = True
|
|
3365
|
+
break
|
|
3366
|
+
else:
|
|
3367
|
+
raise NotImplementedError
|
|
3368
|
+
|
|
3369
|
+
if omit:
|
|
3370
|
+
out_lines.append('# OMITTED: ' + l)
|
|
3371
|
+
continue
|
|
3372
|
+
|
|
3373
|
+
out_req = self.rewrite(l.rstrip('\n'), for_file=True)
|
|
3374
|
+
out_lines.append(out_req + '\n')
|
|
3375
|
+
|
|
3376
|
+
out_file = os.path.join(self._tmp_dir(), os.path.basename(in_file))
|
|
3377
|
+
if os.path.exists(out_file):
|
|
3378
|
+
raise Exception(f'file exists: {out_file}')
|
|
3379
|
+
|
|
3380
|
+
with open(out_file, 'w') as f:
|
|
3381
|
+
f.write(''.join(out_lines))
|
|
3382
|
+
log.info('Rewrote requirements file %s to %s', in_file, out_file)
|
|
3383
|
+
return out_file
|
|
3384
|
+
|
|
3385
|
+
def rewrite(self, in_req: str, *, for_file: bool = False) -> str:
|
|
3386
|
+
if in_req.strip().startswith('-r'):
|
|
3387
|
+
l = in_req.strip()
|
|
3388
|
+
lp, _, rp = l.partition(' ')
|
|
3389
|
+
if lp == '-r':
|
|
3390
|
+
inc_in_file, _, rest = rp.partition(' ')
|
|
3391
|
+
else:
|
|
3392
|
+
inc_in_file, rest = lp[2:], rp
|
|
3393
|
+
|
|
3394
|
+
inc_out_file = self.rewrite_file(inc_in_file)
|
|
3395
|
+
if for_file:
|
|
3396
|
+
return ' '.join(['-r ', inc_out_file, rest])
|
|
3397
|
+
else:
|
|
3398
|
+
return '-r' + inc_out_file
|
|
3399
|
+
|
|
3400
|
+
else:
|
|
3401
|
+
return in_req
|
|
3402
|
+
|
|
3403
|
+
|
|
3210
3404
|
########################################
|
|
3211
3405
|
# ../../../omlish/argparse/cli.py
|
|
3212
3406
|
"""
|
|
@@ -3482,311 +3676,41 @@ class ArgparseCli:
|
|
|
3482
3676
|
|
|
3483
3677
|
|
|
3484
3678
|
########################################
|
|
3485
|
-
# ../../../omlish/lite/
|
|
3679
|
+
# ../../../omlish/lite/marshal.py
|
|
3486
3680
|
"""
|
|
3487
3681
|
TODO:
|
|
3488
|
-
-
|
|
3489
|
-
-
|
|
3682
|
+
- pickle stdlib objs? have to pin to 3.8 pickle protocol, will be cross-version
|
|
3683
|
+
- namedtuple
|
|
3684
|
+
- literals
|
|
3685
|
+
- newtypes?
|
|
3490
3686
|
"""
|
|
3491
3687
|
|
|
3492
3688
|
|
|
3493
|
-
log = logging.getLogger(__name__)
|
|
3494
|
-
|
|
3495
|
-
|
|
3496
3689
|
##
|
|
3497
3690
|
|
|
3498
3691
|
|
|
3499
|
-
|
|
3500
|
-
|
|
3501
|
-
|
|
3502
|
-
|
|
3503
|
-
return True
|
|
3692
|
+
@dc.dataclass(frozen=True)
|
|
3693
|
+
class ObjMarshalOptions:
|
|
3694
|
+
raw_bytes: bool = False
|
|
3695
|
+
nonstrict_dataclasses: bool = False
|
|
3504
3696
|
|
|
3505
3697
|
|
|
3506
|
-
|
|
3698
|
+
class ObjMarshaler(abc.ABC):
|
|
3699
|
+
@abc.abstractmethod
|
|
3700
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
|
3701
|
+
raise NotImplementedError
|
|
3507
3702
|
|
|
3703
|
+
@abc.abstractmethod
|
|
3704
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
|
3705
|
+
raise NotImplementedError
|
|
3508
3706
|
|
|
3509
|
-
class JsonLogFormatter(logging.Formatter):
|
|
3510
3707
|
|
|
3511
|
-
|
|
3512
|
-
|
|
3513
|
-
|
|
3514
|
-
'args': False,
|
|
3515
|
-
'levelname': False,
|
|
3516
|
-
'levelno': False,
|
|
3517
|
-
'pathname': False,
|
|
3518
|
-
'filename': False,
|
|
3519
|
-
'module': False,
|
|
3520
|
-
'exc_info': True,
|
|
3521
|
-
'exc_text': True,
|
|
3522
|
-
'stack_info': True,
|
|
3523
|
-
'lineno': False,
|
|
3524
|
-
'funcName': False,
|
|
3525
|
-
'created': False,
|
|
3526
|
-
'msecs': False,
|
|
3527
|
-
'relativeCreated': False,
|
|
3528
|
-
'thread': False,
|
|
3529
|
-
'threadName': False,
|
|
3530
|
-
'processName': False,
|
|
3531
|
-
'process': False,
|
|
3532
|
-
}
|
|
3708
|
+
class NopObjMarshaler(ObjMarshaler):
|
|
3709
|
+
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
|
3710
|
+
return o
|
|
3533
3711
|
|
|
3534
|
-
def
|
|
3535
|
-
|
|
3536
|
-
k: v
|
|
3537
|
-
for k, o in self.KEYS.items()
|
|
3538
|
-
for v in [getattr(record, k)]
|
|
3539
|
-
if not (o and v is None)
|
|
3540
|
-
}
|
|
3541
|
-
return json_dumps_compact(dct)
|
|
3542
|
-
|
|
3543
|
-
|
|
3544
|
-
##
|
|
3545
|
-
|
|
3546
|
-
|
|
3547
|
-
STANDARD_LOG_FORMAT_PARTS = [
|
|
3548
|
-
('asctime', '%(asctime)-15s'),
|
|
3549
|
-
('process', 'pid=%(process)-6s'),
|
|
3550
|
-
('thread', 'tid=%(thread)x'),
|
|
3551
|
-
('levelname', '%(levelname)s'),
|
|
3552
|
-
('name', '%(name)s'),
|
|
3553
|
-
('separator', '::'),
|
|
3554
|
-
('message', '%(message)s'),
|
|
3555
|
-
]
|
|
3556
|
-
|
|
3557
|
-
|
|
3558
|
-
class StandardLogFormatter(logging.Formatter):
|
|
3559
|
-
|
|
3560
|
-
@staticmethod
|
|
3561
|
-
def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
|
|
3562
|
-
return ' '.join(v for k, v in parts)
|
|
3563
|
-
|
|
3564
|
-
converter = datetime.datetime.fromtimestamp # type: ignore
|
|
3565
|
-
|
|
3566
|
-
def formatTime(self, record, datefmt=None):
|
|
3567
|
-
ct = self.converter(record.created) # type: ignore
|
|
3568
|
-
if datefmt:
|
|
3569
|
-
return ct.strftime(datefmt) # noqa
|
|
3570
|
-
else:
|
|
3571
|
-
t = ct.strftime('%Y-%m-%d %H:%M:%S')
|
|
3572
|
-
return '%s.%03d' % (t, record.msecs) # noqa
|
|
3573
|
-
|
|
3574
|
-
|
|
3575
|
-
##
|
|
3576
|
-
|
|
3577
|
-
|
|
3578
|
-
class ProxyLogFilterer(logging.Filterer):
|
|
3579
|
-
def __init__(self, underlying: logging.Filterer) -> None: # noqa
|
|
3580
|
-
self._underlying = underlying
|
|
3581
|
-
|
|
3582
|
-
@property
|
|
3583
|
-
def underlying(self) -> logging.Filterer:
|
|
3584
|
-
return self._underlying
|
|
3585
|
-
|
|
3586
|
-
@property
|
|
3587
|
-
def filters(self):
|
|
3588
|
-
return self._underlying.filters
|
|
3589
|
-
|
|
3590
|
-
@filters.setter
|
|
3591
|
-
def filters(self, filters):
|
|
3592
|
-
self._underlying.filters = filters
|
|
3593
|
-
|
|
3594
|
-
def addFilter(self, filter): # noqa
|
|
3595
|
-
self._underlying.addFilter(filter)
|
|
3596
|
-
|
|
3597
|
-
def removeFilter(self, filter): # noqa
|
|
3598
|
-
self._underlying.removeFilter(filter)
|
|
3599
|
-
|
|
3600
|
-
def filter(self, record):
|
|
3601
|
-
return self._underlying.filter(record)
|
|
3602
|
-
|
|
3603
|
-
|
|
3604
|
-
class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
|
|
3605
|
-
def __init__(self, underlying: logging.Handler) -> None: # noqa
|
|
3606
|
-
ProxyLogFilterer.__init__(self, underlying)
|
|
3607
|
-
|
|
3608
|
-
_underlying: logging.Handler
|
|
3609
|
-
|
|
3610
|
-
@property
|
|
3611
|
-
def underlying(self) -> logging.Handler:
|
|
3612
|
-
return self._underlying
|
|
3613
|
-
|
|
3614
|
-
def get_name(self):
|
|
3615
|
-
return self._underlying.get_name()
|
|
3616
|
-
|
|
3617
|
-
def set_name(self, name):
|
|
3618
|
-
self._underlying.set_name(name)
|
|
3619
|
-
|
|
3620
|
-
@property
|
|
3621
|
-
def name(self):
|
|
3622
|
-
return self._underlying.name
|
|
3623
|
-
|
|
3624
|
-
@property
|
|
3625
|
-
def level(self):
|
|
3626
|
-
return self._underlying.level
|
|
3627
|
-
|
|
3628
|
-
@level.setter
|
|
3629
|
-
def level(self, level):
|
|
3630
|
-
self._underlying.level = level
|
|
3631
|
-
|
|
3632
|
-
@property
|
|
3633
|
-
def formatter(self):
|
|
3634
|
-
return self._underlying.formatter
|
|
3635
|
-
|
|
3636
|
-
@formatter.setter
|
|
3637
|
-
def formatter(self, formatter):
|
|
3638
|
-
self._underlying.formatter = formatter
|
|
3639
|
-
|
|
3640
|
-
def createLock(self):
|
|
3641
|
-
self._underlying.createLock()
|
|
3642
|
-
|
|
3643
|
-
def acquire(self):
|
|
3644
|
-
self._underlying.acquire()
|
|
3645
|
-
|
|
3646
|
-
def release(self):
|
|
3647
|
-
self._underlying.release()
|
|
3648
|
-
|
|
3649
|
-
def setLevel(self, level):
|
|
3650
|
-
self._underlying.setLevel(level)
|
|
3651
|
-
|
|
3652
|
-
def format(self, record):
|
|
3653
|
-
return self._underlying.format(record)
|
|
3654
|
-
|
|
3655
|
-
def emit(self, record):
|
|
3656
|
-
self._underlying.emit(record)
|
|
3657
|
-
|
|
3658
|
-
def handle(self, record):
|
|
3659
|
-
return self._underlying.handle(record)
|
|
3660
|
-
|
|
3661
|
-
def setFormatter(self, fmt):
|
|
3662
|
-
self._underlying.setFormatter(fmt)
|
|
3663
|
-
|
|
3664
|
-
def flush(self):
|
|
3665
|
-
self._underlying.flush()
|
|
3666
|
-
|
|
3667
|
-
def close(self):
|
|
3668
|
-
self._underlying.close()
|
|
3669
|
-
|
|
3670
|
-
def handleError(self, record):
|
|
3671
|
-
self._underlying.handleError(record)
|
|
3672
|
-
|
|
3673
|
-
|
|
3674
|
-
##
|
|
3675
|
-
|
|
3676
|
-
|
|
3677
|
-
class StandardLogHandler(ProxyLogHandler):
|
|
3678
|
-
pass
|
|
3679
|
-
|
|
3680
|
-
|
|
3681
|
-
##
|
|
3682
|
-
|
|
3683
|
-
|
|
3684
|
-
@contextlib.contextmanager
|
|
3685
|
-
def _locking_logging_module_lock() -> ta.Iterator[None]:
|
|
3686
|
-
if hasattr(logging, '_acquireLock'):
|
|
3687
|
-
logging._acquireLock() # noqa
|
|
3688
|
-
try:
|
|
3689
|
-
yield
|
|
3690
|
-
finally:
|
|
3691
|
-
logging._releaseLock() # type: ignore # noqa
|
|
3692
|
-
|
|
3693
|
-
elif hasattr(logging, '_lock'):
|
|
3694
|
-
# https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
|
|
3695
|
-
with logging._lock: # noqa
|
|
3696
|
-
yield
|
|
3697
|
-
|
|
3698
|
-
else:
|
|
3699
|
-
raise Exception("Can't find lock in logging module")
|
|
3700
|
-
|
|
3701
|
-
|
|
3702
|
-
def configure_standard_logging(
|
|
3703
|
-
level: ta.Union[int, str] = logging.INFO,
|
|
3704
|
-
*,
|
|
3705
|
-
json: bool = False,
|
|
3706
|
-
target: ta.Optional[logging.Logger] = None,
|
|
3707
|
-
force: bool = False,
|
|
3708
|
-
handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
|
|
3709
|
-
) -> ta.Optional[StandardLogHandler]:
|
|
3710
|
-
with _locking_logging_module_lock():
|
|
3711
|
-
if target is None:
|
|
3712
|
-
target = logging.root
|
|
3713
|
-
|
|
3714
|
-
#
|
|
3715
|
-
|
|
3716
|
-
if not force:
|
|
3717
|
-
if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
|
|
3718
|
-
return None
|
|
3719
|
-
|
|
3720
|
-
#
|
|
3721
|
-
|
|
3722
|
-
if handler_factory is not None:
|
|
3723
|
-
handler = handler_factory()
|
|
3724
|
-
else:
|
|
3725
|
-
handler = logging.StreamHandler()
|
|
3726
|
-
|
|
3727
|
-
#
|
|
3728
|
-
|
|
3729
|
-
formatter: logging.Formatter
|
|
3730
|
-
if json:
|
|
3731
|
-
formatter = JsonLogFormatter()
|
|
3732
|
-
else:
|
|
3733
|
-
formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
|
|
3734
|
-
handler.setFormatter(formatter)
|
|
3735
|
-
|
|
3736
|
-
#
|
|
3737
|
-
|
|
3738
|
-
handler.addFilter(TidLogFilter())
|
|
3739
|
-
|
|
3740
|
-
#
|
|
3741
|
-
|
|
3742
|
-
target.addHandler(handler)
|
|
3743
|
-
|
|
3744
|
-
#
|
|
3745
|
-
|
|
3746
|
-
if level is not None:
|
|
3747
|
-
target.setLevel(level)
|
|
3748
|
-
|
|
3749
|
-
#
|
|
3750
|
-
|
|
3751
|
-
return StandardLogHandler(handler)
|
|
3752
|
-
|
|
3753
|
-
|
|
3754
|
-
########################################
|
|
3755
|
-
# ../../../omlish/lite/marshal.py
|
|
3756
|
-
"""
|
|
3757
|
-
TODO:
|
|
3758
|
-
- pickle stdlib objs? have to pin to 3.8 pickle protocol, will be cross-version
|
|
3759
|
-
- namedtuple
|
|
3760
|
-
- literals
|
|
3761
|
-
- newtypes?
|
|
3762
|
-
"""
|
|
3763
|
-
|
|
3764
|
-
|
|
3765
|
-
##
|
|
3766
|
-
|
|
3767
|
-
|
|
3768
|
-
@dc.dataclass(frozen=True)
|
|
3769
|
-
class ObjMarshalOptions:
|
|
3770
|
-
raw_bytes: bool = False
|
|
3771
|
-
nonstrict_dataclasses: bool = False
|
|
3772
|
-
|
|
3773
|
-
|
|
3774
|
-
class ObjMarshaler(abc.ABC):
|
|
3775
|
-
@abc.abstractmethod
|
|
3776
|
-
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
|
3777
|
-
raise NotImplementedError
|
|
3778
|
-
|
|
3779
|
-
@abc.abstractmethod
|
|
3780
|
-
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
|
3781
|
-
raise NotImplementedError
|
|
3782
|
-
|
|
3783
|
-
|
|
3784
|
-
class NopObjMarshaler(ObjMarshaler):
|
|
3785
|
-
def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
|
3786
|
-
return o
|
|
3787
|
-
|
|
3788
|
-
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
|
3789
|
-
return o
|
|
3712
|
+
def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
|
|
3713
|
+
return o
|
|
3790
3714
|
|
|
3791
3715
|
|
|
3792
3716
|
@dc.dataclass()
|
|
@@ -4213,6 +4137,60 @@ def check_runtime_version() -> None:
|
|
|
4213
4137
|
raise OSError(f'Requires python {REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
|
|
4214
4138
|
|
|
4215
4139
|
|
|
4140
|
+
########################################
|
|
4141
|
+
# ../../../omlish/logs/json.py
|
|
4142
|
+
"""
|
|
4143
|
+
TODO:
|
|
4144
|
+
- translate json keys
|
|
4145
|
+
"""
|
|
4146
|
+
|
|
4147
|
+
|
|
4148
|
+
class JsonLogFormatter(logging.Formatter):
|
|
4149
|
+
KEYS: ta.Mapping[str, bool] = {
|
|
4150
|
+
'name': False,
|
|
4151
|
+
'msg': False,
|
|
4152
|
+
'args': False,
|
|
4153
|
+
'levelname': False,
|
|
4154
|
+
'levelno': False,
|
|
4155
|
+
'pathname': False,
|
|
4156
|
+
'filename': False,
|
|
4157
|
+
'module': False,
|
|
4158
|
+
'exc_info': True,
|
|
4159
|
+
'exc_text': True,
|
|
4160
|
+
'stack_info': True,
|
|
4161
|
+
'lineno': False,
|
|
4162
|
+
'funcName': False,
|
|
4163
|
+
'created': False,
|
|
4164
|
+
'msecs': False,
|
|
4165
|
+
'relativeCreated': False,
|
|
4166
|
+
'thread': False,
|
|
4167
|
+
'threadName': False,
|
|
4168
|
+
'processName': False,
|
|
4169
|
+
'process': False,
|
|
4170
|
+
}
|
|
4171
|
+
|
|
4172
|
+
def __init__(
|
|
4173
|
+
self,
|
|
4174
|
+
*args: ta.Any,
|
|
4175
|
+
json_dumps: ta.Optional[ta.Callable[[ta.Any], str]] = None,
|
|
4176
|
+
**kwargs: ta.Any,
|
|
4177
|
+
) -> None:
|
|
4178
|
+
super().__init__(*args, **kwargs)
|
|
4179
|
+
|
|
4180
|
+
if json_dumps is None:
|
|
4181
|
+
json_dumps = json_dumps_compact
|
|
4182
|
+
self._json_dumps = json_dumps
|
|
4183
|
+
|
|
4184
|
+
def format(self, record: logging.LogRecord) -> str:
|
|
4185
|
+
dct = {
|
|
4186
|
+
k: v
|
|
4187
|
+
for k, o in self.KEYS.items()
|
|
4188
|
+
for v in [getattr(record, k)]
|
|
4189
|
+
if not (o and v is None)
|
|
4190
|
+
}
|
|
4191
|
+
return self._json_dumps(dct)
|
|
4192
|
+
|
|
4193
|
+
|
|
4216
4194
|
########################################
|
|
4217
4195
|
# ../../interp/types.py
|
|
4218
4196
|
|
|
@@ -4394,95 +4372,138 @@ class PyprojectConfigPreparer:
|
|
|
4394
4372
|
def prepare_config(self, dct: ta.Mapping[str, ta.Any]) -> PyprojectConfig:
|
|
4395
4373
|
pcfg: PyprojectConfig = unmarshal_obj(dct, PyprojectConfig)
|
|
4396
4374
|
|
|
4397
|
-
ivs = dict(self._inherit_venvs(pcfg.venvs or {}))
|
|
4398
|
-
for k, v in ivs.items():
|
|
4399
|
-
v = dc.replace(v, srcs=self._resolve_srcs(v.srcs or [], pcfg.srcs or {}))
|
|
4400
|
-
v = dc.replace(v, interp=self._fixup_interp(v.interp))
|
|
4401
|
-
ivs[k] = v
|
|
4375
|
+
ivs = dict(self._inherit_venvs(pcfg.venvs or {}))
|
|
4376
|
+
for k, v in ivs.items():
|
|
4377
|
+
v = dc.replace(v, srcs=self._resolve_srcs(v.srcs or [], pcfg.srcs or {}))
|
|
4378
|
+
v = dc.replace(v, interp=self._fixup_interp(v.interp))
|
|
4379
|
+
ivs[k] = v
|
|
4380
|
+
|
|
4381
|
+
pcfg = dc.replace(pcfg, venvs=ivs)
|
|
4382
|
+
return pcfg
|
|
4383
|
+
|
|
4384
|
+
|
|
4385
|
+
########################################
|
|
4386
|
+
# ../../../omlish/logs/standard.py
|
|
4387
|
+
"""
|
|
4388
|
+
TODO:
|
|
4389
|
+
- structured
|
|
4390
|
+
- prefixed
|
|
4391
|
+
- debug
|
|
4392
|
+
"""
|
|
4393
|
+
|
|
4394
|
+
|
|
4395
|
+
##
|
|
4396
|
+
|
|
4397
|
+
|
|
4398
|
+
STANDARD_LOG_FORMAT_PARTS = [
|
|
4399
|
+
('asctime', '%(asctime)-15s'),
|
|
4400
|
+
('process', 'pid=%(process)-6s'),
|
|
4401
|
+
('thread', 'tid=%(thread)x'),
|
|
4402
|
+
('levelname', '%(levelname)s'),
|
|
4403
|
+
('name', '%(name)s'),
|
|
4404
|
+
('separator', '::'),
|
|
4405
|
+
('message', '%(message)s'),
|
|
4406
|
+
]
|
|
4407
|
+
|
|
4408
|
+
|
|
4409
|
+
class StandardLogFormatter(logging.Formatter):
|
|
4410
|
+
@staticmethod
|
|
4411
|
+
def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
|
|
4412
|
+
return ' '.join(v for k, v in parts)
|
|
4413
|
+
|
|
4414
|
+
converter = datetime.datetime.fromtimestamp # type: ignore
|
|
4415
|
+
|
|
4416
|
+
def formatTime(self, record, datefmt=None):
|
|
4417
|
+
ct = self.converter(record.created) # type: ignore
|
|
4418
|
+
if datefmt:
|
|
4419
|
+
return ct.strftime(datefmt) # noqa
|
|
4420
|
+
else:
|
|
4421
|
+
t = ct.strftime('%Y-%m-%d %H:%M:%S')
|
|
4422
|
+
return '%s.%03d' % (t, record.msecs) # noqa
|
|
4423
|
+
|
|
4424
|
+
|
|
4425
|
+
##
|
|
4426
|
+
|
|
4427
|
+
|
|
4428
|
+
class StandardLogHandler(ProxyLogHandler):
|
|
4429
|
+
pass
|
|
4430
|
+
|
|
4431
|
+
|
|
4432
|
+
##
|
|
4433
|
+
|
|
4434
|
+
|
|
4435
|
+
@contextlib.contextmanager
|
|
4436
|
+
def _locking_logging_module_lock() -> ta.Iterator[None]:
|
|
4437
|
+
if hasattr(logging, '_acquireLock'):
|
|
4438
|
+
logging._acquireLock() # noqa
|
|
4439
|
+
try:
|
|
4440
|
+
yield
|
|
4441
|
+
finally:
|
|
4442
|
+
logging._releaseLock() # type: ignore # noqa
|
|
4402
4443
|
|
|
4403
|
-
|
|
4404
|
-
|
|
4444
|
+
elif hasattr(logging, '_lock'):
|
|
4445
|
+
# https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
|
|
4446
|
+
with logging._lock: # noqa
|
|
4447
|
+
yield
|
|
4405
4448
|
|
|
4449
|
+
else:
|
|
4450
|
+
raise Exception("Can't find lock in logging module")
|
|
4406
4451
|
|
|
4407
|
-
########################################
|
|
4408
|
-
# ../reqs.py
|
|
4409
|
-
"""
|
|
4410
|
-
TODO:
|
|
4411
|
-
- embed pip._internal.req.parse_requirements, add additional env stuff? breaks compat with raw pip
|
|
4412
|
-
"""
|
|
4413
4452
|
|
|
4453
|
+
def configure_standard_logging(
|
|
4454
|
+
level: ta.Union[int, str] = logging.INFO,
|
|
4455
|
+
*,
|
|
4456
|
+
json: bool = False,
|
|
4457
|
+
target: ta.Optional[logging.Logger] = None,
|
|
4458
|
+
force: bool = False,
|
|
4459
|
+
handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
|
|
4460
|
+
) -> ta.Optional[StandardLogHandler]:
|
|
4461
|
+
with _locking_logging_module_lock():
|
|
4462
|
+
if target is None:
|
|
4463
|
+
target = logging.root
|
|
4414
4464
|
|
|
4415
|
-
|
|
4416
|
-
def __init__(
|
|
4417
|
-
self,
|
|
4418
|
-
venv: ta.Optional[str] = None,
|
|
4419
|
-
) -> None:
|
|
4420
|
-
super().__init__()
|
|
4421
|
-
self._venv = venv
|
|
4465
|
+
#
|
|
4422
4466
|
|
|
4423
|
-
|
|
4424
|
-
|
|
4425
|
-
|
|
4467
|
+
if not force:
|
|
4468
|
+
if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
|
|
4469
|
+
return None
|
|
4426
4470
|
|
|
4427
|
-
|
|
4471
|
+
#
|
|
4428
4472
|
|
|
4429
|
-
|
|
4430
|
-
|
|
4431
|
-
|
|
4473
|
+
if handler_factory is not None:
|
|
4474
|
+
handler = handler_factory()
|
|
4475
|
+
else:
|
|
4476
|
+
handler = logging.StreamHandler()
|
|
4432
4477
|
|
|
4433
|
-
|
|
4434
|
-
out_lines = []
|
|
4478
|
+
#
|
|
4435
4479
|
|
|
4436
|
-
|
|
4437
|
-
|
|
4438
|
-
|
|
4439
|
-
|
|
4440
|
-
|
|
4441
|
-
|
|
4442
|
-
if v[0] == '!':
|
|
4443
|
-
if self._venv is not None and self._venv == v[1:]:
|
|
4444
|
-
omit = True
|
|
4445
|
-
break
|
|
4446
|
-
else:
|
|
4447
|
-
raise NotImplementedError
|
|
4480
|
+
formatter: logging.Formatter
|
|
4481
|
+
if json:
|
|
4482
|
+
formatter = JsonLogFormatter()
|
|
4483
|
+
else:
|
|
4484
|
+
formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
|
|
4485
|
+
handler.setFormatter(formatter)
|
|
4448
4486
|
|
|
4449
|
-
|
|
4450
|
-
out_lines.append('# OMITTED: ' + l)
|
|
4451
|
-
continue
|
|
4487
|
+
#
|
|
4452
4488
|
|
|
4453
|
-
|
|
4454
|
-
out_lines.append(out_req + '\n')
|
|
4489
|
+
handler.addFilter(TidLogFilter())
|
|
4455
4490
|
|
|
4456
|
-
|
|
4457
|
-
if os.path.exists(out_file):
|
|
4458
|
-
raise Exception(f'file exists: {out_file}')
|
|
4491
|
+
#
|
|
4459
4492
|
|
|
4460
|
-
|
|
4461
|
-
f.write(''.join(out_lines))
|
|
4462
|
-
log.info('Rewrote requirements file %s to %s', in_file, out_file)
|
|
4463
|
-
return out_file
|
|
4493
|
+
target.addHandler(handler)
|
|
4464
4494
|
|
|
4465
|
-
|
|
4466
|
-
if in_req.strip().startswith('-r'):
|
|
4467
|
-
l = in_req.strip()
|
|
4468
|
-
lp, _, rp = l.partition(' ')
|
|
4469
|
-
if lp == '-r':
|
|
4470
|
-
inc_in_file, _, rest = rp.partition(' ')
|
|
4471
|
-
else:
|
|
4472
|
-
inc_in_file, rest = lp[2:], rp
|
|
4495
|
+
#
|
|
4473
4496
|
|
|
4474
|
-
|
|
4475
|
-
|
|
4476
|
-
return ' '.join(['-r ', inc_out_file, rest])
|
|
4477
|
-
else:
|
|
4478
|
-
return '-r' + inc_out_file
|
|
4497
|
+
if level is not None:
|
|
4498
|
+
target.setLevel(level)
|
|
4479
4499
|
|
|
4480
|
-
|
|
4481
|
-
|
|
4500
|
+
#
|
|
4501
|
+
|
|
4502
|
+
return StandardLogHandler(handler)
|
|
4482
4503
|
|
|
4483
4504
|
|
|
4484
4505
|
########################################
|
|
4485
|
-
# ../../../omlish/
|
|
4506
|
+
# ../../../omlish/subprocesses.py
|
|
4486
4507
|
|
|
4487
4508
|
|
|
4488
4509
|
##
|
|
@@ -4533,8 +4554,8 @@ def subprocess_close(
|
|
|
4533
4554
|
##
|
|
4534
4555
|
|
|
4535
4556
|
|
|
4536
|
-
class
|
|
4537
|
-
DEFAULT_LOGGER: ta.ClassVar[ta.Optional[logging.Logger]] =
|
|
4557
|
+
class BaseSubprocesses(abc.ABC): # noqa
|
|
4558
|
+
DEFAULT_LOGGER: ta.ClassVar[ta.Optional[logging.Logger]] = None
|
|
4538
4559
|
|
|
4539
4560
|
def __init__(
|
|
4540
4561
|
self,
|
|
@@ -4547,6 +4568,9 @@ class AbstractSubprocesses(abc.ABC): # noqa
|
|
|
4547
4568
|
self._log = log if log is not None else self.DEFAULT_LOGGER
|
|
4548
4569
|
self._try_exceptions = try_exceptions if try_exceptions is not None else self.DEFAULT_TRY_EXCEPTIONS
|
|
4549
4570
|
|
|
4571
|
+
def set_logger(self, log: ta.Optional[logging.Logger]) -> None:
|
|
4572
|
+
self._log = log
|
|
4573
|
+
|
|
4550
4574
|
#
|
|
4551
4575
|
|
|
4552
4576
|
def prepare_args(
|
|
@@ -4658,23 +4682,25 @@ class AbstractSubprocesses(abc.ABC): # noqa
|
|
|
4658
4682
|
##
|
|
4659
4683
|
|
|
4660
4684
|
|
|
4661
|
-
class
|
|
4685
|
+
class AbstractSubprocesses(BaseSubprocesses, abc.ABC):
|
|
4686
|
+
@abc.abstractmethod
|
|
4662
4687
|
def check_call(
|
|
4663
4688
|
self,
|
|
4664
4689
|
*cmd: str,
|
|
4665
4690
|
stdout: ta.Any = sys.stderr,
|
|
4666
4691
|
**kwargs: ta.Any,
|
|
4667
4692
|
) -> None:
|
|
4668
|
-
|
|
4669
|
-
subprocess.check_call(cmd, **kwargs)
|
|
4693
|
+
raise NotImplementedError
|
|
4670
4694
|
|
|
4695
|
+
@abc.abstractmethod
|
|
4671
4696
|
def check_output(
|
|
4672
4697
|
self,
|
|
4673
4698
|
*cmd: str,
|
|
4674
4699
|
**kwargs: ta.Any,
|
|
4675
4700
|
) -> bytes:
|
|
4676
|
-
|
|
4677
|
-
|
|
4701
|
+
raise NotImplementedError
|
|
4702
|
+
|
|
4703
|
+
#
|
|
4678
4704
|
|
|
4679
4705
|
def check_output_str(
|
|
4680
4706
|
self,
|
|
@@ -4716,9 +4742,94 @@ class Subprocesses(AbstractSubprocesses):
|
|
|
4716
4742
|
return ret.decode().strip()
|
|
4717
4743
|
|
|
4718
4744
|
|
|
4745
|
+
##
|
|
4746
|
+
|
|
4747
|
+
|
|
4748
|
+
class Subprocesses(AbstractSubprocesses):
|
|
4749
|
+
def check_call(
|
|
4750
|
+
self,
|
|
4751
|
+
*cmd: str,
|
|
4752
|
+
stdout: ta.Any = sys.stderr,
|
|
4753
|
+
**kwargs: ta.Any,
|
|
4754
|
+
) -> None:
|
|
4755
|
+
with self.prepare_and_wrap(*cmd, stdout=stdout, **kwargs) as (cmd, kwargs): # noqa
|
|
4756
|
+
subprocess.check_call(cmd, **kwargs)
|
|
4757
|
+
|
|
4758
|
+
def check_output(
|
|
4759
|
+
self,
|
|
4760
|
+
*cmd: str,
|
|
4761
|
+
**kwargs: ta.Any,
|
|
4762
|
+
) -> bytes:
|
|
4763
|
+
with self.prepare_and_wrap(*cmd, **kwargs) as (cmd, kwargs): # noqa
|
|
4764
|
+
return subprocess.check_output(cmd, **kwargs)
|
|
4765
|
+
|
|
4766
|
+
|
|
4719
4767
|
subprocesses = Subprocesses()
|
|
4720
4768
|
|
|
4721
4769
|
|
|
4770
|
+
##
|
|
4771
|
+
|
|
4772
|
+
|
|
4773
|
+
class AbstractAsyncSubprocesses(BaseSubprocesses):
|
|
4774
|
+
@abc.abstractmethod
|
|
4775
|
+
async def check_call(
|
|
4776
|
+
self,
|
|
4777
|
+
*cmd: str,
|
|
4778
|
+
stdout: ta.Any = sys.stderr,
|
|
4779
|
+
**kwargs: ta.Any,
|
|
4780
|
+
) -> None:
|
|
4781
|
+
raise NotImplementedError
|
|
4782
|
+
|
|
4783
|
+
@abc.abstractmethod
|
|
4784
|
+
async def check_output(
|
|
4785
|
+
self,
|
|
4786
|
+
*cmd: str,
|
|
4787
|
+
**kwargs: ta.Any,
|
|
4788
|
+
) -> bytes:
|
|
4789
|
+
raise NotImplementedError
|
|
4790
|
+
|
|
4791
|
+
#
|
|
4792
|
+
|
|
4793
|
+
async def check_output_str(
|
|
4794
|
+
self,
|
|
4795
|
+
*cmd: str,
|
|
4796
|
+
**kwargs: ta.Any,
|
|
4797
|
+
) -> str:
|
|
4798
|
+
return (await self.check_output(*cmd, **kwargs)).decode().strip()
|
|
4799
|
+
|
|
4800
|
+
#
|
|
4801
|
+
|
|
4802
|
+
async def try_call(
|
|
4803
|
+
self,
|
|
4804
|
+
*cmd: str,
|
|
4805
|
+
**kwargs: ta.Any,
|
|
4806
|
+
) -> bool:
|
|
4807
|
+
if isinstance(await self.async_try_fn(self.check_call, *cmd, **kwargs), Exception):
|
|
4808
|
+
return False
|
|
4809
|
+
else:
|
|
4810
|
+
return True
|
|
4811
|
+
|
|
4812
|
+
async def try_output(
|
|
4813
|
+
self,
|
|
4814
|
+
*cmd: str,
|
|
4815
|
+
**kwargs: ta.Any,
|
|
4816
|
+
) -> ta.Optional[bytes]:
|
|
4817
|
+
if isinstance(ret := await self.async_try_fn(self.check_output, *cmd, **kwargs), Exception):
|
|
4818
|
+
return None
|
|
4819
|
+
else:
|
|
4820
|
+
return ret
|
|
4821
|
+
|
|
4822
|
+
async def try_output_str(
|
|
4823
|
+
self,
|
|
4824
|
+
*cmd: str,
|
|
4825
|
+
**kwargs: ta.Any,
|
|
4826
|
+
) -> ta.Optional[str]:
|
|
4827
|
+
if (ret := await self.try_output(*cmd, **kwargs)) is None:
|
|
4828
|
+
return None
|
|
4829
|
+
else:
|
|
4830
|
+
return ret.decode().strip()
|
|
4831
|
+
|
|
4832
|
+
|
|
4722
4833
|
########################################
|
|
4723
4834
|
# ../../git.py
|
|
4724
4835
|
"""
|
|
@@ -5121,7 +5232,7 @@ def get_git_status(
|
|
|
5121
5232
|
|
|
5122
5233
|
|
|
5123
5234
|
########################################
|
|
5124
|
-
# ../../../omlish/
|
|
5235
|
+
# ../../../omlish/asyncs/asyncio/subprocesses.py
|
|
5125
5236
|
|
|
5126
5237
|
|
|
5127
5238
|
##
|
|
@@ -5132,6 +5243,8 @@ class AsyncioProcessCommunicator:
|
|
|
5132
5243
|
self,
|
|
5133
5244
|
proc: asyncio.subprocess.Process,
|
|
5134
5245
|
loop: ta.Optional[ta.Any] = None,
|
|
5246
|
+
*,
|
|
5247
|
+
log: ta.Optional[logging.Logger] = None,
|
|
5135
5248
|
) -> None:
|
|
5136
5249
|
super().__init__()
|
|
5137
5250
|
|
|
@@ -5140,6 +5253,7 @@ class AsyncioProcessCommunicator:
|
|
|
5140
5253
|
|
|
5141
5254
|
self._proc = proc
|
|
5142
5255
|
self._loop = loop
|
|
5256
|
+
self._log = log
|
|
5143
5257
|
|
|
5144
5258
|
self._transport: asyncio.base_subprocess.BaseSubprocessTransport = check.isinstance(
|
|
5145
5259
|
proc._transport, # type: ignore # noqa
|
|
@@ -5155,19 +5269,19 @@ class AsyncioProcessCommunicator:
|
|
|
5155
5269
|
try:
|
|
5156
5270
|
if input is not None:
|
|
5157
5271
|
stdin.write(input)
|
|
5158
|
-
if self._debug:
|
|
5159
|
-
|
|
5272
|
+
if self._debug and self._log is not None:
|
|
5273
|
+
self._log.debug('%r communicate: feed stdin (%s bytes)', self, len(input))
|
|
5160
5274
|
|
|
5161
5275
|
await stdin.drain()
|
|
5162
5276
|
|
|
5163
5277
|
except (BrokenPipeError, ConnectionResetError) as exc:
|
|
5164
5278
|
# communicate() ignores BrokenPipeError and ConnectionResetError. write() and drain() can raise these
|
|
5165
5279
|
# exceptions.
|
|
5166
|
-
if self._debug:
|
|
5167
|
-
|
|
5280
|
+
if self._debug and self._log is not None:
|
|
5281
|
+
self._log.debug('%r communicate: stdin got %r', self, exc)
|
|
5168
5282
|
|
|
5169
|
-
if self._debug:
|
|
5170
|
-
|
|
5283
|
+
if self._debug and self._log is not None:
|
|
5284
|
+
self._log.debug('%r communicate: close stdin', self)
|
|
5171
5285
|
|
|
5172
5286
|
stdin.close()
|
|
5173
5287
|
|
|
@@ -5183,15 +5297,15 @@ class AsyncioProcessCommunicator:
|
|
|
5183
5297
|
check.equal(fd, 1)
|
|
5184
5298
|
stream = check.not_none(self._proc.stdout)
|
|
5185
5299
|
|
|
5186
|
-
if self._debug:
|
|
5300
|
+
if self._debug and self._log is not None:
|
|
5187
5301
|
name = 'stdout' if fd == 1 else 'stderr'
|
|
5188
|
-
|
|
5302
|
+
self._log.debug('%r communicate: read %s', self, name)
|
|
5189
5303
|
|
|
5190
5304
|
output = await stream.read()
|
|
5191
5305
|
|
|
5192
|
-
if self._debug:
|
|
5306
|
+
if self._debug and self._log is not None:
|
|
5193
5307
|
name = 'stdout' if fd == 1 else 'stderr'
|
|
5194
|
-
|
|
5308
|
+
self._log.debug('%r communicate: close %s', self, name)
|
|
5195
5309
|
|
|
5196
5310
|
transport.close()
|
|
5197
5311
|
|
|
@@ -5240,7 +5354,7 @@ class AsyncioProcessCommunicator:
|
|
|
5240
5354
|
##
|
|
5241
5355
|
|
|
5242
5356
|
|
|
5243
|
-
class AsyncioSubprocesses(
|
|
5357
|
+
class AsyncioSubprocesses(AbstractAsyncSubprocesses):
|
|
5244
5358
|
async def communicate(
|
|
5245
5359
|
self,
|
|
5246
5360
|
proc: asyncio.subprocess.Process,
|
|
@@ -5337,45 +5451,6 @@ class AsyncioSubprocesses(AbstractSubprocesses):
|
|
|
5337
5451
|
with self.prepare_and_wrap(*cmd, stdout=subprocess.PIPE, check=True, **kwargs) as (cmd, kwargs): # noqa
|
|
5338
5452
|
return check.not_none((await self.run(*cmd, **kwargs)).stdout)
|
|
5339
5453
|
|
|
5340
|
-
async def check_output_str(
|
|
5341
|
-
self,
|
|
5342
|
-
*cmd: str,
|
|
5343
|
-
**kwargs: ta.Any,
|
|
5344
|
-
) -> str:
|
|
5345
|
-
return (await self.check_output(*cmd, **kwargs)).decode().strip()
|
|
5346
|
-
|
|
5347
|
-
#
|
|
5348
|
-
|
|
5349
|
-
async def try_call(
|
|
5350
|
-
self,
|
|
5351
|
-
*cmd: str,
|
|
5352
|
-
**kwargs: ta.Any,
|
|
5353
|
-
) -> bool:
|
|
5354
|
-
if isinstance(await self.async_try_fn(self.check_call, *cmd, **kwargs), Exception):
|
|
5355
|
-
return False
|
|
5356
|
-
else:
|
|
5357
|
-
return True
|
|
5358
|
-
|
|
5359
|
-
async def try_output(
|
|
5360
|
-
self,
|
|
5361
|
-
*cmd: str,
|
|
5362
|
-
**kwargs: ta.Any,
|
|
5363
|
-
) -> ta.Optional[bytes]:
|
|
5364
|
-
if isinstance(ret := await self.async_try_fn(self.check_output, *cmd, **kwargs), Exception):
|
|
5365
|
-
return None
|
|
5366
|
-
else:
|
|
5367
|
-
return ret
|
|
5368
|
-
|
|
5369
|
-
async def try_output_str(
|
|
5370
|
-
self,
|
|
5371
|
-
*cmd: str,
|
|
5372
|
-
**kwargs: ta.Any,
|
|
5373
|
-
) -> ta.Optional[str]:
|
|
5374
|
-
if (ret := await self.try_output(*cmd, **kwargs)) is None:
|
|
5375
|
-
return None
|
|
5376
|
-
else:
|
|
5377
|
-
return ret.decode().strip()
|
|
5378
|
-
|
|
5379
5454
|
|
|
5380
5455
|
asyncio_subprocesses = AsyncioSubprocesses()
|
|
5381
5456
|
|