omdev 0.0.0.dev156__py3-none-any.whl → 0.0.0.dev158__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of omdev might be problematic. Click here for more details.

@@ -115,7 +115,7 @@ CallableVersionOperator = ta.Callable[['Version', str], bool]
115
115
  # ../../omlish/argparse/cli.py
116
116
  ArgparseCommandFn = ta.Callable[[], ta.Optional[int]] # ta.TypeAlias
117
117
 
118
- # ../../omlish/lite/subprocesses.py
118
+ # ../../omlish/subprocesses.py
119
119
  SubprocessChannelOption = ta.Literal['pipe', 'stdout', 'devnull'] # ta.TypeAlias
120
120
 
121
121
 
@@ -1823,8 +1823,8 @@ class _CachedNullary(_AbstractCachedNullary):
1823
1823
  return self._value
1824
1824
 
1825
1825
 
1826
- def cached_nullary(fn): # ta.Callable[..., T]) -> ta.Callable[..., T]:
1827
- return _CachedNullary(fn)
1826
+ def cached_nullary(fn: CallableT) -> CallableT:
1827
+ return _CachedNullary(fn) # type: ignore
1828
1828
 
1829
1829
 
1830
1830
  def static_init(fn: CallableT) -> CallableT:
@@ -2328,6 +2328,13 @@ json_dump_compact: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON
2328
2328
  json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_COMPACT_KWARGS)
2329
2329
 
2330
2330
 
2331
+ ########################################
2332
+ # ../../../omlish/lite/logs.py
2333
+
2334
+
2335
+ log = logging.getLogger(__name__)
2336
+
2337
+
2331
2338
  ########################################
2332
2339
  # ../../../omlish/lite/reflect.py
2333
2340
 
@@ -2450,6 +2457,116 @@ def format_num_bytes(num_bytes: int) -> str:
2450
2457
  return f'{num_bytes / 1024 ** (len(FORMAT_NUM_BYTES_SUFFIXES) - 1):.2f}{FORMAT_NUM_BYTES_SUFFIXES[-1]}'
2451
2458
 
2452
2459
 
2460
+ ########################################
2461
+ # ../../../omlish/logs/filters.py
2462
+
2463
+
2464
+ class TidLogFilter(logging.Filter):
2465
+ def filter(self, record):
2466
+ record.tid = threading.get_native_id()
2467
+ return True
2468
+
2469
+
2470
+ ########################################
2471
+ # ../../../omlish/logs/proxy.py
2472
+
2473
+
2474
+ class ProxyLogFilterer(logging.Filterer):
2475
+ def __init__(self, underlying: logging.Filterer) -> None: # noqa
2476
+ self._underlying = underlying
2477
+
2478
+ @property
2479
+ def underlying(self) -> logging.Filterer:
2480
+ return self._underlying
2481
+
2482
+ @property
2483
+ def filters(self):
2484
+ return self._underlying.filters
2485
+
2486
+ @filters.setter
2487
+ def filters(self, filters):
2488
+ self._underlying.filters = filters
2489
+
2490
+ def addFilter(self, filter): # noqa
2491
+ self._underlying.addFilter(filter)
2492
+
2493
+ def removeFilter(self, filter): # noqa
2494
+ self._underlying.removeFilter(filter)
2495
+
2496
+ def filter(self, record):
2497
+ return self._underlying.filter(record)
2498
+
2499
+
2500
+ class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
2501
+ def __init__(self, underlying: logging.Handler) -> None: # noqa
2502
+ ProxyLogFilterer.__init__(self, underlying)
2503
+
2504
+ _underlying: logging.Handler
2505
+
2506
+ @property
2507
+ def underlying(self) -> logging.Handler:
2508
+ return self._underlying
2509
+
2510
+ def get_name(self):
2511
+ return self._underlying.get_name()
2512
+
2513
+ def set_name(self, name):
2514
+ self._underlying.set_name(name)
2515
+
2516
+ @property
2517
+ def name(self):
2518
+ return self._underlying.name
2519
+
2520
+ @property
2521
+ def level(self):
2522
+ return self._underlying.level
2523
+
2524
+ @level.setter
2525
+ def level(self, level):
2526
+ self._underlying.level = level
2527
+
2528
+ @property
2529
+ def formatter(self):
2530
+ return self._underlying.formatter
2531
+
2532
+ @formatter.setter
2533
+ def formatter(self, formatter):
2534
+ self._underlying.formatter = formatter
2535
+
2536
+ def createLock(self):
2537
+ self._underlying.createLock()
2538
+
2539
+ def acquire(self):
2540
+ self._underlying.acquire()
2541
+
2542
+ def release(self):
2543
+ self._underlying.release()
2544
+
2545
+ def setLevel(self, level):
2546
+ self._underlying.setLevel(level)
2547
+
2548
+ def format(self, record):
2549
+ return self._underlying.format(record)
2550
+
2551
+ def emit(self, record):
2552
+ self._underlying.emit(record)
2553
+
2554
+ def handle(self, record):
2555
+ return self._underlying.handle(record)
2556
+
2557
+ def setFormatter(self, fmt):
2558
+ self._underlying.setFormatter(fmt)
2559
+
2560
+ def flush(self):
2561
+ self._underlying.flush()
2562
+
2563
+ def close(self):
2564
+ self._underlying.close()
2565
+
2566
+ def handleError(self, record):
2567
+ self._underlying.handleError(record)
2568
+
2569
+
2453
2570
  ########################################
2454
2571
  # ../../cexts/magic.py
2455
2572
 
@@ -3207,6 +3324,83 @@ class SpecifierSet(BaseSpecifier):
3207
3324
  return iter(filtered)
3208
3325
 
3209
3326
 
3327
+ ########################################
3328
+ # ../reqs.py
3329
+ """
3330
+ TODO:
3331
+ - embed pip._internal.req.parse_requirements, add additional env stuff? breaks compat with raw pip
3332
+ """
3333
+
3334
+
3335
+ class RequirementsRewriter:
3336
+ def __init__(
3337
+ self,
3338
+ venv: ta.Optional[str] = None,
3339
+ ) -> None:
3340
+ super().__init__()
3341
+ self._venv = venv
3342
+
3343
+ @cached_nullary
3344
+ def _tmp_dir(self) -> str:
3345
+ return tempfile.mkdtemp('-omlish-reqs')
3346
+
3347
+ VENV_MAGIC = '# @omlish-venv'
3348
+
3349
+ def rewrite_file(self, in_file: str) -> str:
3350
+ with open(in_file) as f:
3351
+ src = f.read()
3352
+
3353
+ in_lines = src.splitlines(keepends=True)
3354
+ out_lines = []
3355
+
3356
+ for l in in_lines:
3357
+ if self.VENV_MAGIC in l:
3358
+ lp, _, rp = l.partition(self.VENV_MAGIC)
3359
+ rp = rp.partition('#')[0]
3360
+ omit = False
3361
+ for v in rp.split():
3362
+ if v[0] == '!':
3363
+ if self._venv is not None and self._venv == v[1:]:
3364
+ omit = True
3365
+ break
3366
+ else:
3367
+ raise NotImplementedError
3368
+
3369
+ if omit:
3370
+ out_lines.append('# OMITTED: ' + l)
3371
+ continue
3372
+
3373
+ out_req = self.rewrite(l.rstrip('\n'), for_file=True)
3374
+ out_lines.append(out_req + '\n')
3375
+
3376
+ out_file = os.path.join(self._tmp_dir(), os.path.basename(in_file))
3377
+ if os.path.exists(out_file):
3378
+ raise Exception(f'file exists: {out_file}')
3379
+
3380
+ with open(out_file, 'w') as f:
3381
+ f.write(''.join(out_lines))
3382
+ log.info('Rewrote requirements file %s to %s', in_file, out_file)
3383
+ return out_file
3384
+
3385
+ def rewrite(self, in_req: str, *, for_file: bool = False) -> str:
3386
+ if in_req.strip().startswith('-r'):
3387
+ l = in_req.strip()
3388
+ lp, _, rp = l.partition(' ')
3389
+ if lp == '-r':
3390
+ inc_in_file, _, rest = rp.partition(' ')
3391
+ else:
3392
+ inc_in_file, rest = lp[2:], rp
3393
+
3394
+ inc_out_file = self.rewrite_file(inc_in_file)
3395
+ if for_file:
3396
+ return ' '.join(['-r ', inc_out_file, rest])
3397
+ else:
3398
+ return '-r' + inc_out_file
3399
+
3400
+ else:
3401
+ return in_req
3402
+
3403
+
3210
3404
  ########################################
3211
3405
  # ../../../omlish/argparse/cli.py
3212
3406
  """
@@ -3316,6 +3510,8 @@ def _get_argparse_arg_ann_kwargs(ann: ta.Any) -> ta.Mapping[str, ta.Any]:
3316
3510
  return {'action': 'store_true'}
3317
3511
  elif ann is list:
3318
3512
  return {'action': 'append'}
3513
+ elif is_optional_alias(ann):
3514
+ return _get_argparse_arg_ann_kwargs(get_optional_alias_arg(ann))
3319
3515
  else:
3320
3516
  raise TypeError(ann)
3321
3517
 
@@ -3480,315 +3676,46 @@ class ArgparseCli:
3480
3676
 
3481
3677
 
3482
3678
  ########################################
3483
- # ../../../omlish/lite/logs.py
3679
+ # ../../../omlish/lite/marshal.py
3484
3680
  """
3485
3681
  TODO:
3486
- - translate json keys
3487
- - debug
3682
+ - pickle stdlib objs? have to pin to 3.8 pickle protocol, will be cross-version
3683
+ - namedtuple
3684
+ - literals
3685
+ - newtypes?
3488
3686
  """
3489
3687
 
3490
3688
 
3491
- log = logging.getLogger(__name__)
3492
-
3493
-
3494
3689
  ##
3495
3690
 
3496
3691
 
3497
- class TidLogFilter(logging.Filter):
3692
+ @dc.dataclass(frozen=True)
3693
+ class ObjMarshalOptions:
3694
+ raw_bytes: bool = False
3695
+ nonstrict_dataclasses: bool = False
3498
3696
 
3499
- def filter(self, record):
3500
- record.tid = threading.get_native_id()
3501
- return True
3502
3697
 
3698
+ class ObjMarshaler(abc.ABC):
3699
+ @abc.abstractmethod
3700
+ def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
3701
+ raise NotImplementedError
3503
3702
 
3504
- ##
3703
+ @abc.abstractmethod
3704
+ def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
3705
+ raise NotImplementedError
3505
3706
 
3506
3707
 
3507
- class JsonLogFormatter(logging.Formatter):
3708
+ class NopObjMarshaler(ObjMarshaler):
3709
+ def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
3710
+ return o
3508
3711
 
3509
- KEYS: ta.Mapping[str, bool] = {
3510
- 'name': False,
3511
- 'msg': False,
3512
- 'args': False,
3513
- 'levelname': False,
3514
- 'levelno': False,
3515
- 'pathname': False,
3516
- 'filename': False,
3517
- 'module': False,
3518
- 'exc_info': True,
3519
- 'exc_text': True,
3520
- 'stack_info': True,
3521
- 'lineno': False,
3522
- 'funcName': False,
3523
- 'created': False,
3524
- 'msecs': False,
3525
- 'relativeCreated': False,
3526
- 'thread': False,
3527
- 'threadName': False,
3528
- 'processName': False,
3529
- 'process': False,
3530
- }
3712
+ def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
3713
+ return o
3531
3714
 
3532
- def format(self, record: logging.LogRecord) -> str:
3533
- dct = {
3534
- k: v
3535
- for k, o in self.KEYS.items()
3536
- for v in [getattr(record, k)]
3537
- if not (o and v is None)
3538
- }
3539
- return json_dumps_compact(dct)
3540
3715
 
3541
-
3542
- ##
3543
-
3544
-
3545
- STANDARD_LOG_FORMAT_PARTS = [
3546
- ('asctime', '%(asctime)-15s'),
3547
- ('process', 'pid=%(process)-6s'),
3548
- ('thread', 'tid=%(thread)x'),
3549
- ('levelname', '%(levelname)s'),
3550
- ('name', '%(name)s'),
3551
- ('separator', '::'),
3552
- ('message', '%(message)s'),
3553
- ]
3554
-
3555
-
3556
- class StandardLogFormatter(logging.Formatter):
3557
-
3558
- @staticmethod
3559
- def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
3560
- return ' '.join(v for k, v in parts)
3561
-
3562
- converter = datetime.datetime.fromtimestamp # type: ignore
3563
-
3564
- def formatTime(self, record, datefmt=None):
3565
- ct = self.converter(record.created) # type: ignore
3566
- if datefmt:
3567
- return ct.strftime(datefmt) # noqa
3568
- else:
3569
- t = ct.strftime('%Y-%m-%d %H:%M:%S')
3570
- return '%s.%03d' % (t, record.msecs) # noqa
3571
-
3572
-
3573
- ##
3574
-
3575
-
3576
- class ProxyLogFilterer(logging.Filterer):
3577
- def __init__(self, underlying: logging.Filterer) -> None: # noqa
3578
- self._underlying = underlying
3579
-
3580
- @property
3581
- def underlying(self) -> logging.Filterer:
3582
- return self._underlying
3583
-
3584
- @property
3585
- def filters(self):
3586
- return self._underlying.filters
3587
-
3588
- @filters.setter
3589
- def filters(self, filters):
3590
- self._underlying.filters = filters
3591
-
3592
- def addFilter(self, filter): # noqa
3593
- self._underlying.addFilter(filter)
3594
-
3595
- def removeFilter(self, filter): # noqa
3596
- self._underlying.removeFilter(filter)
3597
-
3598
- def filter(self, record):
3599
- return self._underlying.filter(record)
3600
-
3601
-
3602
- class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
3603
- def __init__(self, underlying: logging.Handler) -> None: # noqa
3604
- ProxyLogFilterer.__init__(self, underlying)
3605
-
3606
- _underlying: logging.Handler
3607
-
3608
- @property
3609
- def underlying(self) -> logging.Handler:
3610
- return self._underlying
3611
-
3612
- def get_name(self):
3613
- return self._underlying.get_name()
3614
-
3615
- def set_name(self, name):
3616
- self._underlying.set_name(name)
3617
-
3618
- @property
3619
- def name(self):
3620
- return self._underlying.name
3621
-
3622
- @property
3623
- def level(self):
3624
- return self._underlying.level
3625
-
3626
- @level.setter
3627
- def level(self, level):
3628
- self._underlying.level = level
3629
-
3630
- @property
3631
- def formatter(self):
3632
- return self._underlying.formatter
3633
-
3634
- @formatter.setter
3635
- def formatter(self, formatter):
3636
- self._underlying.formatter = formatter
3637
-
3638
- def createLock(self):
3639
- self._underlying.createLock()
3640
-
3641
- def acquire(self):
3642
- self._underlying.acquire()
3643
-
3644
- def release(self):
3645
- self._underlying.release()
3646
-
3647
- def setLevel(self, level):
3648
- self._underlying.setLevel(level)
3649
-
3650
- def format(self, record):
3651
- return self._underlying.format(record)
3652
-
3653
- def emit(self, record):
3654
- self._underlying.emit(record)
3655
-
3656
- def handle(self, record):
3657
- return self._underlying.handle(record)
3658
-
3659
- def setFormatter(self, fmt):
3660
- self._underlying.setFormatter(fmt)
3661
-
3662
- def flush(self):
3663
- self._underlying.flush()
3664
-
3665
- def close(self):
3666
- self._underlying.close()
3667
-
3668
- def handleError(self, record):
3669
- self._underlying.handleError(record)
3670
-
3671
-
3672
- ##
3673
-
3674
-
3675
- class StandardLogHandler(ProxyLogHandler):
3676
- pass
3677
-
3678
-
3679
- ##
3680
-
3681
-
3682
- @contextlib.contextmanager
3683
- def _locking_logging_module_lock() -> ta.Iterator[None]:
3684
- if hasattr(logging, '_acquireLock'):
3685
- logging._acquireLock() # noqa
3686
- try:
3687
- yield
3688
- finally:
3689
- logging._releaseLock() # type: ignore # noqa
3690
-
3691
- elif hasattr(logging, '_lock'):
3692
- # https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
3693
- with logging._lock: # noqa
3694
- yield
3695
-
3696
- else:
3697
- raise Exception("Can't find lock in logging module")
3698
-
3699
-
3700
- def configure_standard_logging(
3701
- level: ta.Union[int, str] = logging.INFO,
3702
- *,
3703
- json: bool = False,
3704
- target: ta.Optional[logging.Logger] = None,
3705
- force: bool = False,
3706
- handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
3707
- ) -> ta.Optional[StandardLogHandler]:
3708
- with _locking_logging_module_lock():
3709
- if target is None:
3710
- target = logging.root
3711
-
3712
- #
3713
-
3714
- if not force:
3715
- if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
3716
- return None
3717
-
3718
- #
3719
-
3720
- if handler_factory is not None:
3721
- handler = handler_factory()
3722
- else:
3723
- handler = logging.StreamHandler()
3724
-
3725
- #
3726
-
3727
- formatter: logging.Formatter
3728
- if json:
3729
- formatter = JsonLogFormatter()
3730
- else:
3731
- formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
3732
- handler.setFormatter(formatter)
3733
-
3734
- #
3735
-
3736
- handler.addFilter(TidLogFilter())
3737
-
3738
- #
3739
-
3740
- target.addHandler(handler)
3741
-
3742
- #
3743
-
3744
- if level is not None:
3745
- target.setLevel(level)
3746
-
3747
- #
3748
-
3749
- return StandardLogHandler(handler)
3750
-
3751
-
3752
- ########################################
3753
- # ../../../omlish/lite/marshal.py
3754
- """
3755
- TODO:
3756
- - pickle stdlib objs? have to pin to 3.8 pickle protocol, will be cross-version
3757
- - namedtuple
3758
- - literals
3759
- """
3760
-
3761
-
3762
- ##
3763
-
3764
-
3765
- @dc.dataclass(frozen=True)
3766
- class ObjMarshalOptions:
3767
- raw_bytes: bool = False
3768
- nonstrict_dataclasses: bool = False
3769
-
3770
-
3771
- class ObjMarshaler(abc.ABC):
3772
- @abc.abstractmethod
3773
- def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
3774
- raise NotImplementedError
3775
-
3776
- @abc.abstractmethod
3777
- def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
3778
- raise NotImplementedError
3779
-
3780
-
3781
- class NopObjMarshaler(ObjMarshaler):
3782
- def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
3783
- return o
3784
-
3785
- def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
3786
- return o
3787
-
3788
-
3789
- @dc.dataclass()
3790
- class ProxyObjMarshaler(ObjMarshaler):
3791
- m: ta.Optional[ObjMarshaler] = None
3716
+ @dc.dataclass()
3717
+ class ProxyObjMarshaler(ObjMarshaler):
3718
+ m: ta.Optional[ObjMarshaler] = None
3792
3719
 
3793
3720
  def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
3794
3721
  return check.not_none(self.m).marshal(o, ctx)
@@ -4210,6 +4137,60 @@ def check_runtime_version() -> None:
4210
4137
  raise OSError(f'Requires python {REQUIRED_PYTHON_VERSION}, got {sys.version_info} from {sys.executable}') # noqa
4211
4138
 
4212
4139
 
4140
+ ########################################
4141
+ # ../../../omlish/logs/json.py
4142
+ """
4143
+ TODO:
4144
+ - translate json keys
4145
+ """
4146
+
4147
+
4148
+ class JsonLogFormatter(logging.Formatter):
4149
+ KEYS: ta.Mapping[str, bool] = {
4150
+ 'name': False,
4151
+ 'msg': False,
4152
+ 'args': False,
4153
+ 'levelname': False,
4154
+ 'levelno': False,
4155
+ 'pathname': False,
4156
+ 'filename': False,
4157
+ 'module': False,
4158
+ 'exc_info': True,
4159
+ 'exc_text': True,
4160
+ 'stack_info': True,
4161
+ 'lineno': False,
4162
+ 'funcName': False,
4163
+ 'created': False,
4164
+ 'msecs': False,
4165
+ 'relativeCreated': False,
4166
+ 'thread': False,
4167
+ 'threadName': False,
4168
+ 'processName': False,
4169
+ 'process': False,
4170
+ }
4171
+
4172
+ def __init__(
4173
+ self,
4174
+ *args: ta.Any,
4175
+ json_dumps: ta.Optional[ta.Callable[[ta.Any], str]] = None,
4176
+ **kwargs: ta.Any,
4177
+ ) -> None:
4178
+ super().__init__(*args, **kwargs)
4179
+
4180
+ if json_dumps is None:
4181
+ json_dumps = json_dumps_compact
4182
+ self._json_dumps = json_dumps
4183
+
4184
+ def format(self, record: logging.LogRecord) -> str:
4185
+ dct = {
4186
+ k: v
4187
+ for k, o in self.KEYS.items()
4188
+ for v in [getattr(record, k)]
4189
+ if not (o and v is None)
4190
+ }
4191
+ return self._json_dumps(dct)
4192
+
4193
+
4213
4194
  ########################################
4214
4195
  # ../../interp/types.py
4215
4196
 
@@ -4391,95 +4372,138 @@ class PyprojectConfigPreparer:
4391
4372
  def prepare_config(self, dct: ta.Mapping[str, ta.Any]) -> PyprojectConfig:
4392
4373
  pcfg: PyprojectConfig = unmarshal_obj(dct, PyprojectConfig)
4393
4374
 
4394
- ivs = dict(self._inherit_venvs(pcfg.venvs or {}))
4395
- for k, v in ivs.items():
4396
- v = dc.replace(v, srcs=self._resolve_srcs(v.srcs or [], pcfg.srcs or {}))
4397
- v = dc.replace(v, interp=self._fixup_interp(v.interp))
4398
- ivs[k] = v
4375
+ ivs = dict(self._inherit_venvs(pcfg.venvs or {}))
4376
+ for k, v in ivs.items():
4377
+ v = dc.replace(v, srcs=self._resolve_srcs(v.srcs or [], pcfg.srcs or {}))
4378
+ v = dc.replace(v, interp=self._fixup_interp(v.interp))
4379
+ ivs[k] = v
4380
+
4381
+ pcfg = dc.replace(pcfg, venvs=ivs)
4382
+ return pcfg
4383
+
4384
+
4385
+ ########################################
4386
+ # ../../../omlish/logs/standard.py
4387
+ """
4388
+ TODO:
4389
+ - structured
4390
+ - prefixed
4391
+ - debug
4392
+ """
4393
+
4394
+
4395
+ ##
4396
+
4397
+
4398
+ STANDARD_LOG_FORMAT_PARTS = [
4399
+ ('asctime', '%(asctime)-15s'),
4400
+ ('process', 'pid=%(process)-6s'),
4401
+ ('thread', 'tid=%(thread)x'),
4402
+ ('levelname', '%(levelname)s'),
4403
+ ('name', '%(name)s'),
4404
+ ('separator', '::'),
4405
+ ('message', '%(message)s'),
4406
+ ]
4407
+
4408
+
4409
+ class StandardLogFormatter(logging.Formatter):
4410
+ @staticmethod
4411
+ def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
4412
+ return ' '.join(v for k, v in parts)
4413
+
4414
+ converter = datetime.datetime.fromtimestamp # type: ignore
4415
+
4416
+ def formatTime(self, record, datefmt=None):
4417
+ ct = self.converter(record.created) # type: ignore
4418
+ if datefmt:
4419
+ return ct.strftime(datefmt) # noqa
4420
+ else:
4421
+ t = ct.strftime('%Y-%m-%d %H:%M:%S')
4422
+ return '%s.%03d' % (t, record.msecs) # noqa
4423
+
4424
+
4425
+ ##
4426
+
4427
+
4428
+ class StandardLogHandler(ProxyLogHandler):
4429
+ pass
4430
+
4431
+
4432
+ ##
4433
+
4434
+
4435
+ @contextlib.contextmanager
4436
+ def _locking_logging_module_lock() -> ta.Iterator[None]:
4437
+ if hasattr(logging, '_acquireLock'):
4438
+ logging._acquireLock() # noqa
4439
+ try:
4440
+ yield
4441
+ finally:
4442
+ logging._releaseLock() # type: ignore # noqa
4399
4443
 
4400
- pcfg = dc.replace(pcfg, venvs=ivs)
4401
- return pcfg
4444
+ elif hasattr(logging, '_lock'):
4445
+ # https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
4446
+ with logging._lock: # noqa
4447
+ yield
4402
4448
 
4449
+ else:
4450
+ raise Exception("Can't find lock in logging module")
4403
4451
 
4404
- ########################################
4405
- # ../reqs.py
4406
- """
4407
- TODO:
4408
- - embed pip._internal.req.parse_requirements, add additional env stuff? breaks compat with raw pip
4409
- """
4410
4452
 
4453
+ def configure_standard_logging(
4454
+ level: ta.Union[int, str] = logging.INFO,
4455
+ *,
4456
+ json: bool = False,
4457
+ target: ta.Optional[logging.Logger] = None,
4458
+ force: bool = False,
4459
+ handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
4460
+ ) -> ta.Optional[StandardLogHandler]:
4461
+ with _locking_logging_module_lock():
4462
+ if target is None:
4463
+ target = logging.root
4411
4464
 
4412
- class RequirementsRewriter:
4413
- def __init__(
4414
- self,
4415
- venv: ta.Optional[str] = None,
4416
- ) -> None:
4417
- super().__init__()
4418
- self._venv = venv
4465
+ #
4419
4466
 
4420
- @cached_nullary
4421
- def _tmp_dir(self) -> str:
4422
- return tempfile.mkdtemp('-omlish-reqs')
4467
+ if not force:
4468
+ if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
4469
+ return None
4423
4470
 
4424
- VENV_MAGIC = '# @omlish-venv'
4471
+ #
4425
4472
 
4426
- def rewrite_file(self, in_file: str) -> str:
4427
- with open(in_file) as f:
4428
- src = f.read()
4473
+ if handler_factory is not None:
4474
+ handler = handler_factory()
4475
+ else:
4476
+ handler = logging.StreamHandler()
4429
4477
 
4430
- in_lines = src.splitlines(keepends=True)
4431
- out_lines = []
4478
+ #
4432
4479
 
4433
- for l in in_lines:
4434
- if self.VENV_MAGIC in l:
4435
- lp, _, rp = l.partition(self.VENV_MAGIC)
4436
- rp = rp.partition('#')[0]
4437
- omit = False
4438
- for v in rp.split():
4439
- if v[0] == '!':
4440
- if self._venv is not None and self._venv == v[1:]:
4441
- omit = True
4442
- break
4443
- else:
4444
- raise NotImplementedError
4480
+ formatter: logging.Formatter
4481
+ if json:
4482
+ formatter = JsonLogFormatter()
4483
+ else:
4484
+ formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
4485
+ handler.setFormatter(formatter)
4445
4486
 
4446
- if omit:
4447
- out_lines.append('# OMITTED: ' + l)
4448
- continue
4487
+ #
4449
4488
 
4450
- out_req = self.rewrite(l.rstrip('\n'), for_file=True)
4451
- out_lines.append(out_req + '\n')
4489
+ handler.addFilter(TidLogFilter())
4452
4490
 
4453
- out_file = os.path.join(self._tmp_dir(), os.path.basename(in_file))
4454
- if os.path.exists(out_file):
4455
- raise Exception(f'file exists: {out_file}')
4491
+ #
4456
4492
 
4457
- with open(out_file, 'w') as f:
4458
- f.write(''.join(out_lines))
4459
- log.info('Rewrote requirements file %s to %s', in_file, out_file)
4460
- return out_file
4493
+ target.addHandler(handler)
4461
4494
 
4462
- def rewrite(self, in_req: str, *, for_file: bool = False) -> str:
4463
- if in_req.strip().startswith('-r'):
4464
- l = in_req.strip()
4465
- lp, _, rp = l.partition(' ')
4466
- if lp == '-r':
4467
- inc_in_file, _, rest = rp.partition(' ')
4468
- else:
4469
- inc_in_file, rest = lp[2:], rp
4495
+ #
4470
4496
 
4471
- inc_out_file = self.rewrite_file(inc_in_file)
4472
- if for_file:
4473
- return ' '.join(['-r ', inc_out_file, rest])
4474
- else:
4475
- return '-r' + inc_out_file
4497
+ if level is not None:
4498
+ target.setLevel(level)
4476
4499
 
4477
- else:
4478
- return in_req
4500
+ #
4501
+
4502
+ return StandardLogHandler(handler)
4479
4503
 
4480
4504
 
4481
4505
  ########################################
4482
- # ../../../omlish/lite/subprocesses.py
4506
+ # ../../../omlish/subprocesses.py
4483
4507
 
4484
4508
 
4485
4509
  ##
@@ -4530,8 +4554,8 @@ def subprocess_close(
4530
4554
  ##
4531
4555
 
4532
4556
 
4533
- class AbstractSubprocesses(abc.ABC): # noqa
4534
- DEFAULT_LOGGER: ta.ClassVar[ta.Optional[logging.Logger]] = log
4557
+ class BaseSubprocesses(abc.ABC): # noqa
4558
+ DEFAULT_LOGGER: ta.ClassVar[ta.Optional[logging.Logger]] = None
4535
4559
 
4536
4560
  def __init__(
4537
4561
  self,
@@ -4544,6 +4568,9 @@ class AbstractSubprocesses(abc.ABC): # noqa
4544
4568
  self._log = log if log is not None else self.DEFAULT_LOGGER
4545
4569
  self._try_exceptions = try_exceptions if try_exceptions is not None else self.DEFAULT_TRY_EXCEPTIONS
4546
4570
 
4571
+ def set_logger(self, log: ta.Optional[logging.Logger]) -> None:
4572
+ self._log = log
4573
+
4547
4574
  #
4548
4575
 
4549
4576
  def prepare_args(
@@ -4655,23 +4682,25 @@ class AbstractSubprocesses(abc.ABC): # noqa
4655
4682
  ##
4656
4683
 
4657
4684
 
4658
- class Subprocesses(AbstractSubprocesses):
4685
+ class AbstractSubprocesses(BaseSubprocesses, abc.ABC):
4686
+ @abc.abstractmethod
4659
4687
  def check_call(
4660
4688
  self,
4661
4689
  *cmd: str,
4662
4690
  stdout: ta.Any = sys.stderr,
4663
4691
  **kwargs: ta.Any,
4664
4692
  ) -> None:
4665
- with self.prepare_and_wrap(*cmd, stdout=stdout, **kwargs) as (cmd, kwargs): # noqa
4666
- subprocess.check_call(cmd, **kwargs)
4693
+ raise NotImplementedError
4667
4694
 
4695
+ @abc.abstractmethod
4668
4696
  def check_output(
4669
4697
  self,
4670
4698
  *cmd: str,
4671
4699
  **kwargs: ta.Any,
4672
4700
  ) -> bytes:
4673
- with self.prepare_and_wrap(*cmd, **kwargs) as (cmd, kwargs): # noqa
4674
- return subprocess.check_output(cmd, **kwargs)
4701
+ raise NotImplementedError
4702
+
4703
+ #
4675
4704
 
4676
4705
  def check_output_str(
4677
4706
  self,
@@ -4713,9 +4742,94 @@ class Subprocesses(AbstractSubprocesses):
4713
4742
  return ret.decode().strip()
4714
4743
 
4715
4744
 
4745
+ ##
4746
+
4747
+
4748
+ class Subprocesses(AbstractSubprocesses):
4749
+ def check_call(
4750
+ self,
4751
+ *cmd: str,
4752
+ stdout: ta.Any = sys.stderr,
4753
+ **kwargs: ta.Any,
4754
+ ) -> None:
4755
+ with self.prepare_and_wrap(*cmd, stdout=stdout, **kwargs) as (cmd, kwargs): # noqa
4756
+ subprocess.check_call(cmd, **kwargs)
4757
+
4758
+ def check_output(
4759
+ self,
4760
+ *cmd: str,
4761
+ **kwargs: ta.Any,
4762
+ ) -> bytes:
4763
+ with self.prepare_and_wrap(*cmd, **kwargs) as (cmd, kwargs): # noqa
4764
+ return subprocess.check_output(cmd, **kwargs)
4765
+
4766
+
4716
4767
  subprocesses = Subprocesses()
4717
4768
 
4718
4769
 
4770
+ ##
4771
+
4772
+
4773
+ class AbstractAsyncSubprocesses(BaseSubprocesses):
4774
+ @abc.abstractmethod
4775
+ async def check_call(
4776
+ self,
4777
+ *cmd: str,
4778
+ stdout: ta.Any = sys.stderr,
4779
+ **kwargs: ta.Any,
4780
+ ) -> None:
4781
+ raise NotImplementedError
4782
+
4783
+ @abc.abstractmethod
4784
+ async def check_output(
4785
+ self,
4786
+ *cmd: str,
4787
+ **kwargs: ta.Any,
4788
+ ) -> bytes:
4789
+ raise NotImplementedError
4790
+
4791
+ #
4792
+
4793
+ async def check_output_str(
4794
+ self,
4795
+ *cmd: str,
4796
+ **kwargs: ta.Any,
4797
+ ) -> str:
4798
+ return (await self.check_output(*cmd, **kwargs)).decode().strip()
4799
+
4800
+ #
4801
+
4802
+ async def try_call(
4803
+ self,
4804
+ *cmd: str,
4805
+ **kwargs: ta.Any,
4806
+ ) -> bool:
4807
+ if isinstance(await self.async_try_fn(self.check_call, *cmd, **kwargs), Exception):
4808
+ return False
4809
+ else:
4810
+ return True
4811
+
4812
+ async def try_output(
4813
+ self,
4814
+ *cmd: str,
4815
+ **kwargs: ta.Any,
4816
+ ) -> ta.Optional[bytes]:
4817
+ if isinstance(ret := await self.async_try_fn(self.check_output, *cmd, **kwargs), Exception):
4818
+ return None
4819
+ else:
4820
+ return ret
4821
+
4822
+ async def try_output_str(
4823
+ self,
4824
+ *cmd: str,
4825
+ **kwargs: ta.Any,
4826
+ ) -> ta.Optional[str]:
4827
+ if (ret := await self.try_output(*cmd, **kwargs)) is None:
4828
+ return None
4829
+ else:
4830
+ return ret.decode().strip()
4831
+
4832
+
4719
4833
  ########################################
4720
4834
  # ../../git.py
4721
4835
  """
@@ -5118,7 +5232,7 @@ def get_git_status(
5118
5232
 
5119
5233
 
5120
5234
  ########################################
5121
- # ../../../omlish/lite/asyncio/subprocesses.py
5235
+ # ../../../omlish/asyncs/asyncio/subprocesses.py
5122
5236
 
5123
5237
 
5124
5238
  ##
@@ -5129,6 +5243,8 @@ class AsyncioProcessCommunicator:
5129
5243
  self,
5130
5244
  proc: asyncio.subprocess.Process,
5131
5245
  loop: ta.Optional[ta.Any] = None,
5246
+ *,
5247
+ log: ta.Optional[logging.Logger] = None,
5132
5248
  ) -> None:
5133
5249
  super().__init__()
5134
5250
 
@@ -5137,6 +5253,7 @@ class AsyncioProcessCommunicator:
5137
5253
 
5138
5254
  self._proc = proc
5139
5255
  self._loop = loop
5256
+ self._log = log
5140
5257
 
5141
5258
  self._transport: asyncio.base_subprocess.BaseSubprocessTransport = check.isinstance(
5142
5259
  proc._transport, # type: ignore # noqa
@@ -5152,19 +5269,19 @@ class AsyncioProcessCommunicator:
5152
5269
  try:
5153
5270
  if input is not None:
5154
5271
  stdin.write(input)
5155
- if self._debug:
5156
- log.debug('%r communicate: feed stdin (%s bytes)', self, len(input))
5272
+ if self._debug and self._log is not None:
5273
+ self._log.debug('%r communicate: feed stdin (%s bytes)', self, len(input))
5157
5274
 
5158
5275
  await stdin.drain()
5159
5276
 
5160
5277
  except (BrokenPipeError, ConnectionResetError) as exc:
5161
5278
  # communicate() ignores BrokenPipeError and ConnectionResetError. write() and drain() can raise these
5162
5279
  # exceptions.
5163
- if self._debug:
5164
- log.debug('%r communicate: stdin got %r', self, exc)
5280
+ if self._debug and self._log is not None:
5281
+ self._log.debug('%r communicate: stdin got %r', self, exc)
5165
5282
 
5166
- if self._debug:
5167
- log.debug('%r communicate: close stdin', self)
5283
+ if self._debug and self._log is not None:
5284
+ self._log.debug('%r communicate: close stdin', self)
5168
5285
 
5169
5286
  stdin.close()
5170
5287
 
@@ -5180,15 +5297,15 @@ class AsyncioProcessCommunicator:
5180
5297
  check.equal(fd, 1)
5181
5298
  stream = check.not_none(self._proc.stdout)
5182
5299
 
5183
- if self._debug:
5300
+ if self._debug and self._log is not None:
5184
5301
  name = 'stdout' if fd == 1 else 'stderr'
5185
- log.debug('%r communicate: read %s', self, name)
5302
+ self._log.debug('%r communicate: read %s', self, name)
5186
5303
 
5187
5304
  output = await stream.read()
5188
5305
 
5189
- if self._debug:
5306
+ if self._debug and self._log is not None:
5190
5307
  name = 'stdout' if fd == 1 else 'stderr'
5191
- log.debug('%r communicate: close %s', self, name)
5308
+ self._log.debug('%r communicate: close %s', self, name)
5192
5309
 
5193
5310
  transport.close()
5194
5311
 
@@ -5237,7 +5354,7 @@ class AsyncioProcessCommunicator:
5237
5354
  ##
5238
5355
 
5239
5356
 
5240
- class AsyncioSubprocesses(AbstractSubprocesses):
5357
+ class AsyncioSubprocesses(AbstractAsyncSubprocesses):
5241
5358
  async def communicate(
5242
5359
  self,
5243
5360
  proc: asyncio.subprocess.Process,
@@ -5334,45 +5451,6 @@ class AsyncioSubprocesses(AbstractSubprocesses):
5334
5451
  with self.prepare_and_wrap(*cmd, stdout=subprocess.PIPE, check=True, **kwargs) as (cmd, kwargs): # noqa
5335
5452
  return check.not_none((await self.run(*cmd, **kwargs)).stdout)
5336
5453
 
5337
- async def check_output_str(
5338
- self,
5339
- *cmd: str,
5340
- **kwargs: ta.Any,
5341
- ) -> str:
5342
- return (await self.check_output(*cmd, **kwargs)).decode().strip()
5343
-
5344
- #
5345
-
5346
- async def try_call(
5347
- self,
5348
- *cmd: str,
5349
- **kwargs: ta.Any,
5350
- ) -> bool:
5351
- if isinstance(await self.async_try_fn(self.check_call, *cmd, **kwargs), Exception):
5352
- return False
5353
- else:
5354
- return True
5355
-
5356
- async def try_output(
5357
- self,
5358
- *cmd: str,
5359
- **kwargs: ta.Any,
5360
- ) -> ta.Optional[bytes]:
5361
- if isinstance(ret := await self.async_try_fn(self.check_output, *cmd, **kwargs), Exception):
5362
- return None
5363
- else:
5364
- return ret
5365
-
5366
- async def try_output_str(
5367
- self,
5368
- *cmd: str,
5369
- **kwargs: ta.Any,
5370
- ) -> ta.Optional[str]:
5371
- if (ret := await self.try_output(*cmd, **kwargs)) is None:
5372
- return None
5373
- else:
5374
- return ret.decode().strip()
5375
-
5376
5454
 
5377
5455
  asyncio_subprocesses = AsyncioSubprocesses()
5378
5456
 
@@ -6505,6 +6583,7 @@ class PyenvVersionInstaller:
6505
6583
  self._version,
6506
6584
  ]
6507
6585
 
6586
+ full_args: ta.List[str]
6508
6587
  if self._given_install_name is not None:
6509
6588
  full_args = [
6510
6589
  os.path.join(check.not_none(await self._pyenv.root()), 'plugins', 'python-build', 'bin', 'python-build'), # noqa