ominfra 0.0.0.dev157__py3-none-any.whl → 0.0.0.dev158__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -78,7 +78,7 @@ ConfigMapping = ta.Mapping[str, ta.Any]
78
78
  # ../../../threadworkers.py
79
79
  ThreadWorkerT = ta.TypeVar('ThreadWorkerT', bound='ThreadWorker')
80
80
 
81
- # ../../../../omlish/lite/subprocesses.py
81
+ # ../../../../omlish/subprocesses.py
82
82
  SubprocessChannelOption = ta.Literal['pipe', 'stdout', 'devnull'] # ta.TypeAlias
83
83
 
84
84
 
@@ -1437,6 +1437,13 @@ json_dump_compact: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON
1437
1437
  json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_COMPACT_KWARGS)
1438
1438
 
1439
1439
 
1440
+ ########################################
1441
+ # ../../../../../omlish/lite/logs.py
1442
+
1443
+
1444
+ log = logging.getLogger(__name__)
1445
+
1446
+
1440
1447
  ########################################
1441
1448
  # ../../../../../omlish/lite/reflect.py
1442
1449
 
@@ -1559,6 +1566,116 @@ def format_num_bytes(num_bytes: int) -> str:
1559
1566
  return f'{num_bytes / 1024 ** (len(FORMAT_NUM_BYTES_SUFFIXES) - 1):.2f}{FORMAT_NUM_BYTES_SUFFIXES[-1]}'
1560
1567
 
1561
1568
 
1569
+ ########################################
1570
+ # ../../../../../omlish/logs/filters.py
1571
+
1572
+
1573
+ class TidLogFilter(logging.Filter):
1574
+ def filter(self, record):
1575
+ record.tid = threading.get_native_id()
1576
+ return True
1577
+
1578
+
1579
+ ########################################
1580
+ # ../../../../../omlish/logs/proxy.py
1581
+
1582
+
1583
+ class ProxyLogFilterer(logging.Filterer):
1584
+ def __init__(self, underlying: logging.Filterer) -> None: # noqa
1585
+ self._underlying = underlying
1586
+
1587
+ @property
1588
+ def underlying(self) -> logging.Filterer:
1589
+ return self._underlying
1590
+
1591
+ @property
1592
+ def filters(self):
1593
+ return self._underlying.filters
1594
+
1595
+ @filters.setter
1596
+ def filters(self, filters):
1597
+ self._underlying.filters = filters
1598
+
1599
+ def addFilter(self, filter): # noqa
1600
+ self._underlying.addFilter(filter)
1601
+
1602
+ def removeFilter(self, filter): # noqa
1603
+ self._underlying.removeFilter(filter)
1604
+
1605
+ def filter(self, record):
1606
+ return self._underlying.filter(record)
1607
+
1608
+
1609
+ class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
1610
+ def __init__(self, underlying: logging.Handler) -> None: # noqa
1611
+ ProxyLogFilterer.__init__(self, underlying)
1612
+
1613
+ _underlying: logging.Handler
1614
+
1615
+ @property
1616
+ def underlying(self) -> logging.Handler:
1617
+ return self._underlying
1618
+
1619
+ def get_name(self):
1620
+ return self._underlying.get_name()
1621
+
1622
+ def set_name(self, name):
1623
+ self._underlying.set_name(name)
1624
+
1625
+ @property
1626
+ def name(self):
1627
+ return self._underlying.name
1628
+
1629
+ @property
1630
+ def level(self):
1631
+ return self._underlying.level
1632
+
1633
+ @level.setter
1634
+ def level(self, level):
1635
+ self._underlying.level = level
1636
+
1637
+ @property
1638
+ def formatter(self):
1639
+ return self._underlying.formatter
1640
+
1641
+ @formatter.setter
1642
+ def formatter(self, formatter):
1643
+ self._underlying.formatter = formatter
1644
+
1645
+ def createLock(self):
1646
+ self._underlying.createLock()
1647
+
1648
+ def acquire(self):
1649
+ self._underlying.acquire()
1650
+
1651
+ def release(self):
1652
+ self._underlying.release()
1653
+
1654
+ def setLevel(self, level):
1655
+ self._underlying.setLevel(level)
1656
+
1657
+ def format(self, record):
1658
+ return self._underlying.format(record)
1659
+
1660
+ def emit(self, record):
1661
+ self._underlying.emit(record)
1662
+
1663
+ def handle(self, record):
1664
+ return self._underlying.handle(record)
1665
+
1666
+ def setFormatter(self, fmt):
1667
+ self._underlying.setFormatter(fmt)
1668
+
1669
+ def flush(self):
1670
+ self._underlying.flush()
1671
+
1672
+ def close(self):
1673
+ self._underlying.close()
1674
+
1675
+ def handleError(self, record):
1676
+ self._underlying.handleError(record)
1677
+
1678
+
1562
1679
  ########################################
1563
1680
  # ../../../../../omlish/os/pidfile.py
1564
1681
 
@@ -1988,6 +2105,52 @@ class AwsDataclassMeta:
1988
2105
  return AwsDataclassMeta.Converters(d2a, a2d)
1989
2106
 
1990
2107
 
2108
+ ########################################
2109
+ # ../cursor.py
2110
+
2111
+
2112
+ class JournalctlToAwsCursor:
2113
+ def __init__(
2114
+ self,
2115
+ cursor_file: ta.Optional[str] = None,
2116
+ *,
2117
+ ensure_locked: ta.Optional[ta.Callable[[], None]] = None,
2118
+ ) -> None:
2119
+ super().__init__()
2120
+ self._cursor_file = cursor_file
2121
+ self._ensure_locked = ensure_locked
2122
+
2123
+ #
2124
+
2125
+ def get(self) -> ta.Optional[str]:
2126
+ if self._ensure_locked is not None:
2127
+ self._ensure_locked()
2128
+
2129
+ if not (cf := self._cursor_file):
2130
+ return None
2131
+ cf = os.path.expanduser(cf)
2132
+
2133
+ try:
2134
+ with open(cf) as f:
2135
+ return f.read().strip()
2136
+ except FileNotFoundError:
2137
+ return None
2138
+
2139
+ def set(self, cursor: str) -> None:
2140
+ if self._ensure_locked is not None:
2141
+ self._ensure_locked()
2142
+
2143
+ if not (cf := self._cursor_file):
2144
+ return
2145
+ cf = os.path.expanduser(cf)
2146
+
2147
+ log.info('Writing cursor file %s : %s', cf, cursor)
2148
+ with open(ncf := cf + '.next', 'w') as f:
2149
+ f.write(cursor)
2150
+
2151
+ os.rename(ncf, cf)
2152
+
2153
+
1991
2154
  ########################################
1992
2155
  # ../../../../../omlish/io/buffers.py
1993
2156
 
@@ -2283,333 +2446,63 @@ class aclosing(contextlib.AbstractAsyncContextManager): # noqa
2283
2446
 
2284
2447
 
2285
2448
  ########################################
2286
- # ../../../../../omlish/lite/logs.py
2449
+ # ../../../../../omlish/lite/marshal.py
2287
2450
  """
2288
2451
  TODO:
2289
- - translate json keys
2290
- - debug
2452
+ - pickle stdlib objs? have to pin to 3.8 pickle protocol, will be cross-version
2453
+ - namedtuple
2454
+ - literals
2455
+ - newtypes?
2291
2456
  """
2292
2457
 
2293
2458
 
2294
- log = logging.getLogger(__name__)
2295
-
2296
-
2297
2459
  ##
2298
2460
 
2299
2461
 
2300
- class TidLogFilter(logging.Filter):
2462
+ @dc.dataclass(frozen=True)
2463
+ class ObjMarshalOptions:
2464
+ raw_bytes: bool = False
2465
+ nonstrict_dataclasses: bool = False
2301
2466
 
2302
- def filter(self, record):
2303
- record.tid = threading.get_native_id()
2304
- return True
2305
2467
 
2468
+ class ObjMarshaler(abc.ABC):
2469
+ @abc.abstractmethod
2470
+ def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2471
+ raise NotImplementedError
2306
2472
 
2307
- ##
2473
+ @abc.abstractmethod
2474
+ def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2475
+ raise NotImplementedError
2308
2476
 
2309
2477
 
2310
- class JsonLogFormatter(logging.Formatter):
2478
+ class NopObjMarshaler(ObjMarshaler):
2479
+ def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2480
+ return o
2311
2481
 
2312
- KEYS: ta.Mapping[str, bool] = {
2313
- 'name': False,
2314
- 'msg': False,
2315
- 'args': False,
2316
- 'levelname': False,
2317
- 'levelno': False,
2318
- 'pathname': False,
2319
- 'filename': False,
2320
- 'module': False,
2321
- 'exc_info': True,
2322
- 'exc_text': True,
2323
- 'stack_info': True,
2324
- 'lineno': False,
2325
- 'funcName': False,
2326
- 'created': False,
2327
- 'msecs': False,
2328
- 'relativeCreated': False,
2329
- 'thread': False,
2330
- 'threadName': False,
2331
- 'processName': False,
2332
- 'process': False,
2333
- }
2482
+ def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2483
+ return o
2334
2484
 
2335
- def format(self, record: logging.LogRecord) -> str:
2336
- dct = {
2337
- k: v
2338
- for k, o in self.KEYS.items()
2339
- for v in [getattr(record, k)]
2340
- if not (o and v is None)
2341
- }
2342
- return json_dumps_compact(dct)
2343
2485
 
2486
+ @dc.dataclass()
2487
+ class ProxyObjMarshaler(ObjMarshaler):
2488
+ m: ta.Optional[ObjMarshaler] = None
2344
2489
 
2345
- ##
2490
+ def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2491
+ return check.not_none(self.m).marshal(o, ctx)
2346
2492
 
2493
+ def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2494
+ return check.not_none(self.m).unmarshal(o, ctx)
2347
2495
 
2348
- STANDARD_LOG_FORMAT_PARTS = [
2349
- ('asctime', '%(asctime)-15s'),
2350
- ('process', 'pid=%(process)-6s'),
2351
- ('thread', 'tid=%(thread)x'),
2352
- ('levelname', '%(levelname)s'),
2353
- ('name', '%(name)s'),
2354
- ('separator', '::'),
2355
- ('message', '%(message)s'),
2356
- ]
2357
2496
 
2497
+ @dc.dataclass(frozen=True)
2498
+ class CastObjMarshaler(ObjMarshaler):
2499
+ ty: type
2358
2500
 
2359
- class StandardLogFormatter(logging.Formatter):
2501
+ def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2502
+ return o
2360
2503
 
2361
- @staticmethod
2362
- def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
2363
- return ' '.join(v for k, v in parts)
2364
-
2365
- converter = datetime.datetime.fromtimestamp # type: ignore
2366
-
2367
- def formatTime(self, record, datefmt=None):
2368
- ct = self.converter(record.created) # type: ignore
2369
- if datefmt:
2370
- return ct.strftime(datefmt) # noqa
2371
- else:
2372
- t = ct.strftime('%Y-%m-%d %H:%M:%S')
2373
- return '%s.%03d' % (t, record.msecs) # noqa
2374
-
2375
-
2376
- ##
2377
-
2378
-
2379
- class ProxyLogFilterer(logging.Filterer):
2380
- def __init__(self, underlying: logging.Filterer) -> None: # noqa
2381
- self._underlying = underlying
2382
-
2383
- @property
2384
- def underlying(self) -> logging.Filterer:
2385
- return self._underlying
2386
-
2387
- @property
2388
- def filters(self):
2389
- return self._underlying.filters
2390
-
2391
- @filters.setter
2392
- def filters(self, filters):
2393
- self._underlying.filters = filters
2394
-
2395
- def addFilter(self, filter): # noqa
2396
- self._underlying.addFilter(filter)
2397
-
2398
- def removeFilter(self, filter): # noqa
2399
- self._underlying.removeFilter(filter)
2400
-
2401
- def filter(self, record):
2402
- return self._underlying.filter(record)
2403
-
2404
-
2405
- class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
2406
- def __init__(self, underlying: logging.Handler) -> None: # noqa
2407
- ProxyLogFilterer.__init__(self, underlying)
2408
-
2409
- _underlying: logging.Handler
2410
-
2411
- @property
2412
- def underlying(self) -> logging.Handler:
2413
- return self._underlying
2414
-
2415
- def get_name(self):
2416
- return self._underlying.get_name()
2417
-
2418
- def set_name(self, name):
2419
- self._underlying.set_name(name)
2420
-
2421
- @property
2422
- def name(self):
2423
- return self._underlying.name
2424
-
2425
- @property
2426
- def level(self):
2427
- return self._underlying.level
2428
-
2429
- @level.setter
2430
- def level(self, level):
2431
- self._underlying.level = level
2432
-
2433
- @property
2434
- def formatter(self):
2435
- return self._underlying.formatter
2436
-
2437
- @formatter.setter
2438
- def formatter(self, formatter):
2439
- self._underlying.formatter = formatter
2440
-
2441
- def createLock(self):
2442
- self._underlying.createLock()
2443
-
2444
- def acquire(self):
2445
- self._underlying.acquire()
2446
-
2447
- def release(self):
2448
- self._underlying.release()
2449
-
2450
- def setLevel(self, level):
2451
- self._underlying.setLevel(level)
2452
-
2453
- def format(self, record):
2454
- return self._underlying.format(record)
2455
-
2456
- def emit(self, record):
2457
- self._underlying.emit(record)
2458
-
2459
- def handle(self, record):
2460
- return self._underlying.handle(record)
2461
-
2462
- def setFormatter(self, fmt):
2463
- self._underlying.setFormatter(fmt)
2464
-
2465
- def flush(self):
2466
- self._underlying.flush()
2467
-
2468
- def close(self):
2469
- self._underlying.close()
2470
-
2471
- def handleError(self, record):
2472
- self._underlying.handleError(record)
2473
-
2474
-
2475
- ##
2476
-
2477
-
2478
- class StandardLogHandler(ProxyLogHandler):
2479
- pass
2480
-
2481
-
2482
- ##
2483
-
2484
-
2485
- @contextlib.contextmanager
2486
- def _locking_logging_module_lock() -> ta.Iterator[None]:
2487
- if hasattr(logging, '_acquireLock'):
2488
- logging._acquireLock() # noqa
2489
- try:
2490
- yield
2491
- finally:
2492
- logging._releaseLock() # type: ignore # noqa
2493
-
2494
- elif hasattr(logging, '_lock'):
2495
- # https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
2496
- with logging._lock: # noqa
2497
- yield
2498
-
2499
- else:
2500
- raise Exception("Can't find lock in logging module")
2501
-
2502
-
2503
- def configure_standard_logging(
2504
- level: ta.Union[int, str] = logging.INFO,
2505
- *,
2506
- json: bool = False,
2507
- target: ta.Optional[logging.Logger] = None,
2508
- force: bool = False,
2509
- handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
2510
- ) -> ta.Optional[StandardLogHandler]:
2511
- with _locking_logging_module_lock():
2512
- if target is None:
2513
- target = logging.root
2514
-
2515
- #
2516
-
2517
- if not force:
2518
- if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
2519
- return None
2520
-
2521
- #
2522
-
2523
- if handler_factory is not None:
2524
- handler = handler_factory()
2525
- else:
2526
- handler = logging.StreamHandler()
2527
-
2528
- #
2529
-
2530
- formatter: logging.Formatter
2531
- if json:
2532
- formatter = JsonLogFormatter()
2533
- else:
2534
- formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
2535
- handler.setFormatter(formatter)
2536
-
2537
- #
2538
-
2539
- handler.addFilter(TidLogFilter())
2540
-
2541
- #
2542
-
2543
- target.addHandler(handler)
2544
-
2545
- #
2546
-
2547
- if level is not None:
2548
- target.setLevel(level)
2549
-
2550
- #
2551
-
2552
- return StandardLogHandler(handler)
2553
-
2554
-
2555
- ########################################
2556
- # ../../../../../omlish/lite/marshal.py
2557
- """
2558
- TODO:
2559
- - pickle stdlib objs? have to pin to 3.8 pickle protocol, will be cross-version
2560
- - namedtuple
2561
- - literals
2562
- - newtypes?
2563
- """
2564
-
2565
-
2566
- ##
2567
-
2568
-
2569
- @dc.dataclass(frozen=True)
2570
- class ObjMarshalOptions:
2571
- raw_bytes: bool = False
2572
- nonstrict_dataclasses: bool = False
2573
-
2574
-
2575
- class ObjMarshaler(abc.ABC):
2576
- @abc.abstractmethod
2577
- def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2578
- raise NotImplementedError
2579
-
2580
- @abc.abstractmethod
2581
- def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2582
- raise NotImplementedError
2583
-
2584
-
2585
- class NopObjMarshaler(ObjMarshaler):
2586
- def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2587
- return o
2588
-
2589
- def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2590
- return o
2591
-
2592
-
2593
- @dc.dataclass()
2594
- class ProxyObjMarshaler(ObjMarshaler):
2595
- m: ta.Optional[ObjMarshaler] = None
2596
-
2597
- def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2598
- return check.not_none(self.m).marshal(o, ctx)
2599
-
2600
- def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2601
- return check.not_none(self.m).unmarshal(o, ctx)
2602
-
2603
-
2604
- @dc.dataclass(frozen=True)
2605
- class CastObjMarshaler(ObjMarshaler):
2606
- ty: type
2607
-
2608
- def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2609
- return o
2610
-
2611
- def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2612
- return self.ty(o)
2504
+ def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2505
+ return self.ty(o)
2613
2506
 
2614
2507
 
2615
2508
  class DynamicObjMarshaler(ObjMarshaler):
@@ -3015,49 +2908,57 @@ def check_runtime_version() -> None:
3015
2908
 
3016
2909
 
3017
2910
  ########################################
3018
- # ../cursor.py
2911
+ # ../../../../../omlish/logs/json.py
2912
+ """
2913
+ TODO:
2914
+ - translate json keys
2915
+ """
3019
2916
 
3020
2917
 
3021
- class JournalctlToAwsCursor:
2918
+ class JsonLogFormatter(logging.Formatter):
2919
+ KEYS: ta.Mapping[str, bool] = {
2920
+ 'name': False,
2921
+ 'msg': False,
2922
+ 'args': False,
2923
+ 'levelname': False,
2924
+ 'levelno': False,
2925
+ 'pathname': False,
2926
+ 'filename': False,
2927
+ 'module': False,
2928
+ 'exc_info': True,
2929
+ 'exc_text': True,
2930
+ 'stack_info': True,
2931
+ 'lineno': False,
2932
+ 'funcName': False,
2933
+ 'created': False,
2934
+ 'msecs': False,
2935
+ 'relativeCreated': False,
2936
+ 'thread': False,
2937
+ 'threadName': False,
2938
+ 'processName': False,
2939
+ 'process': False,
2940
+ }
2941
+
3022
2942
  def __init__(
3023
2943
  self,
3024
- cursor_file: ta.Optional[str] = None,
3025
- *,
3026
- ensure_locked: ta.Optional[ta.Callable[[], None]] = None,
2944
+ *args: ta.Any,
2945
+ json_dumps: ta.Optional[ta.Callable[[ta.Any], str]] = None,
2946
+ **kwargs: ta.Any,
3027
2947
  ) -> None:
3028
- super().__init__()
3029
- self._cursor_file = cursor_file
3030
- self._ensure_locked = ensure_locked
3031
-
3032
- #
3033
-
3034
- def get(self) -> ta.Optional[str]:
3035
- if self._ensure_locked is not None:
3036
- self._ensure_locked()
3037
-
3038
- if not (cf := self._cursor_file):
3039
- return None
3040
- cf = os.path.expanduser(cf)
3041
-
3042
- try:
3043
- with open(cf) as f:
3044
- return f.read().strip()
3045
- except FileNotFoundError:
3046
- return None
3047
-
3048
- def set(self, cursor: str) -> None:
3049
- if self._ensure_locked is not None:
3050
- self._ensure_locked()
3051
-
3052
- if not (cf := self._cursor_file):
3053
- return
3054
- cf = os.path.expanduser(cf)
2948
+ super().__init__(*args, **kwargs)
3055
2949
 
3056
- log.info('Writing cursor file %s : %s', cf, cursor)
3057
- with open(ncf := cf + '.next', 'w') as f:
3058
- f.write(cursor)
2950
+ if json_dumps is None:
2951
+ json_dumps = json_dumps_compact
2952
+ self._json_dumps = json_dumps
3059
2953
 
3060
- os.rename(ncf, cf)
2954
+ def format(self, record: logging.LogRecord) -> str:
2955
+ dct = {
2956
+ k: v
2957
+ for k, o in self.KEYS.items()
2958
+ for v in [getattr(record, k)]
2959
+ if not (o and v is None)
2960
+ }
2961
+ return self._json_dumps(dct)
3061
2962
 
3062
2963
 
3063
2964
  ########################################
@@ -3575,7 +3476,127 @@ class ThreadWorkerGroup:
3575
3476
 
3576
3477
 
3577
3478
  ########################################
3578
- # ../../../../../omlish/lite/subprocesses.py
3479
+ # ../../../../../omlish/logs/standard.py
3480
+ """
3481
+ TODO:
3482
+ - structured
3483
+ - prefixed
3484
+ - debug
3485
+ """
3486
+
3487
+
3488
+ ##
3489
+
3490
+
3491
+ STANDARD_LOG_FORMAT_PARTS = [
3492
+ ('asctime', '%(asctime)-15s'),
3493
+ ('process', 'pid=%(process)-6s'),
3494
+ ('thread', 'tid=%(thread)x'),
3495
+ ('levelname', '%(levelname)s'),
3496
+ ('name', '%(name)s'),
3497
+ ('separator', '::'),
3498
+ ('message', '%(message)s'),
3499
+ ]
3500
+
3501
+
3502
+ class StandardLogFormatter(logging.Formatter):
3503
+ @staticmethod
3504
+ def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
3505
+ return ' '.join(v for k, v in parts)
3506
+
3507
+ converter = datetime.datetime.fromtimestamp # type: ignore
3508
+
3509
+ def formatTime(self, record, datefmt=None):
3510
+ ct = self.converter(record.created) # type: ignore
3511
+ if datefmt:
3512
+ return ct.strftime(datefmt) # noqa
3513
+ else:
3514
+ t = ct.strftime('%Y-%m-%d %H:%M:%S')
3515
+ return '%s.%03d' % (t, record.msecs) # noqa
3516
+
3517
+
3518
+ ##
3519
+
3520
+
3521
+ class StandardLogHandler(ProxyLogHandler):
3522
+ pass
3523
+
3524
+
3525
+ ##
3526
+
3527
+
3528
+ @contextlib.contextmanager
3529
+ def _locking_logging_module_lock() -> ta.Iterator[None]:
3530
+ if hasattr(logging, '_acquireLock'):
3531
+ logging._acquireLock() # noqa
3532
+ try:
3533
+ yield
3534
+ finally:
3535
+ logging._releaseLock() # type: ignore # noqa
3536
+
3537
+ elif hasattr(logging, '_lock'):
3538
+ # https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
3539
+ with logging._lock: # noqa
3540
+ yield
3541
+
3542
+ else:
3543
+ raise Exception("Can't find lock in logging module")
3544
+
3545
+
3546
+ def configure_standard_logging(
3547
+ level: ta.Union[int, str] = logging.INFO,
3548
+ *,
3549
+ json: bool = False,
3550
+ target: ta.Optional[logging.Logger] = None,
3551
+ force: bool = False,
3552
+ handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
3553
+ ) -> ta.Optional[StandardLogHandler]:
3554
+ with _locking_logging_module_lock():
3555
+ if target is None:
3556
+ target = logging.root
3557
+
3558
+ #
3559
+
3560
+ if not force:
3561
+ if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
3562
+ return None
3563
+
3564
+ #
3565
+
3566
+ if handler_factory is not None:
3567
+ handler = handler_factory()
3568
+ else:
3569
+ handler = logging.StreamHandler()
3570
+
3571
+ #
3572
+
3573
+ formatter: logging.Formatter
3574
+ if json:
3575
+ formatter = JsonLogFormatter()
3576
+ else:
3577
+ formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
3578
+ handler.setFormatter(formatter)
3579
+
3580
+ #
3581
+
3582
+ handler.addFilter(TidLogFilter())
3583
+
3584
+ #
3585
+
3586
+ target.addHandler(handler)
3587
+
3588
+ #
3589
+
3590
+ if level is not None:
3591
+ target.setLevel(level)
3592
+
3593
+ #
3594
+
3595
+ return StandardLogHandler(handler)
3596
+
3597
+
3598
+ ########################################
3599
+ # ../../../../../omlish/subprocesses.py
3579
3600
 
3580
3601
 
3581
3602
  ##
@@ -3626,8 +3647,8 @@ def subprocess_close(
3626
3647
  ##
3627
3648
 
3628
3649
 
3629
- class AbstractSubprocesses(abc.ABC): # noqa
3630
- DEFAULT_LOGGER: ta.ClassVar[ta.Optional[logging.Logger]] = log
3650
+ class BaseSubprocesses(abc.ABC): # noqa
3651
+ DEFAULT_LOGGER: ta.ClassVar[ta.Optional[logging.Logger]] = None
3631
3652
 
3632
3653
  def __init__(
3633
3654
  self,
@@ -3640,6 +3661,9 @@ class AbstractSubprocesses(abc.ABC): # noqa
3640
3661
  self._log = log if log is not None else self.DEFAULT_LOGGER
3641
3662
  self._try_exceptions = try_exceptions if try_exceptions is not None else self.DEFAULT_TRY_EXCEPTIONS
3642
3663
 
3664
+ def set_logger(self, log: ta.Optional[logging.Logger]) -> None:
3665
+ self._log = log
3666
+
3643
3667
  #
3644
3668
 
3645
3669
  def prepare_args(
@@ -3751,23 +3775,25 @@ class AbstractSubprocesses(abc.ABC): # noqa
3751
3775
  ##
3752
3776
 
3753
3777
 
3754
- class Subprocesses(AbstractSubprocesses):
3778
+ class AbstractSubprocesses(BaseSubprocesses, abc.ABC):
3779
+ @abc.abstractmethod
3755
3780
  def check_call(
3756
3781
  self,
3757
3782
  *cmd: str,
3758
3783
  stdout: ta.Any = sys.stderr,
3759
3784
  **kwargs: ta.Any,
3760
3785
  ) -> None:
3761
- with self.prepare_and_wrap(*cmd, stdout=stdout, **kwargs) as (cmd, kwargs): # noqa
3762
- subprocess.check_call(cmd, **kwargs)
3786
+ raise NotImplementedError
3763
3787
 
3788
+ @abc.abstractmethod
3764
3789
  def check_output(
3765
3790
  self,
3766
3791
  *cmd: str,
3767
3792
  **kwargs: ta.Any,
3768
3793
  ) -> bytes:
3769
- with self.prepare_and_wrap(*cmd, **kwargs) as (cmd, kwargs): # noqa
3770
- return subprocess.check_output(cmd, **kwargs)
3794
+ raise NotImplementedError
3795
+
3796
+ #
3771
3797
 
3772
3798
  def check_output_str(
3773
3799
  self,
@@ -3809,9 +3835,94 @@ class Subprocesses(AbstractSubprocesses):
3809
3835
  return ret.decode().strip()
3810
3836
 
3811
3837
 
3838
+ ##
3839
+
3840
+
3841
+ class Subprocesses(AbstractSubprocesses):
3842
+ def check_call(
3843
+ self,
3844
+ *cmd: str,
3845
+ stdout: ta.Any = sys.stderr,
3846
+ **kwargs: ta.Any,
3847
+ ) -> None:
3848
+ with self.prepare_and_wrap(*cmd, stdout=stdout, **kwargs) as (cmd, kwargs): # noqa
3849
+ subprocess.check_call(cmd, **kwargs)
3850
+
3851
+ def check_output(
3852
+ self,
3853
+ *cmd: str,
3854
+ **kwargs: ta.Any,
3855
+ ) -> bytes:
3856
+ with self.prepare_and_wrap(*cmd, **kwargs) as (cmd, kwargs): # noqa
3857
+ return subprocess.check_output(cmd, **kwargs)
3858
+
3859
+
3812
3860
  subprocesses = Subprocesses()
3813
3861
 
3814
3862
 
3863
+ ##
3864
+
3865
+
3866
+ class AbstractAsyncSubprocesses(BaseSubprocesses):
3867
+ @abc.abstractmethod
3868
+ async def check_call(
3869
+ self,
3870
+ *cmd: str,
3871
+ stdout: ta.Any = sys.stderr,
3872
+ **kwargs: ta.Any,
3873
+ ) -> None:
3874
+ raise NotImplementedError
3875
+
3876
+ @abc.abstractmethod
3877
+ async def check_output(
3878
+ self,
3879
+ *cmd: str,
3880
+ **kwargs: ta.Any,
3881
+ ) -> bytes:
3882
+ raise NotImplementedError
3883
+
3884
+ #
3885
+
3886
+ async def check_output_str(
3887
+ self,
3888
+ *cmd: str,
3889
+ **kwargs: ta.Any,
3890
+ ) -> str:
3891
+ return (await self.check_output(*cmd, **kwargs)).decode().strip()
3892
+
3893
+ #
3894
+
3895
+ async def try_call(
3896
+ self,
3897
+ *cmd: str,
3898
+ **kwargs: ta.Any,
3899
+ ) -> bool:
3900
+ if isinstance(await self.async_try_fn(self.check_call, *cmd, **kwargs), Exception):
3901
+ return False
3902
+ else:
3903
+ return True
3904
+
3905
+ async def try_output(
3906
+ self,
3907
+ *cmd: str,
3908
+ **kwargs: ta.Any,
3909
+ ) -> ta.Optional[bytes]:
3910
+ if isinstance(ret := await self.async_try_fn(self.check_output, *cmd, **kwargs), Exception):
3911
+ return None
3912
+ else:
3913
+ return ret
3914
+
3915
+ async def try_output_str(
3916
+ self,
3917
+ *cmd: str,
3918
+ **kwargs: ta.Any,
3919
+ ) -> ta.Optional[str]:
3920
+ if (ret := await self.try_output(*cmd, **kwargs)) is None:
3921
+ return None
3922
+ else:
3923
+ return ret.decode().strip()
3924
+
3925
+
3815
3926
  ########################################
3816
3927
  # ../poster.py
3817
3928
  """