ominfra 0.0.0.dev156__py3-none-any.whl → 0.0.0.dev158__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -78,7 +78,7 @@ ConfigMapping = ta.Mapping[str, ta.Any]
78
78
  # ../../../threadworkers.py
79
79
  ThreadWorkerT = ta.TypeVar('ThreadWorkerT', bound='ThreadWorker')
80
80
 
81
- # ../../../../omlish/lite/subprocesses.py
81
+ # ../../../../omlish/subprocesses.py
82
82
  SubprocessChannelOption = ta.Literal['pipe', 'stdout', 'devnull'] # ta.TypeAlias
83
83
 
84
84
 
@@ -932,8 +932,8 @@ class _CachedNullary(_AbstractCachedNullary):
932
932
  return self._value
933
933
 
934
934
 
935
- def cached_nullary(fn): # ta.Callable[..., T]) -> ta.Callable[..., T]:
936
- return _CachedNullary(fn)
935
+ def cached_nullary(fn: CallableT) -> CallableT:
936
+ return _CachedNullary(fn) # type: ignore
937
937
 
938
938
 
939
939
  def static_init(fn: CallableT) -> CallableT:
@@ -1437,6 +1437,13 @@ json_dump_compact: ta.Callable[..., bytes] = functools.partial(json.dump, **JSON
1437
1437
  json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_COMPACT_KWARGS)
1438
1438
 
1439
1439
 
1440
+ ########################################
1441
+ # ../../../../../omlish/lite/logs.py
1442
+
1443
+
1444
+ log = logging.getLogger(__name__)
1445
+
1446
+
1440
1447
  ########################################
1441
1448
  # ../../../../../omlish/lite/reflect.py
1442
1449
 
@@ -1559,6 +1566,116 @@ def format_num_bytes(num_bytes: int) -> str:
1559
1566
  return f'{num_bytes / 1024 ** (len(FORMAT_NUM_BYTES_SUFFIXES) - 1):.2f}{FORMAT_NUM_BYTES_SUFFIXES[-1]}'
1560
1567
 
1561
1568
 
1569
+ ########################################
1570
+ # ../../../../../omlish/logs/filters.py
1571
+
1572
+
1573
+ class TidLogFilter(logging.Filter):
1574
+ def filter(self, record):
1575
+ record.tid = threading.get_native_id()
1576
+ return True
1577
+
1578
+
1579
+ ########################################
1580
+ # ../../../../../omlish/logs/proxy.py
1581
+
1582
+
1583
+ class ProxyLogFilterer(logging.Filterer):
1584
+ def __init__(self, underlying: logging.Filterer) -> None: # noqa
1585
+ self._underlying = underlying
1586
+
1587
+ @property
1588
+ def underlying(self) -> logging.Filterer:
1589
+ return self._underlying
1590
+
1591
+ @property
1592
+ def filters(self):
1593
+ return self._underlying.filters
1594
+
1595
+ @filters.setter
1596
+ def filters(self, filters):
1597
+ self._underlying.filters = filters
1598
+
1599
+ def addFilter(self, filter): # noqa
1600
+ self._underlying.addFilter(filter)
1601
+
1602
+ def removeFilter(self, filter): # noqa
1603
+ self._underlying.removeFilter(filter)
1604
+
1605
+ def filter(self, record):
1606
+ return self._underlying.filter(record)
1607
+
1608
+
1609
+ class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
1610
+ def __init__(self, underlying: logging.Handler) -> None: # noqa
1611
+ ProxyLogFilterer.__init__(self, underlying)
1612
+
1613
+ _underlying: logging.Handler
1614
+
1615
+ @property
1616
+ def underlying(self) -> logging.Handler:
1617
+ return self._underlying
1618
+
1619
+ def get_name(self):
1620
+ return self._underlying.get_name()
1621
+
1622
+ def set_name(self, name):
1623
+ self._underlying.set_name(name)
1624
+
1625
+ @property
1626
+ def name(self):
1627
+ return self._underlying.name
1628
+
1629
+ @property
1630
+ def level(self):
1631
+ return self._underlying.level
1632
+
1633
+ @level.setter
1634
+ def level(self, level):
1635
+ self._underlying.level = level
1636
+
1637
+ @property
1638
+ def formatter(self):
1639
+ return self._underlying.formatter
1640
+
1641
+ @formatter.setter
1642
+ def formatter(self, formatter):
1643
+ self._underlying.formatter = formatter
1644
+
1645
+ def createLock(self):
1646
+ self._underlying.createLock()
1647
+
1648
+ def acquire(self):
1649
+ self._underlying.acquire()
1650
+
1651
+ def release(self):
1652
+ self._underlying.release()
1653
+
1654
+ def setLevel(self, level):
1655
+ self._underlying.setLevel(level)
1656
+
1657
+ def format(self, record):
1658
+ return self._underlying.format(record)
1659
+
1660
+ def emit(self, record):
1661
+ self._underlying.emit(record)
1662
+
1663
+ def handle(self, record):
1664
+ return self._underlying.handle(record)
1665
+
1666
+ def setFormatter(self, fmt):
1667
+ self._underlying.setFormatter(fmt)
1668
+
1669
+ def flush(self):
1670
+ self._underlying.flush()
1671
+
1672
+ def close(self):
1673
+ self._underlying.close()
1674
+
1675
+ def handleError(self, record):
1676
+ self._underlying.handleError(record)
1677
+
1678
+
1562
1679
  ########################################
1563
1680
  # ../../../../../omlish/os/pidfile.py
1564
1681
 
@@ -1988,6 +2105,52 @@ class AwsDataclassMeta:
1988
2105
  return AwsDataclassMeta.Converters(d2a, a2d)
1989
2106
 
1990
2107
 
2108
+ ########################################
2109
+ # ../cursor.py
2110
+
2111
+
2112
+ class JournalctlToAwsCursor:
2113
+ def __init__(
2114
+ self,
2115
+ cursor_file: ta.Optional[str] = None,
2116
+ *,
2117
+ ensure_locked: ta.Optional[ta.Callable[[], None]] = None,
2118
+ ) -> None:
2119
+ super().__init__()
2120
+ self._cursor_file = cursor_file
2121
+ self._ensure_locked = ensure_locked
2122
+
2123
+ #
2124
+
2125
+ def get(self) -> ta.Optional[str]:
2126
+ if self._ensure_locked is not None:
2127
+ self._ensure_locked()
2128
+
2129
+ if not (cf := self._cursor_file):
2130
+ return None
2131
+ cf = os.path.expanduser(cf)
2132
+
2133
+ try:
2134
+ with open(cf) as f:
2135
+ return f.read().strip()
2136
+ except FileNotFoundError:
2137
+ return None
2138
+
2139
+ def set(self, cursor: str) -> None:
2140
+ if self._ensure_locked is not None:
2141
+ self._ensure_locked()
2142
+
2143
+ if not (cf := self._cursor_file):
2144
+ return
2145
+ cf = os.path.expanduser(cf)
2146
+
2147
+ log.info('Writing cursor file %s : %s', cf, cursor)
2148
+ with open(ncf := cf + '.next', 'w') as f:
2149
+ f.write(cursor)
2150
+
2151
+ os.rename(ncf, cf)
2152
+
2153
+
1991
2154
  ########################################
1992
2155
  # ../../../../../omlish/io/buffers.py
1993
2156
 
@@ -2283,340 +2446,71 @@ class aclosing(contextlib.AbstractAsyncContextManager): # noqa
2283
2446
 
2284
2447
 
2285
2448
  ########################################
2286
- # ../../../../../omlish/lite/logs.py
2449
+ # ../../../../../omlish/lite/marshal.py
2287
2450
  """
2288
2451
  TODO:
2289
- - translate json keys
2290
- - debug
2452
+ - pickle stdlib objs? have to pin to 3.8 pickle protocol, will be cross-version
2453
+ - namedtuple
2454
+ - literals
2455
+ - newtypes?
2291
2456
  """
2292
2457
 
2293
2458
 
2294
- log = logging.getLogger(__name__)
2459
+ ##
2295
2460
 
2296
2461
 
2297
- ##
2462
+ @dc.dataclass(frozen=True)
2463
+ class ObjMarshalOptions:
2464
+ raw_bytes: bool = False
2465
+ nonstrict_dataclasses: bool = False
2298
2466
 
2299
2467
 
2300
- class TidLogFilter(logging.Filter):
2468
+ class ObjMarshaler(abc.ABC):
2469
+ @abc.abstractmethod
2470
+ def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2471
+ raise NotImplementedError
2301
2472
 
2302
- def filter(self, record):
2303
- record.tid = threading.get_native_id()
2304
- return True
2473
+ @abc.abstractmethod
2474
+ def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2475
+ raise NotImplementedError
2305
2476
 
2306
2477
 
2307
- ##
2478
+ class NopObjMarshaler(ObjMarshaler):
2479
+ def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2480
+ return o
2308
2481
 
2482
+ def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2483
+ return o
2309
2484
 
2310
- class JsonLogFormatter(logging.Formatter):
2311
2485
 
2312
- KEYS: ta.Mapping[str, bool] = {
2313
- 'name': False,
2314
- 'msg': False,
2315
- 'args': False,
2316
- 'levelname': False,
2317
- 'levelno': False,
2318
- 'pathname': False,
2319
- 'filename': False,
2320
- 'module': False,
2321
- 'exc_info': True,
2322
- 'exc_text': True,
2323
- 'stack_info': True,
2324
- 'lineno': False,
2325
- 'funcName': False,
2326
- 'created': False,
2327
- 'msecs': False,
2328
- 'relativeCreated': False,
2329
- 'thread': False,
2330
- 'threadName': False,
2331
- 'processName': False,
2332
- 'process': False,
2333
- }
2486
+ @dc.dataclass()
2487
+ class ProxyObjMarshaler(ObjMarshaler):
2488
+ m: ta.Optional[ObjMarshaler] = None
2334
2489
 
2335
- def format(self, record: logging.LogRecord) -> str:
2336
- dct = {
2337
- k: v
2338
- for k, o in self.KEYS.items()
2339
- for v in [getattr(record, k)]
2340
- if not (o and v is None)
2341
- }
2342
- return json_dumps_compact(dct)
2490
+ def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2491
+ return check.not_none(self.m).marshal(o, ctx)
2343
2492
 
2493
+ def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2494
+ return check.not_none(self.m).unmarshal(o, ctx)
2344
2495
 
2345
- ##
2346
2496
 
2497
+ @dc.dataclass(frozen=True)
2498
+ class CastObjMarshaler(ObjMarshaler):
2499
+ ty: type
2347
2500
 
2348
- STANDARD_LOG_FORMAT_PARTS = [
2349
- ('asctime', '%(asctime)-15s'),
2350
- ('process', 'pid=%(process)-6s'),
2351
- ('thread', 'tid=%(thread)x'),
2352
- ('levelname', '%(levelname)s'),
2353
- ('name', '%(name)s'),
2354
- ('separator', '::'),
2355
- ('message', '%(message)s'),
2356
- ]
2501
+ def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2502
+ return o
2357
2503
 
2504
+ def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2505
+ return self.ty(o)
2358
2506
 
2359
- class StandardLogFormatter(logging.Formatter):
2360
2507
 
2361
- @staticmethod
2362
- def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
2363
- return ' '.join(v for k, v in parts)
2508
+ class DynamicObjMarshaler(ObjMarshaler):
2509
+ def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2510
+ return ctx.manager.marshal_obj(o, opts=ctx.options)
2364
2511
 
2365
- converter = datetime.datetime.fromtimestamp # type: ignore
2366
-
2367
- def formatTime(self, record, datefmt=None):
2368
- ct = self.converter(record.created) # type: ignore
2369
- if datefmt:
2370
- return ct.strftime(datefmt) # noqa
2371
- else:
2372
- t = ct.strftime('%Y-%m-%d %H:%M:%S')
2373
- return '%s.%03d' % (t, record.msecs) # noqa
2374
-
2375
-
2376
- ##
2377
-
2378
-
2379
- class ProxyLogFilterer(logging.Filterer):
2380
- def __init__(self, underlying: logging.Filterer) -> None: # noqa
2381
- self._underlying = underlying
2382
-
2383
- @property
2384
- def underlying(self) -> logging.Filterer:
2385
- return self._underlying
2386
-
2387
- @property
2388
- def filters(self):
2389
- return self._underlying.filters
2390
-
2391
- @filters.setter
2392
- def filters(self, filters):
2393
- self._underlying.filters = filters
2394
-
2395
- def addFilter(self, filter): # noqa
2396
- self._underlying.addFilter(filter)
2397
-
2398
- def removeFilter(self, filter): # noqa
2399
- self._underlying.removeFilter(filter)
2400
-
2401
- def filter(self, record):
2402
- return self._underlying.filter(record)
2403
-
2404
-
2405
- class ProxyLogHandler(ProxyLogFilterer, logging.Handler):
2406
- def __init__(self, underlying: logging.Handler) -> None: # noqa
2407
- ProxyLogFilterer.__init__(self, underlying)
2408
-
2409
- _underlying: logging.Handler
2410
-
2411
- @property
2412
- def underlying(self) -> logging.Handler:
2413
- return self._underlying
2414
-
2415
- def get_name(self):
2416
- return self._underlying.get_name()
2417
-
2418
- def set_name(self, name):
2419
- self._underlying.set_name(name)
2420
-
2421
- @property
2422
- def name(self):
2423
- return self._underlying.name
2424
-
2425
- @property
2426
- def level(self):
2427
- return self._underlying.level
2428
-
2429
- @level.setter
2430
- def level(self, level):
2431
- self._underlying.level = level
2432
-
2433
- @property
2434
- def formatter(self):
2435
- return self._underlying.formatter
2436
-
2437
- @formatter.setter
2438
- def formatter(self, formatter):
2439
- self._underlying.formatter = formatter
2440
-
2441
- def createLock(self):
2442
- self._underlying.createLock()
2443
-
2444
- def acquire(self):
2445
- self._underlying.acquire()
2446
-
2447
- def release(self):
2448
- self._underlying.release()
2449
-
2450
- def setLevel(self, level):
2451
- self._underlying.setLevel(level)
2452
-
2453
- def format(self, record):
2454
- return self._underlying.format(record)
2455
-
2456
- def emit(self, record):
2457
- self._underlying.emit(record)
2458
-
2459
- def handle(self, record):
2460
- return self._underlying.handle(record)
2461
-
2462
- def setFormatter(self, fmt):
2463
- self._underlying.setFormatter(fmt)
2464
-
2465
- def flush(self):
2466
- self._underlying.flush()
2467
-
2468
- def close(self):
2469
- self._underlying.close()
2470
-
2471
- def handleError(self, record):
2472
- self._underlying.handleError(record)
2473
-
2474
-
2475
- ##
2476
-
2477
-
2478
- class StandardLogHandler(ProxyLogHandler):
2479
- pass
2480
-
2481
-
2482
- ##
2483
-
2484
-
2485
- @contextlib.contextmanager
2486
- def _locking_logging_module_lock() -> ta.Iterator[None]:
2487
- if hasattr(logging, '_acquireLock'):
2488
- logging._acquireLock() # noqa
2489
- try:
2490
- yield
2491
- finally:
2492
- logging._releaseLock() # type: ignore # noqa
2493
-
2494
- elif hasattr(logging, '_lock'):
2495
- # https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
2496
- with logging._lock: # noqa
2497
- yield
2498
-
2499
- else:
2500
- raise Exception("Can't find lock in logging module")
2501
-
2502
-
2503
- def configure_standard_logging(
2504
- level: ta.Union[int, str] = logging.INFO,
2505
- *,
2506
- json: bool = False,
2507
- target: ta.Optional[logging.Logger] = None,
2508
- force: bool = False,
2509
- handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
2510
- ) -> ta.Optional[StandardLogHandler]:
2511
- with _locking_logging_module_lock():
2512
- if target is None:
2513
- target = logging.root
2514
-
2515
- #
2516
-
2517
- if not force:
2518
- if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
2519
- return None
2520
-
2521
- #
2522
-
2523
- if handler_factory is not None:
2524
- handler = handler_factory()
2525
- else:
2526
- handler = logging.StreamHandler()
2527
-
2528
- #
2529
-
2530
- formatter: logging.Formatter
2531
- if json:
2532
- formatter = JsonLogFormatter()
2533
- else:
2534
- formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
2535
- handler.setFormatter(formatter)
2536
-
2537
- #
2538
-
2539
- handler.addFilter(TidLogFilter())
2540
-
2541
- #
2542
-
2543
- target.addHandler(handler)
2544
-
2545
- #
2546
-
2547
- if level is not None:
2548
- target.setLevel(level)
2549
-
2550
- #
2551
-
2552
- return StandardLogHandler(handler)
2553
-
2554
-
2555
- ########################################
2556
- # ../../../../../omlish/lite/marshal.py
2557
- """
2558
- TODO:
2559
- - pickle stdlib objs? have to pin to 3.8 pickle protocol, will be cross-version
2560
- - namedtuple
2561
- - literals
2562
- """
2563
-
2564
-
2565
- ##
2566
-
2567
-
2568
- @dc.dataclass(frozen=True)
2569
- class ObjMarshalOptions:
2570
- raw_bytes: bool = False
2571
- nonstrict_dataclasses: bool = False
2572
-
2573
-
2574
- class ObjMarshaler(abc.ABC):
2575
- @abc.abstractmethod
2576
- def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2577
- raise NotImplementedError
2578
-
2579
- @abc.abstractmethod
2580
- def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2581
- raise NotImplementedError
2582
-
2583
-
2584
- class NopObjMarshaler(ObjMarshaler):
2585
- def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2586
- return o
2587
-
2588
- def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2589
- return o
2590
-
2591
-
2592
- @dc.dataclass()
2593
- class ProxyObjMarshaler(ObjMarshaler):
2594
- m: ta.Optional[ObjMarshaler] = None
2595
-
2596
- def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2597
- return check.not_none(self.m).marshal(o, ctx)
2598
-
2599
- def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2600
- return check.not_none(self.m).unmarshal(o, ctx)
2601
-
2602
-
2603
- @dc.dataclass(frozen=True)
2604
- class CastObjMarshaler(ObjMarshaler):
2605
- ty: type
2606
-
2607
- def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2608
- return o
2609
-
2610
- def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2611
- return self.ty(o)
2612
-
2613
-
2614
- class DynamicObjMarshaler(ObjMarshaler):
2615
- def marshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2616
- return ctx.manager.marshal_obj(o, opts=ctx.options)
2617
-
2618
- def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2619
- return o
2512
+ def unmarshal(self, o: ta.Any, ctx: 'ObjMarshalContext') -> ta.Any:
2513
+ return o
2620
2514
 
2621
2515
 
2622
2516
  @dc.dataclass(frozen=True)
@@ -3014,49 +2908,57 @@ def check_runtime_version() -> None:
3014
2908
 
3015
2909
 
3016
2910
  ########################################
3017
- # ../cursor.py
2911
+ # ../../../../../omlish/logs/json.py
2912
+ """
2913
+ TODO:
2914
+ - translate json keys
2915
+ """
3018
2916
 
3019
2917
 
3020
- class JournalctlToAwsCursor:
2918
+ class JsonLogFormatter(logging.Formatter):
2919
+ KEYS: ta.Mapping[str, bool] = {
2920
+ 'name': False,
2921
+ 'msg': False,
2922
+ 'args': False,
2923
+ 'levelname': False,
2924
+ 'levelno': False,
2925
+ 'pathname': False,
2926
+ 'filename': False,
2927
+ 'module': False,
2928
+ 'exc_info': True,
2929
+ 'exc_text': True,
2930
+ 'stack_info': True,
2931
+ 'lineno': False,
2932
+ 'funcName': False,
2933
+ 'created': False,
2934
+ 'msecs': False,
2935
+ 'relativeCreated': False,
2936
+ 'thread': False,
2937
+ 'threadName': False,
2938
+ 'processName': False,
2939
+ 'process': False,
2940
+ }
2941
+
3021
2942
  def __init__(
3022
2943
  self,
3023
- cursor_file: ta.Optional[str] = None,
3024
- *,
3025
- ensure_locked: ta.Optional[ta.Callable[[], None]] = None,
2944
+ *args: ta.Any,
2945
+ json_dumps: ta.Optional[ta.Callable[[ta.Any], str]] = None,
2946
+ **kwargs: ta.Any,
3026
2947
  ) -> None:
3027
- super().__init__()
3028
- self._cursor_file = cursor_file
3029
- self._ensure_locked = ensure_locked
3030
-
3031
- #
3032
-
3033
- def get(self) -> ta.Optional[str]:
3034
- if self._ensure_locked is not None:
3035
- self._ensure_locked()
3036
-
3037
- if not (cf := self._cursor_file):
3038
- return None
3039
- cf = os.path.expanduser(cf)
3040
-
3041
- try:
3042
- with open(cf) as f:
3043
- return f.read().strip()
3044
- except FileNotFoundError:
3045
- return None
3046
-
3047
- def set(self, cursor: str) -> None:
3048
- if self._ensure_locked is not None:
3049
- self._ensure_locked()
3050
-
3051
- if not (cf := self._cursor_file):
3052
- return
3053
- cf = os.path.expanduser(cf)
2948
+ super().__init__(*args, **kwargs)
3054
2949
 
3055
- log.info('Writing cursor file %s : %s', cf, cursor)
3056
- with open(ncf := cf + '.next', 'w') as f:
3057
- f.write(cursor)
2950
+ if json_dumps is None:
2951
+ json_dumps = json_dumps_compact
2952
+ self._json_dumps = json_dumps
3058
2953
 
3059
- os.rename(ncf, cf)
2954
+ def format(self, record: logging.LogRecord) -> str:
2955
+ dct = {
2956
+ k: v
2957
+ for k, o in self.KEYS.items()
2958
+ for v in [getattr(record, k)]
2959
+ if not (o and v is None)
2960
+ }
2961
+ return self._json_dumps(dct)
3060
2962
 
3061
2963
 
3062
2964
  ########################################
@@ -3574,7 +3476,127 @@ class ThreadWorkerGroup:
3574
3476
 
3575
3477
 
3576
3478
  ########################################
3577
- # ../../../../../omlish/lite/subprocesses.py
3479
+ # ../../../../../omlish/logs/standard.py
3480
+ """
3481
+ TODO:
3482
+ - structured
3483
+ - prefixed
3484
+ - debug
3485
+ """
3486
+
3487
+
3488
+ ##
3489
+
3490
+
3491
+ STANDARD_LOG_FORMAT_PARTS = [
3492
+ ('asctime', '%(asctime)-15s'),
3493
+ ('process', 'pid=%(process)-6s'),
3494
+ ('thread', 'tid=%(thread)x'),
3495
+ ('levelname', '%(levelname)s'),
3496
+ ('name', '%(name)s'),
3497
+ ('separator', '::'),
3498
+ ('message', '%(message)s'),
3499
+ ]
3500
+
3501
+
3502
+ class StandardLogFormatter(logging.Formatter):
3503
+ @staticmethod
3504
+ def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
3505
+ return ' '.join(v for k, v in parts)
3506
+
3507
+ converter = datetime.datetime.fromtimestamp # type: ignore
3508
+
3509
+ def formatTime(self, record, datefmt=None):
3510
+ ct = self.converter(record.created) # type: ignore
3511
+ if datefmt:
3512
+ return ct.strftime(datefmt) # noqa
3513
+ else:
3514
+ t = ct.strftime('%Y-%m-%d %H:%M:%S')
3515
+ return '%s.%03d' % (t, record.msecs) # noqa
3516
+
3517
+
3518
+ ##
3519
+
3520
+
3521
+ class StandardLogHandler(ProxyLogHandler):
3522
+ pass
3523
+
3524
+
3525
+ ##
3526
+
3527
+
3528
+ @contextlib.contextmanager
3529
+ def _locking_logging_module_lock() -> ta.Iterator[None]:
3530
+ if hasattr(logging, '_acquireLock'):
3531
+ logging._acquireLock() # noqa
3532
+ try:
3533
+ yield
3534
+ finally:
3535
+ logging._releaseLock() # type: ignore # noqa
3536
+
3537
+ elif hasattr(logging, '_lock'):
3538
+ # https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
3539
+ with logging._lock: # noqa
3540
+ yield
3541
+
3542
+ else:
3543
+ raise Exception("Can't find lock in logging module")
3544
+
3545
+
3546
+ def configure_standard_logging(
3547
+ level: ta.Union[int, str] = logging.INFO,
3548
+ *,
3549
+ json: bool = False,
3550
+ target: ta.Optional[logging.Logger] = None,
3551
+ force: bool = False,
3552
+ handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
3553
+ ) -> ta.Optional[StandardLogHandler]:
3554
+ with _locking_logging_module_lock():
3555
+ if target is None:
3556
+ target = logging.root
3557
+
3558
+ #
3559
+
3560
+ if not force:
3561
+ if any(isinstance(h, StandardLogHandler) for h in list(target.handlers)):
3562
+ return None
3563
+
3564
+ #
3565
+
3566
+ if handler_factory is not None:
3567
+ handler = handler_factory()
3568
+ else:
3569
+ handler = logging.StreamHandler()
3570
+
3571
+ #
3572
+
3573
+ formatter: logging.Formatter
3574
+ if json:
3575
+ formatter = JsonLogFormatter()
3576
+ else:
3577
+ formatter = StandardLogFormatter(StandardLogFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS))
3578
+ handler.setFormatter(formatter)
3579
+
3580
+ #
3581
+
3582
+ handler.addFilter(TidLogFilter())
3583
+
3584
+ #
3585
+
3586
+ target.addHandler(handler)
3587
+
3588
+ #
3589
+
3590
+ if level is not None:
3591
+ target.setLevel(level)
3592
+
3593
+ #
3594
+
3595
+ return StandardLogHandler(handler)
3596
+
3597
+
3598
+ ########################################
3599
+ # ../../../../../omlish/subprocesses.py
3578
3600
 
3579
3601
 
3580
3602
  ##
@@ -3625,8 +3647,8 @@ def subprocess_close(
3625
3647
  ##
3626
3648
 
3627
3649
 
3628
- class AbstractSubprocesses(abc.ABC): # noqa
3629
- DEFAULT_LOGGER: ta.ClassVar[ta.Optional[logging.Logger]] = log
3650
+ class BaseSubprocesses(abc.ABC): # noqa
3651
+ DEFAULT_LOGGER: ta.ClassVar[ta.Optional[logging.Logger]] = None
3630
3652
 
3631
3653
  def __init__(
3632
3654
  self,
@@ -3639,6 +3661,9 @@ class AbstractSubprocesses(abc.ABC): # noqa
3639
3661
  self._log = log if log is not None else self.DEFAULT_LOGGER
3640
3662
  self._try_exceptions = try_exceptions if try_exceptions is not None else self.DEFAULT_TRY_EXCEPTIONS
3641
3663
 
3664
+ def set_logger(self, log: ta.Optional[logging.Logger]) -> None:
3665
+ self._log = log
3666
+
3642
3667
  #
3643
3668
 
3644
3669
  def prepare_args(
@@ -3750,23 +3775,25 @@ class AbstractSubprocesses(abc.ABC): # noqa
3750
3775
  ##
3751
3776
 
3752
3777
 
3753
- class Subprocesses(AbstractSubprocesses):
3778
+ class AbstractSubprocesses(BaseSubprocesses, abc.ABC):
3779
+ @abc.abstractmethod
3754
3780
  def check_call(
3755
3781
  self,
3756
3782
  *cmd: str,
3757
3783
  stdout: ta.Any = sys.stderr,
3758
3784
  **kwargs: ta.Any,
3759
3785
  ) -> None:
3760
- with self.prepare_and_wrap(*cmd, stdout=stdout, **kwargs) as (cmd, kwargs): # noqa
3761
- subprocess.check_call(cmd, **kwargs)
3786
+ raise NotImplementedError
3762
3787
 
3788
+ @abc.abstractmethod
3763
3789
  def check_output(
3764
3790
  self,
3765
3791
  *cmd: str,
3766
3792
  **kwargs: ta.Any,
3767
3793
  ) -> bytes:
3768
- with self.prepare_and_wrap(*cmd, **kwargs) as (cmd, kwargs): # noqa
3769
- return subprocess.check_output(cmd, **kwargs)
3794
+ raise NotImplementedError
3795
+
3796
+ #
3770
3797
 
3771
3798
  def check_output_str(
3772
3799
  self,
@@ -3808,9 +3835,94 @@ class Subprocesses(AbstractSubprocesses):
3808
3835
  return ret.decode().strip()
3809
3836
 
3810
3837
 
3838
+ ##
3839
+
3840
+
3841
+ class Subprocesses(AbstractSubprocesses):
3842
+ def check_call(
3843
+ self,
3844
+ *cmd: str,
3845
+ stdout: ta.Any = sys.stderr,
3846
+ **kwargs: ta.Any,
3847
+ ) -> None:
3848
+ with self.prepare_and_wrap(*cmd, stdout=stdout, **kwargs) as (cmd, kwargs): # noqa
3849
+ subprocess.check_call(cmd, **kwargs)
3850
+
3851
+ def check_output(
3852
+ self,
3853
+ *cmd: str,
3854
+ **kwargs: ta.Any,
3855
+ ) -> bytes:
3856
+ with self.prepare_and_wrap(*cmd, **kwargs) as (cmd, kwargs): # noqa
3857
+ return subprocess.check_output(cmd, **kwargs)
3858
+
3859
+
3811
3860
  subprocesses = Subprocesses()
3812
3861
 
3813
3862
 
3863
+ ##
3864
+
3865
+
3866
+ class AbstractAsyncSubprocesses(BaseSubprocesses):
3867
+ @abc.abstractmethod
3868
+ async def check_call(
3869
+ self,
3870
+ *cmd: str,
3871
+ stdout: ta.Any = sys.stderr,
3872
+ **kwargs: ta.Any,
3873
+ ) -> None:
3874
+ raise NotImplementedError
3875
+
3876
+ @abc.abstractmethod
3877
+ async def check_output(
3878
+ self,
3879
+ *cmd: str,
3880
+ **kwargs: ta.Any,
3881
+ ) -> bytes:
3882
+ raise NotImplementedError
3883
+
3884
+ #
3885
+
3886
+ async def check_output_str(
3887
+ self,
3888
+ *cmd: str,
3889
+ **kwargs: ta.Any,
3890
+ ) -> str:
3891
+ return (await self.check_output(*cmd, **kwargs)).decode().strip()
3892
+
3893
+ #
3894
+
3895
+ async def try_call(
3896
+ self,
3897
+ *cmd: str,
3898
+ **kwargs: ta.Any,
3899
+ ) -> bool:
3900
+ if isinstance(await self.async_try_fn(self.check_call, *cmd, **kwargs), Exception):
3901
+ return False
3902
+ else:
3903
+ return True
3904
+
3905
+ async def try_output(
3906
+ self,
3907
+ *cmd: str,
3908
+ **kwargs: ta.Any,
3909
+ ) -> ta.Optional[bytes]:
3910
+ if isinstance(ret := await self.async_try_fn(self.check_output, *cmd, **kwargs), Exception):
3911
+ return None
3912
+ else:
3913
+ return ret
3914
+
3915
+ async def try_output_str(
3916
+ self,
3917
+ *cmd: str,
3918
+ **kwargs: ta.Any,
3919
+ ) -> ta.Optional[str]:
3920
+ if (ret := await self.try_output(*cmd, **kwargs)) is None:
3921
+ return None
3922
+ else:
3923
+ return ret.decode().strip()
3924
+
3925
+
3814
3926
  ########################################
3815
3927
  # ../poster.py
3816
3928
  """