ominfra 0.0.0.dev433__py3-none-any.whl → 0.0.0.dev501__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ominfra might be problematic. Click here for more details.

Files changed (29) hide show
  1. ominfra/README.md +26 -0
  2. ominfra/__about__.py +5 -2
  3. ominfra/clouds/aws/instancetypes/cache.json.gz +0 -0
  4. ominfra/clouds/aws/journald2aws/main.py +1 -1
  5. ominfra/clouds/aws/models/{base.py → base/__init__.py} +6 -0
  6. ominfra/clouds/aws/models/base/_dataclasses.py +721 -0
  7. ominfra/clouds/aws/models/gen/cli.py +2 -1
  8. ominfra/clouds/aws/models/gen/gen.py +16 -7
  9. ominfra/clouds/aws/models/services/{ec2.py → ec2/__init__.py} +123 -1
  10. ominfra/clouds/aws/models/services/ec2/_dataclasses.py +30654 -0
  11. ominfra/clouds/aws/models/services/{lambda_.py → lambda_/__init__.py} +139 -1
  12. ominfra/clouds/aws/models/services/lambda_/_dataclasses.py +4182 -0
  13. ominfra/clouds/aws/models/services/{rds.py → rds/__init__.py} +244 -78
  14. ominfra/clouds/aws/models/services/rds/_dataclasses.py +8231 -0
  15. ominfra/clouds/aws/models/services/{s3.py → s3/__init__.py} +9 -1
  16. ominfra/clouds/aws/models/services/s3/_dataclasses.py +5014 -0
  17. ominfra/manage/bootstrap_.py +1 -1
  18. ominfra/manage/main.py +1 -2
  19. ominfra/manage/targets/bestpython.sh +1 -1
  20. ominfra/scripts/journald2aws.py +748 -71
  21. ominfra/scripts/manage.py +824 -99
  22. ominfra/scripts/supervisor.py +925 -123
  23. ominfra/supervisor/main.py +1 -1
  24. {ominfra-0.0.0.dev433.dist-info → ominfra-0.0.0.dev501.dist-info}/METADATA +7 -5
  25. {ominfra-0.0.0.dev433.dist-info → ominfra-0.0.0.dev501.dist-info}/RECORD +29 -23
  26. {ominfra-0.0.0.dev433.dist-info → ominfra-0.0.0.dev501.dist-info}/WHEEL +0 -0
  27. {ominfra-0.0.0.dev433.dist-info → ominfra-0.0.0.dev501.dist-info}/entry_points.txt +0 -0
  28. {ominfra-0.0.0.dev433.dist-info → ominfra-0.0.0.dev501.dist-info}/licenses/LICENSE +0 -0
  29. {ominfra-0.0.0.dev433.dist-info → ominfra-0.0.0.dev501.dist-info}/top_level.txt +0 -0
@@ -54,11 +54,64 @@ if sys.version_info < (3, 8):
54
54
  raise OSError(f'Requires python (3, 8), got {sys.version_info} from {sys.executable}') # noqa
55
55
 
56
56
 
57
+ def __omlish_amalg__(): # noqa
58
+ return dict(
59
+ src_files=[
60
+ dict(path='../../../../omlish/configs/types.py', sha1='f7a5584cd6eccb77d18d729796072a162e9a8790'),
61
+ dict(path='../../../../omlish/formats/ini/sections.py', sha1='731c92cce82e183d1d4bdc23fc781fad62187394'),
62
+ dict(path='../../../../omlish/formats/toml/parser.py', sha1='73dac82289350ab951c4bcdbfe61167fa221f26f'),
63
+ dict(path='../../../../omlish/formats/toml/writer.py', sha1='6ea41d7e724bb1dcf6bd84b88993ff4e8798e021'),
64
+ dict(path='../../../../omlish/io/readers.py', sha1='4b19ab4a87f2fa2a6f6c3cad7e1f3892b7cbd3a4'),
65
+ dict(path='../../../../omlish/lite/abstract.py', sha1='a2fc3f3697fa8de5247761e9d554e70176f37aac'),
66
+ dict(path='../../../../omlish/lite/asyncs.py', sha1='b3f2251c56617ce548abf9c333ac996b63edb23e'),
67
+ dict(path='../../../../omlish/lite/attrops.py', sha1='c1ebfb8573d766d34593c452a2377208d02726dc'),
68
+ dict(path='../../../../omlish/lite/cached.py', sha1='0c33cf961ac8f0727284303c7a30c5ea98f714f2'),
69
+ dict(path='../../../../omlish/lite/check.py', sha1='bb6b6b63333699b84462951a854d99ae83195b94'),
70
+ dict(path='../../../../omlish/lite/contextmanagers.py', sha1='993f5ed96d3410f739a20363f55670d5e5267fa3'),
71
+ dict(path='../../../../omlish/lite/json.py', sha1='57eeddc4d23a17931e00284ffa5cb6e3ce089486'),
72
+ dict(path='../../../../omlish/lite/objects.py', sha1='9566bbf3530fd71fcc56321485216b592fae21e9'),
73
+ dict(path='../../../../omlish/lite/reflect.py', sha1='c4fec44bf144e9d93293c996af06f6c65fc5e63d'),
74
+ dict(path='../../../../omlish/lite/strings.py', sha1='89831ecbc34ad80e118a865eceb390ed399dc4d6'),
75
+ dict(path='../../../../omlish/logs/levels.py', sha1='91405563d082a5eba874da82aac89d83ce7b6152'),
76
+ dict(path='../../../../omlish/logs/std/filters.py', sha1='f36aab646d84d31e295b33aaaaa6f8b67ff38b3d'),
77
+ dict(path='../../../../omlish/logs/std/proxy.py', sha1='3e7301a2aa351127f9c85f61b2f85dcc3f15aafb'),
78
+ dict(path='../../../../omlish/logs/warnings.py', sha1='c4eb694b24773351107fcc058f3620f1dbfb6799'),
79
+ dict(path='../../../../omlish/os/pidfiles/pidfile.py', sha1='7c3c6d4674855bfc4f1d7fd77b8fa40f36581535'),
80
+ dict(path='../../../../omlish/subprocesses/utils.py', sha1='2210d90ab1bfc75642aa2f4caad662368900aa1c'),
81
+ dict(path='../auth.py', sha1='b1ac1a5e03d4e9e38957a54e346943c6dcc964a1'),
82
+ dict(path='../dataclasses.py', sha1='8e950d7815904588fed284889392cbb0b1002605'),
83
+ dict(path='../../../../omlish/configs/formats.py', sha1='9bc4f953b4b8700f6f109e6f49e2d70f8e48ce7c'),
84
+ dict(path='../../../../omlish/io/buffers.py', sha1='45a5f79c6d71f02ab82082a48d63ebbd10959031'),
85
+ dict(path='../../../../omlish/lite/marshal.py', sha1='96348f5f2a26dc27d842d33cc3927e9da163436b'),
86
+ dict(path='../../../../omlish/lite/runtime.py', sha1='2e752a27ae2bf89b1bb79b4a2da522a3ec360c70'),
87
+ dict(path='../../../../omlish/logs/infos.py', sha1='4dd104bd468a8c438601dd0bbda619b47d2f1620'),
88
+ dict(path='../../../../omlish/logs/std/json.py', sha1='2a75553131e4d5331bb0cedde42aa183f403fc3b'),
89
+ dict(path='../logs.py', sha1='5a4fad522508bdc1b790f1d5234a87f319c9da2d'),
90
+ dict(path='../../../../omlish/lite/configs.py', sha1='c8602e0e197ef1133e7e8e248935ac745bfd46cb'),
91
+ dict(path='../../../../omlish/logs/contexts.py', sha1='1000a6d5ddfb642865ca532e34b1d50759781cf0'),
92
+ dict(path='../../../../omlish/logs/std/standard.py', sha1='5c97c1b9f7ead58d6127d047b873398f708f288d'),
93
+ dict(path='../../../../omlish/subprocesses/wrap.py', sha1='8a9b7d2255481fae15c05f5624b0cdc0766f4b3f'),
94
+ dict(path='../../../../omlish/logs/base.py', sha1='8d06faee05fead6b1dd98c9035a5b042af4aebb1'),
95
+ dict(path='../../../../omlish/logs/std/records.py', sha1='8bbf6ef9eccb3a012c6ca416ddf3969450fd8fc9'),
96
+ dict(path='../../../../omlish/logs/asyncs.py', sha1='ab11b70033d9f2e9a4e70254185aa1c6130c6077'),
97
+ dict(path='../../../../omlish/logs/std/loggers.py', sha1='a569179445d6a8a942b5dcfad1d1f77702868803'),
98
+ dict(path='../../../../omlish/logs/modules.py', sha1='dd7d5f8e63fe8829dfb49460f3929ab64b68ee14'),
99
+ dict(path='cursor.py', sha1='00f1c62e16e4c85b20658eaf33c0bedf22c9e18f'),
100
+ dict(path='../../../journald/messages.py', sha1='6f2d2eeedb71723b1c6631ad2e634b473b297696'),
101
+ dict(path='../../../threadworkers.py', sha1='e3413436070b66faeb3e6974dc9a75cd8a949ad7'),
102
+ dict(path='poster.py', sha1='275770a4e60ea5777053c9044e37d71397c3ed20'),
103
+ dict(path='../../../journald/tailer.py', sha1='1c37a6bbde32e8556d26b94b439f14869731ac01'),
104
+ dict(path='driver.py', sha1='a9353a417fc4e57f29e7f04038c3bf4668f36dbd'),
105
+ dict(path='main.py', sha1='8cb83e241c0b9dc9f1fc0cd727254f6e4a070c58'),
106
+ ],
107
+ )
108
+
109
+
57
110
  ########################################
58
111
 
59
112
 
60
113
  # ../../../../omlish/configs/types.py
61
- ConfigMap = ta.Mapping[str, ta.Any]
114
+ ConfigMap = ta.Mapping[str, ta.Any] # ta.TypeAlias
62
115
 
63
116
  # ../../../../omlish/formats/ini/sections.py
64
117
  IniSectionSettingsMap = ta.Mapping[str, ta.Mapping[str, ta.Union[str, ta.Sequence[str]]]] # ta.TypeAlias
@@ -68,7 +121,7 @@ TomlParseFloat = ta.Callable[[str], ta.Any] # ta.TypeAlias
68
121
  TomlKey = ta.Tuple[str, ...] # ta.TypeAlias
69
122
  TomlPos = int # ta.TypeAlias
70
123
 
71
- # ../../../../omlish/lite/attrops.py
124
+ # ../../../../omlish/lite/abstract.py
72
125
  T = ta.TypeVar('T')
73
126
 
74
127
  # ../../../../omlish/lite/cached.py
@@ -1200,6 +1253,36 @@ class TomlWriter:
1200
1253
  return out.getvalue()
1201
1254
 
1202
1255
 
1256
+ ########################################
1257
+ # ../../../../../omlish/io/readers.py
1258
+
1259
+
1260
+ ##
1261
+
1262
+
1263
+ class RawBytesReader(ta.Protocol):
1264
+ def read1(self, n: int = -1, /) -> bytes: ...
1265
+
1266
+
1267
+ class BufferedBytesReader(RawBytesReader, ta.Protocol):
1268
+ def read(self, n: int = -1, /) -> bytes: ...
1269
+
1270
+ def readall(self) -> bytes: ...
1271
+
1272
+
1273
+ #
1274
+
1275
+
1276
+ class AsyncRawBytesReader(ta.Protocol):
1277
+ def read1(self, n: int = -1, /) -> ta.Awaitable[bytes]: ...
1278
+
1279
+
1280
+ class AsyncBufferedBytesReader(AsyncRawBytesReader, ta.Protocol):
1281
+ def read(self, n: int = -1, /) -> ta.Awaitable[bytes]: ...
1282
+
1283
+ def readall(self) -> ta.Awaitable[bytes]: ...
1284
+
1285
+
1203
1286
  ########################################
1204
1287
  # ../../../../../omlish/lite/abstract.py
1205
1288
 
@@ -1215,25 +1298,49 @@ def is_abstract_method(obj: ta.Any) -> bool:
1215
1298
  return bool(getattr(obj, _IS_ABSTRACT_METHOD_ATTR, False))
1216
1299
 
1217
1300
 
1218
- def update_abstracts(cls, *, force=False):
1301
+ def compute_abstract_methods(cls: type) -> ta.FrozenSet[str]:
1302
+ # ~> https://github.com/python/cpython/blob/f3476c6507381ca860eec0989f53647b13517423/Modules/_abc.c#L358
1303
+
1304
+ # Stage 1: direct abstract methods
1305
+
1306
+ abstracts = {
1307
+ a
1308
+ # Get items as a list to avoid mutation issues during iteration
1309
+ for a, v in list(cls.__dict__.items())
1310
+ if is_abstract_method(v)
1311
+ }
1312
+
1313
+ # Stage 2: inherited abstract methods
1314
+
1315
+ for base in cls.__bases__:
1316
+ # Get __abstractmethods__ from base if it exists
1317
+ if (base_abstracts := getattr(base, _ABSTRACT_METHODS_ATTR, None)) is None:
1318
+ continue
1319
+
1320
+ # Iterate over abstract methods in base
1321
+ for key in base_abstracts:
1322
+ # Check if this class has an attribute with this name
1323
+ try:
1324
+ value = getattr(cls, key)
1325
+ except AttributeError:
1326
+ # Attribute not found in this class, skip
1327
+ continue
1328
+
1329
+ # Check if it's still abstract
1330
+ if is_abstract_method(value):
1331
+ abstracts.add(key)
1332
+
1333
+ return frozenset(abstracts)
1334
+
1335
+
1336
+ def update_abstracts(cls: ta.Type[T], *, force: bool = False) -> ta.Type[T]:
1219
1337
  if not force and not hasattr(cls, _ABSTRACT_METHODS_ATTR):
1220
1338
  # Per stdlib: We check for __abstractmethods__ here because cls might by a C implementation or a python
1221
1339
  # implementation (especially during testing), and we want to handle both cases.
1222
1340
  return cls
1223
1341
 
1224
- abstracts: ta.Set[str] = set()
1225
-
1226
- for scls in cls.__bases__:
1227
- for name in getattr(scls, _ABSTRACT_METHODS_ATTR, ()):
1228
- value = getattr(cls, name, None)
1229
- if getattr(value, _IS_ABSTRACT_METHOD_ATTR, False):
1230
- abstracts.add(name)
1231
-
1232
- for name, value in cls.__dict__.items():
1233
- if getattr(value, _IS_ABSTRACT_METHOD_ATTR, False):
1234
- abstracts.add(name)
1235
-
1236
- setattr(cls, _ABSTRACT_METHODS_ATTR, frozenset(abstracts))
1342
+ abstracts = compute_abstract_methods(cls)
1343
+ setattr(cls, _ABSTRACT_METHODS_ATTR, abstracts)
1237
1344
  return cls
1238
1345
 
1239
1346
 
@@ -1287,23 +1394,26 @@ class Abstract:
1287
1394
  super().__init_subclass__(**kwargs)
1288
1395
 
1289
1396
  if not (Abstract in cls.__bases__ or abc.ABC in cls.__bases__):
1290
- ams = {a: cls for a, o in cls.__dict__.items() if is_abstract_method(o)}
1397
+ if ams := compute_abstract_methods(cls):
1398
+ amd = {
1399
+ a: mcls
1400
+ for mcls in cls.__mro__[::-1]
1401
+ for a in ams
1402
+ if a in mcls.__dict__
1403
+ }
1291
1404
 
1292
- seen = set(cls.__dict__)
1293
- for b in cls.__bases__:
1294
- ams.update({a: b for a in set(getattr(b, _ABSTRACT_METHODS_ATTR, [])) - seen}) # noqa
1295
- seen.update(dir(b))
1296
-
1297
- if ams:
1298
1405
  raise AbstractTypeError(
1299
1406
  f'Cannot subclass abstract class {cls.__name__} with abstract methods: ' +
1300
1407
  ', '.join(sorted([
1301
1408
  '.'.join([
1302
- *([m] if (m := getattr(c, '__module__')) else []),
1303
- getattr(c, '__qualname__', getattr(c, '__name__')),
1409
+ *([
1410
+ *([m] if (m := getattr(c, '__module__')) else []),
1411
+ getattr(c, '__qualname__', getattr(c, '__name__')),
1412
+ ] if c is not None else '?'),
1304
1413
  a,
1305
1414
  ])
1306
- for a, c in ams.items()
1415
+ for a in ams
1416
+ for c in [amd.get(a)]
1307
1417
  ])),
1308
1418
  )
1309
1419
 
@@ -1320,6 +1430,150 @@ class Abstract:
1320
1430
  update_abstracts(cls, force=True)
1321
1431
 
1322
1432
 
1433
+ ########################################
1434
+ # ../../../../../omlish/lite/asyncs.py
1435
+
1436
+
1437
+ ##
1438
+
1439
+
1440
+ async def opt_await(aw: ta.Optional[ta.Awaitable[T]]) -> ta.Optional[T]:
1441
+ return (await aw if aw is not None else None)
1442
+
1443
+
1444
+ async def async_list(ai: ta.AsyncIterable[T]) -> ta.List[T]:
1445
+ return [v async for v in ai]
1446
+
1447
+
1448
+ async def async_enumerate(ai: ta.AsyncIterable[T]) -> ta.AsyncIterable[ta.Tuple[int, T]]:
1449
+ i = 0
1450
+ async for e in ai:
1451
+ yield (i, e)
1452
+ i += 1
1453
+
1454
+
1455
+ ##
1456
+
1457
+
1458
+ def as_async(fn: ta.Callable[..., T], *, wrap: bool = False) -> ta.Callable[..., ta.Awaitable[T]]:
1459
+ async def inner(*args, **kwargs):
1460
+ return fn(*args, **kwargs)
1461
+
1462
+ return functools.wraps(fn)(inner) if wrap else inner
1463
+
1464
+
1465
+ ##
1466
+
1467
+
1468
+ class SyncAwaitCoroutineNotTerminatedError(Exception):
1469
+ pass
1470
+
1471
+
1472
+ def sync_await(aw: ta.Awaitable[T]) -> T:
1473
+ """
1474
+ Allows for the synchronous execution of async functions which will never actually *externally* await anything. These
1475
+ functions are allowed to await any number of other functions - including contextmanagers and generators - so long as
1476
+ nothing ever actually 'leaks' out of the function, presumably to an event loop.
1477
+ """
1478
+
1479
+ ret = missing = object()
1480
+
1481
+ async def thunk():
1482
+ nonlocal ret
1483
+
1484
+ ret = await aw
1485
+
1486
+ cr = thunk()
1487
+ try:
1488
+ try:
1489
+ cr.send(None)
1490
+ except StopIteration:
1491
+ pass
1492
+
1493
+ if ret is missing or cr.cr_await is not None or cr.cr_running:
1494
+ raise SyncAwaitCoroutineNotTerminatedError('Not terminated')
1495
+
1496
+ finally:
1497
+ cr.close()
1498
+
1499
+ return ta.cast(T, ret)
1500
+
1501
+
1502
+ #
1503
+
1504
+
1505
+ def sync_aiter(ai: ta.AsyncIterator[T]) -> ta.Iterator[T]:
1506
+ while True:
1507
+ try:
1508
+ o = sync_await(ai.__anext__())
1509
+ except StopAsyncIteration:
1510
+ break
1511
+ yield o
1512
+
1513
+
1514
+ def sync_async_list(ai: ta.AsyncIterable[T]) -> ta.List[T]:
1515
+ """
1516
+ Uses `sync_await` to synchronously read the full contents of a function call returning an async iterator, given that
1517
+ the function never externally awaits anything.
1518
+ """
1519
+
1520
+ lst: ta.Optional[ta.List[T]] = None
1521
+
1522
+ async def inner():
1523
+ nonlocal lst
1524
+
1525
+ lst = [v async for v in ai]
1526
+
1527
+ sync_await(inner())
1528
+
1529
+ if not isinstance(lst, list):
1530
+ raise TypeError(lst)
1531
+
1532
+ return lst
1533
+
1534
+
1535
+ #
1536
+
1537
+
1538
+ @ta.final
1539
+ class SyncAwaitContextManager(ta.Generic[T]):
1540
+ def __init__(self, acm: ta.AsyncContextManager[T]) -> None:
1541
+ self._acm = acm
1542
+
1543
+ def __repr__(self) -> str:
1544
+ return f'{self.__class__.__name__}({self._acm!r})'
1545
+
1546
+ def __enter__(self) -> T:
1547
+ return sync_await(self._acm.__aenter__())
1548
+
1549
+ def __exit__(self, exc_type, exc_val, exc_tb):
1550
+ return sync_await(self._acm.__aexit__(exc_type, exc_val, exc_tb))
1551
+
1552
+
1553
+ sync_async_with = SyncAwaitContextManager
1554
+
1555
+
1556
+ ##
1557
+
1558
+
1559
+ @ta.final
1560
+ class SyncToAsyncContextManager(ta.Generic[T]):
1561
+ def __init__(self, cm: ta.ContextManager[T]) -> None:
1562
+ self._cm = cm
1563
+
1564
+ def __repr__(self) -> str:
1565
+ return f'{self.__class__.__name__}({self._cm!r})'
1566
+
1567
+ async def __aenter__(self) -> T:
1568
+ return self._cm.__enter__()
1569
+
1570
+ async def __aexit__(self, exc_type, exc_value, traceback, /):
1571
+ return self._cm.__exit__(exc_type, exc_value, traceback)
1572
+
1573
+
1574
+ as_async_context_manager = SyncToAsyncContextManager
1575
+
1576
+
1323
1577
  ########################################
1324
1578
  # ../../../../../omlish/lite/attrops.py
1325
1579
  """
@@ -1328,6 +1582,8 @@ TODO:
1328
1582
  - per-attr repr transform / filter
1329
1583
  - __ne__ ? cases where it still matters
1330
1584
  - ordering ?
1585
+ - repr_filter: ta.Union[ta.Callable[[ta.Any], ta.Optional[str]], ta.Literal['not_none', 'truthy']]] ?
1586
+ - unify repr/repr_fn/repr_filter
1331
1587
  """
1332
1588
 
1333
1589
 
@@ -1345,6 +1601,8 @@ class AttrOps(ta.Generic[T]):
1345
1601
  display: ta.Optional[str] = None,
1346
1602
 
1347
1603
  repr: bool = True, # noqa
1604
+ repr_fn: ta.Optional[ta.Callable[[ta.Any], ta.Optional[str]]] = None,
1605
+
1348
1606
  hash: bool = True, # noqa
1349
1607
  eq: bool = True,
1350
1608
  ) -> None:
@@ -1359,6 +1617,8 @@ class AttrOps(ta.Generic[T]):
1359
1617
  self._display = display
1360
1618
 
1361
1619
  self._repr = repr
1620
+ self._repr_fn = repr_fn
1621
+
1362
1622
  self._hash = hash
1363
1623
  self._eq = eq
1364
1624
 
@@ -1366,21 +1626,30 @@ class AttrOps(ta.Generic[T]):
1366
1626
  def of(
1367
1627
  cls,
1368
1628
  o: ta.Union[
1369
- str,
1370
- ta.Tuple[str, str],
1371
1629
  'AttrOps.Attr',
1630
+ str,
1631
+ ta.Tuple[str, ta.Union[str, ta.Mapping[str, ta.Any]]],
1632
+ ta.Mapping[str, ta.Any],
1372
1633
  ],
1373
1634
  ) -> 'AttrOps.Attr':
1374
1635
  if isinstance(o, AttrOps.Attr):
1375
1636
  return o
1376
1637
  elif isinstance(o, str):
1377
1638
  return cls(o)
1639
+ elif isinstance(o, tuple):
1640
+ name, x = o
1641
+ kw: ta.Mapping[str, ta.Any]
1642
+ if isinstance(x, str):
1643
+ kw = dict(display=x)
1644
+ elif isinstance(x, ta.Mapping):
1645
+ kw = x
1646
+ else:
1647
+ raise TypeError(x)
1648
+ return cls(name, **kw)
1649
+ elif isinstance(o, ta.Mapping):
1650
+ return cls(**o)
1378
1651
  else:
1379
- name, disp = o
1380
- return cls(
1381
- name,
1382
- display=disp,
1383
- )
1652
+ raise TypeError(o)
1384
1653
 
1385
1654
  @property
1386
1655
  def name(self) -> str:
@@ -1398,19 +1667,34 @@ class AttrOps(ta.Generic[T]):
1398
1667
  def eq(self) -> bool:
1399
1668
  return self._eq
1400
1669
 
1670
+ @staticmethod
1671
+ def opt_repr(o: ta.Any) -> ta.Optional[str]:
1672
+ return repr(o) if o is not None else None
1673
+
1674
+ @staticmethod
1675
+ def truthy_repr(o: ta.Any) -> ta.Optional[str]:
1676
+ return repr(o) if o else None
1677
+
1678
+ #
1679
+
1401
1680
  @ta.overload
1402
1681
  def __init__(
1403
1682
  self,
1404
1683
  *attrs: ta.Sequence[ta.Union[
1405
1684
  str,
1406
- ta.Tuple[str, str],
1685
+ ta.Tuple[str, ta.Union[str, ta.Mapping[str, ta.Any]]],
1686
+ ta.Mapping[str, ta.Any],
1407
1687
  Attr,
1408
1688
  ]],
1689
+
1409
1690
  with_module: bool = False,
1410
1691
  use_qualname: bool = False,
1411
1692
  with_id: bool = False,
1693
+ terse: bool = False,
1412
1694
  repr_filter: ta.Optional[ta.Callable[[ta.Any], bool]] = None,
1413
1695
  recursive: bool = False,
1696
+
1697
+ cache_hash: ta.Union[bool, str] = False,
1414
1698
  subtypes_eq: bool = False,
1415
1699
  ) -> None:
1416
1700
  ...
@@ -1420,16 +1704,20 @@ class AttrOps(ta.Generic[T]):
1420
1704
  self,
1421
1705
  attrs_fn: ta.Callable[[T], ta.Tuple[ta.Union[
1422
1706
  ta.Any,
1423
- ta.Tuple[str, ta.Any],
1707
+ ta.Tuple[ta.Any, ta.Union[str, ta.Mapping[str, ta.Any]]],
1424
1708
  Attr,
1425
1709
  ], ...]],
1426
1710
  /,
1427
1711
  *,
1712
+
1428
1713
  with_module: bool = False,
1429
1714
  use_qualname: bool = False,
1430
1715
  with_id: bool = False,
1716
+ terse: bool = False,
1431
1717
  repr_filter: ta.Optional[ta.Callable[[ta.Any], bool]] = None,
1432
1718
  recursive: bool = False,
1719
+
1720
+ cache_hash: ta.Union[bool, str] = False,
1433
1721
  subtypes_eq: bool = False,
1434
1722
  ) -> None:
1435
1723
  ...
@@ -1437,11 +1725,15 @@ class AttrOps(ta.Generic[T]):
1437
1725
  def __init__(
1438
1726
  self,
1439
1727
  *args,
1728
+
1440
1729
  with_module=False,
1441
1730
  use_qualname=False,
1442
1731
  with_id=False,
1732
+ terse=False,
1443
1733
  repr_filter=None,
1444
1734
  recursive=False,
1735
+
1736
+ cache_hash=False,
1445
1737
  subtypes_eq=False,
1446
1738
  ) -> None:
1447
1739
  if args and len(args) == 1 and callable(args[0]):
@@ -1452,8 +1744,11 @@ class AttrOps(ta.Generic[T]):
1452
1744
  self._with_module: bool = with_module
1453
1745
  self._use_qualname: bool = use_qualname
1454
1746
  self._with_id: bool = with_id
1747
+ self._terse: bool = terse
1455
1748
  self._repr_filter: ta.Optional[ta.Callable[[ta.Any], bool]] = repr_filter
1456
1749
  self._recursive: bool = recursive
1750
+
1751
+ self._cache_hash: ta.Union[bool, str] = cache_hash
1457
1752
  self._subtypes_eq: bool = subtypes_eq
1458
1753
 
1459
1754
  @property
@@ -1488,20 +1783,27 @@ class AttrOps(ta.Generic[T]):
1488
1783
 
1489
1784
  attrs: ta.List[AttrOps.Attr] = []
1490
1785
  for o in raw:
1491
- if isinstance(o, AttrOps.Attr):
1492
- attrs.append(o)
1786
+ if isinstance(o, (AttrOps.Attr, ta.Mapping)):
1787
+ attrs.append(AttrOps.Attr.of(o))
1493
1788
  continue
1494
1789
 
1790
+ kw: ta.Mapping[str, ta.Any]
1495
1791
  if isinstance(o, tuple):
1496
- disp, cap, = o
1792
+ cap, x = o
1793
+ if isinstance(x, str):
1794
+ kw = dict(display=x)
1795
+ elif isinstance(x, ta.Mapping):
1796
+ kw = x
1797
+ else:
1798
+ raise TypeError(x)
1497
1799
  else:
1498
- disp, cap = None, o
1800
+ cap, kw = o, {}
1499
1801
 
1500
1802
  path = tuple(rec(cap))
1501
1803
 
1502
1804
  attrs.append(AttrOps.Attr(
1503
1805
  '.'.join(path),
1504
- display=disp,
1806
+ **kw,
1505
1807
  ))
1506
1808
 
1507
1809
  return attrs
@@ -1518,19 +1820,27 @@ class AttrOps(ta.Generic[T]):
1518
1820
  pass
1519
1821
 
1520
1822
  def _repr(o: T) -> str:
1521
- vs = ', '.join(
1522
- f'{a._display}={v!r}' # noqa
1523
- for a in self._attrs
1524
- if a._repr # noqa
1525
- for v in [getattr(o, a._name)] # noqa
1526
- if self._repr_filter is None or self._repr_filter(v)
1527
- )
1823
+ vs: ta.List[str] = []
1824
+ for a in self._attrs:
1825
+ if not a._repr: # noqa
1826
+ continue
1827
+ v = getattr(o, a._name) # noqa
1828
+ if self._repr_filter is not None and not self._repr_filter(v):
1829
+ continue
1830
+ if (rfn := a._repr_fn) is None: # noqa
1831
+ rfn = repr
1832
+ if (vr := rfn(v)) is None:
1833
+ continue
1834
+ if self._terse:
1835
+ vs.append(vr)
1836
+ else:
1837
+ vs.append(f'{a._display}={vr}') # noqa
1528
1838
 
1529
1839
  return (
1530
1840
  f'{o.__class__.__module__ + "." if self._with_module else ""}'
1531
1841
  f'{o.__class__.__qualname__ if self._use_qualname else o.__class__.__name__}'
1532
- f'{("@" + hex(id(o))[2:]) if self._with_id else ""}'
1533
- f'({vs})'
1842
+ f'{("@" + hex(id(o))[2:]) if self._with_id else ""}' # noqa
1843
+ f'({", ".join(vs)})'
1534
1844
  )
1535
1845
 
1536
1846
  if self._recursive:
@@ -1555,6 +1865,8 @@ class AttrOps(ta.Generic[T]):
1555
1865
 
1556
1866
  #
1557
1867
 
1868
+ _DEFAULT_CACHED_HASH_ATTR: ta.ClassVar[str] = '__cached_hash__'
1869
+
1558
1870
  _hash: ta.Callable[[T], int]
1559
1871
 
1560
1872
  @property
@@ -1564,13 +1876,33 @@ class AttrOps(ta.Generic[T]):
1564
1876
  except AttributeError:
1565
1877
  pass
1566
1878
 
1567
- def _hash(o: T) -> int:
1879
+ def _calc_hash(o: T) -> int:
1568
1880
  return hash(tuple(
1569
1881
  getattr(o, a._name) # noqa
1570
1882
  for a in self._attrs
1571
1883
  if a._hash # noqa
1572
1884
  ))
1573
1885
 
1886
+ if (ch := self._cache_hash) is not False:
1887
+ if ch is True:
1888
+ cha = self._DEFAULT_CACHED_HASH_ATTR
1889
+ elif isinstance(ch, str):
1890
+ cha = ch
1891
+ else:
1892
+ raise TypeError(ch)
1893
+
1894
+ def _cached_hash(o: T) -> int:
1895
+ try:
1896
+ return object.__getattribute__(o, cha)
1897
+ except AttributeError:
1898
+ object.__setattr__(o, cha, h := _calc_hash(o))
1899
+ return h
1900
+
1901
+ _hash = _cached_hash
1902
+
1903
+ else:
1904
+ _hash = _calc_hash
1905
+
1574
1906
  self._hash = _hash
1575
1907
  return _hash
1576
1908
 
@@ -1711,6 +2043,62 @@ def async_cached_nullary(fn): # ta.Callable[..., T]) -> ta.Callable[..., T]:
1711
2043
  return _AsyncCachedNullary(fn)
1712
2044
 
1713
2045
 
2046
+ ##
2047
+
2048
+
2049
+ cached_property = functools.cached_property
2050
+
2051
+
2052
+ class _cached_property: # noqa
2053
+ """Backported to pick up https://github.com/python/cpython/commit/056dfc71dce15f81887f0bd6da09d6099d71f979 ."""
2054
+
2055
+ def __init__(self, func):
2056
+ self.func = func
2057
+ self.attrname = None # noqa
2058
+ self.__doc__ = func.__doc__
2059
+ self.__module__ = func.__module__
2060
+
2061
+ _NOT_FOUND = object()
2062
+
2063
+ def __set_name__(self, owner, name):
2064
+ if self.attrname is None:
2065
+ self.attrname = name # noqa
2066
+ elif name != self.attrname:
2067
+ raise TypeError(
2068
+ f'Cannot assign the same cached_property to two different names ({self.attrname!r} and {name!r}).',
2069
+ )
2070
+
2071
+ def __get__(self, instance, owner=None):
2072
+ if instance is None:
2073
+ return self
2074
+ if self.attrname is None:
2075
+ raise TypeError('Cannot use cached_property instance without calling __set_name__ on it.')
2076
+
2077
+ try:
2078
+ cache = instance.__dict__
2079
+ except AttributeError: # not all objects have __dict__ (e.g. class defines slots)
2080
+ raise TypeError(
2081
+ f"No '__dict__' attribute on {type(instance).__name__!r} instance to cache {self.attrname!r} property.",
2082
+ ) from None
2083
+
2084
+ val = cache.get(self.attrname, self._NOT_FOUND)
2085
+
2086
+ if val is self._NOT_FOUND:
2087
+ val = self.func(instance)
2088
+ try:
2089
+ cache[self.attrname] = val
2090
+ except TypeError:
2091
+ raise TypeError(
2092
+ f"The '__dict__' attribute on {type(instance).__name__!r} instance does not support item "
2093
+ f"assignment for caching {self.attrname!r} property.",
2094
+ ) from None
2095
+
2096
+ return val
2097
+
2098
+
2099
+ globals()['cached_property'] = _cached_property
2100
+
2101
+
1714
2102
  ########################################
1715
2103
  # ../../../../../omlish/lite/check.py
1716
2104
  """
@@ -2250,7 +2638,7 @@ class ExitStacked:
2250
2638
  es.__enter__()
2251
2639
  try:
2252
2640
  self._enter_contexts()
2253
- except Exception: # noqa
2641
+ except BaseException: # noqa
2254
2642
  es.__exit__(*sys.exc_info())
2255
2643
  raise
2256
2644
  return self
@@ -2261,7 +2649,7 @@ class ExitStacked:
2261
2649
  return None
2262
2650
  try:
2263
2651
  self._exit_contexts()
2264
- except Exception: # noqa
2652
+ except BaseException: # noqa
2265
2653
  es.__exit__(*sys.exc_info())
2266
2654
  raise
2267
2655
  return es.__exit__(exc_type, exc_val, exc_tb)
@@ -2309,7 +2697,7 @@ class AsyncExitStacked:
2309
2697
  await es.__aenter__()
2310
2698
  try:
2311
2699
  await self._async_enter_contexts()
2312
- except Exception: # noqa
2700
+ except BaseException: # noqa
2313
2701
  await es.__aexit__(*sys.exc_info())
2314
2702
  raise
2315
2703
  return self
@@ -2320,7 +2708,7 @@ class AsyncExitStacked:
2320
2708
  return None
2321
2709
  try:
2322
2710
  await self._async_exit_contexts()
2323
- except Exception: # noqa
2711
+ except BaseException: # noqa
2324
2712
  await es.__aexit__(*sys.exc_info())
2325
2713
  raise
2326
2714
  return await es.__aexit__(exc_type, exc_val, exc_tb)
@@ -3747,6 +4135,10 @@ DEFAULT_CONFIG_RENDERER = SwitchedConfigRenderer(DEFAULT_CONFIG_RENDERERS)
3747
4135
 
3748
4136
  ########################################
3749
4137
  # ../../../../../omlish/io/buffers.py
4138
+ """
4139
+ TODO:
4140
+ - overhaul and just coro-ify pyio?
4141
+ """
3750
4142
 
3751
4143
 
3752
4144
  ##
@@ -3925,6 +4317,9 @@ class ReadableListBuffer:
3925
4317
 
3926
4318
  self._lst: list[bytes] = []
3927
4319
 
4320
+ def __bool__(self) -> ta.NoReturn:
4321
+ raise TypeError("Use 'buf is not None' or 'len(buf)'.")
4322
+
3928
4323
  def __len__(self) -> int:
3929
4324
  return sum(map(len, self._lst))
3930
4325
 
@@ -3950,6 +4345,9 @@ class ReadableListBuffer:
3950
4345
 
3951
4346
  def read(self, n: ta.Optional[int] = None) -> ta.Optional[bytes]:
3952
4347
  if n is None:
4348
+ if not self._lst:
4349
+ return b''
4350
+
3953
4351
  o = b''.join(self._lst)
3954
4352
  self._lst = []
3955
4353
  return o
@@ -3988,6 +4386,110 @@ class ReadableListBuffer:
3988
4386
  r = self.read_until_(delim)
3989
4387
  return r if isinstance(r, bytes) else None
3990
4388
 
4389
+ #
4390
+
4391
+ DEFAULT_BUFFERED_READER_CHUNK_SIZE: ta.ClassVar[int] = -1
4392
+
4393
+ @ta.final
4394
+ class _BufferedBytesReader(BufferedBytesReader):
4395
+ def __init__(
4396
+ self,
4397
+ raw: RawBytesReader,
4398
+ buf: 'ReadableListBuffer',
4399
+ *,
4400
+ chunk_size: ta.Optional[int] = None,
4401
+ ) -> None:
4402
+ self._raw = raw
4403
+ self._buf = buf
4404
+ self._chunk_size = chunk_size or ReadableListBuffer.DEFAULT_BUFFERED_READER_CHUNK_SIZE
4405
+
4406
+ def read1(self, n: int = -1, /) -> bytes:
4407
+ if n < 0:
4408
+ n = self._chunk_size
4409
+ if not n:
4410
+ return b''
4411
+ if 0 < n <= len(self._buf):
4412
+ return self._buf.read(n) or b''
4413
+ return self._raw.read1(n)
4414
+
4415
+ def read(self, /, n: int = -1) -> bytes:
4416
+ if n < 0:
4417
+ return self.readall()
4418
+ while len(self._buf) < n:
4419
+ if not (b := self._raw.read1(n)):
4420
+ break
4421
+ self._buf.feed(b)
4422
+ return self._buf.read(n) or b''
4423
+
4424
+ def readall(self) -> bytes:
4425
+ buf = io.BytesIO()
4426
+ buf.write(self._buf.read() or b'')
4427
+ while (b := self._raw.read1(self._chunk_size)):
4428
+ buf.write(b)
4429
+ return buf.getvalue()
4430
+
4431
+ def new_buffered_reader(
4432
+ self,
4433
+ raw: RawBytesReader,
4434
+ *,
4435
+ chunk_size: ta.Optional[int] = None,
4436
+ ) -> BufferedBytesReader:
4437
+ return self._BufferedBytesReader(
4438
+ raw,
4439
+ self,
4440
+ chunk_size=chunk_size,
4441
+ )
4442
+
4443
+ @ta.final
4444
+ class _AsyncBufferedBytesReader(AsyncBufferedBytesReader):
4445
+ def __init__(
4446
+ self,
4447
+ raw: AsyncRawBytesReader,
4448
+ buf: 'ReadableListBuffer',
4449
+ *,
4450
+ chunk_size: ta.Optional[int] = None,
4451
+ ) -> None:
4452
+ self._raw = raw
4453
+ self._buf = buf
4454
+ self._chunk_size = chunk_size or ReadableListBuffer.DEFAULT_BUFFERED_READER_CHUNK_SIZE
4455
+
4456
+ async def read1(self, n: int = -1, /) -> bytes:
4457
+ if n < 0:
4458
+ n = self._chunk_size
4459
+ if not n:
4460
+ return b''
4461
+ if 0 < n <= len(self._buf):
4462
+ return self._buf.read(n) or b''
4463
+ return await self._raw.read1(n)
4464
+
4465
+ async def read(self, /, n: int = -1) -> bytes:
4466
+ if n < 0:
4467
+ return await self.readall()
4468
+ while len(self._buf) < n:
4469
+ if not (b := await self._raw.read1(n)):
4470
+ break
4471
+ self._buf.feed(b)
4472
+ return self._buf.read(n) or b''
4473
+
4474
+ async def readall(self) -> bytes:
4475
+ buf = io.BytesIO()
4476
+ buf.write(self._buf.read() or b'')
4477
+ while b := await self._raw.read1(self._chunk_size):
4478
+ buf.write(b)
4479
+ return buf.getvalue()
4480
+
4481
+ def new_async_buffered_reader(
4482
+ self,
4483
+ raw: AsyncRawBytesReader,
4484
+ *,
4485
+ chunk_size: ta.Optional[int] = None,
4486
+ ) -> AsyncBufferedBytesReader:
4487
+ return self._AsyncBufferedBytesReader(
4488
+ raw,
4489
+ self,
4490
+ chunk_size=chunk_size,
4491
+ )
4492
+
3991
4493
 
3992
4494
  ##
3993
4495
 
@@ -5552,6 +6054,9 @@ class CaptureLoggingContextImpl(CaptureLoggingContext):
5552
6054
  self._infos[type(info)] = info
5553
6055
  return self
5554
6056
 
6057
+ def get_infos(self) -> ta.Mapping[ta.Type[LoggingContextInfo], LoggingContextInfo]:
6058
+ return self._infos
6059
+
5555
6060
  def get_info(self, ty: ta.Type[LoggingContextInfoT]) -> ta.Optional[LoggingContextInfoT]:
5556
6061
  return self._infos.get(ty)
5557
6062
 
@@ -5574,7 +6079,7 @@ class CaptureLoggingContextImpl(CaptureLoggingContext):
5574
6079
  _stack_offset: int
5575
6080
  _stack_info: bool
5576
6081
 
5577
- def inc_stack_offset(self, ofs: int = 1) -> 'CaptureLoggingContext':
6082
+ def inc_stack_offset(self, ofs: int = 1) -> 'CaptureLoggingContextImpl':
5578
6083
  if hasattr(self, '_stack_offset'):
5579
6084
  self._stack_offset += ofs
5580
6085
  return self
@@ -5606,10 +6111,9 @@ class CaptureLoggingContextImpl(CaptureLoggingContext):
5606
6111
 
5607
6112
 
5608
6113
  ########################################
5609
- # ../../../../../omlish/logs/standard.py
6114
+ # ../../../../../omlish/logs/std/standard.py
5610
6115
  """
5611
6116
  TODO:
5612
- - !! move to std !!
5613
6117
  - structured
5614
6118
  - prefixed
5615
6119
  - debug
@@ -5785,6 +6289,11 @@ class AnyLogger(Abstract, ta.Generic[T]):
5785
6289
 
5786
6290
  ##
5787
6291
 
6292
+ # This will be 1 for [Sync]Logger and 0 for AsyncLogger - in sync loggers these methods remain present on the stack,
6293
+ # in async loggers they return a coroutine to be awaited and thus aren't actually present when said coroutine is
6294
+ # awaited.
6295
+ _level_proxy_method_stack_offset: int
6296
+
5788
6297
  @ta.overload
5789
6298
  def log(self, level: LogLevel, msg: str, *args: ta.Any, **kwargs: ta.Any) -> T:
5790
6299
  ...
@@ -5799,7 +6308,14 @@ class AnyLogger(Abstract, ta.Generic[T]):
5799
6308
 
5800
6309
  @ta.final
5801
6310
  def log(self, level: LogLevel, *args, **kwargs):
5802
- return self._log(CaptureLoggingContextImpl(level, stack_offset=1), *args, **kwargs)
6311
+ return self._log(
6312
+ CaptureLoggingContextImpl(
6313
+ level,
6314
+ stack_offset=self._level_proxy_method_stack_offset,
6315
+ ),
6316
+ *args,
6317
+ **kwargs,
6318
+ )
5803
6319
 
5804
6320
  #
5805
6321
 
@@ -5817,7 +6333,14 @@ class AnyLogger(Abstract, ta.Generic[T]):
5817
6333
 
5818
6334
  @ta.final
5819
6335
  def debug(self, *args, **kwargs):
5820
- return self._log(CaptureLoggingContextImpl(NamedLogLevel.DEBUG, stack_offset=1), *args, **kwargs)
6336
+ return self._log(
6337
+ CaptureLoggingContextImpl(
6338
+ NamedLogLevel.DEBUG,
6339
+ stack_offset=self._level_proxy_method_stack_offset,
6340
+ ),
6341
+ *args,
6342
+ **kwargs,
6343
+ )
5821
6344
 
5822
6345
  #
5823
6346
 
@@ -5835,7 +6358,14 @@ class AnyLogger(Abstract, ta.Generic[T]):
5835
6358
 
5836
6359
  @ta.final
5837
6360
  def info(self, *args, **kwargs):
5838
- return self._log(CaptureLoggingContextImpl(NamedLogLevel.INFO, stack_offset=1), *args, **kwargs)
6361
+ return self._log(
6362
+ CaptureLoggingContextImpl(
6363
+ NamedLogLevel.INFO,
6364
+ stack_offset=self._level_proxy_method_stack_offset,
6365
+ ),
6366
+ *args,
6367
+ **kwargs,
6368
+ )
5839
6369
 
5840
6370
  #
5841
6371
 
@@ -5853,7 +6383,14 @@ class AnyLogger(Abstract, ta.Generic[T]):
5853
6383
 
5854
6384
  @ta.final
5855
6385
  def warning(self, *args, **kwargs):
5856
- return self._log(CaptureLoggingContextImpl(NamedLogLevel.WARNING, stack_offset=1), *args, **kwargs)
6386
+ return self._log(
6387
+ CaptureLoggingContextImpl(
6388
+ NamedLogLevel.WARNING,
6389
+ stack_offset=self._level_proxy_method_stack_offset,
6390
+ ),
6391
+ *args,
6392
+ **kwargs,
6393
+ )
5857
6394
 
5858
6395
  #
5859
6396
 
@@ -5871,7 +6408,14 @@ class AnyLogger(Abstract, ta.Generic[T]):
5871
6408
 
5872
6409
  @ta.final
5873
6410
  def error(self, *args, **kwargs):
5874
- return self._log(CaptureLoggingContextImpl(NamedLogLevel.ERROR, stack_offset=1), *args, **kwargs)
6411
+ return self._log(
6412
+ CaptureLoggingContextImpl(
6413
+ NamedLogLevel.ERROR,
6414
+ stack_offset=self._level_proxy_method_stack_offset,
6415
+ ),
6416
+ *args,
6417
+ **kwargs,
6418
+ )
5875
6419
 
5876
6420
  #
5877
6421
 
@@ -5889,7 +6433,15 @@ class AnyLogger(Abstract, ta.Generic[T]):
5889
6433
 
5890
6434
  @ta.final
5891
6435
  def exception(self, *args, exc_info: LoggingExcInfoArg = True, **kwargs):
5892
- return self._log(CaptureLoggingContextImpl(NamedLogLevel.ERROR, exc_info=exc_info, stack_offset=1), *args, **kwargs) # noqa
6436
+ return self._log(
6437
+ CaptureLoggingContextImpl(
6438
+ NamedLogLevel.ERROR,
6439
+ exc_info=exc_info,
6440
+ stack_offset=self._level_proxy_method_stack_offset,
6441
+ ),
6442
+ *args,
6443
+ **kwargs,
6444
+ )
5893
6445
 
5894
6446
  #
5895
6447
 
@@ -5907,24 +6459,53 @@ class AnyLogger(Abstract, ta.Generic[T]):
5907
6459
 
5908
6460
  @ta.final
5909
6461
  def critical(self, *args, **kwargs):
5910
- return self._log(CaptureLoggingContextImpl(NamedLogLevel.CRITICAL, stack_offset=1), *args, **kwargs)
6462
+ return self._log(
6463
+ CaptureLoggingContextImpl(
6464
+ NamedLogLevel.CRITICAL,
6465
+ stack_offset=self._level_proxy_method_stack_offset,
6466
+ ),
6467
+ *args,
6468
+ **kwargs,
6469
+ )
5911
6470
 
5912
6471
  ##
5913
6472
 
5914
6473
  @abc.abstractmethod
5915
- def _log(self, ctx: CaptureLoggingContext, msg: ta.Union[str, tuple, LoggingMsgFn], *args: ta.Any, **kwargs: ta.Any) -> T: # noqa
6474
+ def _log(
6475
+ self,
6476
+ ctx: CaptureLoggingContext,
6477
+ msg: ta.Union[str, tuple, LoggingMsgFn],
6478
+ *args: ta.Any,
6479
+ **kwargs: ta.Any,
6480
+ ) -> T:
5916
6481
  raise NotImplementedError
5917
6482
 
5918
6483
 
5919
6484
  class Logger(AnyLogger[None], Abstract):
6485
+ _level_proxy_method_stack_offset: int = 1
6486
+
5920
6487
  @abc.abstractmethod
5921
- def _log(self, ctx: CaptureLoggingContext, msg: ta.Union[str, tuple, LoggingMsgFn], *args: ta.Any, **kwargs: ta.Any) -> None: # noqa
6488
+ def _log(
6489
+ self,
6490
+ ctx: CaptureLoggingContext,
6491
+ msg: ta.Union[str, tuple, LoggingMsgFn],
6492
+ *args: ta.Any,
6493
+ **kwargs: ta.Any,
6494
+ ) -> None:
5922
6495
  raise NotImplementedError
5923
6496
 
5924
6497
 
5925
6498
  class AsyncLogger(AnyLogger[ta.Awaitable[None]], Abstract):
6499
+ _level_proxy_method_stack_offset: int = 0
6500
+
5926
6501
  @abc.abstractmethod
5927
- def _log(self, ctx: CaptureLoggingContext, msg: ta.Union[str, tuple, LoggingMsgFn], *args: ta.Any, **kwargs: ta.Any) -> ta.Awaitable[None]: # noqa
6502
+ def _log(
6503
+ self,
6504
+ ctx: CaptureLoggingContext,
6505
+ msg: ta.Union[str, tuple, LoggingMsgFn],
6506
+ *args: ta.Any,
6507
+ **kwargs: ta.Any,
6508
+ ) -> ta.Awaitable[None]:
5928
6509
  raise NotImplementedError
5929
6510
 
5930
6511
 
@@ -5939,13 +6520,25 @@ class AnyNopLogger(AnyLogger[T], Abstract):
5939
6520
 
5940
6521
  @ta.final
5941
6522
  class NopLogger(AnyNopLogger[None], Logger):
5942
- def _log(self, ctx: CaptureLoggingContext, msg: ta.Union[str, tuple, LoggingMsgFn], *args: ta.Any, **kwargs: ta.Any) -> None: # noqa
6523
+ def _log(
6524
+ self,
6525
+ ctx: CaptureLoggingContext,
6526
+ msg: ta.Union[str, tuple, LoggingMsgFn],
6527
+ *args: ta.Any,
6528
+ **kwargs: ta.Any,
6529
+ ) -> None:
5943
6530
  pass
5944
6531
 
5945
6532
 
5946
6533
  @ta.final
5947
6534
  class AsyncNopLogger(AnyNopLogger[ta.Awaitable[None]], AsyncLogger):
5948
- async def _log(self, ctx: CaptureLoggingContext, msg: ta.Union[str, tuple, LoggingMsgFn], *args: ta.Any, **kwargs: ta.Any) -> None: # noqa
6535
+ async def _log(
6536
+ self,
6537
+ ctx: CaptureLoggingContext,
6538
+ msg: ta.Union[str, tuple, LoggingMsgFn],
6539
+ *args: ta.Any,
6540
+ **kwargs: ta.Any,
6541
+ ) -> None:
5949
6542
  pass
5950
6543
 
5951
6544
 
@@ -6607,6 +7200,70 @@ class LogRecordLoggingContext(LoggingContext):
6607
7200
  return self._infos.get(ty)
6608
7201
 
6609
7202
 
7203
+ ########################################
7204
+ # ../../../../../omlish/logs/asyncs.py
7205
+
7206
+
7207
+ ##
7208
+
7209
+
7210
+ class AsyncLoggerToLogger(Logger):
7211
+ def __init__(self, u: AsyncLogger) -> None:
7212
+ super().__init__()
7213
+
7214
+ self._u = u
7215
+
7216
+ def get_effective_level(self) -> LogLevel:
7217
+ return self._u.get_effective_level()
7218
+
7219
+ def _log(
7220
+ self,
7221
+ ctx: CaptureLoggingContext,
7222
+ msg: ta.Union[str, tuple, LoggingMsgFn],
7223
+ *args: ta.Any,
7224
+ **kwargs: ta.Any,
7225
+ ) -> None:
7226
+ # Nope out early to avoid sync_await if possible - don't bother in the LoggerToAsyncLogger.
7227
+ if not self.is_enabled_for(ctx.must_get_info(LoggingContextInfos.Level).level):
7228
+ return
7229
+
7230
+ # Note: we hardcode the stack offset of sync_await (which is 2 - sync_await + sync_await.thunk). In non-lite
7231
+ # code, lang.sync_await uses a cext if present to avoid being on the py stack, which would obviously complicate
7232
+ # this, but this is lite code so we will always have the non-c version.
7233
+ sync_await(
7234
+ self._u._log( # noqa
7235
+ check.isinstance(ctx, CaptureLoggingContextImpl).inc_stack_offset(3),
7236
+ msg,
7237
+ *args,
7238
+ **kwargs,
7239
+ ),
7240
+ )
7241
+
7242
+
7243
+ class LoggerToAsyncLogger(AsyncLogger):
7244
+ def __init__(self, u: Logger) -> None:
7245
+ super().__init__()
7246
+
7247
+ self._u = u
7248
+
7249
+ def get_effective_level(self) -> LogLevel:
7250
+ return self._u.get_effective_level()
7251
+
7252
+ async def _log(
7253
+ self,
7254
+ ctx: CaptureLoggingContext,
7255
+ msg: ta.Union[str, tuple, LoggingMsgFn],
7256
+ *args: ta.Any,
7257
+ **kwargs: ta.Any,
7258
+ ) -> None:
7259
+ return self._u._log( # noqa
7260
+ check.isinstance(ctx, CaptureLoggingContextImpl).inc_stack_offset(),
7261
+ msg,
7262
+ *args,
7263
+ **kwargs,
7264
+ )
7265
+
7266
+
6610
7267
  ########################################
6611
7268
  # ../../../../../omlish/logs/std/loggers.py
6612
7269
 
@@ -6630,7 +7287,12 @@ class StdLogger(Logger):
6630
7287
  def get_effective_level(self) -> LogLevel:
6631
7288
  return self._std.getEffectiveLevel()
6632
7289
 
6633
- def _log(self, ctx: CaptureLoggingContext, msg: ta.Union[str, tuple, LoggingMsgFn], *args: ta.Any) -> None:
7290
+ def _log(
7291
+ self,
7292
+ ctx: CaptureLoggingContext,
7293
+ msg: ta.Union[str, tuple, LoggingMsgFn],
7294
+ *args: ta.Any,
7295
+ ) -> None:
6634
7296
  if not self.is_enabled_for(ctx.must_get_info(LoggingContextInfos.Level).level):
6635
7297
  return
6636
7298
 
@@ -6655,8 +7317,23 @@ class StdLogger(Logger):
6655
7317
  ##
6656
7318
 
6657
7319
 
7320
+ def _get_module_std_logger(mod_globals: ta.Mapping[str, ta.Any]) -> logging.Logger:
7321
+ return logging.getLogger(mod_globals.get('__name__'))
7322
+
7323
+
6658
7324
  def get_module_logger(mod_globals: ta.Mapping[str, ta.Any]) -> Logger:
6659
- return StdLogger(logging.getLogger(mod_globals.get('__name__'))) # noqa
7325
+ return StdLogger(_get_module_std_logger(mod_globals))
7326
+
7327
+
7328
+ def get_module_async_logger(mod_globals: ta.Mapping[str, ta.Any]) -> AsyncLogger:
7329
+ return LoggerToAsyncLogger(get_module_logger(mod_globals))
7330
+
7331
+
7332
+ def get_module_loggers(mod_globals: ta.Mapping[str, ta.Any]) -> ta.Tuple[Logger, AsyncLogger]:
7333
+ return (
7334
+ log := get_module_logger(mod_globals),
7335
+ LoggerToAsyncLogger(log),
7336
+ )
6660
7337
 
6661
7338
 
6662
7339
  ########################################