omdev 0.0.0.dev416__py3-none-any.whl → 0.0.0.dev500__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of omdev might be problematic. Click here for more details.

Files changed (211) hide show
  1. omdev/{.manifests.json → .omlish-manifests.json} +23 -47
  2. omdev/README.md +51 -0
  3. omdev/__about__.py +12 -8
  4. omdev/amalg/cli/main.py +1 -2
  5. omdev/amalg/gen/gen.py +49 -6
  6. omdev/amalg/gen/imports.py +1 -1
  7. omdev/amalg/gen/manifests.py +1 -1
  8. omdev/amalg/gen/resources.py +1 -1
  9. omdev/amalg/gen/srcfiles.py +26 -3
  10. omdev/amalg/gen/strip.py +1 -1
  11. omdev/amalg/gen/types.py +1 -1
  12. omdev/amalg/gen/typing.py +1 -1
  13. omdev/amalg/info.py +32 -0
  14. omdev/cache/compute/storage.py +3 -1
  15. omdev/cache/data/actions.py +1 -1
  16. omdev/cache/data/cache.py +2 -2
  17. omdev/cache/data/specs.py +1 -1
  18. omdev/cexts/_boilerplate.cc +2 -3
  19. omdev/cexts/_distutils/build_ext.py +5 -2
  20. omdev/cexts/_distutils/compilers/ccompiler.py +5 -2
  21. omdev/cexts/_distutils/compilers/options.py +3 -0
  22. omdev/cexts/_distutils/compilers/unixccompiler.py +6 -2
  23. omdev/cexts/_distutils/dir_util.py +6 -2
  24. omdev/cexts/_distutils/errors.py +3 -0
  25. omdev/cexts/_distutils/extension.py +3 -0
  26. omdev/cexts/_distutils/file_util.py +6 -2
  27. omdev/cexts/_distutils/modified.py +3 -0
  28. omdev/cexts/_distutils/spawn.py +6 -2
  29. omdev/cexts/_distutils/sysconfig.py +3 -0
  30. omdev/cexts/_distutils/util.py +6 -2
  31. omdev/cexts/_distutils/version.py +3 -0
  32. omdev/cexts/cmake.py +5 -3
  33. omdev/cexts/scan.py +1 -2
  34. omdev/ci/cache.py +7 -3
  35. omdev/ci/cli.py +6 -4
  36. omdev/ci/docker/buildcaching.py +3 -1
  37. omdev/ci/docker/cache.py +2 -1
  38. omdev/ci/docker/cacheserved/cache.py +4 -1
  39. omdev/ci/docker/cacheserved/manifests.py +2 -2
  40. omdev/ci/docker/dataserver.py +2 -2
  41. omdev/ci/docker/imagepulling.py +2 -1
  42. omdev/ci/docker/packing.py +1 -1
  43. omdev/ci/docker/repositories.py +2 -1
  44. omdev/ci/github/api/clients.py +8 -4
  45. omdev/ci/github/api/v1/client.py +4 -1
  46. omdev/ci/github/api/v2/api.py +2 -0
  47. omdev/ci/github/api/v2/azure.py +4 -1
  48. omdev/ci/github/api/v2/client.py +4 -1
  49. omdev/cli/clicli.py +37 -7
  50. omdev/clipboard/clipboard.py +1 -1
  51. omdev/cmake.py +2 -1
  52. omdev/cmdlog/cli.py +1 -2
  53. omdev/dataclasses/_dumping.py +1960 -0
  54. omdev/dataclasses/_template.py +22 -0
  55. omdev/dataclasses/cli.py +7 -2
  56. omdev/dataclasses/codegen.py +342 -62
  57. omdev/dataclasses/dumping.py +200 -0
  58. omdev/dataserver/handlers.py +3 -2
  59. omdev/dataserver/targets.py +2 -2
  60. omdev/imgur.py +2 -2
  61. omdev/interp/cli.py +1 -1
  62. omdev/interp/inspect.py +2 -1
  63. omdev/interp/providers/base.py +3 -2
  64. omdev/interp/providers/standalone.py +4 -1
  65. omdev/interp/providers/system.py +2 -2
  66. omdev/interp/pyenv/install.py +2 -1
  67. omdev/interp/pyenv/provider.py +2 -2
  68. omdev/interp/types.py +3 -2
  69. omdev/interp/uv/provider.py +40 -2
  70. omdev/interp/uv/uv.py +2 -2
  71. omdev/interp/venvs.py +3 -2
  72. omdev/irc/messages/base.py +50 -0
  73. omdev/irc/messages/formats.py +92 -0
  74. omdev/irc/messages/messages.py +775 -0
  75. omdev/irc/messages/parsing.py +99 -0
  76. omdev/irc/numerics/formats.py +97 -0
  77. omdev/irc/numerics/numerics.py +865 -0
  78. omdev/irc/numerics/types.py +59 -0
  79. omdev/irc/protocol/LICENSE +11 -0
  80. omdev/irc/protocol/__init__.py +61 -0
  81. omdev/irc/protocol/consts.py +6 -0
  82. omdev/irc/protocol/errors.py +30 -0
  83. omdev/irc/protocol/message.py +21 -0
  84. omdev/irc/protocol/nuh.py +55 -0
  85. omdev/irc/protocol/parsing.py +158 -0
  86. omdev/irc/protocol/rendering.py +153 -0
  87. omdev/irc/protocol/tags.py +102 -0
  88. omdev/irc/protocol/utils.py +30 -0
  89. omdev/manifests/_dumping.py +529 -136
  90. omdev/manifests/building.py +6 -3
  91. omdev/manifests/main.py +1 -1
  92. omdev/markdown/__init__.py +0 -0
  93. omdev/markdown/incparse.py +116 -0
  94. omdev/markdown/tokens.py +51 -0
  95. omdev/oci/data.py +2 -2
  96. omdev/oci/datarefs.py +2 -2
  97. omdev/oci/media.py +2 -2
  98. omdev/oci/repositories.py +3 -2
  99. omdev/packaging/marshal.py +9 -9
  100. omdev/packaging/requires.py +6 -6
  101. omdev/packaging/revisions.py +5 -2
  102. omdev/packaging/specifiers.py +41 -42
  103. omdev/packaging/versions.py +10 -10
  104. omdev/packaging/wheelfile.py +4 -2
  105. omdev/precheck/blanklines.py +66 -0
  106. omdev/precheck/caches.py +1 -1
  107. omdev/precheck/imports.py +14 -1
  108. omdev/precheck/lite.py +2 -2
  109. omdev/precheck/main.py +5 -5
  110. omdev/precheck/unicode.py +39 -15
  111. omdev/py/asts/__init__.py +0 -0
  112. omdev/py/asts/parents.py +28 -0
  113. omdev/py/asts/toplevel.py +123 -0
  114. omdev/py/asts/visitors.py +18 -0
  115. omdev/py/attrdocs.py +6 -7
  116. omdev/py/bracepy.py +12 -4
  117. omdev/py/docstrings/numpydoc.py +4 -4
  118. omdev/py/reprs.py +32 -0
  119. omdev/py/scripts/execstat.py +31 -26
  120. omdev/py/srcheaders.py +1 -1
  121. omdev/py/tokens/__init__.py +0 -0
  122. omdev/{tokens → py/tokens}/utils.py +2 -1
  123. omdev/py/tools/importscan.py +2 -2
  124. omdev/py/tools/mkrelimp.py +3 -4
  125. omdev/py/tools/pipdepup.py +686 -0
  126. omdev/pyproject/cli.py +1 -1
  127. omdev/pyproject/pkg.py +197 -48
  128. omdev/pyproject/reqs.py +36 -10
  129. omdev/pyproject/tools/__init__.py +0 -0
  130. omdev/pyproject/tools/aboutdeps.py +60 -0
  131. omdev/pyproject/venvs.py +12 -2
  132. omdev/rs/__init__.py +0 -0
  133. omdev/scripts/ci.py +9551 -6982
  134. omdev/scripts/interp.py +1323 -892
  135. omdev/scripts/lib/__init__.py +0 -0
  136. omdev/scripts/lib/inject.py +2086 -0
  137. omdev/scripts/lib/logs.py +2175 -0
  138. omdev/scripts/lib/marshal.py +1731 -0
  139. omdev/scripts/pyproject.py +4979 -1874
  140. omdev/tools/docker.py +19 -7
  141. omdev/tools/git/cli.py +56 -16
  142. omdev/tools/git/messages.py +2 -2
  143. omdev/tools/json/cli.py +6 -6
  144. omdev/tools/json/formats.py +2 -0
  145. omdev/tools/json/parsing.py +5 -5
  146. omdev/tools/json/processing.py +6 -3
  147. omdev/tools/json/rendering.py +2 -2
  148. omdev/tools/jsonview/cli.py +49 -65
  149. omdev/tools/jsonview/resources/jsonview.html.j2 +43 -0
  150. omdev/tools/pawk/README.md +195 -0
  151. omdev/tools/pawk/pawk.py +2 -2
  152. omdev/tools/pip.py +8 -0
  153. omdev/tui/__init__.py +0 -0
  154. omdev/tui/apps/__init__.py +0 -0
  155. omdev/tui/apps/edit/__init__.py +0 -0
  156. omdev/tui/apps/edit/main.py +167 -0
  157. omdev/tui/apps/irc/__init__.py +0 -0
  158. omdev/tui/apps/irc/__main__.py +4 -0
  159. omdev/tui/apps/irc/app.py +286 -0
  160. omdev/tui/apps/irc/client.py +187 -0
  161. omdev/tui/apps/irc/commands.py +175 -0
  162. omdev/tui/apps/irc/main.py +26 -0
  163. omdev/tui/apps/markdown/__init__.py +0 -0
  164. omdev/tui/apps/markdown/__main__.py +11 -0
  165. omdev/{ptk → tui/apps}/markdown/cli.py +5 -7
  166. omdev/tui/rich/__init__.py +46 -0
  167. omdev/tui/rich/console2.py +20 -0
  168. omdev/tui/rich/markdown2.py +186 -0
  169. omdev/tui/textual/__init__.py +265 -0
  170. omdev/tui/textual/app2.py +16 -0
  171. omdev/tui/textual/autocomplete/LICENSE +21 -0
  172. omdev/tui/textual/autocomplete/__init__.py +33 -0
  173. omdev/tui/textual/autocomplete/matching.py +226 -0
  174. omdev/tui/textual/autocomplete/paths.py +202 -0
  175. omdev/tui/textual/autocomplete/widget.py +612 -0
  176. omdev/tui/textual/debug/__init__.py +10 -0
  177. omdev/tui/textual/debug/dominfo.py +151 -0
  178. omdev/tui/textual/debug/screen.py +24 -0
  179. omdev/tui/textual/devtools.py +187 -0
  180. omdev/tui/textual/drivers2.py +55 -0
  181. omdev/tui/textual/logging2.py +20 -0
  182. omdev/tui/textual/types.py +45 -0
  183. {omdev-0.0.0.dev416.dist-info → omdev-0.0.0.dev500.dist-info}/METADATA +18 -12
  184. omdev-0.0.0.dev500.dist-info/RECORD +386 -0
  185. omdev/ptk/__init__.py +0 -103
  186. omdev/ptk/apps/ncdu.py +0 -167
  187. omdev/ptk/confirm.py +0 -60
  188. omdev/ptk/markdown/LICENSE +0 -22
  189. omdev/ptk/markdown/__init__.py +0 -10
  190. omdev/ptk/markdown/__main__.py +0 -11
  191. omdev/ptk/markdown/border.py +0 -94
  192. omdev/ptk/markdown/markdown.py +0 -390
  193. omdev/ptk/markdown/parser.py +0 -42
  194. omdev/ptk/markdown/styles.py +0 -29
  195. omdev/ptk/markdown/tags.py +0 -299
  196. omdev/ptk/markdown/utils.py +0 -366
  197. omdev/pyproject/cexts.py +0 -110
  198. omdev/tools/antlr/__main__.py +0 -11
  199. omdev/tools/antlr/cli.py +0 -62
  200. omdev/tools/antlr/consts.py +0 -7
  201. omdev/tools/antlr/gen.py +0 -188
  202. omdev-0.0.0.dev416.dist-info/RECORD +0 -332
  203. /omdev/{ptk/apps → irc}/__init__.py +0 -0
  204. /omdev/{tokens → irc/messages}/__init__.py +0 -0
  205. /omdev/{tools/antlr → irc/numerics}/__init__.py +0 -0
  206. /omdev/{tokens → py/tokens}/all.py +0 -0
  207. /omdev/{tokens → py/tokens}/tokenizert.py +0 -0
  208. {omdev-0.0.0.dev416.dist-info → omdev-0.0.0.dev500.dist-info}/WHEEL +0 -0
  209. {omdev-0.0.0.dev416.dist-info → omdev-0.0.0.dev500.dist-info}/entry_points.txt +0 -0
  210. {omdev-0.0.0.dev416.dist-info → omdev-0.0.0.dev500.dist-info}/licenses/LICENSE +0 -0
  211. {omdev-0.0.0.dev416.dist-info → omdev-0.0.0.dev500.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,2175 @@
1
+ #!/usr/bin/env python3
2
+ # noinspection DuplicatedCode
3
+ # @omlish-lite
4
+ # @omlish-script
5
+ # @omlish-generated
6
+ # @omlish-amalg-output ../../../omlish/logs/_amalg.py
7
+ # @omlish-git-diff-omit
8
+ # ruff: noqa: N802 UP006 UP007 UP036 UP045 UP046
9
+ import abc
10
+ import collections.abc
11
+ import contextlib
12
+ import datetime
13
+ import functools
14
+ import io
15
+ import json
16
+ import logging
17
+ import os.path
18
+ import sys
19
+ import threading
20
+ import time
21
+ import traceback
22
+ import types
23
+ import typing as ta
24
+
25
+
26
+ ########################################
27
+
28
+
29
+ if sys.version_info < (3, 8):
30
+ raise OSError(f'Requires python (3, 8), got {sys.version_info} from {sys.executable}') # noqa
31
+
32
+
33
+ def __omlish_amalg__(): # noqa
34
+ return dict(
35
+ src_files=[
36
+ dict(path='../lite/abstract.py', sha1='a2fc3f3697fa8de5247761e9d554e70176f37aac'),
37
+ dict(path='../lite/json.py', sha1='57eeddc4d23a17931e00284ffa5cb6e3ce089486'),
38
+ dict(path='levels.py', sha1='91405563d082a5eba874da82aac89d83ce7b6152'),
39
+ dict(path='std/filters.py', sha1='f36aab646d84d31e295b33aaaaa6f8b67ff38b3d'),
40
+ dict(path='std/proxy.py', sha1='3e7301a2aa351127f9c85f61b2f85dcc3f15aafb'),
41
+ dict(path='warnings.py', sha1='c4eb694b24773351107fcc058f3620f1dbfb6799'),
42
+ dict(path='infos.py', sha1='4dd104bd468a8c438601dd0bbda619b47d2f1620'),
43
+ dict(path='std/json.py', sha1='2a75553131e4d5331bb0cedde42aa183f403fc3b'),
44
+ dict(path='contexts.py', sha1='1000a6d5ddfb642865ca532e34b1d50759781cf0'),
45
+ dict(path='std/standard.py', sha1='5c97c1b9f7ead58d6127d047b873398f708f288d'),
46
+ dict(path='base.py', sha1='8d06faee05fead6b1dd98c9035a5b042af4aebb1'),
47
+ dict(path='std/records.py', sha1='8bbf6ef9eccb3a012c6ca416ddf3969450fd8fc9'),
48
+ dict(path='std/loggers.py', sha1='a569179445d6a8a942b5dcfad1d1f77702868803'),
49
+ dict(path='_amalg.py', sha1='ae5189de25ab155651a5b2f21dd0baf6eb4f3916'),
50
+ ],
51
+ )
52
+
53
+
54
+ ########################################
55
+
56
+
57
+ # ../lite/abstract.py
58
+ T = ta.TypeVar('T')
59
+
60
+ # levels.py
61
+ LogLevel = int # ta.TypeAlias
62
+
63
+ # infos.py
64
+ LoggingMsgFn = ta.Callable[[], ta.Union[str, tuple]] # ta.TypeAlias
65
+ LoggingExcInfoTuple = ta.Tuple[ta.Type[BaseException], BaseException, ta.Optional[types.TracebackType]] # ta.TypeAlias
66
+ LoggingExcInfo = ta.Union[BaseException, LoggingExcInfoTuple] # ta.TypeAlias
67
+ LoggingExcInfoArg = ta.Union[LoggingExcInfo, bool, None] # ta.TypeAlias
68
+ LoggingContextInfo = ta.Any # ta.TypeAlias
69
+
70
+ # contexts.py
71
+ LoggingContextInfoT = ta.TypeVar('LoggingContextInfoT', bound=LoggingContextInfo)
72
+
73
+
74
+ ########################################
75
+ # ../../lite/abstract.py
76
+
77
+
78
+ ##
79
+
80
+
81
+ _ABSTRACT_METHODS_ATTR = '__abstractmethods__'
82
+ _IS_ABSTRACT_METHOD_ATTR = '__isabstractmethod__'
83
+
84
+
85
+ def is_abstract_method(obj: ta.Any) -> bool:
86
+ return bool(getattr(obj, _IS_ABSTRACT_METHOD_ATTR, False))
87
+
88
+
89
+ def compute_abstract_methods(cls: type) -> ta.FrozenSet[str]:
90
+ # ~> https://github.com/python/cpython/blob/f3476c6507381ca860eec0989f53647b13517423/Modules/_abc.c#L358
91
+
92
+ # Stage 1: direct abstract methods
93
+
94
+ abstracts = {
95
+ a
96
+ # Get items as a list to avoid mutation issues during iteration
97
+ for a, v in list(cls.__dict__.items())
98
+ if is_abstract_method(v)
99
+ }
100
+
101
+ # Stage 2: inherited abstract methods
102
+
103
+ for base in cls.__bases__:
104
+ # Get __abstractmethods__ from base if it exists
105
+ if (base_abstracts := getattr(base, _ABSTRACT_METHODS_ATTR, None)) is None:
106
+ continue
107
+
108
+ # Iterate over abstract methods in base
109
+ for key in base_abstracts:
110
+ # Check if this class has an attribute with this name
111
+ try:
112
+ value = getattr(cls, key)
113
+ except AttributeError:
114
+ # Attribute not found in this class, skip
115
+ continue
116
+
117
+ # Check if it's still abstract
118
+ if is_abstract_method(value):
119
+ abstracts.add(key)
120
+
121
+ return frozenset(abstracts)
122
+
123
+
124
+ def update_abstracts(cls: ta.Type[T], *, force: bool = False) -> ta.Type[T]:
125
+ if not force and not hasattr(cls, _ABSTRACT_METHODS_ATTR):
126
+ # Per stdlib: We check for __abstractmethods__ here because cls might by a C implementation or a python
127
+ # implementation (especially during testing), and we want to handle both cases.
128
+ return cls
129
+
130
+ abstracts = compute_abstract_methods(cls)
131
+ setattr(cls, _ABSTRACT_METHODS_ATTR, abstracts)
132
+ return cls
133
+
134
+
135
+ #
136
+
137
+
138
+ class AbstractTypeError(TypeError):
139
+ pass
140
+
141
+
142
+ _FORCE_ABSTRACT_ATTR = '__forceabstract__'
143
+
144
+
145
+ class Abstract:
146
+ """
147
+ Different from, but interoperable with, abc.ABC / abc.ABCMeta:
148
+
149
+ - This raises AbstractTypeError during class creation, not instance instantiation - unless Abstract or abc.ABC are
150
+ explicitly present in the class's direct bases.
151
+ - This will forbid instantiation of classes with Abstract in their direct bases even if there are no
152
+ abstractmethods left on the class.
153
+ - This is a mixin, not a metaclass.
154
+ - As it is not an ABCMeta, this does not support virtual base classes. As a result, operations like `isinstance`
155
+ and `issubclass` are ~7x faster.
156
+ - It additionally enforces a base class order of (Abstract, abc.ABC) to preemptively prevent common mro conflicts.
157
+
158
+ If not mixed-in with an ABCMeta, it will update __abstractmethods__ itself.
159
+ """
160
+
161
+ __slots__ = ()
162
+
163
+ __abstractmethods__: ta.ClassVar[ta.FrozenSet[str]] = frozenset()
164
+
165
+ #
166
+
167
+ def __forceabstract__(self):
168
+ raise TypeError
169
+
170
+ # This is done manually, rather than through @abc.abstractmethod, to mask it from static analysis.
171
+ setattr(__forceabstract__, _IS_ABSTRACT_METHOD_ATTR, True)
172
+
173
+ #
174
+
175
+ def __init_subclass__(cls, **kwargs: ta.Any) -> None:
176
+ setattr(
177
+ cls,
178
+ _FORCE_ABSTRACT_ATTR,
179
+ getattr(Abstract, _FORCE_ABSTRACT_ATTR) if Abstract in cls.__bases__ else False,
180
+ )
181
+
182
+ super().__init_subclass__(**kwargs)
183
+
184
+ if not (Abstract in cls.__bases__ or abc.ABC in cls.__bases__):
185
+ if ams := compute_abstract_methods(cls):
186
+ amd = {
187
+ a: mcls
188
+ for mcls in cls.__mro__[::-1]
189
+ for a in ams
190
+ if a in mcls.__dict__
191
+ }
192
+
193
+ raise AbstractTypeError(
194
+ f'Cannot subclass abstract class {cls.__name__} with abstract methods: ' +
195
+ ', '.join(sorted([
196
+ '.'.join([
197
+ *([
198
+ *([m] if (m := getattr(c, '__module__')) else []),
199
+ getattr(c, '__qualname__', getattr(c, '__name__')),
200
+ ] if c is not None else '?'),
201
+ a,
202
+ ])
203
+ for a in ams
204
+ for c in [amd.get(a)]
205
+ ])),
206
+ )
207
+
208
+ xbi = (Abstract, abc.ABC) # , ta.Generic ?
209
+ bis = [(cls.__bases__.index(b), b) for b in xbi if b in cls.__bases__]
210
+ if bis != sorted(bis):
211
+ raise TypeError(
212
+ f'Abstract subclass {cls.__name__} must have proper base class order of '
213
+ f'({", ".join(getattr(b, "__name__") for b in xbi)}), got: '
214
+ f'({", ".join(getattr(b, "__name__") for _, b in sorted(bis))})',
215
+ )
216
+
217
+ if not isinstance(cls, abc.ABCMeta):
218
+ update_abstracts(cls, force=True)
219
+
220
+
221
+ ########################################
222
+ # ../../lite/json.py
223
+
224
+
225
+ ##
226
+
227
+
228
+ JSON_PRETTY_INDENT = 2
229
+
230
+ JSON_PRETTY_KWARGS: ta.Mapping[str, ta.Any] = dict(
231
+ indent=JSON_PRETTY_INDENT,
232
+ )
233
+
234
+ json_dump_pretty: ta.Callable[..., None] = functools.partial(json.dump, **JSON_PRETTY_KWARGS)
235
+ json_dumps_pretty: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_PRETTY_KWARGS)
236
+
237
+
238
+ ##
239
+
240
+
241
+ JSON_COMPACT_SEPARATORS = (',', ':')
242
+
243
+ JSON_COMPACT_KWARGS: ta.Mapping[str, ta.Any] = dict(
244
+ indent=None,
245
+ separators=JSON_COMPACT_SEPARATORS,
246
+ )
247
+
248
+ json_dump_compact: ta.Callable[..., None] = functools.partial(json.dump, **JSON_COMPACT_KWARGS)
249
+ json_dumps_compact: ta.Callable[..., str] = functools.partial(json.dumps, **JSON_COMPACT_KWARGS)
250
+
251
+
252
+ ########################################
253
+ # ../levels.py
254
+
255
+
256
+ ##
257
+
258
+
259
+ @ta.final
260
+ class NamedLogLevel(int):
261
+ # logging.getLevelNamesMapping (or, as that is unavailable <3.11, logging._nameToLevel) includes the deprecated
262
+ # aliases.
263
+ _NAMES_BY_INT: ta.ClassVar[ta.Mapping[LogLevel, str]] = dict(sorted(logging._levelToName.items(), key=lambda t: -t[0])) # noqa
264
+
265
+ _INTS_BY_NAME: ta.ClassVar[ta.Mapping[str, LogLevel]] = {v: k for k, v in _NAMES_BY_INT.items()}
266
+
267
+ _NAME_INT_PAIRS: ta.ClassVar[ta.Sequence[ta.Tuple[str, LogLevel]]] = list(_INTS_BY_NAME.items())
268
+
269
+ #
270
+
271
+ _CACHE: ta.ClassVar[ta.MutableMapping[int, 'NamedLogLevel']] = {}
272
+
273
+ @ta.overload
274
+ def __new__(cls, name: str, offset: int = 0, /) -> 'NamedLogLevel':
275
+ ...
276
+
277
+ @ta.overload
278
+ def __new__(cls, i: int, /) -> 'NamedLogLevel':
279
+ ...
280
+
281
+ def __new__(cls, x, offset=0, /):
282
+ if isinstance(x, str):
283
+ return cls(cls._INTS_BY_NAME[x.upper()] + offset)
284
+ elif not offset and (c := cls._CACHE.get(x)) is not None:
285
+ return c
286
+ else:
287
+ return super().__new__(cls, x + offset)
288
+
289
+ #
290
+
291
+ _name_and_offset: ta.Tuple[str, int]
292
+
293
+ @property
294
+ def name_and_offset(self) -> ta.Tuple[str, int]:
295
+ try:
296
+ return self._name_and_offset
297
+ except AttributeError:
298
+ pass
299
+
300
+ if (n := self._NAMES_BY_INT.get(self)) is not None:
301
+ t = (n, 0)
302
+ else:
303
+ for n, i in self._NAME_INT_PAIRS: # noqa
304
+ if self >= i:
305
+ t = (n, (self - i))
306
+ break
307
+ else:
308
+ t = ('NOTSET', int(self))
309
+
310
+ self._name_and_offset = t
311
+ return t
312
+
313
+ @property
314
+ def exact_name(self) -> ta.Optional[str]:
315
+ n, o = self.name_and_offset
316
+ return n if not o else None
317
+
318
+ @property
319
+ def effective_name(self) -> str:
320
+ n, _ = self.name_and_offset
321
+ return n
322
+
323
+ #
324
+
325
+ def __str__(self) -> str:
326
+ return self.exact_name or f'{self.effective_name}{int(self):+}'
327
+
328
+ def __repr__(self) -> str:
329
+ n, o = self.name_and_offset
330
+ return f'{self.__class__.__name__}({n!r}{f", {int(o)}" if o else ""})'
331
+
332
+ #
333
+
334
+ CRITICAL: ta.ClassVar['NamedLogLevel']
335
+ ERROR: ta.ClassVar['NamedLogLevel']
336
+ WARNING: ta.ClassVar['NamedLogLevel']
337
+ INFO: ta.ClassVar['NamedLogLevel']
338
+ DEBUG: ta.ClassVar['NamedLogLevel']
339
+ NOTSET: ta.ClassVar['NamedLogLevel']
340
+
341
+
342
+ NamedLogLevel.CRITICAL = NamedLogLevel(logging.CRITICAL)
343
+ NamedLogLevel.ERROR = NamedLogLevel(logging.ERROR)
344
+ NamedLogLevel.WARNING = NamedLogLevel(logging.WARNING)
345
+ NamedLogLevel.INFO = NamedLogLevel(logging.INFO)
346
+ NamedLogLevel.DEBUG = NamedLogLevel(logging.DEBUG)
347
+ NamedLogLevel.NOTSET = NamedLogLevel(logging.NOTSET)
348
+
349
+
350
+ NamedLogLevel._CACHE.update({i: NamedLogLevel(i) for i in NamedLogLevel._NAMES_BY_INT}) # noqa
351
+
352
+
353
+ ########################################
354
+ # ../std/filters.py
355
+
356
+
357
+ ##
358
+
359
+
360
+ class TidLoggingFilter(logging.Filter):
361
+ def filter(self, record):
362
+ # FIXME: handle better - missing from wasm and cosmos
363
+ if hasattr(threading, 'get_native_id'):
364
+ record.tid = threading.get_native_id()
365
+ else:
366
+ record.tid = '?'
367
+ return True
368
+
369
+
370
+ ########################################
371
+ # ../std/proxy.py
372
+
373
+
374
+ ##
375
+
376
+
377
+ class ProxyLoggingFilterer(logging.Filterer):
378
+ def __init__(self, underlying: logging.Filterer) -> None: # noqa
379
+ self._underlying = underlying
380
+
381
+ @property
382
+ def underlying(self) -> logging.Filterer:
383
+ return self._underlying
384
+
385
+ @property
386
+ def filters(self):
387
+ return self._underlying.filters
388
+
389
+ @filters.setter
390
+ def filters(self, filters):
391
+ self._underlying.filters = filters
392
+
393
+ def addFilter(self, filter): # noqa
394
+ self._underlying.addFilter(filter)
395
+
396
+ def removeFilter(self, filter): # noqa
397
+ self._underlying.removeFilter(filter)
398
+
399
+ def filter(self, record):
400
+ return self._underlying.filter(record)
401
+
402
+
403
+ class ProxyLoggingHandler(ProxyLoggingFilterer, logging.Handler):
404
+ def __init__(self, underlying: logging.Handler) -> None: # noqa
405
+ ProxyLoggingFilterer.__init__(self, underlying)
406
+
407
+ _underlying: logging.Handler
408
+
409
+ @property
410
+ def underlying(self) -> logging.Handler:
411
+ return self._underlying
412
+
413
+ def get_name(self):
414
+ return self._underlying.get_name()
415
+
416
+ def set_name(self, name):
417
+ self._underlying.set_name(name)
418
+
419
+ @property
420
+ def name(self): # type: ignore[override]
421
+ return self._underlying.name
422
+
423
+ @property
424
+ def level(self):
425
+ return self._underlying.level
426
+
427
+ @level.setter
428
+ def level(self, level):
429
+ self._underlying.level = level
430
+
431
+ @property
432
+ def formatter(self):
433
+ return self._underlying.formatter
434
+
435
+ @formatter.setter
436
+ def formatter(self, formatter):
437
+ self._underlying.formatter = formatter
438
+
439
+ def createLock(self):
440
+ self._underlying.createLock()
441
+
442
+ def acquire(self):
443
+ self._underlying.acquire()
444
+
445
+ def release(self):
446
+ self._underlying.release()
447
+
448
+ def setLevel(self, level):
449
+ self._underlying.setLevel(level)
450
+
451
+ def format(self, record):
452
+ return self._underlying.format(record)
453
+
454
+ def emit(self, record):
455
+ self._underlying.emit(record)
456
+
457
+ def handle(self, record):
458
+ return self._underlying.handle(record)
459
+
460
+ def setFormatter(self, fmt):
461
+ self._underlying.setFormatter(fmt)
462
+
463
+ def flush(self):
464
+ self._underlying.flush()
465
+
466
+ def close(self):
467
+ self._underlying.close()
468
+
469
+ def handleError(self, record):
470
+ self._underlying.handleError(record)
471
+
472
+
473
+ ########################################
474
+ # ../warnings.py
475
+
476
+
477
+ ##
478
+
479
+
480
+ class LoggingSetupWarning(Warning):
481
+ pass
482
+
483
+
484
+ ########################################
485
+ # ../infos.py
486
+ """
487
+ TODO:
488
+ - remove redundant info fields only present for std adaptation (Level.name, ...)
489
+ """
490
+
491
+
492
+ ##
493
+
494
+
495
+ def logging_context_info(cls):
496
+ return cls
497
+
498
+
499
+ @ta.final
500
+ class LoggingContextInfos:
501
+ def __new__(cls, *args, **kwargs): # noqa
502
+ raise TypeError
503
+
504
+ #
505
+
506
+ @logging_context_info
507
+ @ta.final
508
+ class Name(ta.NamedTuple):
509
+ name: str
510
+
511
+ @logging_context_info
512
+ @ta.final
513
+ class Level(ta.NamedTuple):
514
+ level: NamedLogLevel
515
+ name: str
516
+
517
+ @classmethod
518
+ def build(cls, level: int) -> 'LoggingContextInfos.Level':
519
+ nl: NamedLogLevel = level if level.__class__ is NamedLogLevel else NamedLogLevel(level) # type: ignore[assignment] # noqa
520
+ return cls(
521
+ level=nl,
522
+ name=logging.getLevelName(nl),
523
+ )
524
+
525
+ @logging_context_info
526
+ @ta.final
527
+ class Msg(ta.NamedTuple):
528
+ msg: str
529
+ args: ta.Union[tuple, ta.Mapping[ta.Any, ta.Any], None]
530
+
531
+ @classmethod
532
+ def build(
533
+ cls,
534
+ msg: ta.Union[str, tuple, LoggingMsgFn],
535
+ *args: ta.Any,
536
+ ) -> 'LoggingContextInfos.Msg':
537
+ s: str
538
+ a: ta.Any
539
+
540
+ if callable(msg):
541
+ if args:
542
+ raise TypeError(f'Must not provide both a message function and args: {msg=} {args=}')
543
+ x = msg()
544
+ if isinstance(x, str):
545
+ s, a = x, ()
546
+ elif isinstance(x, tuple):
547
+ if x:
548
+ s, a = x[0], x[1:]
549
+ else:
550
+ s, a = '', ()
551
+ else:
552
+ raise TypeError(x)
553
+
554
+ elif isinstance(msg, tuple):
555
+ if args:
556
+ raise TypeError(f'Must not provide both a tuple message and args: {msg=} {args=}')
557
+ if msg:
558
+ s, a = msg[0], msg[1:]
559
+ else:
560
+ s, a = '', ()
561
+
562
+ elif isinstance(msg, str):
563
+ s, a = msg, args
564
+
565
+ else:
566
+ raise TypeError(msg)
567
+
568
+ # https://github.com/python/cpython/blob/e709361fc87d0d9ab9c58033a0a7f2fef0ad43d2/Lib/logging/__init__.py#L307 # noqa
569
+ if a and len(a) == 1 and isinstance(a[0], collections.abc.Mapping) and a[0]:
570
+ a = a[0]
571
+
572
+ return cls(
573
+ msg=s,
574
+ args=a,
575
+ )
576
+
577
+ @logging_context_info
578
+ @ta.final
579
+ class Extra(ta.NamedTuple):
580
+ extra: ta.Mapping[ta.Any, ta.Any]
581
+
582
+ @logging_context_info
583
+ @ta.final
584
+ class Time(ta.NamedTuple):
585
+ ns: int
586
+ secs: float
587
+ msecs: float
588
+ relative_secs: float
589
+
590
+ @classmethod
591
+ def get_std_start_ns(cls) -> int:
592
+ x: ta.Any = logging._startTime # type: ignore[attr-defined] # noqa
593
+
594
+ # Before 3.13.0b1 this will be `time.time()`, a float of seconds. After that, it will be `time.time_ns()`,
595
+ # an int.
596
+ #
597
+ # See:
598
+ # - https://github.com/python/cpython/commit/1316692e8c7c1e1f3b6639e51804f9db5ed892ea
599
+ #
600
+ if isinstance(x, float):
601
+ return int(x * 1e9)
602
+ else:
603
+ return x
604
+
605
+ @classmethod
606
+ def build(
607
+ cls,
608
+ ns: int,
609
+ *,
610
+ start_ns: ta.Optional[int] = None,
611
+ ) -> 'LoggingContextInfos.Time':
612
+ # https://github.com/python/cpython/commit/1316692e8c7c1e1f3b6639e51804f9db5ed892ea
613
+ secs = ns / 1e9 # ns to float seconds
614
+
615
+ # Get the number of whole milliseconds (0-999) in the fractional part of seconds.
616
+ # Eg: 1_677_903_920_999_998_503 ns --> 999_998_503 ns--> 999 ms
617
+ # Convert to float by adding 0.0 for historical reasons. See gh-89047
618
+ msecs = (ns % 1_000_000_000) // 1_000_000 + 0.0
619
+
620
+ # https://github.com/python/cpython/commit/1500a23f33f5a6d052ff1ef6383d9839928b8ff1
621
+ if msecs == 999.0 and int(secs) != ns // 1_000_000_000:
622
+ # ns -> sec conversion can round up, e.g:
623
+ # 1_677_903_920_999_999_900 ns --> 1_677_903_921.0 sec
624
+ msecs = 0.0
625
+
626
+ if start_ns is None:
627
+ start_ns = cls.get_std_start_ns()
628
+ relative_secs = (ns - start_ns) / 1e6
629
+
630
+ return cls(
631
+ ns=ns,
632
+ secs=secs,
633
+ msecs=msecs,
634
+ relative_secs=relative_secs,
635
+ )
636
+
637
+ @logging_context_info
638
+ @ta.final
639
+ class Exc(ta.NamedTuple):
640
+ info: LoggingExcInfo
641
+ info_tuple: LoggingExcInfoTuple
642
+
643
+ @classmethod
644
+ def build(
645
+ cls,
646
+ arg: LoggingExcInfoArg = False,
647
+ ) -> ta.Optional['LoggingContextInfos.Exc']:
648
+ if arg is True:
649
+ sys_exc_info = sys.exc_info()
650
+ if sys_exc_info[0] is not None:
651
+ arg = sys_exc_info
652
+ else:
653
+ arg = None
654
+ elif arg is False:
655
+ arg = None
656
+ if arg is None:
657
+ return None
658
+
659
+ info: LoggingExcInfo = arg
660
+ if isinstance(info, BaseException):
661
+ info_tuple: LoggingExcInfoTuple = (type(info), info, info.__traceback__) # noqa
662
+ else:
663
+ info_tuple = info
664
+
665
+ return cls(
666
+ info=info,
667
+ info_tuple=info_tuple,
668
+ )
669
+
670
+ @logging_context_info
671
+ @ta.final
672
+ class Caller(ta.NamedTuple):
673
+ file_path: str
674
+ line_no: int
675
+ func_name: str
676
+ stack_info: ta.Optional[str]
677
+
678
+ @classmethod
679
+ def is_internal_frame(cls, frame: types.FrameType) -> bool:
680
+ file_path = os.path.normcase(frame.f_code.co_filename)
681
+
682
+ # Yes, really.
683
+ # https://github.com/python/cpython/blob/e709361fc87d0d9ab9c58033a0a7f2fef0ad43d2/Lib/logging/__init__.py#L204 # noqa
684
+ # https://github.com/python/cpython/commit/5ca6d7469be53960843df39bb900e9c3359f127f
685
+ if 'importlib' in file_path and '_bootstrap' in file_path:
686
+ return True
687
+
688
+ return False
689
+
690
+ @classmethod
691
+ def find_frame(cls, stack_offset: int = 0) -> ta.Optional[types.FrameType]:
692
+ f: ta.Optional[types.FrameType] = sys._getframe(2 + stack_offset) # noqa
693
+
694
+ while f is not None:
695
+ # NOTE: We don't check __file__ like stdlib since we may be running amalgamated - we rely on careful,
696
+ # manual stack_offset management.
697
+ if hasattr(f, 'f_code'):
698
+ return f
699
+
700
+ f = f.f_back
701
+
702
+ return None
703
+
704
+ @classmethod
705
+ def build(
706
+ cls,
707
+ stack_offset: int = 0,
708
+ *,
709
+ stack_info: bool = False,
710
+ ) -> ta.Optional['LoggingContextInfos.Caller']:
711
+ if (f := cls.find_frame(stack_offset + 1)) is None:
712
+ return None
713
+
714
+ # https://github.com/python/cpython/blob/08e9794517063c8cd92c48714071b1d3c60b71bd/Lib/logging/__init__.py#L1616-L1623 # noqa
715
+ sinfo = None
716
+ if stack_info:
717
+ sio = io.StringIO()
718
+ traceback.print_stack(f, file=sio)
719
+ sinfo = sio.getvalue()
720
+ sio.close()
721
+ if sinfo[-1] == '\n':
722
+ sinfo = sinfo[:-1]
723
+
724
+ return cls(
725
+ file_path=f.f_code.co_filename,
726
+ line_no=f.f_lineno or 0,
727
+ func_name=f.f_code.co_name,
728
+ stack_info=sinfo,
729
+ )
730
+
731
+ @logging_context_info
732
+ @ta.final
733
+ class SourceFile(ta.NamedTuple):
734
+ file_name: str
735
+ module: str
736
+
737
+ @classmethod
738
+ def build(cls, caller_file_path: ta.Optional[str]) -> ta.Optional['LoggingContextInfos.SourceFile']:
739
+ if caller_file_path is None:
740
+ return None
741
+
742
+ # https://github.com/python/cpython/blob/e709361fc87d0d9ab9c58033a0a7f2fef0ad43d2/Lib/logging/__init__.py#L331-L336 # noqa
743
+ try:
744
+ file_name = os.path.basename(caller_file_path)
745
+ module = os.path.splitext(file_name)[0]
746
+ except (TypeError, ValueError, AttributeError):
747
+ return None
748
+
749
+ return cls(
750
+ file_name=file_name,
751
+ module=module,
752
+ )
753
+
754
+ @logging_context_info
755
+ @ta.final
756
+ class Thread(ta.NamedTuple):
757
+ ident: int
758
+ native_id: ta.Optional[int]
759
+ name: str
760
+
761
+ @classmethod
762
+ def build(cls) -> 'LoggingContextInfos.Thread':
763
+ return cls(
764
+ ident=threading.get_ident(),
765
+ native_id=threading.get_native_id() if hasattr(threading, 'get_native_id') else None,
766
+ name=threading.current_thread().name,
767
+ )
768
+
769
+ @logging_context_info
770
+ @ta.final
771
+ class Process(ta.NamedTuple):
772
+ pid: int
773
+
774
+ @classmethod
775
+ def build(cls) -> 'LoggingContextInfos.Process':
776
+ return cls(
777
+ pid=os.getpid(),
778
+ )
779
+
780
+ @logging_context_info
781
+ @ta.final
782
+ class Multiprocessing(ta.NamedTuple):
783
+ process_name: str
784
+
785
+ @classmethod
786
+ def build(cls) -> ta.Optional['LoggingContextInfos.Multiprocessing']:
787
+ # https://github.com/python/cpython/blob/e709361fc87d0d9ab9c58033a0a7f2fef0ad43d2/Lib/logging/__init__.py#L355-L364 # noqa
788
+ if (mp := sys.modules.get('multiprocessing')) is None:
789
+ return None
790
+
791
+ return cls(
792
+ process_name=mp.current_process().name,
793
+ )
794
+
795
+ @logging_context_info
796
+ @ta.final
797
+ class AsyncioTask(ta.NamedTuple):
798
+ name: str
799
+
800
+ @classmethod
801
+ def build(cls) -> ta.Optional['LoggingContextInfos.AsyncioTask']:
802
+ # https://github.com/python/cpython/blob/e709361fc87d0d9ab9c58033a0a7f2fef0ad43d2/Lib/logging/__init__.py#L372-L377 # noqa
803
+ if (asyncio := sys.modules.get('asyncio')) is None:
804
+ return None
805
+
806
+ try:
807
+ task = asyncio.current_task()
808
+ except Exception: # noqa
809
+ return None
810
+
811
+ if task is None:
812
+ return None
813
+
814
+ return cls(
815
+ name=task.get_name(), # Always non-None
816
+ )
817
+
818
+
819
+ ##
820
+
821
+
822
+ class UnexpectedLoggingStartTimeWarning(LoggingSetupWarning):
823
+ pass
824
+
825
+
826
+ def _check_logging_start_time() -> None:
827
+ if (x := LoggingContextInfos.Time.get_std_start_ns()) < (t := time.time()):
828
+ import warnings # noqa
829
+
830
+ warnings.warn(
831
+ f'Unexpected logging start time detected: '
832
+ f'get_std_start_ns={x}, '
833
+ f'time.time()={t}',
834
+ UnexpectedLoggingStartTimeWarning,
835
+ )
836
+
837
+
838
+ _check_logging_start_time()
839
+
840
+
841
+ ########################################
842
+ # ../std/json.py
843
+ """
844
+ TODO:
845
+ - translate json keys
846
+ """
847
+
848
+
849
+ ##
850
+
851
+
852
+ class JsonLoggingFormatter(logging.Formatter):
853
+ KEYS: ta.Mapping[str, bool] = {
854
+ 'name': False,
855
+ 'msg': False,
856
+ 'args': False,
857
+ 'levelname': False,
858
+ 'levelno': False,
859
+ 'pathname': False,
860
+ 'filename': False,
861
+ 'module': False,
862
+ 'exc_info': True,
863
+ 'exc_text': True,
864
+ 'stack_info': True,
865
+ 'lineno': False,
866
+ 'funcName': False,
867
+ 'created': False,
868
+ 'msecs': False,
869
+ 'relativeCreated': False,
870
+ 'thread': False,
871
+ 'threadName': False,
872
+ 'processName': False,
873
+ 'process': False,
874
+ }
875
+
876
+ def __init__(
877
+ self,
878
+ *args: ta.Any,
879
+ json_dumps: ta.Optional[ta.Callable[[ta.Any], str]] = None,
880
+ **kwargs: ta.Any,
881
+ ) -> None:
882
+ super().__init__(*args, **kwargs)
883
+
884
+ if json_dumps is None:
885
+ json_dumps = json_dumps_compact
886
+ self._json_dumps = json_dumps
887
+
888
+ def format(self, record: logging.LogRecord) -> str:
889
+ dct = {
890
+ k: v
891
+ for k, o in self.KEYS.items()
892
+ for v in [getattr(record, k)]
893
+ if not (o and v is None)
894
+ }
895
+ return self._json_dumps(dct)
896
+
897
+
898
+ ########################################
899
+ # ../contexts.py
900
+
901
+
902
+ ##
903
+
904
+
905
+ class LoggingContext(Abstract):
906
+ @abc.abstractmethod
907
+ def get_info(self, ty: ta.Type[LoggingContextInfoT]) -> ta.Optional[LoggingContextInfoT]:
908
+ raise NotImplementedError
909
+
910
+ @ta.final
911
+ def __getitem__(self, ty: ta.Type[LoggingContextInfoT]) -> ta.Optional[LoggingContextInfoT]:
912
+ return self.get_info(ty)
913
+
914
+ @ta.final
915
+ def must_get_info(self, ty: ta.Type[LoggingContextInfoT]) -> LoggingContextInfoT:
916
+ if (info := self.get_info(ty)) is None:
917
+ raise TypeError(f'LoggingContextInfo absent: {ty}')
918
+ return info
919
+
920
+
921
+ @ta.final
922
+ class SimpleLoggingContext(LoggingContext):
923
+ def __init__(self, *infos: LoggingContextInfo) -> None:
924
+ self._infos: ta.Dict[ta.Type[LoggingContextInfo], LoggingContextInfo] = {type(i): i for i in infos}
925
+
926
+ def get_info(self, ty: ta.Type[LoggingContextInfoT]) -> ta.Optional[LoggingContextInfoT]:
927
+ return self._infos.get(ty)
928
+
929
+
930
+ ##
931
+
932
+
933
+ class CaptureLoggingContext(LoggingContext, Abstract):
934
+ @abc.abstractmethod
935
+ def set_basic(
936
+ self,
937
+ name: str,
938
+
939
+ msg: ta.Union[str, tuple, LoggingMsgFn],
940
+ args: tuple,
941
+ ) -> 'CaptureLoggingContext':
942
+ raise NotImplementedError
943
+
944
+ #
945
+
946
+ class AlreadyCapturedError(Exception):
947
+ pass
948
+
949
+ class NotCapturedError(Exception):
950
+ pass
951
+
952
+ @abc.abstractmethod
953
+ def capture(self) -> None:
954
+ """Must be cooperatively called only from the expected locations."""
955
+
956
+ raise NotImplementedError
957
+
958
+
959
+ @ta.final
960
+ class CaptureLoggingContextImpl(CaptureLoggingContext):
961
+ @ta.final
962
+ class NOT_SET: # noqa
963
+ def __new__(cls, *args, **kwargs): # noqa
964
+ raise TypeError
965
+
966
+ #
967
+
968
+ def __init__(
969
+ self,
970
+ level: LogLevel,
971
+ *,
972
+ time_ns: ta.Optional[int] = None,
973
+
974
+ exc_info: LoggingExcInfoArg = False,
975
+
976
+ caller: ta.Union[LoggingContextInfos.Caller, ta.Type[NOT_SET], None] = NOT_SET,
977
+ stack_offset: int = 0,
978
+ stack_info: bool = False,
979
+ ) -> None:
980
+ if time_ns is None:
981
+ time_ns = time.time_ns()
982
+
983
+ # Done early to not trample on sys.exc_info()
984
+ exc = LoggingContextInfos.Exc.build(exc_info)
985
+
986
+ self._infos: ta.Dict[ta.Type[LoggingContextInfo], LoggingContextInfo] = {}
987
+ self._set_info(
988
+ LoggingContextInfos.Level.build(level),
989
+ exc,
990
+ LoggingContextInfos.Time.build(time_ns),
991
+ )
992
+
993
+ if caller is not CaptureLoggingContextImpl.NOT_SET:
994
+ self._infos[LoggingContextInfos.Caller] = caller
995
+ else:
996
+ self._stack_offset = stack_offset
997
+ self._stack_info = stack_info
998
+
999
+ def _set_info(self, *infos: ta.Optional[LoggingContextInfo]) -> 'CaptureLoggingContextImpl':
1000
+ for info in infos:
1001
+ if info is not None:
1002
+ self._infos[type(info)] = info
1003
+ return self
1004
+
1005
+ def get_infos(self) -> ta.Mapping[ta.Type[LoggingContextInfo], LoggingContextInfo]:
1006
+ return self._infos
1007
+
1008
+ def get_info(self, ty: ta.Type[LoggingContextInfoT]) -> ta.Optional[LoggingContextInfoT]:
1009
+ return self._infos.get(ty)
1010
+
1011
+ ##
1012
+
1013
+ def set_basic(
1014
+ self,
1015
+ name: str,
1016
+
1017
+ msg: ta.Union[str, tuple, LoggingMsgFn],
1018
+ args: tuple,
1019
+ ) -> 'CaptureLoggingContextImpl':
1020
+ return self._set_info(
1021
+ LoggingContextInfos.Name(name),
1022
+ LoggingContextInfos.Msg.build(msg, *args),
1023
+ )
1024
+
1025
+ ##
1026
+
1027
+ _stack_offset: int
1028
+ _stack_info: bool
1029
+
1030
+ def inc_stack_offset(self, ofs: int = 1) -> 'CaptureLoggingContextImpl':
1031
+ if hasattr(self, '_stack_offset'):
1032
+ self._stack_offset += ofs
1033
+ return self
1034
+
1035
+ _has_captured: bool = False
1036
+
1037
+ def capture(self) -> None:
1038
+ if self._has_captured:
1039
+ raise CaptureLoggingContextImpl.AlreadyCapturedError
1040
+ self._has_captured = True
1041
+
1042
+ if LoggingContextInfos.Caller not in self._infos:
1043
+ self._set_info(LoggingContextInfos.Caller.build(
1044
+ self._stack_offset + 1,
1045
+ stack_info=self._stack_info,
1046
+ ))
1047
+
1048
+ if (caller := self[LoggingContextInfos.Caller]) is not None:
1049
+ self._set_info(LoggingContextInfos.SourceFile.build(
1050
+ caller.file_path,
1051
+ ))
1052
+
1053
+ self._set_info(
1054
+ LoggingContextInfos.Thread.build(),
1055
+ LoggingContextInfos.Process.build(),
1056
+ LoggingContextInfos.Multiprocessing.build(),
1057
+ LoggingContextInfos.AsyncioTask.build(),
1058
+ )
1059
+
1060
+
1061
+ ########################################
1062
+ # ../std/standard.py
1063
+ """
1064
+ TODO:
1065
+ - structured
1066
+ - prefixed
1067
+ - debug
1068
+ - optional noisy? noisy will never be lite - some kinda configure_standard callback mechanism?
1069
+ """
1070
+
1071
+
1072
+ ##
1073
+
1074
+
1075
+ STANDARD_LOG_FORMAT_PARTS = [
1076
+ ('asctime', '%(asctime)-15s'),
1077
+ ('process', 'pid=%(process)s'),
1078
+ ('thread', 'tid=%(thread)x'),
1079
+ ('levelname', '%(levelname)s'),
1080
+ ('name', '%(name)s'),
1081
+ ('separator', '::'),
1082
+ ('message', '%(message)s'),
1083
+ ]
1084
+
1085
+
1086
+ class StandardLoggingFormatter(logging.Formatter):
1087
+ @staticmethod
1088
+ def build_log_format(parts: ta.Iterable[ta.Tuple[str, str]]) -> str:
1089
+ return ' '.join(v for k, v in parts)
1090
+
1091
+ converter = datetime.datetime.fromtimestamp # type: ignore
1092
+
1093
+ def formatTime(self, record, datefmt=None):
1094
+ ct = self.converter(record.created)
1095
+ if datefmt:
1096
+ return ct.strftime(datefmt) # noqa
1097
+ else:
1098
+ t = ct.strftime('%Y-%m-%d %H:%M:%S')
1099
+ return '%s.%03d' % (t, record.msecs) # noqa
1100
+
1101
+
1102
+ ##
1103
+
1104
+
1105
+ class StandardConfiguredLoggingHandler(ProxyLoggingHandler):
1106
+ def __init_subclass__(cls, **kwargs):
1107
+ raise TypeError('This class serves only as a marker and should not be subclassed.')
1108
+
1109
+
1110
+ ##
1111
+
1112
+
1113
+ @contextlib.contextmanager
1114
+ def _locking_logging_module_lock() -> ta.Iterator[None]:
1115
+ if hasattr(logging, '_acquireLock'):
1116
+ logging._acquireLock() # noqa
1117
+ try:
1118
+ yield
1119
+ finally:
1120
+ logging._releaseLock() # type: ignore # noqa
1121
+
1122
+ elif hasattr(logging, '_lock'):
1123
+ # https://github.com/python/cpython/commit/74723e11109a320e628898817ab449b3dad9ee96
1124
+ with logging._lock: # noqa
1125
+ yield
1126
+
1127
+ else:
1128
+ raise Exception("Can't find lock in logging module")
1129
+
1130
+
1131
+ def configure_standard_logging(
1132
+ level: ta.Union[int, str] = logging.INFO,
1133
+ *,
1134
+ target: ta.Optional[logging.Logger] = None,
1135
+
1136
+ force: bool = False,
1137
+
1138
+ handler_factory: ta.Optional[ta.Callable[[], logging.Handler]] = None,
1139
+
1140
+ formatter: ta.Optional[logging.Formatter] = None, # noqa
1141
+ json: bool = False,
1142
+ ) -> ta.Optional[StandardConfiguredLoggingHandler]:
1143
+ with _locking_logging_module_lock():
1144
+ if target is None:
1145
+ target = logging.root
1146
+
1147
+ #
1148
+
1149
+ if not force:
1150
+ if any(isinstance(h, StandardConfiguredLoggingHandler) for h in list(target.handlers)):
1151
+ return None
1152
+
1153
+ #
1154
+
1155
+ if handler_factory is not None:
1156
+ handler = handler_factory()
1157
+ else:
1158
+ handler = logging.StreamHandler()
1159
+
1160
+ #
1161
+
1162
+ if formatter is None:
1163
+ if json:
1164
+ formatter = JsonLoggingFormatter()
1165
+ else:
1166
+ formatter = StandardLoggingFormatter(StandardLoggingFormatter.build_log_format(STANDARD_LOG_FORMAT_PARTS)) # noqa
1167
+ handler.setFormatter(formatter)
1168
+
1169
+ #
1170
+
1171
+ handler.addFilter(TidLoggingFilter())
1172
+
1173
+ #
1174
+
1175
+ target.addHandler(handler)
1176
+
1177
+ #
1178
+
1179
+ if level is not None:
1180
+ target.setLevel(level)
1181
+
1182
+ #
1183
+
1184
+ return StandardConfiguredLoggingHandler(handler)
1185
+
1186
+
1187
+ ########################################
1188
+ # ../base.py
1189
+
1190
+
1191
+ ##
1192
+
1193
+
1194
+ class AnyLogger(Abstract, ta.Generic[T]):
1195
+ def is_enabled_for(self, level: LogLevel) -> bool:
1196
+ return level >= self.get_effective_level()
1197
+
1198
+ @abc.abstractmethod
1199
+ def get_effective_level(self) -> LogLevel:
1200
+ raise NotImplementedError
1201
+
1202
+ #
1203
+
1204
+ @ta.final
1205
+ def isEnabledFor(self, level: LogLevel) -> bool: # noqa
1206
+ return self.is_enabled_for(level)
1207
+
1208
+ @ta.final
1209
+ def getEffectiveLevel(self) -> LogLevel: # noqa
1210
+ return self.get_effective_level()
1211
+
1212
+ ##
1213
+
1214
+ # This will be 1 for [Sync]Logger and 0 for AsyncLogger - in sync loggers these methods remain present on the stack,
1215
+ # in async loggers they return a coroutine to be awaited and thus aren't actually present when said coroutine is
1216
+ # awaited.
1217
+ _level_proxy_method_stack_offset: int
1218
+
1219
+ @ta.overload
1220
+ def log(self, level: LogLevel, msg: str, *args: ta.Any, **kwargs: ta.Any) -> T:
1221
+ ...
1222
+
1223
+ @ta.overload
1224
+ def log(self, level: LogLevel, msg: ta.Tuple[ta.Any, ...], **kwargs: ta.Any) -> T:
1225
+ ...
1226
+
1227
+ @ta.overload
1228
+ def log(self, level: LogLevel, msg_fn: LoggingMsgFn, **kwargs: ta.Any) -> T:
1229
+ ...
1230
+
1231
+ @ta.final
1232
+ def log(self, level: LogLevel, *args, **kwargs):
1233
+ return self._log(
1234
+ CaptureLoggingContextImpl(
1235
+ level,
1236
+ stack_offset=self._level_proxy_method_stack_offset,
1237
+ ),
1238
+ *args,
1239
+ **kwargs,
1240
+ )
1241
+
1242
+ #
1243
+
1244
+ @ta.overload
1245
+ def debug(self, msg: str, *args: ta.Any, **kwargs: ta.Any) -> T:
1246
+ ...
1247
+
1248
+ @ta.overload
1249
+ def debug(self, msg: ta.Tuple[ta.Any, ...], **kwargs: ta.Any) -> T:
1250
+ ...
1251
+
1252
+ @ta.overload
1253
+ def debug(self, msg_fn: LoggingMsgFn, **kwargs: ta.Any) -> T:
1254
+ ...
1255
+
1256
+ @ta.final
1257
+ def debug(self, *args, **kwargs):
1258
+ return self._log(
1259
+ CaptureLoggingContextImpl(
1260
+ NamedLogLevel.DEBUG,
1261
+ stack_offset=self._level_proxy_method_stack_offset,
1262
+ ),
1263
+ *args,
1264
+ **kwargs,
1265
+ )
1266
+
1267
+ #
1268
+
1269
+ @ta.overload
1270
+ def info(self, msg: str, *args: ta.Any, **kwargs: ta.Any) -> T:
1271
+ ...
1272
+
1273
+ @ta.overload
1274
+ def info(self, msg: ta.Tuple[ta.Any, ...], **kwargs: ta.Any) -> T:
1275
+ ...
1276
+
1277
+ @ta.overload
1278
+ def info(self, msg_fn: LoggingMsgFn, **kwargs: ta.Any) -> T:
1279
+ ...
1280
+
1281
+ @ta.final
1282
+ def info(self, *args, **kwargs):
1283
+ return self._log(
1284
+ CaptureLoggingContextImpl(
1285
+ NamedLogLevel.INFO,
1286
+ stack_offset=self._level_proxy_method_stack_offset,
1287
+ ),
1288
+ *args,
1289
+ **kwargs,
1290
+ )
1291
+
1292
+ #
1293
+
1294
+ @ta.overload
1295
+ def warning(self, msg: str, *args: ta.Any, **kwargs: ta.Any) -> T:
1296
+ ...
1297
+
1298
+ @ta.overload
1299
+ def warning(self, msg: ta.Tuple[ta.Any, ...], **kwargs: ta.Any) -> T:
1300
+ ...
1301
+
1302
+ @ta.overload
1303
+ def warning(self, msg_fn: LoggingMsgFn, **kwargs: ta.Any) -> T:
1304
+ ...
1305
+
1306
+ @ta.final
1307
+ def warning(self, *args, **kwargs):
1308
+ return self._log(
1309
+ CaptureLoggingContextImpl(
1310
+ NamedLogLevel.WARNING,
1311
+ stack_offset=self._level_proxy_method_stack_offset,
1312
+ ),
1313
+ *args,
1314
+ **kwargs,
1315
+ )
1316
+
1317
+ #
1318
+
1319
+ @ta.overload
1320
+ def error(self, msg: str, *args: ta.Any, **kwargs: ta.Any) -> T:
1321
+ ...
1322
+
1323
+ @ta.overload
1324
+ def error(self, msg: ta.Tuple[ta.Any, ...], **kwargs: ta.Any) -> T:
1325
+ ...
1326
+
1327
+ @ta.overload
1328
+ def error(self, msg_fn: LoggingMsgFn, **kwargs: ta.Any) -> T:
1329
+ ...
1330
+
1331
+ @ta.final
1332
+ def error(self, *args, **kwargs):
1333
+ return self._log(
1334
+ CaptureLoggingContextImpl(
1335
+ NamedLogLevel.ERROR,
1336
+ stack_offset=self._level_proxy_method_stack_offset,
1337
+ ),
1338
+ *args,
1339
+ **kwargs,
1340
+ )
1341
+
1342
+ #
1343
+
1344
+ @ta.overload
1345
+ def exception(self, msg: str, *args: ta.Any, exc_info: LoggingExcInfoArg = True, **kwargs: ta.Any) -> T:
1346
+ ...
1347
+
1348
+ @ta.overload
1349
+ def exception(self, msg: ta.Tuple[ta.Any, ...], *, exc_info: LoggingExcInfoArg = True, **kwargs: ta.Any) -> T:
1350
+ ...
1351
+
1352
+ @ta.overload
1353
+ def exception(self, msg_fn: LoggingMsgFn, *, exc_info: LoggingExcInfoArg = True, **kwargs: ta.Any) -> T:
1354
+ ...
1355
+
1356
+ @ta.final
1357
+ def exception(self, *args, exc_info: LoggingExcInfoArg = True, **kwargs):
1358
+ return self._log(
1359
+ CaptureLoggingContextImpl(
1360
+ NamedLogLevel.ERROR,
1361
+ exc_info=exc_info,
1362
+ stack_offset=self._level_proxy_method_stack_offset,
1363
+ ),
1364
+ *args,
1365
+ **kwargs,
1366
+ )
1367
+
1368
+ #
1369
+
1370
+ @ta.overload
1371
+ def critical(self, msg: str, *args: ta.Any, **kwargs: ta.Any) -> T:
1372
+ ...
1373
+
1374
+ @ta.overload
1375
+ def critical(self, msg: ta.Tuple[ta.Any, ...], **kwargs: ta.Any) -> T:
1376
+ ...
1377
+
1378
+ @ta.overload
1379
+ def critical(self, msg_fn: LoggingMsgFn, **kwargs: ta.Any) -> T:
1380
+ ...
1381
+
1382
+ @ta.final
1383
+ def critical(self, *args, **kwargs):
1384
+ return self._log(
1385
+ CaptureLoggingContextImpl(
1386
+ NamedLogLevel.CRITICAL,
1387
+ stack_offset=self._level_proxy_method_stack_offset,
1388
+ ),
1389
+ *args,
1390
+ **kwargs,
1391
+ )
1392
+
1393
+ ##
1394
+
1395
+ @abc.abstractmethod
1396
+ def _log(
1397
+ self,
1398
+ ctx: CaptureLoggingContext,
1399
+ msg: ta.Union[str, tuple, LoggingMsgFn],
1400
+ *args: ta.Any,
1401
+ **kwargs: ta.Any,
1402
+ ) -> T:
1403
+ raise NotImplementedError
1404
+
1405
+
1406
+ class Logger(AnyLogger[None], Abstract):
1407
+ _level_proxy_method_stack_offset: int = 1
1408
+
1409
+ @abc.abstractmethod
1410
+ def _log(
1411
+ self,
1412
+ ctx: CaptureLoggingContext,
1413
+ msg: ta.Union[str, tuple, LoggingMsgFn],
1414
+ *args: ta.Any,
1415
+ **kwargs: ta.Any,
1416
+ ) -> None:
1417
+ raise NotImplementedError
1418
+
1419
+
1420
+ class AsyncLogger(AnyLogger[ta.Awaitable[None]], Abstract):
1421
+ _level_proxy_method_stack_offset: int = 0
1422
+
1423
+ @abc.abstractmethod
1424
+ def _log(
1425
+ self,
1426
+ ctx: CaptureLoggingContext,
1427
+ msg: ta.Union[str, tuple, LoggingMsgFn],
1428
+ *args: ta.Any,
1429
+ **kwargs: ta.Any,
1430
+ ) -> ta.Awaitable[None]:
1431
+ raise NotImplementedError
1432
+
1433
+
1434
+ ##
1435
+
1436
+
1437
+ class AnyNopLogger(AnyLogger[T], Abstract):
1438
+ @ta.final
1439
+ def get_effective_level(self) -> LogLevel:
1440
+ return -999
1441
+
1442
+
1443
+ @ta.final
1444
+ class NopLogger(AnyNopLogger[None], Logger):
1445
+ def _log(
1446
+ self,
1447
+ ctx: CaptureLoggingContext,
1448
+ msg: ta.Union[str, tuple, LoggingMsgFn],
1449
+ *args: ta.Any,
1450
+ **kwargs: ta.Any,
1451
+ ) -> None:
1452
+ pass
1453
+
1454
+
1455
+ @ta.final
1456
+ class AsyncNopLogger(AnyNopLogger[ta.Awaitable[None]], AsyncLogger):
1457
+ async def _log(
1458
+ self,
1459
+ ctx: CaptureLoggingContext,
1460
+ msg: ta.Union[str, tuple, LoggingMsgFn],
1461
+ *args: ta.Any,
1462
+ **kwargs: ta.Any,
1463
+ ) -> None:
1464
+ pass
1465
+
1466
+
1467
+ ########################################
1468
+ # ../std/records.py
1469
+ """
1470
+ TODO:
1471
+ - TypedDict?
1472
+ """
1473
+
1474
+
1475
+ ##
1476
+
1477
+
1478
+ class LoggingContextInfoRecordAdapters:
1479
+ # Ref:
1480
+ # - https://docs.python.org/3/library/logging.html#logrecord-attributes
1481
+ #
1482
+ # LogRecord:
1483
+ # - https://github.com/python/cpython/blob/39b2f82717a69dde7212bc39b673b0f55c99e6a3/Lib/logging/__init__.py#L276 (3.8) # noqa
1484
+ # - https://github.com/python/cpython/blob/f070f54c5f4a42c7c61d1d5d3b8f3b7203b4a0fb/Lib/logging/__init__.py#L286 (~3.14) # noqa
1485
+ #
1486
+
1487
+ def __new__(cls, *args, **kwargs): # noqa
1488
+ raise TypeError
1489
+
1490
+ class Adapter(Abstract, ta.Generic[T]):
1491
+ @property
1492
+ @abc.abstractmethod
1493
+ def info_cls(self) -> ta.Type[LoggingContextInfo]:
1494
+ raise NotImplementedError
1495
+
1496
+ #
1497
+
1498
+ @ta.final
1499
+ class NOT_SET: # noqa
1500
+ def __new__(cls, *args, **kwargs): # noqa
1501
+ raise TypeError
1502
+
1503
+ class RecordAttr(ta.NamedTuple):
1504
+ name: str
1505
+ type: ta.Any
1506
+ default: ta.Any
1507
+
1508
+ # @abc.abstractmethod
1509
+ record_attrs: ta.ClassVar[ta.Mapping[str, RecordAttr]]
1510
+
1511
+ @property
1512
+ @abc.abstractmethod
1513
+ def _record_attrs(self) -> ta.Union[
1514
+ ta.Mapping[str, ta.Any],
1515
+ ta.Mapping[str, ta.Tuple[ta.Any, ta.Any]],
1516
+ ]:
1517
+ raise NotImplementedError
1518
+
1519
+ #
1520
+
1521
+ @abc.abstractmethod
1522
+ def context_to_record(self, ctx: LoggingContext) -> ta.Mapping[str, ta.Any]:
1523
+ raise NotImplementedError
1524
+
1525
+ #
1526
+
1527
+ @abc.abstractmethod
1528
+ def record_to_info(self, rec: logging.LogRecord) -> ta.Optional[T]:
1529
+ raise NotImplementedError
1530
+
1531
+ #
1532
+
1533
+ def __init_subclass__(cls, **kwargs: ta.Any) -> None:
1534
+ super().__init_subclass__(**kwargs)
1535
+
1536
+ if Abstract in cls.__bases__:
1537
+ return
1538
+
1539
+ if 'record_attrs' in cls.__dict__:
1540
+ raise TypeError(cls)
1541
+ if not isinstance(ra := cls.__dict__['_record_attrs'], collections.abc.Mapping):
1542
+ raise TypeError(ra)
1543
+
1544
+ rd: ta.Dict[str, LoggingContextInfoRecordAdapters.Adapter.RecordAttr] = {}
1545
+ for n, v in ra.items():
1546
+ if not n or not isinstance(n, str) or n in rd:
1547
+ raise AttributeError(n)
1548
+ if isinstance(v, tuple):
1549
+ t, d = v
1550
+ else:
1551
+ t, d = v, cls.NOT_SET
1552
+ rd[n] = cls.RecordAttr(
1553
+ name=n,
1554
+ type=t,
1555
+ default=d,
1556
+ )
1557
+ cls.record_attrs = rd
1558
+
1559
+ class RequiredAdapter(Adapter[T], Abstract):
1560
+ @property
1561
+ @abc.abstractmethod
1562
+ def _record_attrs(self) -> ta.Mapping[str, ta.Any]:
1563
+ raise NotImplementedError
1564
+
1565
+ #
1566
+
1567
+ @ta.final
1568
+ def context_to_record(self, ctx: LoggingContext) -> ta.Mapping[str, ta.Any]:
1569
+ if (info := ctx.get_info(self.info_cls)) is not None:
1570
+ return self._info_to_record(info)
1571
+ else:
1572
+ raise TypeError # FIXME: fallback?
1573
+
1574
+ @abc.abstractmethod
1575
+ def _info_to_record(self, info: T) -> ta.Mapping[str, ta.Any]:
1576
+ raise NotImplementedError
1577
+
1578
+ #
1579
+
1580
+ @abc.abstractmethod
1581
+ def record_to_info(self, rec: logging.LogRecord) -> T:
1582
+ raise NotImplementedError
1583
+
1584
+ #
1585
+
1586
+ def __init_subclass__(cls, **kwargs: ta.Any) -> None:
1587
+ super().__init_subclass__(**kwargs)
1588
+
1589
+ if any(a.default is not cls.NOT_SET for a in cls.record_attrs.values()):
1590
+ raise TypeError(cls.record_attrs)
1591
+
1592
+ class OptionalAdapter(Adapter[T], Abstract, ta.Generic[T]):
1593
+ @property
1594
+ @abc.abstractmethod
1595
+ def _record_attrs(self) -> ta.Mapping[str, ta.Tuple[ta.Any, ta.Any]]:
1596
+ raise NotImplementedError
1597
+
1598
+ record_defaults: ta.ClassVar[ta.Mapping[str, ta.Any]]
1599
+
1600
+ #
1601
+
1602
+ @ta.final
1603
+ def context_to_record(self, ctx: LoggingContext) -> ta.Mapping[str, ta.Any]:
1604
+ if (info := ctx.get_info(self.info_cls)) is not None:
1605
+ return self._info_to_record(info)
1606
+ else:
1607
+ return self.record_defaults
1608
+
1609
+ @abc.abstractmethod
1610
+ def _info_to_record(self, info: T) -> ta.Mapping[str, ta.Any]:
1611
+ raise NotImplementedError
1612
+
1613
+ #
1614
+
1615
+ def __init_subclass__(cls, **kwargs: ta.Any) -> None:
1616
+ super().__init_subclass__(**kwargs)
1617
+
1618
+ dd: ta.Dict[str, ta.Any] = {a.name: a.default for a in cls.record_attrs.values()}
1619
+ if any(d is cls.NOT_SET for d in dd.values()):
1620
+ raise TypeError(cls.record_attrs)
1621
+ cls.record_defaults = dd
1622
+
1623
+ #
1624
+
1625
+ class Name(RequiredAdapter[LoggingContextInfos.Name]):
1626
+ info_cls: ta.ClassVar[ta.Type[LoggingContextInfos.Name]] = LoggingContextInfos.Name
1627
+
1628
+ _record_attrs: ta.ClassVar[ta.Mapping[str, ta.Any]] = dict(
1629
+ # Name of the logger used to log the call. Unmodified by ctor.
1630
+ name=str,
1631
+ )
1632
+
1633
+ def _info_to_record(self, info: LoggingContextInfos.Name) -> ta.Mapping[str, ta.Any]:
1634
+ return dict(
1635
+ name=info.name,
1636
+ )
1637
+
1638
+ def record_to_info(self, rec: logging.LogRecord) -> LoggingContextInfos.Name:
1639
+ return LoggingContextInfos.Name(
1640
+ name=rec.name,
1641
+ )
1642
+
1643
+ class Level(RequiredAdapter[LoggingContextInfos.Level]):
1644
+ info_cls: ta.ClassVar[ta.Type[LoggingContextInfos.Level]] = LoggingContextInfos.Level
1645
+
1646
+ _record_attrs: ta.ClassVar[ta.Mapping[str, ta.Any]] = dict(
1647
+ # Text logging level for the message ('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'). Set to
1648
+ # `getLevelName(level)`.
1649
+ levelname=str,
1650
+
1651
+ # Numeric logging level for the message (DEBUG, INFO, WARNING, ERROR, CRITICAL). Unmodified by ctor.
1652
+ levelno=int,
1653
+ )
1654
+
1655
+ def _info_to_record(self, info: LoggingContextInfos.Level) -> ta.Mapping[str, ta.Any]:
1656
+ return dict(
1657
+ levelname=info.name,
1658
+ levelno=int(info.level),
1659
+ )
1660
+
1661
+ def record_to_info(self, rec: logging.LogRecord) -> LoggingContextInfos.Level:
1662
+ return LoggingContextInfos.Level.build(rec.levelno)
1663
+
1664
+ class Msg(RequiredAdapter[LoggingContextInfos.Msg]):
1665
+ info_cls: ta.ClassVar[ta.Type[LoggingContextInfos.Msg]] = LoggingContextInfos.Msg
1666
+
1667
+ _record_attrs: ta.ClassVar[ta.Mapping[str, ta.Any]] = dict(
1668
+ # The format string passed in the original logging call. Merged with args to produce message, or an
1669
+ # arbitrary object (see Using arbitrary objects as messages). Unmodified by ctor.
1670
+ msg=str,
1671
+
1672
+ # The tuple of arguments merged into msg to produce message, or a dict whose values are used for the merge
1673
+ # (when there is only one argument, and it is a dictionary). Ctor will transform a 1-tuple containing a
1674
+ # Mapping into just the mapping, but is otherwise unmodified.
1675
+ args=ta.Union[tuple, dict, None],
1676
+ )
1677
+
1678
+ def _info_to_record(self, info: LoggingContextInfos.Msg) -> ta.Mapping[str, ta.Any]:
1679
+ return dict(
1680
+ msg=info.msg,
1681
+ args=info.args,
1682
+ )
1683
+
1684
+ def record_to_info(self, rec: logging.LogRecord) -> LoggingContextInfos.Msg:
1685
+ return LoggingContextInfos.Msg(
1686
+ msg=rec.msg,
1687
+ args=rec.args,
1688
+ )
1689
+
1690
+ class Time(RequiredAdapter[LoggingContextInfos.Time]):
1691
+ info_cls: ta.ClassVar[ta.Type[LoggingContextInfos.Time]] = LoggingContextInfos.Time
1692
+
1693
+ _record_attrs: ta.ClassVar[ta.Mapping[str, ta.Any]] = dict(
1694
+ # Time when the LogRecord was created. Set to `time.time_ns() / 1e9` for >=3.13.0b1, otherwise simply
1695
+ # `time.time()`.
1696
+ #
1697
+ # See:
1698
+ # - https://github.com/python/cpython/commit/1316692e8c7c1e1f3b6639e51804f9db5ed892ea
1699
+ # - https://github.com/python/cpython/commit/1500a23f33f5a6d052ff1ef6383d9839928b8ff1
1700
+ #
1701
+ created=float,
1702
+
1703
+ # Millisecond portion of the time when the LogRecord was created.
1704
+ msecs=float,
1705
+
1706
+ # Time in milliseconds when the LogRecord was created, relative to the time the logging module was loaded.
1707
+ relativeCreated=float,
1708
+ )
1709
+
1710
+ def _info_to_record(self, info: LoggingContextInfos.Time) -> ta.Mapping[str, ta.Any]:
1711
+ return dict(
1712
+ created=info.secs,
1713
+ msecs=info.msecs,
1714
+ relativeCreated=info.relative_secs,
1715
+ )
1716
+
1717
+ def record_to_info(self, rec: logging.LogRecord) -> LoggingContextInfos.Time:
1718
+ return LoggingContextInfos.Time.build(
1719
+ int(rec.created * 1e9),
1720
+ )
1721
+
1722
+ class Exc(OptionalAdapter[LoggingContextInfos.Exc]):
1723
+ info_cls: ta.ClassVar[ta.Type[LoggingContextInfos.Exc]] = LoggingContextInfos.Exc
1724
+
1725
+ _record_attrs: ta.ClassVar[ta.Mapping[str, ta.Tuple[ta.Any, ta.Any]]] = dict(
1726
+ # Exception tuple (à la sys.exc_info) or, if no exception has occurred, None. Unmodified by ctor.
1727
+ exc_info=(ta.Optional[LoggingExcInfoTuple], None),
1728
+
1729
+ # Used to cache the traceback text. Simply set to None by ctor, later set by Formatter.format.
1730
+ exc_text=(ta.Optional[str], None),
1731
+ )
1732
+
1733
+ def _info_to_record(self, info: LoggingContextInfos.Exc) -> ta.Mapping[str, ta.Any]:
1734
+ return dict(
1735
+ exc_info=info.info_tuple,
1736
+ exc_text=None,
1737
+ )
1738
+
1739
+ def record_to_info(self, rec: logging.LogRecord) -> ta.Optional[LoggingContextInfos.Exc]:
1740
+ # FIXME:
1741
+ # error: Argument 1 to "build" of "Exc" has incompatible type
1742
+ # "tuple[type[BaseException], BaseException, TracebackType | None] | tuple[None, None, None] | None"; expected # noqa
1743
+ # "BaseException | tuple[type[BaseException], BaseException, TracebackType | None] | bool | None" [arg-type] # noqa
1744
+ return LoggingContextInfos.Exc.build(rec.exc_info) # type: ignore[arg-type]
1745
+
1746
+ class Caller(OptionalAdapter[LoggingContextInfos.Caller]):
1747
+ info_cls: ta.ClassVar[ta.Type[LoggingContextInfos.Caller]] = LoggingContextInfos.Caller
1748
+
1749
+ _UNKNOWN_PATH_NAME: ta.ClassVar[str] = '(unknown file)'
1750
+ _UNKNOWN_FUNC_NAME: ta.ClassVar[str] = '(unknown function)'
1751
+
1752
+ _STACK_INFO_PREFIX: ta.ClassVar[str] = 'Stack (most recent call last):\n'
1753
+
1754
+ _record_attrs: ta.ClassVar[ta.Mapping[str, ta.Tuple[ta.Any, ta.Any]]] = dict(
1755
+ # Full pathname of the source file where the logging call was issued (if available). Unmodified by ctor. May
1756
+ # default to "(unknown file)" by Logger.findCaller / Logger._log.
1757
+ pathname=(str, _UNKNOWN_PATH_NAME),
1758
+
1759
+ # Source line number where the logging call was issued (if available). Unmodified by ctor. May default to 0
1760
+ # y Logger.findCaller / Logger._log.
1761
+ lineno=(int, 0),
1762
+
1763
+ # Name of function containing the logging call. Set by ctor to `func` arg, unmodified. May default to
1764
+ # "(unknown function)" by Logger.findCaller / Logger._log.
1765
+ funcName=(str, _UNKNOWN_FUNC_NAME),
1766
+
1767
+ # Stack frame information (where available) from the bottom of the stack in the current thread, up to and
1768
+ # including the stack frame of the logging call which resulted in the creation of this record. Set by ctor
1769
+ # to `sinfo` arg, unmodified. Mostly set, if requested, by `Logger.findCaller`, to
1770
+ # `traceback.print_stack(f)`, but prepended with the literal "Stack (most recent call last):\n", and
1771
+ # stripped of exactly one trailing `\n` if present.
1772
+ stack_info=(ta.Optional[str], None),
1773
+ )
1774
+
1775
+ def _info_to_record(self, caller: LoggingContextInfos.Caller) -> ta.Mapping[str, ta.Any]:
1776
+ if (sinfo := caller.stack_info) is not None:
1777
+ stack_info: ta.Optional[str] = '\n'.join([
1778
+ self._STACK_INFO_PREFIX,
1779
+ sinfo[1:] if sinfo.endswith('\n') else sinfo,
1780
+ ])
1781
+ else:
1782
+ stack_info = None
1783
+
1784
+ return dict(
1785
+ pathname=caller.file_path,
1786
+
1787
+ lineno=caller.line_no,
1788
+ funcName=caller.func_name,
1789
+
1790
+ stack_info=stack_info,
1791
+ )
1792
+
1793
+ def record_to_info(self, rec: logging.LogRecord) -> ta.Optional[LoggingContextInfos.Caller]:
1794
+ # FIXME: piecemeal?
1795
+ if (
1796
+ rec.pathname != self._UNKNOWN_PATH_NAME and
1797
+ rec.lineno != 0 and
1798
+ rec.funcName != self._UNKNOWN_FUNC_NAME
1799
+ ):
1800
+ if (sinfo := rec.stack_info) is not None and sinfo.startswith(self._STACK_INFO_PREFIX):
1801
+ sinfo = sinfo[len(self._STACK_INFO_PREFIX):]
1802
+ return LoggingContextInfos.Caller(
1803
+ file_path=rec.pathname,
1804
+
1805
+ line_no=rec.lineno,
1806
+ func_name=rec.funcName,
1807
+
1808
+ stack_info=sinfo,
1809
+ )
1810
+
1811
+ return None
1812
+
1813
+ class SourceFile(Adapter[LoggingContextInfos.SourceFile]):
1814
+ info_cls: ta.ClassVar[ta.Type[LoggingContextInfos.SourceFile]] = LoggingContextInfos.SourceFile
1815
+
1816
+ _record_attrs: ta.ClassVar[ta.Mapping[str, ta.Any]] = dict(
1817
+ # Filename portion of pathname. Set to `os.path.basename(pathname)` if successful, otherwise defaults to
1818
+ # pathname.
1819
+ filename=str,
1820
+
1821
+ # Module (name portion of filename). Set to `os.path.splitext(filename)[0]`, otherwise defaults to
1822
+ # "Unknown module".
1823
+ module=str,
1824
+ )
1825
+
1826
+ _UNKNOWN_MODULE: ta.ClassVar[str] = 'Unknown module'
1827
+
1828
+ def context_to_record(self, ctx: LoggingContext) -> ta.Mapping[str, ta.Any]:
1829
+ if (info := ctx.get_info(LoggingContextInfos.SourceFile)) is not None:
1830
+ return dict(
1831
+ filename=info.file_name,
1832
+ module=info.module,
1833
+ )
1834
+
1835
+ if (caller := ctx.get_info(LoggingContextInfos.Caller)) is not None:
1836
+ return dict(
1837
+ filename=caller.file_path,
1838
+ module=self._UNKNOWN_MODULE,
1839
+ )
1840
+
1841
+ return dict(
1842
+ filename=LoggingContextInfoRecordAdapters.Caller._UNKNOWN_PATH_NAME, # noqa
1843
+ module=self._UNKNOWN_MODULE,
1844
+ )
1845
+
1846
+ def record_to_info(self, rec: logging.LogRecord) -> ta.Optional[LoggingContextInfos.SourceFile]:
1847
+ if (
1848
+ rec.module is not None and
1849
+ rec.module != self._UNKNOWN_MODULE
1850
+ ):
1851
+ return LoggingContextInfos.SourceFile(
1852
+ file_name=rec.filename,
1853
+ module=rec.module, # FIXME: piecemeal?
1854
+ )
1855
+
1856
+ return None
1857
+
1858
+ class Thread(OptionalAdapter[LoggingContextInfos.Thread]):
1859
+ info_cls: ta.ClassVar[ta.Type[LoggingContextInfos.Thread]] = LoggingContextInfos.Thread
1860
+
1861
+ _record_attrs: ta.ClassVar[ta.Mapping[str, ta.Tuple[ta.Any, ta.Any]]] = dict(
1862
+ # Thread ID if available, and `logging.logThreads` is truthy.
1863
+ thread=(ta.Optional[int], None),
1864
+
1865
+ # Thread name if available, and `logging.logThreads` is truthy.
1866
+ threadName=(ta.Optional[str], None),
1867
+ )
1868
+
1869
+ def _info_to_record(self, info: LoggingContextInfos.Thread) -> ta.Mapping[str, ta.Any]:
1870
+ if logging.logThreads:
1871
+ return dict(
1872
+ thread=info.ident,
1873
+ threadName=info.name,
1874
+ )
1875
+
1876
+ return self.record_defaults
1877
+
1878
+ def record_to_info(self, rec: logging.LogRecord) -> ta.Optional[LoggingContextInfos.Thread]:
1879
+ if (
1880
+ (ident := rec.thread) is not None and
1881
+ (name := rec.threadName) is not None
1882
+ ):
1883
+ return LoggingContextInfos.Thread(
1884
+ ident=ident,
1885
+ native_id=None,
1886
+ name=name,
1887
+ )
1888
+
1889
+ return None
1890
+
1891
+ class Process(OptionalAdapter[LoggingContextInfos.Process]):
1892
+ info_cls: ta.ClassVar[ta.Type[LoggingContextInfos.Process]] = LoggingContextInfos.Process
1893
+
1894
+ _record_attrs: ta.ClassVar[ta.Mapping[str, ta.Tuple[ta.Any, ta.Any]]] = dict(
1895
+ # Process ID if available - that is, if `hasattr(os, 'getpid')` - and `logging.logProcesses` is truthy,
1896
+ # otherwise None.
1897
+ process=(ta.Optional[int], None),
1898
+ )
1899
+
1900
+ def _info_to_record(self, info: LoggingContextInfos.Process) -> ta.Mapping[str, ta.Any]:
1901
+ if logging.logProcesses:
1902
+ return dict(
1903
+ process=info.pid,
1904
+ )
1905
+
1906
+ return self.record_defaults
1907
+
1908
+ def record_to_info(self, rec: logging.LogRecord) -> ta.Optional[LoggingContextInfos.Process]:
1909
+ if (
1910
+ (pid := rec.process) is not None
1911
+ ):
1912
+ return LoggingContextInfos.Process(
1913
+ pid=pid,
1914
+ )
1915
+
1916
+ return None
1917
+
1918
+ class Multiprocessing(OptionalAdapter[LoggingContextInfos.Multiprocessing]):
1919
+ info_cls: ta.ClassVar[ta.Type[LoggingContextInfos.Multiprocessing]] = LoggingContextInfos.Multiprocessing
1920
+
1921
+ _record_attrs: ta.ClassVar[ta.Mapping[str, ta.Tuple[ta.Any, ta.Any]]] = dict(
1922
+ # Process name if available. Set to None if `logging.logMultiprocessing` is not truthy. Otherwise, set to
1923
+ # 'MainProcess', then `sys.modules.get('multiprocessing').current_process().name` if that works, otherwise
1924
+ # remains as 'MainProcess'.
1925
+ #
1926
+ # As noted by stdlib:
1927
+ #
1928
+ # Errors may occur if multiprocessing has not finished loading yet - e.g. if a custom import hook causes
1929
+ # third-party code to run when multiprocessing calls import. See issue 8200 for an example
1930
+ #
1931
+ processName=(ta.Optional[str], None),
1932
+ )
1933
+
1934
+ def _info_to_record(self, info: LoggingContextInfos.Multiprocessing) -> ta.Mapping[str, ta.Any]:
1935
+ if logging.logMultiprocessing:
1936
+ return dict(
1937
+ processName=info.process_name,
1938
+ )
1939
+
1940
+ return self.record_defaults
1941
+
1942
+ def record_to_info(self, rec: logging.LogRecord) -> ta.Optional[LoggingContextInfos.Multiprocessing]:
1943
+ if (
1944
+ (process_name := rec.processName) is not None
1945
+ ):
1946
+ return LoggingContextInfos.Multiprocessing(
1947
+ process_name=process_name,
1948
+ )
1949
+
1950
+ return None
1951
+
1952
+ class AsyncioTask(OptionalAdapter[LoggingContextInfos.AsyncioTask]):
1953
+ info_cls: ta.ClassVar[ta.Type[LoggingContextInfos.AsyncioTask]] = LoggingContextInfos.AsyncioTask
1954
+
1955
+ _record_attrs: ta.ClassVar[ta.Mapping[str, ta.Union[ta.Any, ta.Tuple[ta.Any, ta.Any]]]] = dict(
1956
+ # Absent <3.12, otherwise asyncio.Task name if available, and `logging.logAsyncioTasks` is truthy. Set to
1957
+ # `sys.modules.get('asyncio').current_task().get_name()`, otherwise None.
1958
+ taskName=(ta.Optional[str], None),
1959
+ )
1960
+
1961
+ def _info_to_record(self, info: LoggingContextInfos.AsyncioTask) -> ta.Mapping[str, ta.Any]:
1962
+ if getattr(logging, 'logAsyncioTasks', None): # Absent <3.12
1963
+ return dict(
1964
+ taskName=info.name,
1965
+ )
1966
+
1967
+ return self.record_defaults
1968
+
1969
+ def record_to_info(self, rec: logging.LogRecord) -> ta.Optional[LoggingContextInfos.AsyncioTask]:
1970
+ if (
1971
+ (name := getattr(rec, 'taskName', None)) is not None
1972
+ ):
1973
+ return LoggingContextInfos.AsyncioTask(
1974
+ name=name,
1975
+ )
1976
+
1977
+ return None
1978
+
1979
+
1980
+ _LOGGING_CONTEXT_INFO_RECORD_ADAPTERS_: ta.Sequence[LoggingContextInfoRecordAdapters.Adapter] = [ # noqa
1981
+ LoggingContextInfoRecordAdapters.Name(),
1982
+ LoggingContextInfoRecordAdapters.Level(),
1983
+ LoggingContextInfoRecordAdapters.Msg(),
1984
+ LoggingContextInfoRecordAdapters.Time(),
1985
+ LoggingContextInfoRecordAdapters.Exc(),
1986
+ LoggingContextInfoRecordAdapters.Caller(),
1987
+ LoggingContextInfoRecordAdapters.SourceFile(),
1988
+ LoggingContextInfoRecordAdapters.Thread(),
1989
+ LoggingContextInfoRecordAdapters.Process(),
1990
+ LoggingContextInfoRecordAdapters.Multiprocessing(),
1991
+ LoggingContextInfoRecordAdapters.AsyncioTask(),
1992
+ ]
1993
+
1994
+ _LOGGING_CONTEXT_INFO_RECORD_ADAPTERS: ta.Mapping[ta.Type[LoggingContextInfo], LoggingContextInfoRecordAdapters.Adapter] = { # noqa
1995
+ ad.info_cls: ad for ad in _LOGGING_CONTEXT_INFO_RECORD_ADAPTERS_
1996
+ }
1997
+
1998
+
1999
+ ##
2000
+
2001
+
2002
+ # Formatter:
2003
+ # - https://github.com/python/cpython/blob/39b2f82717a69dde7212bc39b673b0f55c99e6a3/Lib/logging/__init__.py#L514 (3.8)
2004
+ # - https://github.com/python/cpython/blob/f070f54c5f4a42c7c61d1d5d3b8f3b7203b4a0fb/Lib/logging/__init__.py#L554 (~3.14) # noqa
2005
+ #
2006
+ _KNOWN_STD_LOGGING_FORMATTER_RECORD_ATTRS: ta.Dict[str, ta.Any] = dict(
2007
+ # The logged message, computed as msg % args. Set to `record.getMessage()`.
2008
+ message=str,
2009
+
2010
+ # Human-readable time when the LogRecord was created. By default this is of the form '2003-07-08 16:49:45,896' (the
2011
+ # numbers after the comma are millisecond portion of the time). Set to `self.formatTime(record, self.datefmt)` if
2012
+ # `self.usesTime()`, otherwise unset.
2013
+ asctime=str,
2014
+
2015
+ # Used to cache the traceback text. If unset (falsey) on the record and `exc_info` is truthy, set to
2016
+ # `self.formatException(record.exc_info)` - otherwise unmodified.
2017
+ exc_text=ta.Optional[str],
2018
+ )
2019
+
2020
+
2021
+ ##
2022
+
2023
+
2024
+ _KNOWN_STD_LOGGING_RECORD_ATTR_SET: ta.FrozenSet[str] = frozenset(
2025
+ a for ad in _LOGGING_CONTEXT_INFO_RECORD_ADAPTERS.values() for a in ad.record_attrs
2026
+ )
2027
+
2028
+ _KNOWN_STD_LOGGING_FORMATTER_RECORD_ATTR_SET: ta.FrozenSet[str] = frozenset(_KNOWN_STD_LOGGING_FORMATTER_RECORD_ATTRS)
2029
+
2030
+
2031
+ class UnknownStdLoggingRecordAttrsWarning(LoggingSetupWarning):
2032
+ pass
2033
+
2034
+
2035
+ def _check_std_logging_record_attrs() -> None:
2036
+ if (
2037
+ len([a for ad in _LOGGING_CONTEXT_INFO_RECORD_ADAPTERS.values() for a in ad.record_attrs]) !=
2038
+ len(_KNOWN_STD_LOGGING_RECORD_ATTR_SET)
2039
+ ):
2040
+ raise RuntimeError('Duplicate LoggingContextInfoRecordAdapter record attrs')
2041
+
2042
+ rec_dct = dict(logging.makeLogRecord({}).__dict__)
2043
+
2044
+ if (unk_rec_fields := frozenset(rec_dct) - _KNOWN_STD_LOGGING_RECORD_ATTR_SET):
2045
+ import warnings # noqa
2046
+
2047
+ warnings.warn(
2048
+ f'Unknown log record attrs detected: {sorted(unk_rec_fields)!r}',
2049
+ UnknownStdLoggingRecordAttrsWarning,
2050
+ )
2051
+
2052
+
2053
+ _check_std_logging_record_attrs()
2054
+
2055
+
2056
+ ##
2057
+
2058
+
2059
+ class LoggingContextLogRecord(logging.LogRecord):
2060
+ # LogRecord.__init__ args:
2061
+ # - name: str
2062
+ # - level: int
2063
+ # - pathname: str - Confusingly referred to as `fn` before the LogRecord ctor. May be empty or "(unknown file)".
2064
+ # - lineno: int - May be 0.
2065
+ # - msg: str
2066
+ # - args: tuple | dict | 1-tuple[dict]
2067
+ # - exc_info: LoggingExcInfoTuple | None
2068
+ # - func: str | None = None -> funcName
2069
+ # - sinfo: str | None = None -> stack_info
2070
+ #
2071
+
2072
+ def __init__(self, *, _logging_context: LoggingContext) -> None: # noqa
2073
+ self.__dict__.update(_logging_context=_logging_context)
2074
+
2075
+ for ad in _LOGGING_CONTEXT_INFO_RECORD_ADAPTERS_:
2076
+ self.__dict__.update(ad.context_to_record(_logging_context))
2077
+
2078
+ _logging_context: LoggingContext
2079
+
2080
+ # FIXME: track extra
2081
+ # def __setattr__(self, key, value):
2082
+ # super().__setattr__(key, value)
2083
+
2084
+
2085
+ ##
2086
+
2087
+
2088
+ @ta.final
2089
+ class LogRecordLoggingContext(LoggingContext):
2090
+ def __init__(self, rec: logging.LogRecord) -> None:
2091
+ if isinstance(rec, LoggingContextLogRecord):
2092
+ raise TypeError(rec)
2093
+
2094
+ self._rec = rec
2095
+
2096
+ infos: ta.List[LoggingContextInfo] = [
2097
+ info
2098
+ for ad in _LOGGING_CONTEXT_INFO_RECORD_ADAPTERS_
2099
+ if (info := ad.record_to_info(rec)) is not None
2100
+ ]
2101
+
2102
+ # FIXME:
2103
+ # if extra is not None:
2104
+ # for key in extra:
2105
+ # if (key in ["message", "asctime"]) or (key in rv.__dict__):
2106
+ # raise KeyError("Attempt to overwrite %r in LogRecord" % key)
2107
+ # rv.__dict__[key] = extra[key]
2108
+
2109
+ if (extra := {
2110
+ a: v
2111
+ for a, v in rec.__dict__.items()
2112
+ if a not in _KNOWN_STD_LOGGING_RECORD_ATTR_SET
2113
+ }):
2114
+ infos.append(LoggingContextInfos.Extra(extra))
2115
+
2116
+ self._infos: ta.Dict[ta.Type[LoggingContextInfo], LoggingContextInfo] = {
2117
+ type(info): info
2118
+ for info in infos
2119
+ }
2120
+
2121
+ def get_info(self, ty: ta.Type[LoggingContextInfoT]) -> ta.Optional[LoggingContextInfoT]:
2122
+ return self._infos.get(ty)
2123
+
2124
+
2125
+ ########################################
2126
+ # ../std/loggers.py
2127
+
2128
+
2129
+ ##
2130
+
2131
+
2132
+ class StdLogger(Logger):
2133
+ def __init__(self, std: logging.Logger) -> None:
2134
+ super().__init__()
2135
+
2136
+ self._std = std
2137
+
2138
+ @property
2139
+ def std(self) -> logging.Logger:
2140
+ return self._std
2141
+
2142
+ def is_enabled_for(self, level: LogLevel) -> bool:
2143
+ return self._std.isEnabledFor(level)
2144
+
2145
+ def get_effective_level(self) -> LogLevel:
2146
+ return self._std.getEffectiveLevel()
2147
+
2148
+ def _log(
2149
+ self,
2150
+ ctx: CaptureLoggingContext,
2151
+ msg: ta.Union[str, tuple, LoggingMsgFn],
2152
+ *args: ta.Any,
2153
+ ) -> None:
2154
+ if not self.is_enabled_for(ctx.must_get_info(LoggingContextInfos.Level).level):
2155
+ return
2156
+
2157
+ ctx.set_basic(
2158
+ name=self._std.name,
2159
+
2160
+ msg=msg,
2161
+ args=args,
2162
+ )
2163
+
2164
+ ctx.capture()
2165
+
2166
+ rec = LoggingContextLogRecord(_logging_context=ctx)
2167
+
2168
+ self._std.handle(rec)
2169
+
2170
+
2171
+ ########################################
2172
+ # _amalg.py
2173
+
2174
+
2175
+ ##