logxpy 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- logxpy/__init__.py +126 -0
- logxpy/_action.py +958 -0
- logxpy/_async.py +186 -0
- logxpy/_base.py +80 -0
- logxpy/_compat.py +71 -0
- logxpy/_config.py +45 -0
- logxpy/_dest.py +88 -0
- logxpy/_errors.py +58 -0
- logxpy/_fmt.py +68 -0
- logxpy/_generators.py +136 -0
- logxpy/_mask.py +23 -0
- logxpy/_message.py +195 -0
- logxpy/_output.py +517 -0
- logxpy/_pool.py +93 -0
- logxpy/_traceback.py +126 -0
- logxpy/_types.py +71 -0
- logxpy/_util.py +56 -0
- logxpy/_validation.py +486 -0
- logxpy/_version.py +21 -0
- logxpy/cli.py +61 -0
- logxpy/dask.py +172 -0
- logxpy/decorators.py +268 -0
- logxpy/filter.py +124 -0
- logxpy/journald.py +88 -0
- logxpy/json.py +149 -0
- logxpy/loggerx.py +253 -0
- logxpy/logwriter.py +84 -0
- logxpy/parse.py +191 -0
- logxpy/prettyprint.py +173 -0
- logxpy/serializers.py +36 -0
- logxpy/stdlib.py +23 -0
- logxpy/tai64n.py +45 -0
- logxpy/testing.py +472 -0
- logxpy/tests/__init__.py +9 -0
- logxpy/tests/common.py +36 -0
- logxpy/tests/strategies.py +231 -0
- logxpy/tests/test_action.py +1751 -0
- logxpy/tests/test_api.py +86 -0
- logxpy/tests/test_async.py +67 -0
- logxpy/tests/test_compat.py +13 -0
- logxpy/tests/test_config.py +21 -0
- logxpy/tests/test_coroutines.py +105 -0
- logxpy/tests/test_dask.py +211 -0
- logxpy/tests/test_decorators.py +54 -0
- logxpy/tests/test_filter.py +122 -0
- logxpy/tests/test_fmt.py +42 -0
- logxpy/tests/test_generators.py +292 -0
- logxpy/tests/test_journald.py +246 -0
- logxpy/tests/test_json.py +208 -0
- logxpy/tests/test_loggerx.py +44 -0
- logxpy/tests/test_logwriter.py +262 -0
- logxpy/tests/test_message.py +334 -0
- logxpy/tests/test_output.py +921 -0
- logxpy/tests/test_parse.py +309 -0
- logxpy/tests/test_pool.py +55 -0
- logxpy/tests/test_prettyprint.py +303 -0
- logxpy/tests/test_pyinstaller.py +35 -0
- logxpy/tests/test_serializers.py +36 -0
- logxpy/tests/test_stdlib.py +73 -0
- logxpy/tests/test_tai64n.py +66 -0
- logxpy/tests/test_testing.py +1051 -0
- logxpy/tests/test_traceback.py +251 -0
- logxpy/tests/test_twisted.py +814 -0
- logxpy/tests/test_util.py +45 -0
- logxpy/tests/test_validation.py +989 -0
- logxpy/twisted.py +265 -0
- logxpy-0.1.0.dist-info/METADATA +100 -0
- logxpy-0.1.0.dist-info/RECORD +72 -0
- logxpy-0.1.0.dist-info/WHEEL +5 -0
- logxpy-0.1.0.dist-info/entry_points.txt +2 -0
- logxpy-0.1.0.dist-info/licenses/LICENSE +201 -0
- logxpy-0.1.0.dist-info/top_level.txt +1 -0
logxpy/_output.py
ADDED
|
@@ -0,0 +1,517 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Implementation of hooks and APIs for outputting log messages.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import traceback
|
|
6
|
+
import inspect
|
|
7
|
+
from threading import Lock
|
|
8
|
+
from functools import wraps
|
|
9
|
+
from io import IOBase
|
|
10
|
+
import warnings
|
|
11
|
+
|
|
12
|
+
from pyrsistent import PClass, field
|
|
13
|
+
|
|
14
|
+
from zope.interface import Interface, implementer
|
|
15
|
+
|
|
16
|
+
from ._traceback import write_traceback, TRACEBACK_MESSAGE
|
|
17
|
+
from ._message import EXCEPTION_FIELD, MESSAGE_TYPE_FIELD, REASON_FIELD
|
|
18
|
+
from ._util import saferepr, safeunicode
|
|
19
|
+
from .json import (
|
|
20
|
+
json_default,
|
|
21
|
+
_encoder_to_default_function,
|
|
22
|
+
_dumps_bytes,
|
|
23
|
+
_dumps_unicode,
|
|
24
|
+
)
|
|
25
|
+
from ._validation import ValidationError
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
# Action type for log messages due to a (hopefully temporarily) broken
|
|
29
|
+
# destination.
|
|
30
|
+
DESTINATION_FAILURE = "eliot:destination_failure"
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class BufferingDestination(object):
|
|
34
|
+
"""
|
|
35
|
+
Buffer messages in memory.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
def __init__(self):
|
|
39
|
+
self.messages = []
|
|
40
|
+
|
|
41
|
+
def __call__(self, message):
|
|
42
|
+
self.messages.append(message)
|
|
43
|
+
while len(self.messages) > 1000:
|
|
44
|
+
self.messages.pop(0)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class Destinations(object):
|
|
48
|
+
"""
|
|
49
|
+
Manage a list of destinations for message dictionaries.
|
|
50
|
+
|
|
51
|
+
The global instance of this class is where L{Logger} instances will
|
|
52
|
+
send written messages.
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
def __init__(self):
|
|
56
|
+
self._destinations = [BufferingDestination()]
|
|
57
|
+
self._any_added = False
|
|
58
|
+
self._globalFields = {}
|
|
59
|
+
|
|
60
|
+
def addGlobalFields(self, **fields):
|
|
61
|
+
"""
|
|
62
|
+
Add fields that will be included in all messages sent through this
|
|
63
|
+
destination.
|
|
64
|
+
|
|
65
|
+
@param fields: Keyword arguments mapping field names to values.
|
|
66
|
+
"""
|
|
67
|
+
self._globalFields.update(fields)
|
|
68
|
+
|
|
69
|
+
def send(self, message, logger=None):
|
|
70
|
+
"""
|
|
71
|
+
Deliver a message to all destinations.
|
|
72
|
+
|
|
73
|
+
The passed in message might be mutated.
|
|
74
|
+
|
|
75
|
+
This should never raise an exception.
|
|
76
|
+
|
|
77
|
+
@param message: A message dictionary that can be serialized to JSON.
|
|
78
|
+
@type message: L{dict}
|
|
79
|
+
|
|
80
|
+
@param logger: The ``ILogger`` that wrote the message, if any.
|
|
81
|
+
"""
|
|
82
|
+
message.update(self._globalFields)
|
|
83
|
+
errors = []
|
|
84
|
+
is_destination_error_message = (
|
|
85
|
+
message.get("message_type", None) == DESTINATION_FAILURE
|
|
86
|
+
)
|
|
87
|
+
for dest in self._destinations:
|
|
88
|
+
try:
|
|
89
|
+
dest(message)
|
|
90
|
+
except Exception as e:
|
|
91
|
+
# If the destination is broken not because of a specific
|
|
92
|
+
# message, but rather continously, we will get a
|
|
93
|
+
# "eliot:destination_failure" log message logged, and so we
|
|
94
|
+
# want to ensure it doesn't do infinite recursion.
|
|
95
|
+
if not is_destination_error_message:
|
|
96
|
+
errors.append(e)
|
|
97
|
+
|
|
98
|
+
for exception in errors:
|
|
99
|
+
from ._action import log_message
|
|
100
|
+
|
|
101
|
+
try:
|
|
102
|
+
new_msg = {
|
|
103
|
+
MESSAGE_TYPE_FIELD: DESTINATION_FAILURE,
|
|
104
|
+
REASON_FIELD: safeunicode(exception),
|
|
105
|
+
EXCEPTION_FIELD: exception.__class__.__module__
|
|
106
|
+
+ "."
|
|
107
|
+
+ exception.__class__.__name__,
|
|
108
|
+
"message": _safe_unicode_dictionary(message),
|
|
109
|
+
}
|
|
110
|
+
if logger is not None:
|
|
111
|
+
# This is really only useful for testing, should really
|
|
112
|
+
# figure out way to get rid of this mechanism...
|
|
113
|
+
new_msg["__eliot_logger__"] = logger
|
|
114
|
+
log_message(**new_msg)
|
|
115
|
+
except:
|
|
116
|
+
# Nothing we can do here, raising exception to caller will
|
|
117
|
+
# break business logic, better to have that continue to
|
|
118
|
+
# work even if logging isn't.
|
|
119
|
+
pass
|
|
120
|
+
|
|
121
|
+
def add(self, *destinations):
|
|
122
|
+
"""
|
|
123
|
+
Adds new destinations.
|
|
124
|
+
|
|
125
|
+
A destination should never ever throw an exception. Seriously.
|
|
126
|
+
A destination should not mutate the dictionary it is given.
|
|
127
|
+
|
|
128
|
+
@param destinations: A list of callables that takes message
|
|
129
|
+
dictionaries.
|
|
130
|
+
"""
|
|
131
|
+
buffered_messages = None
|
|
132
|
+
if not self._any_added:
|
|
133
|
+
# These are first set of messages added, so we need to clear
|
|
134
|
+
# BufferingDestination:
|
|
135
|
+
self._any_added = True
|
|
136
|
+
buffered_messages = self._destinations[0].messages
|
|
137
|
+
self._destinations = []
|
|
138
|
+
self._destinations.extend(destinations)
|
|
139
|
+
if buffered_messages:
|
|
140
|
+
# Re-deliver buffered messages:
|
|
141
|
+
for message in buffered_messages:
|
|
142
|
+
self.send(message)
|
|
143
|
+
|
|
144
|
+
def remove(self, destination):
|
|
145
|
+
"""
|
|
146
|
+
Remove an existing destination.
|
|
147
|
+
|
|
148
|
+
@param destination: A destination previously added with C{self.add}.
|
|
149
|
+
|
|
150
|
+
@raises ValueError: If the destination is unknown.
|
|
151
|
+
"""
|
|
152
|
+
self._destinations.remove(destination)
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
class ILogger(Interface):
|
|
156
|
+
"""
|
|
157
|
+
Write out message dictionaries to some destination.
|
|
158
|
+
"""
|
|
159
|
+
|
|
160
|
+
def write(dictionary, serializer=None):
|
|
161
|
+
"""
|
|
162
|
+
Write a dictionary to the appropriate destination.
|
|
163
|
+
|
|
164
|
+
@note: This method is thread-safe.
|
|
165
|
+
|
|
166
|
+
@param serializer: Either C{None}, or a
|
|
167
|
+
L{eliot._validation._MessageSerializer} which can be used to
|
|
168
|
+
validate this message.
|
|
169
|
+
|
|
170
|
+
@param dictionary: The message to write out. The given dictionary
|
|
171
|
+
will not be mutated.
|
|
172
|
+
@type dictionary: C{dict}
|
|
173
|
+
"""
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
def _safe_unicode_dictionary(dictionary):
|
|
177
|
+
"""
|
|
178
|
+
Serialize a dictionary to a unicode string no matter what it contains.
|
|
179
|
+
|
|
180
|
+
The resulting dictionary will loosely follow Python syntax but it is
|
|
181
|
+
not expected to actually be a lossless encoding in all cases.
|
|
182
|
+
|
|
183
|
+
@param dictionary: A L{dict} to serialize.
|
|
184
|
+
|
|
185
|
+
@return: A L{str} string representing the input dictionary as
|
|
186
|
+
faithfully as can be done without putting in too much effort.
|
|
187
|
+
"""
|
|
188
|
+
try:
|
|
189
|
+
return str(
|
|
190
|
+
dict(
|
|
191
|
+
(saferepr(key), saferepr(value)) for (key, value) in dictionary.items()
|
|
192
|
+
)
|
|
193
|
+
)
|
|
194
|
+
except:
|
|
195
|
+
return saferepr(dictionary)
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
@implementer(ILogger)
|
|
199
|
+
class Logger(object):
|
|
200
|
+
"""
|
|
201
|
+
Write out messages to the globally configured destination(s).
|
|
202
|
+
|
|
203
|
+
You will typically want to create one of these for every chunk of code
|
|
204
|
+
whose messages you want to unit test in isolation, e.g. a class. The tests
|
|
205
|
+
can then replace a specific L{Logger} with a L{MemoryLogger}.
|
|
206
|
+
"""
|
|
207
|
+
|
|
208
|
+
_destinations = Destinations()
|
|
209
|
+
|
|
210
|
+
def write(self, dictionary, serializer=None):
|
|
211
|
+
"""
|
|
212
|
+
Serialize the dictionary, and write it to C{self._destinations}.
|
|
213
|
+
"""
|
|
214
|
+
dictionary = dictionary.copy()
|
|
215
|
+
try:
|
|
216
|
+
if serializer is not None:
|
|
217
|
+
serializer.serialize(dictionary)
|
|
218
|
+
except:
|
|
219
|
+
write_traceback(self)
|
|
220
|
+
from ._action import log_message
|
|
221
|
+
|
|
222
|
+
log_message(
|
|
223
|
+
"eliot:serialization_failure",
|
|
224
|
+
message=_safe_unicode_dictionary(dictionary),
|
|
225
|
+
__eliot_logger__=self,
|
|
226
|
+
)
|
|
227
|
+
return
|
|
228
|
+
|
|
229
|
+
self._destinations.send(dictionary, self)
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
def exclusively(f):
|
|
233
|
+
"""
|
|
234
|
+
Decorate a function to make it thread-safe by serializing invocations
|
|
235
|
+
using a per-instance lock.
|
|
236
|
+
"""
|
|
237
|
+
|
|
238
|
+
@wraps(f)
|
|
239
|
+
def exclusively_f(self, *a, **kw):
|
|
240
|
+
with self._lock:
|
|
241
|
+
return f(self, *a, **kw)
|
|
242
|
+
|
|
243
|
+
return exclusively_f
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
@implementer(ILogger)
|
|
247
|
+
class MemoryLogger(object):
|
|
248
|
+
"""
|
|
249
|
+
Store written messages in memory.
|
|
250
|
+
|
|
251
|
+
When unit testing you don't want to create this directly but rather use
|
|
252
|
+
the L{eliot.testing.validateLogging} decorator on a test method, which
|
|
253
|
+
will provide additional testing integration.
|
|
254
|
+
|
|
255
|
+
@ivar messages: A C{list} of the dictionaries passed to
|
|
256
|
+
L{MemoryLogger.write}. Do not mutate this list.
|
|
257
|
+
|
|
258
|
+
@ivar serializers: A C{list} of the serializers passed to
|
|
259
|
+
L{MemoryLogger.write}, each corresponding to a message
|
|
260
|
+
L{MemoryLogger.messages}. Do not mutate this list.
|
|
261
|
+
|
|
262
|
+
@ivar tracebackMessages: A C{list} of messages written to this logger for
|
|
263
|
+
tracebacks using L{eliot.write_traceback} or L{eliot.writeFailure}. Do
|
|
264
|
+
not mutate this list.
|
|
265
|
+
"""
|
|
266
|
+
|
|
267
|
+
def __init__(self, encoder=None, json_default=json_default):
|
|
268
|
+
"""
|
|
269
|
+
@param encoder: DEPRECATED. A JSONEncoder subclass to use when
|
|
270
|
+
encoding JSON.
|
|
271
|
+
|
|
272
|
+
@param json_default: A callable that handles objects the default JSON
|
|
273
|
+
serializer can't handle.
|
|
274
|
+
"""
|
|
275
|
+
json_default = _json_default_from_encoder_and_json_default(
|
|
276
|
+
encoder, json_default
|
|
277
|
+
)
|
|
278
|
+
self._lock = Lock()
|
|
279
|
+
self._json_default = json_default
|
|
280
|
+
self.reset()
|
|
281
|
+
|
|
282
|
+
@exclusively
|
|
283
|
+
def flushTracebacks(self, exceptionType):
|
|
284
|
+
"""
|
|
285
|
+
Flush all logged tracebacks whose exception is of the given type.
|
|
286
|
+
|
|
287
|
+
This means they are expected tracebacks and should not cause the test
|
|
288
|
+
to fail.
|
|
289
|
+
|
|
290
|
+
@param exceptionType: A subclass of L{Exception}.
|
|
291
|
+
|
|
292
|
+
@return: C{list} of flushed messages.
|
|
293
|
+
"""
|
|
294
|
+
result = []
|
|
295
|
+
remaining = []
|
|
296
|
+
for message in self.tracebackMessages:
|
|
297
|
+
if isinstance(message[REASON_FIELD], exceptionType):
|
|
298
|
+
result.append(message)
|
|
299
|
+
else:
|
|
300
|
+
remaining.append(message)
|
|
301
|
+
self.tracebackMessages = remaining
|
|
302
|
+
return result
|
|
303
|
+
|
|
304
|
+
# PEP 8 variant:
|
|
305
|
+
flush_tracebacks = flushTracebacks
|
|
306
|
+
|
|
307
|
+
@exclusively
|
|
308
|
+
def write(self, dictionary, serializer=None):
|
|
309
|
+
"""
|
|
310
|
+
Add the dictionary to list of messages.
|
|
311
|
+
"""
|
|
312
|
+
# Validate copy of the dictionary, to ensure what we store isn't
|
|
313
|
+
# mutated.
|
|
314
|
+
try:
|
|
315
|
+
self._validate_message(dictionary.copy(), serializer)
|
|
316
|
+
except Exception as e:
|
|
317
|
+
# Skip irrelevant frames that don't help pinpoint the problem:
|
|
318
|
+
from . import _output, _message, _action
|
|
319
|
+
|
|
320
|
+
skip_filenames = [_output.__file__, _message.__file__, _action.__file__]
|
|
321
|
+
for frame in inspect.stack():
|
|
322
|
+
if frame[1] not in skip_filenames:
|
|
323
|
+
break
|
|
324
|
+
self._failed_validations.append(
|
|
325
|
+
"{}: {}".format(e, "".join(traceback.format_stack(frame[0])))
|
|
326
|
+
)
|
|
327
|
+
self.messages.append(dictionary)
|
|
328
|
+
self.serializers.append(serializer)
|
|
329
|
+
if serializer is TRACEBACK_MESSAGE._serializer:
|
|
330
|
+
self.tracebackMessages.append(dictionary)
|
|
331
|
+
|
|
332
|
+
def _validate_message(self, dictionary, serializer):
|
|
333
|
+
"""Validate an individual message.
|
|
334
|
+
|
|
335
|
+
As a side-effect, the message is replaced with its serialized contents.
|
|
336
|
+
|
|
337
|
+
@param dictionary: A message C{dict} to be validated. Might be mutated
|
|
338
|
+
by the serializer!
|
|
339
|
+
|
|
340
|
+
@param serializer: C{None} or a serializer.
|
|
341
|
+
|
|
342
|
+
@raises TypeError: If a field name is not unicode, or the dictionary
|
|
343
|
+
fails to serialize to JSON.
|
|
344
|
+
|
|
345
|
+
@raises eliot.ValidationError: If serializer was given and validation
|
|
346
|
+
failed.
|
|
347
|
+
"""
|
|
348
|
+
if serializer is not None:
|
|
349
|
+
serializer.validate(dictionary)
|
|
350
|
+
for key in dictionary:
|
|
351
|
+
if not isinstance(key, str):
|
|
352
|
+
if isinstance(key, bytes):
|
|
353
|
+
key.decode("utf-8")
|
|
354
|
+
else:
|
|
355
|
+
raise TypeError(dictionary, "%r is not unicode" % (key,))
|
|
356
|
+
if serializer is not None:
|
|
357
|
+
serializer.serialize(dictionary)
|
|
358
|
+
|
|
359
|
+
try:
|
|
360
|
+
_dumps_unicode(dictionary, default=self._json_default)
|
|
361
|
+
except Exception as e:
|
|
362
|
+
raise TypeError("Message %s doesn't encode to JSON: %s" % (dictionary, e))
|
|
363
|
+
|
|
364
|
+
@exclusively
|
|
365
|
+
def validate(self):
|
|
366
|
+
"""
|
|
367
|
+
Validate all written messages.
|
|
368
|
+
|
|
369
|
+
Does minimal validation of types, and for messages with corresponding
|
|
370
|
+
serializers use those to do additional validation.
|
|
371
|
+
|
|
372
|
+
As a side-effect, the messages are replaced with their serialized
|
|
373
|
+
contents.
|
|
374
|
+
|
|
375
|
+
@raises TypeError: If a field name is not unicode, or the dictionary
|
|
376
|
+
fails to serialize to JSON.
|
|
377
|
+
|
|
378
|
+
@raises eliot.ValidationError: If serializer was given and validation
|
|
379
|
+
failed.
|
|
380
|
+
"""
|
|
381
|
+
for dictionary, serializer in zip(self.messages, self.serializers):
|
|
382
|
+
try:
|
|
383
|
+
self._validate_message(dictionary, serializer)
|
|
384
|
+
except (TypeError, ValidationError) as e:
|
|
385
|
+
# We already figured out which messages failed validation
|
|
386
|
+
# earlier. This just lets us figure out which exception type to
|
|
387
|
+
# raise.
|
|
388
|
+
raise e.__class__("\n\n".join(self._failed_validations))
|
|
389
|
+
|
|
390
|
+
@exclusively
|
|
391
|
+
def serialize(self):
|
|
392
|
+
"""
|
|
393
|
+
Serialize all written messages.
|
|
394
|
+
|
|
395
|
+
This is the Field-based serialization, not JSON.
|
|
396
|
+
|
|
397
|
+
@return: A C{list} of C{dict}, the serialized messages.
|
|
398
|
+
"""
|
|
399
|
+
result = []
|
|
400
|
+
for dictionary, serializer in zip(self.messages, self.serializers):
|
|
401
|
+
dictionary = dictionary.copy()
|
|
402
|
+
serializer.serialize(dictionary)
|
|
403
|
+
result.append(dictionary)
|
|
404
|
+
return result
|
|
405
|
+
|
|
406
|
+
@exclusively
|
|
407
|
+
def reset(self):
|
|
408
|
+
"""
|
|
409
|
+
Clear all logged messages.
|
|
410
|
+
|
|
411
|
+
Any logged tracebacks will also be cleared, and will therefore not
|
|
412
|
+
cause a test failure.
|
|
413
|
+
|
|
414
|
+
This is useful to ensure a logger is in a known state before testing
|
|
415
|
+
logging of a specific code path.
|
|
416
|
+
"""
|
|
417
|
+
self.messages = []
|
|
418
|
+
self.serializers = []
|
|
419
|
+
self.tracebackMessages = []
|
|
420
|
+
self._failed_validations = []
|
|
421
|
+
|
|
422
|
+
|
|
423
|
+
def _json_default_from_encoder_and_json_default(encoder, json_default):
|
|
424
|
+
if encoder is not None:
|
|
425
|
+
warnings.warn(
|
|
426
|
+
"Using a JSON encoder subclass is no longer supported, please switch to using a default function",
|
|
427
|
+
DeprecationWarning,
|
|
428
|
+
stacklevel=3,
|
|
429
|
+
)
|
|
430
|
+
from .json import json_default as default_json_default
|
|
431
|
+
|
|
432
|
+
if json_default is not default_json_default:
|
|
433
|
+
raise RuntimeError("Can't pass in both encoder and default function")
|
|
434
|
+
|
|
435
|
+
json_default = _encoder_to_default_function(encoder())
|
|
436
|
+
return json_default
|
|
437
|
+
|
|
438
|
+
|
|
439
|
+
class FileDestination(PClass):
|
|
440
|
+
"""
|
|
441
|
+
Callable that writes JSON messages to a file that accepts either C{bytes}
|
|
442
|
+
or C{str}.
|
|
443
|
+
|
|
444
|
+
@ivar file: The file to which messages will be written.
|
|
445
|
+
|
|
446
|
+
@ivar _dumps: Function that serializes an object to JSON.
|
|
447
|
+
|
|
448
|
+
@ivar _linebreak: C{"\n"} as either bytes or unicode.
|
|
449
|
+
"""
|
|
450
|
+
|
|
451
|
+
file = field(mandatory=True)
|
|
452
|
+
_json_default = field(mandatory=True)
|
|
453
|
+
_dumps = field(mandatory=True)
|
|
454
|
+
_linebreak = field(mandatory=True)
|
|
455
|
+
|
|
456
|
+
def __new__(cls, file, encoder=None, json_default=json_default):
|
|
457
|
+
"""
|
|
458
|
+
Use ``json_default`` to pass in a default function for JSON dumping.
|
|
459
|
+
|
|
460
|
+
The ``encoder`` parameter is deprecated.
|
|
461
|
+
"""
|
|
462
|
+
if isinstance(file, IOBase) and not file.writable():
|
|
463
|
+
raise RuntimeError("Given file {} is not writeable.")
|
|
464
|
+
|
|
465
|
+
json_default = _json_default_from_encoder_and_json_default(
|
|
466
|
+
encoder, json_default
|
|
467
|
+
)
|
|
468
|
+
|
|
469
|
+
unicodeFile = False
|
|
470
|
+
try:
|
|
471
|
+
file.write(b"")
|
|
472
|
+
except TypeError:
|
|
473
|
+
unicodeFile = True
|
|
474
|
+
|
|
475
|
+
if unicodeFile:
|
|
476
|
+
_dumps = _dumps_unicode
|
|
477
|
+
_linebreak = "\n"
|
|
478
|
+
else:
|
|
479
|
+
_dumps = _dumps_bytes
|
|
480
|
+
_linebreak = b"\n"
|
|
481
|
+
return PClass.__new__(
|
|
482
|
+
cls,
|
|
483
|
+
file=file,
|
|
484
|
+
_dumps=_dumps,
|
|
485
|
+
_linebreak=_linebreak,
|
|
486
|
+
_json_default=json_default,
|
|
487
|
+
)
|
|
488
|
+
|
|
489
|
+
def __call__(self, message):
|
|
490
|
+
"""
|
|
491
|
+
@param message: A message dictionary.
|
|
492
|
+
"""
|
|
493
|
+
self.file.write(
|
|
494
|
+
self._dumps(message, default=self._json_default) + self._linebreak
|
|
495
|
+
)
|
|
496
|
+
self.file.flush()
|
|
497
|
+
|
|
498
|
+
|
|
499
|
+
def to_file(output_file, encoder=None, json_default=json_default):
|
|
500
|
+
"""
|
|
501
|
+
Add a destination that writes a JSON message per line to the given file.
|
|
502
|
+
|
|
503
|
+
@param output_file: A file-like object.
|
|
504
|
+
|
|
505
|
+
@param encoder: DEPRECATED. A JSONEncoder subclass to use when encoding
|
|
506
|
+
JSON.
|
|
507
|
+
|
|
508
|
+
@param json_default: A callable that handles objects the default JSON
|
|
509
|
+
serializer can't handle.
|
|
510
|
+
"""
|
|
511
|
+
Logger._destinations.add(
|
|
512
|
+
FileDestination(file=output_file, encoder=encoder, json_default=json_default)
|
|
513
|
+
)
|
|
514
|
+
|
|
515
|
+
|
|
516
|
+
# The default Logger, used when none is specified:
|
|
517
|
+
_DEFAULT_LOGGER = Logger()
|
logxpy/_pool.py
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
"""Thread pool and async channels."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import os
|
|
7
|
+
from collections.abc import AsyncIterator, Callable
|
|
8
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
from typing import Any, Generic, TypeVar
|
|
11
|
+
|
|
12
|
+
T = TypeVar("T")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
# === Thread Pool ===
|
|
16
|
+
class Pool:
|
|
17
|
+
"""Shared thread pools for CPU and I/O work."""
|
|
18
|
+
|
|
19
|
+
_instance: Pool | None = None
|
|
20
|
+
|
|
21
|
+
def __new__(cls) -> Pool:
|
|
22
|
+
if cls._instance is None:
|
|
23
|
+
cls._instance = super().__new__(cls)
|
|
24
|
+
n = os.cpu_count() or 4
|
|
25
|
+
cls._instance._cpu = ThreadPoolExecutor(n, thread_name_prefix="lx-cpu-")
|
|
26
|
+
cls._instance._io = ThreadPoolExecutor(n * 2, thread_name_prefix="lx-io-")
|
|
27
|
+
return cls._instance
|
|
28
|
+
|
|
29
|
+
async def cpu(self, fn: Callable[..., T], *args: Any, **kw: Any) -> T:
|
|
30
|
+
return await asyncio.get_event_loop().run_in_executor(self._cpu, lambda: fn(*args, **kw))
|
|
31
|
+
|
|
32
|
+
async def io(self, fn: Callable[..., T], *args: Any, **kw: Any) -> T:
|
|
33
|
+
return await asyncio.get_event_loop().run_in_executor(self._io, lambda: fn(*args, **kw))
|
|
34
|
+
|
|
35
|
+
def shutdown(self) -> None:
|
|
36
|
+
self._cpu.shutdown(wait=False)
|
|
37
|
+
self._io.shutdown(wait=False)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
pool = Pool()
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
# === Channel ===
|
|
44
|
+
@dataclass
|
|
45
|
+
class ChannelStats:
|
|
46
|
+
sent: int = 0
|
|
47
|
+
recv: int = 0
|
|
48
|
+
dropped: int = 0
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class Channel(Generic[T]):
|
|
52
|
+
"""Bounded async channel with backpressure."""
|
|
53
|
+
|
|
54
|
+
__slots__ = ("_closed", "_drop", "_q", "stats")
|
|
55
|
+
|
|
56
|
+
def __init__(self, size: int = 1000, drop_oldest: bool = False):
|
|
57
|
+
self._q: asyncio.Queue[T | None] = asyncio.Queue(maxsize=size)
|
|
58
|
+
self._closed = False
|
|
59
|
+
self._drop = drop_oldest
|
|
60
|
+
self.stats = ChannelStats()
|
|
61
|
+
|
|
62
|
+
async def send(self, item: T) -> bool:
|
|
63
|
+
if self._closed:
|
|
64
|
+
return False
|
|
65
|
+
try:
|
|
66
|
+
self._q.put_nowait(item)
|
|
67
|
+
except asyncio.QueueFull:
|
|
68
|
+
if self._drop:
|
|
69
|
+
try:
|
|
70
|
+
self._q.get_nowait()
|
|
71
|
+
self.stats.dropped += 1
|
|
72
|
+
except asyncio.QueueEmpty:
|
|
73
|
+
pass
|
|
74
|
+
await self._q.put(item)
|
|
75
|
+
self.stats.sent += 1
|
|
76
|
+
return True
|
|
77
|
+
|
|
78
|
+
async def recv(self) -> T | None:
|
|
79
|
+
item = await self._q.get()
|
|
80
|
+
if item is not None:
|
|
81
|
+
self.stats.recv += 1
|
|
82
|
+
return item
|
|
83
|
+
|
|
84
|
+
def close(self) -> None:
|
|
85
|
+
self._closed = True
|
|
86
|
+
try:
|
|
87
|
+
self._q.put_nowait(None)
|
|
88
|
+
except asyncio.QueueFull:
|
|
89
|
+
pass
|
|
90
|
+
|
|
91
|
+
async def __aiter__(self) -> AsyncIterator[T]:
|
|
92
|
+
while (item := await self.recv()) is not None:
|
|
93
|
+
yield item
|