mermaid-trace 0.4.1__py3-none-any.whl → 0.5.3.post0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,105 +1,181 @@
1
1
  """
2
2
  Asynchronous Mermaid Handler Module
3
+ ===================================
3
4
 
4
- This module provides a non-blocking logging handler that uses a background thread
5
- for writing logs. It's designed to improve performance in high-throughput applications
6
- by decoupling the logging I/O from the main execution thread.
5
+ This module implements a non-blocking logging handler that leverages a background thread
6
+ and a queue mechanism to handle log records. This design pattern is crucial for
7
+ high-performance applications where I/O latency (like writing to disk) in the main
8
+ execution thread is unacceptable.
9
+
10
+ Key Components:
11
+ 1. **AsyncMermaidHandler**: The frontend handler that quickly pushes logs to a queue.
12
+ 2. **Queue**: A thread-safe FIFO buffer (producer-consumer pattern).
13
+ 3. **QueueListener**: A background worker that pulls logs from the queue and
14
+ writes them to the actual destination (e.g., a file).
15
+
16
+ By decoupling log generation from log persistence, we ensure that the application's
17
+ core logic remains responsive even during bursts of logging activity.
7
18
  """
8
19
 
9
20
  import logging
10
21
  import logging.handlers
11
22
  import queue
12
23
  import atexit
13
- from typing import List, Optional
24
+ from typing import List, Optional, cast
14
25
 
15
26
 
16
27
  class AsyncMermaidHandler(logging.handlers.QueueHandler):
17
28
  """
18
- A non-blocking logging handler that uses a background thread to write logs.
19
-
20
- This handler pushes log records to a queue, which are then picked up by a
21
- QueueListener running in a separate thread and dispatched to the actual
22
- handlers (e.g., MermaidFileHandler).
23
-
24
- This architecture provides several benefits:
25
- - Main thread doesn't block waiting for disk I/O
26
- - Logs are processed in the background
27
- - Better performance in high-throughput applications
28
- - Smooth handling of burst traffic
29
+ A high-performance, non-blocking logging handler using the Producer-Consumer pattern.
30
+
31
+ This handler acts as the "Producer". It intercepts log records and immediately
32
+ pushes them into a thread-safe queue. A separate "Consumer" thread (managed by
33
+ QueueListener) asynchronously picks up these records and dispatches them to the
34
+ actual underlying handlers (like MermaidFileHandler).
35
+
36
+ **Architecture & Benefits:**
37
+ - **Non-Blocking I/O**: The main application thread never waits for disk writes.
38
+ It only waits for the (very fast) queue insertion operation.
39
+ - **Burst Handling**: The queue acts as a buffer, absorbing sudden spikes in
40
+ log volume without slowing down the application.
41
+ - **Thread Safety**: Uses Python's thread-safe `queue.Queue` for synchronization.
42
+ - **Graceful Shutdown**: Integrates with `atexit` to ensure pending logs are
43
+ flushed before the application terminates.
44
+
45
+ **Usage:**
46
+ Typically used to wrap a standard file handler:
47
+
48
+ ```python
49
+ file_handler = MermaidFileHandler("trace.mmd")
50
+ async_handler = AsyncMermaidHandler(handlers=[file_handler])
51
+ logger.addHandler(async_handler)
52
+ ```
29
53
  """
30
54
 
31
55
  def __init__(self, handlers: List[logging.Handler], queue_size: int = 1000):
32
56
  """
33
- Initialize the async handler.
57
+ Initialize the asynchronous handler infrastructure.
58
+
59
+ This setup involves three main steps:
60
+ 1. Creating a bounded queue to hold log records.
61
+ 2. Initializing the parent QueueHandler.
62
+ 3. Starting a background QueueListener thread to process the queue.
34
63
 
35
64
  Args:
36
- handlers: A list of handlers that should receive the logs from the queue.
37
- These are typically MermaidFileHandler instances.
38
- queue_size: The maximum size of the queue. Default is 1000.
39
- If the queue fills up, new log records may be dropped.
65
+ handlers (List[logging.Handler]): A list of "real" handlers that will
66
+ eventually write the logs (e.g., to a file or stream). The background
67
+ listener will pass records to these handlers.
68
+ queue_size (int): The maximum number of log records the queue can hold.
69
+ Defaults to 1000.
70
+ *Trade-off*: A larger queue consumes more memory but handles larger
71
+ bursts. A smaller queue saves memory but increases the risk of
72
+ dropped logs if the consumer falls behind.
40
73
  """
41
- # Create a bounded queue with the specified size
74
+ # 1. Create a bounded queue (Producer-Consumer buffer).
75
+ # We use a bounded queue to prevent uncontrolled memory growth if the
76
+ # consumer (writer) cannot keep up with the producer (application).
42
77
  self._log_queue: queue.Queue[logging.LogRecord] = queue.Queue(queue_size)
43
78
  self._queue_size = queue_size
44
79
 
45
- # Initialize parent QueueHandler with our queue
80
+ # 2. Initialize the parent QueueHandler.
81
+ # This configures self.queue, which is used by the emit() method.
46
82
  super().__init__(self._log_queue)
47
83
 
48
- # Initialize QueueListener to process records from the queue
49
- # It starts an internal thread to monitor the queue
50
- # respect_handler_level=True ensures the target handlers' log levels are respected
84
+ # 3. Initialize and start the QueueListener (The Consumer).
85
+ # The QueueListener runs in a separate daemon thread. It continuously:
86
+ # a. Blocks waiting for a record from the queue.
87
+ # b. Retrieves the record.
88
+ # c. Passes it to the provided 'handlers'.
89
+ #
90
+ # respect_handler_level=True ensures that if the underlying handler is set
91
+ # to ERROR but the logger is INFO, the underlying handler won't write INFO logs.
51
92
  self._listener: Optional[logging.handlers.QueueListener] = (
52
93
  logging.handlers.QueueListener(
53
94
  self._log_queue, *handlers, respect_handler_level=True
54
95
  )
55
96
  )
56
97
 
57
- # Start the listener thread
98
+ # Start the background worker thread.
58
99
  self._listener.start()
59
100
 
60
- # Register stop method to be called on program exit
61
- # This ensures all pending logs are written to disk before termination
101
+ # 4. Ensure Graceful Shutdown.
102
+ # Register the stop method to be called automatically when the Python
103
+ # interpreter exits. This is critical for flushing the queue so no logs are lost.
62
104
  atexit.register(self.stop)
63
105
 
64
106
  def emit(self, record: logging.LogRecord) -> None:
65
107
  """
66
- Emit a log record to the queue with a timeout and drop policy.
108
+ Emit a log record to the queue (Producer action).
109
+
110
+ This method overrides the standard logging emit to implement a non-blocking
111
+ strategy with a fallback.
67
112
 
68
- If the queue is full, this method will attempt to put the record with
69
- a short timeout. If that fails, it will drop the record and print a warning.
113
+ **Logic Flow:**
114
+ 1. Attempt to put the record into the queue.
115
+ 2. Use a short timeout (0.1s) to avoid blocking the main application
116
+ indefinitely if the queue is full (backpressure).
117
+ 3. If the queue remains full after the timeout, drop the record to preserve
118
+ application stability, but print a warning to stderr.
70
119
 
71
120
  Args:
72
- record: The log record to emit
121
+ record (logging.LogRecord): The log event to be processed.
73
122
  """
74
- from typing import cast
75
-
76
123
  try:
77
- # Try to put the record in the queue with a short timeout (0.1 seconds)
78
- # This prevents the main thread from blocking indefinitely if the queue is full
79
- # Use cast to tell Mypy this is a queue.Queue instance
124
+ # We explicitly cast self.queue because QueueHandler.queue is typed
125
+ # generically in some stubs, but we know it's a queue.Queue.
80
126
  queue_instance = cast(queue.Queue[logging.LogRecord], self.queue)
127
+
128
+ # Attempt to enqueue the record.
129
+ # block=True, timeout=0.1: We wait briefly for a slot to open up.
130
+ # This balances "trying to save the log" vs "not freezing the app".
81
131
  queue_instance.put(record, block=True, timeout=0.1)
132
+
82
133
  except queue.Full:
83
- # If queue is full, log a warning and drop the record
134
+ # **Queue Overflow Handling**
135
+ # If we reach here, the consumer (writer) is too slow or the burst
136
+ # is too large. We must drop data to keep the application running.
137
+
138
+ # Only warn for important logs to avoid spamming stderr.
84
139
  if record.levelno >= logging.WARNING:
85
- # Avoid infinite recursion by not using self.logger
140
+ # We use print() instead of logging to avoid infinite recursion
141
+ # (logging about a logging failure).
86
142
  print(
87
- f"WARNING: AsyncMermaidHandler queue is full (size: {self._queue_size}), dropping log record: {record.msg}"
143
+ f"WARNING: AsyncMermaidHandler queue is full (size: {self._queue_size}), "
144
+ f"dropping log record: {record.msg}"
88
145
  )
89
146
 
90
147
  def stop(self) -> None:
91
148
  """
92
- Stops the listener and flushes all pending logs from the queue.
149
+ Clean up resources and flush pending logs.
150
+
151
+ This method is called automatically via atexit or can be called manually.
93
152
 
94
- This method is registered with `atexit` to ensure that all pending logs
95
- are written to disk before the application terminates.
153
+ **Shutdown Sequence:**
154
+ 1. Check if the listener is active.
155
+ 2. Call `listener.stop()`. This sends a special "sentinel" (None) to the queue.
156
+ 3. The background thread sees the sentinel, stops waiting for new logs,
157
+ processes any remaining items in the queue, and then terminates.
158
+ 4. Explicitly flush all underlying handlers to ensure stateful formatters
159
+ write their final buffered events.
96
160
  """
97
161
  if self._listener:
162
+ # We keep a reference to handlers to flush them after listener stops
163
+ handlers = self._listener.handlers
98
164
  try:
99
- # Stop the listener - this will process all remaining records in the queue
165
+ # Stop the listener. This blocks until the listener thread joins,
166
+ # ensuring all records currently in the queue are processed.
100
167
  self._listener.stop()
101
168
  self._listener = None
102
- except queue.Full:
103
- # Handle case where queue is full when trying to put sentinel value
104
- # The listener thread may still be processing, but we can safely exit
169
+
170
+ # Crucial step for stateful formatters:
171
+ # After the listener has finished emitting all records from the queue,
172
+ # we must tell the handlers to flush their internal buffers.
173
+ for handler in handlers:
174
+ try:
175
+ handler.flush()
176
+ except Exception:
177
+ pass
178
+ except Exception:
179
+ # We catch generic exceptions here because during interpreter shutdown,
180
+ # some modules (like queue) might already be partially unloaded.
105
181
  pass
@@ -7,22 +7,118 @@ ensures thread-safe file writing.
7
7
  """
8
8
 
9
9
  import logging
10
+ import logging.handlers
10
11
  import os
12
+ from typing import Any, Optional, TYPE_CHECKING
11
13
 
14
+ if TYPE_CHECKING:
15
+ pass
12
16
 
13
- class MermaidFileHandler(logging.FileHandler):
17
+
18
+ class MermaidHandlerMixin:
14
19
  """
15
- A custom logging handler that writes `FlowEvent` objects to a Mermaid (.mmd) file.
20
+ Mixin to provide Mermaid-specific logic to logging handlers.
21
+ """
22
+
23
+ title: str
24
+ terminator: str
25
+ # These are provided by logging.Handler or its subclasses
26
+ formatter: Optional[logging.Formatter]
27
+ stream: Any
16
28
 
17
- Strategy & Optimization:
18
- 1. **Inheritance**: Inherits from `logging.FileHandler` to leverage robust,
19
- thread-safe file writing capabilities (locking, buffering) provided by the stdlib.
20
- 2. **Header Management**: Automatically handles the Mermaid file header
21
- (`sequenceDiagram`, `title`, `autonumber`) to ensure the output file
22
- is a valid Mermaid document. It smartly detects if the file is new or
23
- being appended to.
24
- 3. **Deferred Formatting**: The actual string conversion happens in the `emit`
25
- method (via the formatter), keeping the handler focused on I/O.
29
+ def _write_header(self) -> None:
30
+ """
31
+ Writes the initial Mermaid syntax lines to the file.
32
+ """
33
+ # Default header if no formatter is available
34
+ header = f"sequenceDiagram\n title {self.title}\n autonumber\n\n"
35
+
36
+ if self.formatter and hasattr(self.formatter, "get_header"):
37
+ try:
38
+ # Use formatter's header if it provides one
39
+ header = getattr(self.formatter, "get_header")(self.title)
40
+ # Ensure it ends with at least one newline for safety
41
+ if not header.endswith("\n"):
42
+ header += "\n"
43
+ except Exception:
44
+ # Fallback if formatter fails
45
+ pass
46
+
47
+ if self.stream:
48
+ self.stream.write(header)
49
+ # Ensure it's physically on disk before any logs follow
50
+ self.stream.flush()
51
+
52
+ def emit(self, record: logging.LogRecord) -> None:
53
+ """
54
+ Process a log record and write it to the Mermaid file.
55
+ Handles rotation if the parent class supports it.
56
+ """
57
+ # Only process records that contain our structured FlowEvent data
58
+ if not hasattr(record, "flow_event"):
59
+ return
60
+
61
+ try:
62
+ # 1. Handle Rotation (for RotatingFileHandler and TimedRotatingFileHandler)
63
+ if hasattr(self, "shouldRollover") and getattr(self, "shouldRollover")(
64
+ record
65
+ ):
66
+ getattr(self, "doRollover")()
67
+
68
+ # 2. Ensure stream is open (handles delay=True)
69
+ if self.stream is None:
70
+ if hasattr(self, "_open"):
71
+ self.stream = getattr(self, "_open")()
72
+
73
+ # 3. Check if we need to write the header.
74
+ # If the file is empty (position 0), it's either a new file,
75
+ # an empty existing file, or a freshly rotated file.
76
+ if self.stream and hasattr(self.stream, "tell") and self.stream.tell() == 0:
77
+ self._write_header()
78
+
79
+ # 4. Format the record.
80
+ # Our custom MermaidFormatter might return an empty string
81
+ # if it's currently collapsing/buffering repetitive calls.
82
+ if hasattr(self, "format"):
83
+ msg = getattr(self, "format")(record)
84
+ if msg and self.stream:
85
+ self.stream.write(msg + self.terminator)
86
+ # Note: We do NOT call self.flush() here to allow
87
+ # the formatter's collapsing buffer to work correctly.
88
+ except Exception:
89
+ if hasattr(self, "handleError"):
90
+ getattr(self, "handleError")(record)
91
+
92
+ def flush(self) -> None:
93
+ """
94
+ Flushes both the underlying file stream and any buffered events in the formatter.
95
+ """
96
+ if self.formatter and hasattr(self.formatter, "flush"):
97
+ try:
98
+ msg = getattr(self.formatter, "flush")()
99
+ if msg and self.stream:
100
+ self.stream.write(msg + self.terminator)
101
+ except Exception:
102
+ pass
103
+
104
+ # Use hasattr to check if super() has flush, to avoid Mypy errors with mixins
105
+ super_flush = getattr(super(), "flush", None)
106
+ if callable(super_flush):
107
+ super_flush()
108
+
109
+ def close(self) -> None:
110
+ """
111
+ Ensures all buffered events are written before closing the file.
112
+ """
113
+ self.flush()
114
+ super_close = getattr(super(), "close", None)
115
+ if callable(super_close):
116
+ super_close()
117
+
118
+
119
+ class MermaidFileHandler(MermaidHandlerMixin, logging.FileHandler):
120
+ """
121
+ A custom logging handler that writes `FlowEvent` objects to a Mermaid (.mmd) file.
26
122
  """
27
123
 
28
124
  def __init__(
@@ -33,87 +129,57 @@ class MermaidFileHandler(logging.FileHandler):
33
129
  encoding: str = "utf-8",
34
130
  delay: bool = False,
35
131
  ):
36
- """
37
- Initialize the Mermaid file handler.
38
-
39
- Args:
40
- filename (str): The path to the output .mmd file.
41
- title (str, optional): The title of the Mermaid diagram. Defaults to "Log Flow".
42
- mode (str, optional): File open mode. 'w' (overwrite) or 'a' (append). Defaults to "a".
43
- encoding (str, optional): File encoding. Defaults to "utf-8".
44
- delay (bool, optional): If True, file opening is deferred until the first call to emit.
45
- Useful to avoid creating empty files if no logs occur. Defaults to False.
46
- """
47
- # Ensure the directory exists to prevent FileNotFoundError when opening the file
48
132
  os.makedirs(os.path.dirname(os.path.abspath(filename)) or ".", exist_ok=True)
49
-
50
- # Determine if we need to write a header
51
- # Header is written only if:
52
- # 1. We are overwriting the file (mode='w'), or
53
- # 2. We are appending (mode='a') but the file doesn't exist or is empty
54
- should_write_header = False
55
- if mode == "w":
56
- should_write_header = True
57
- elif mode == "a":
58
- if not os.path.exists(filename) or os.path.getsize(filename) == 0:
59
- should_write_header = True
60
-
61
- # Initialize the parent FileHandler (opens the file unless delay=True)
62
133
  super().__init__(filename, mode, encoding, delay)
63
134
  self.title = title
135
+ self.terminator = "\n"
64
136
 
65
- # Write the header immediately if needed
66
- if should_write_header:
67
- self._write_header()
68
-
69
- def _write_header(self) -> None:
70
- """
71
- Writes the initial Mermaid syntax lines to the file.
72
137
 
73
- This setup is required for Mermaid JS or Live Editor to render the diagram correctly.
74
- It defines:
75
- - Diagram type (sequenceDiagram)
76
- - Title of the diagram
77
- - Autonumbering of steps
78
-
79
- Thread Safety: Uses the handler's internal lock to prevent concurrent writes
80
- when delay=True, ensuring the header is written only once.
81
- """
82
- # Use the handler's internal lock to ensure thread safety
83
- assert self.lock is not None, "Handler lock should always be initialized"
84
- with self.lock:
85
- # Write to the existing stream if available, otherwise open temporarily
86
- if self.stream:
87
- # Stream is already open (delay=False or emit() has been called)
88
- self.stream.write("sequenceDiagram\n")
89
- self.stream.write(f" title {self.title}\n")
90
- self.stream.write(" autonumber\n")
91
- # Flush ensures the header is written to disk immediately
92
- self.flush()
93
- else:
94
- # Handle delay=True case: file not yet opened
95
- # Temporarily open the file just to write the header
96
- with open(self.baseFilename, self.mode, encoding=self.encoding) as f:
97
- f.write("sequenceDiagram\n")
98
- f.write(f" title {self.title}\n")
99
- f.write(" autonumber\n")
138
+ class RotatingMermaidFileHandler(
139
+ MermaidHandlerMixin, logging.handlers.RotatingFileHandler
140
+ ):
141
+ """
142
+ Rotating version of the MermaidFileHandler.
143
+ """
100
144
 
101
- def emit(self, record: logging.LogRecord) -> None:
102
- """
103
- Process a log record and write it to the Mermaid file.
145
+ def __init__(
146
+ self,
147
+ filename: str,
148
+ title: str = "Log Flow",
149
+ mode: str = "a",
150
+ maxBytes: int = 0,
151
+ backupCount: int = 0,
152
+ encoding: str = "utf-8",
153
+ delay: bool = False,
154
+ ): # noqa: PLR0913
155
+ os.makedirs(os.path.dirname(os.path.abspath(filename)) or ".", exist_ok=True)
156
+ super().__init__(filename, mode, maxBytes, backupCount, encoding, delay)
157
+ self.title = title
158
+ self.terminator = "\n"
104
159
 
105
- This method overrides the parent's emit method to filter out non-FlowEvent records.
106
160
 
107
- Optimization:
108
- - Checks for `flow_event` attribute first, acting as a high-performance filter
109
- - Only processes records containing structured FlowEvent data
110
- - Delegates actual writing to parent's emit() method, which handles locking and flushing
161
+ class TimedRotatingMermaidFileHandler(
162
+ MermaidHandlerMixin, logging.handlers.TimedRotatingFileHandler
163
+ ):
164
+ """
165
+ Timed rotating version of the MermaidFileHandler.
166
+ """
111
167
 
112
- Args:
113
- record (logging.LogRecord): The log record to process
114
- """
115
- # Only process records that contain our structured FlowEvent data
116
- # This allows the handler to be attached to the root logger without processing
117
- # irrelevant system logs
118
- if hasattr(record, "flow_event"):
119
- super().emit(record)
168
+ def __init__(
169
+ self,
170
+ filename: str,
171
+ title: str = "Log Flow",
172
+ when: str = "h",
173
+ interval: int = 1,
174
+ backupCount: int = 0,
175
+ encoding: str = "utf-8",
176
+ delay: bool = False,
177
+ utc: bool = False,
178
+ atTime: Any = None,
179
+ ): # noqa: PLR0913
180
+ os.makedirs(os.path.dirname(os.path.abspath(filename)) or ".", exist_ok=True)
181
+ super().__init__(
182
+ filename, when, interval, backupCount, encoding, delay, utc, atTime
183
+ )
184
+ self.title = title
185
+ self.terminator = "\n"
@@ -0,0 +1,4 @@
1
+ """
2
+ MermaidTrace integrations package.
3
+ Contains middleware and adapters for third-party frameworks.
4
+ """