djlogq 1.0.6__py3-none-any.whl → 1.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: djlogq
3
- Version: 1.0.6
3
+ Version: 1.0.7
4
4
  Summary: A reusable Django app for asynchronous, thread-safe logging with rich metadata, admin interface, and API support.
5
5
  Author-email: mess <mesnavunawa@gmail.com>
6
6
  License: MIT
@@ -29,6 +29,16 @@ A reusable Django app that provides asynchronous logging functionality using a s
29
29
  - **Decorators**: Utility decorators for function logging and performance monitoring
30
30
  - **Context Managers**: Easy-to-use context managers for operation logging
31
31
  - **Configurable**: Customizable queue size, flush intervals, and cleanup policies
32
+ - **Extendible**: Easily add your own custom handlers to process logs in different ways.
33
+
34
+ **Useful built-in and example handlers include:**
35
+ - **File Handler**: Write logs to a file.
36
+ - **Email Handler**: Send error logs via email.
37
+ - **Webhook Handler**: Forward logs to external services (e.g., Slack, Discord, custom endpoints).
38
+ - **Database Handler**: Store logs in custom database tables.
39
+ - **Console Handler**: Output logs to the console for development.
40
+ - **Third-Party Integrations**: Integrate with services like Sentry or Logstash.
41
+ - You can implement your own handler by subclassing the provided base handler class.
32
42
 
33
43
  ## Installation
34
44
 
@@ -62,6 +72,7 @@ ASYNC_LOGGING_CONFIG = {
62
72
  'AUTO_CLEANUP_DAYS': 30,
63
73
  'ENABLE_REQUEST_LOGGING': True,
64
74
  'IGNORE_PATHS': ['/admin/'], # paths to ignore for request logging
75
+ 'DEFAULT_HANDLERS': [], # list of handler class paths, e.g. ['logq.handlers.FileHandler']
65
76
  }
66
77
  ```
67
78
 
@@ -146,6 +157,11 @@ response = requests.get('http://your-domain/logq/api/logs/?limit=10')
146
157
  logs = response.json()['logs']
147
158
  ```
148
159
 
160
+ ### CUSTOM HANDLERS
161
+ You can define custom log handlers by subclassing `LogHandler` and passing them to `AsyncLogger` or define them in the `DEFAULT_HANDLERS` section of the config. This allows you to process or forward log entries in any way you need (e.g., send to an external service, write to a file, etc).
162
+
163
+
164
+
149
165
  ### Admin Interface
150
166
 
151
167
  Access the admin interface at `/admin/` to view and manage logs. Features include:
@@ -177,7 +193,6 @@ python manage.py clean_logs --dry-run
177
193
  |---------|---------|-------------|
178
194
  | `MAX_QUEUE_SIZE` | 1000 | Maximum number of log entries in the queue |
179
195
  | `FLUSH_INTERVAL` | 1.0 | How often to flush logs to database (seconds) |
180
- | `AUTO_CLEANUP_DAYS` | 30 | Days to keep logs before auto-cleanup |
181
196
  | `ENABLE_REQUEST_LOGGING` | True | Whether to log all HTTP requests |
182
197
 
183
198
  ## Model Fields
@@ -1,10 +1,10 @@
1
1
  logq/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  logq/admin.py,sha256=nC8TVXc64G41Mh05ijUSCJmvIDo-kgZHKyyOlmqH-AE,320
3
3
  logq/apps.py,sha256=LNBTCEKiwAU2wT3CTjQ-EfXumbBF6Izez5J7mTK3i-U,330
4
- logq/async_logger.py,sha256=fdMqtkAXFVAHt1yYPpxC5CK8Nzryebj4PFXsijcsG-I,7495
4
+ logq/async_logger.py,sha256=kNAmyVV7GWzsd_aZ4baSfNz5H7AwZV5YC4oV-Xy_k5M,10382
5
5
  logq/middleware.py,sha256=Y9EAnxrmavyDgVklEW893Wh0eeQPKdCvizcxGzPDLBo,3642
6
6
  logq/models.py,sha256=Gu8KLNMn7I4XtEUHQPavSSlcESQ-2Pu5JLKsL97t9Gg,1616
7
- logq/tests.py,sha256=ILuZHtFDi_ushbGEwFDrch_EBwR1BkKhcjpGVFHVSwY,8584
7
+ logq/tests.py,sha256=180_XHLQ3I1am5qlaaf-36n8E-CaRI4ZT3uGZ-OyYJU,10704
8
8
  logq/urls.py,sha256=oGwVM9HXsVY_P86sVPuz5xnFofYfmkL8ZSZDhExhJQk,216
9
9
  logq/utils.py,sha256=lNm2TfbpWdKbAeoy4ny8-TFaNFyQGsBL_Wxq0VoxkO4,5235
10
10
  logq/views.py,sha256=WJpwiPyfItBbceM1862NcXp_ui4U6WyNBhw2P27mlJ4,3695
@@ -14,7 +14,7 @@ logq/management/commands/clean_logs.py,sha256=Cc33EEqGGwsNcvQjwnLbM6kIb0lBJsyDQl
14
14
  logq/migrations/0001_initial.py,sha256=l4f-lUcO7OsABGYiSBp7fdWDt2rLHaIhR78pCKIAAdQ,2007
15
15
  logq/migrations/0002_alter_logentry_function_alter_logentry_line_number_and_more.py,sha256=SNBngZmhk9BgcOe8eAojX47V5JKC2V7oW9QtLHWIkFc,750
16
16
  logq/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
- djlogq-1.0.6.dist-info/METADATA,sha256=IEboB0sstO5gMVslUUId9LgfH--Aj66MTZuqiIso27I,6447
18
- djlogq-1.0.6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
19
- djlogq-1.0.6.dist-info/top_level.txt,sha256=VWj_EO93x0ix2Y2qm6obWT22L7VPFiZ7lQf0yIaI8do,5
20
- djlogq-1.0.6.dist-info/RECORD,,
17
+ djlogq-1.0.7.dist-info/METADATA,sha256=3EhYc2CraOPQMB61EeB-Q_kA5yyETvN1_WfEggsjqqE,7426
18
+ djlogq-1.0.7.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
19
+ djlogq-1.0.7.dist-info/top_level.txt,sha256=VWj_EO93x0ix2Y2qm6obWT22L7VPFiZ7lQf0yIaI8do,5
20
+ djlogq-1.0.7.dist-info/RECORD,,
logq/async_logger.py CHANGED
@@ -9,6 +9,19 @@ from django.utils import timezone
9
9
  from django.db import transaction
10
10
  from django.conf import settings
11
11
  from .models import LogEntry, LogLevel
12
+ from typing import List
13
+
14
+
15
+ class LogHandler:
16
+ """Base class for custom log handlers"""
17
+
18
+ def handle(self, log_entry:LogEntry) -> None:
19
+ """Handle a log entry. Overide this method to implement custom logging behavior."""
20
+ pass
21
+
22
+ def flush(self) -> None:
23
+ """Flush any buffered log entries. Override this method to implement custom flushing behavior."""
24
+ pass
12
25
 
13
26
 
14
27
  class AsyncLogger:
@@ -16,7 +29,7 @@ class AsyncLogger:
16
29
  Asynchronous logger that runs in a separate thread to avoid blocking the main application.
17
30
  """
18
31
 
19
- def __init__(self, max_queue_size: int = None, flush_interval: float = None):
32
+ def __init__(self, max_queue_size: int = None, flush_interval: float = None, handlers: List[LogHandler] = None):
20
33
  # Get configuration from settings
21
34
  config = getattr(settings, 'ASYNC_LOGGING_CONFIG', {})
22
35
  self.max_queue_size = max_queue_size or config.get('MAX_QUEUE_SIZE', 1000)
@@ -28,7 +41,41 @@ class AsyncLogger:
28
41
  self._lock = threading.Lock()
29
42
  self.dropped_count = 0
30
43
  self.dropped_levels = {} # track most serious dropped level
31
- self._dropped_lock = threading.Lock() #
44
+ self._dropped_lock = threading.Lock()
45
+
46
+ # initialize custom handlers
47
+ self.handlers = handlers or []
48
+ self._add_default_handlers() # add default handlers to the logger
49
+
50
+ def _add_default_handlers(self):
51
+ """Add default handlers from settings if configured."""
52
+ config = getattr(settings, 'ASYNC_LOGGING_CONFIG', {})
53
+ default_handlers = config.get('DEFAULT_HANDLERS', [])
54
+ for handler_class in default_handlers:
55
+ try:
56
+ if isinstance(handler_class, str):
57
+ # import handler class from string
58
+ module_path, class_name = handler_class.rsplit('.', 1)
59
+ module = __import__(module_path, fromlist=[class_name]) # import the module
60
+ handler_class = getattr(module, class_name)
61
+ handler = handler_class()
62
+ self.handlers.append(handler)
63
+ except Exception as e:
64
+ print(f"Error initializing default handler {handler_class}: {e}")
65
+
66
+ def add_handler(self, handler: LogHandler):
67
+ """Add a custom handler to the logger."""
68
+ if not isinstance(handler, LogHandler):
69
+ raise ValueError("Handler must be an instance of LogHandler")
70
+ self.handlers.append(handler)
71
+
72
+ def remove_handler(self, handler: LogHandler):
73
+ """Remove a custom handler from the logger."""
74
+ if handler in self.handlers:
75
+ self.handlers.remove(handler)
76
+
77
+ def clear_handlers(self):
78
+ """Remove all custom handlers from the logger."""
32
79
 
33
80
  def start(self):
34
81
  """Start the logging thread."""
@@ -84,6 +131,9 @@ class AsyncLogger:
84
131
  with transaction.atomic():
85
132
  LogEntry.objects.bulk_create(batch, ignore_conflicts=True)
86
133
 
134
+ # send log entries to custom handlers
135
+ self._send_to_handlers(batch)
136
+
87
137
  # Log dropped messages if any
88
138
  with self._dropped_lock:
89
139
  if self.dropped_count > 0:
@@ -115,6 +165,27 @@ class AsyncLogger:
115
165
 
116
166
  except Exception as e:
117
167
  print(f"Error flushing log batch: {e}")
168
+
169
+ def _send_to_handlers(self, batch: List[LogEntry]):
170
+ """Send log entries to all registered handlers.
171
+ Args:
172
+ batch: List[LogEntry] - The batch of log entries to send to handlers
173
+ """
174
+ for handler in self.handlers:
175
+ try:
176
+ for entry in batch:
177
+ handler.handle(entry)
178
+ except Exception as e:
179
+ # Dont let an error in a handler crash the logger
180
+ print(f"Error sending log entries to handler {handler.__class__.__name__}: {e}")
181
+
182
+ def _flush_handlers(self):
183
+ """Flush all registered handlers."""
184
+ for handler in self.handlers:
185
+ try:
186
+ handler.flush()
187
+ except Exception as e:
188
+ print(f"Error flushing handler {handler.__class__.__name__}: {e}")
118
189
 
119
190
  def log(self, level: str, message: str, **kwargs):
120
191
  """Add a log entry to the queue."""
logq/tests.py CHANGED
@@ -7,9 +7,9 @@ import json
7
7
  import time
8
8
  import threading
9
9
  from .models import LogEntry, LogLevel
10
- from .async_logger import AsyncLogger, get_async_logger, stop_async_logger
10
+ from .async_logger import AsyncLogger, get_async_logger, stop_async_logger, LogHandler
11
11
  from .utils import log_performance, log_function_call
12
- import requests
12
+
13
13
 
14
14
  class AsyncLoggerTestCase(TransactionTestCase):
15
15
  def setUp(self):
@@ -240,4 +240,70 @@ class UtilsTestCase(TransactionTestCase):
240
240
  entries = LogEntry.objects.all()
241
241
 
242
242
  self.assertGreater(entries.count(), 0)
243
+
244
+
245
+ class LogHandlerTestCase(TransactionTestCase):
246
+ def setUp(self):
247
+ super().setUp()
248
+ # Stop the global logger to avoid interference
249
+ stop_async_logger()
250
+
251
+ # Clear all existing logs
252
+ with connection.cursor() as cursor:
253
+ cursor.execute("DELETE FROM logq_logentry")
254
+
255
+ # Create a properly configured global logger
256
+ from .async_logger import _async_logger
257
+ from . import async_logger as async_logger_module
258
+
259
+ # Create a test logger with fast flush interval
260
+ test_logger = AsyncLogger(max_queue_size=100, flush_interval=0.1)
261
+ test_logger.start()
262
+
263
+ # Replace the global logger
264
+ async_logger_module._async_logger = test_logger
265
+
266
+ time.sleep(0.2) # Wait for thre
267
+
268
+ def tearDown(self):
269
+ # Stop the global logger
270
+ stop_async_logger()
271
+ time.sleep(0.2) # Wait for thread to stop
272
+
273
+ # Clear logs after test
274
+ with connection.cursor() as cursor:
275
+ cursor.execute("DELETE FROM logq_logentry")
276
+ super().tearDown()
277
+
278
+ def test_log_handler(self):
279
+ """Test log handler functionality."""
280
+ # Verify we start with no logs
281
+ self.assertEqual(LogEntry.objects.count(), 0)
282
+
283
+ # Create a test handler
284
+ class TestHandler(LogHandler):
285
+ def handle(self, log_entry:LogEntry) -> None:
286
+ pass
287
+
288
+ def flush(self) -> None:
289
+ pass
290
+
291
+ # Create a logger with the test handler
292
+ logger = get_async_logger()
293
+ logger.add_handler(TestHandler())
294
+ logger.start()
295
+
296
+ logger.info("Test message")
297
+ time.sleep(0.5)
298
+
299
+ # Verify we have exactly one log entry
300
+ self.assertEqual(LogEntry.objects.count(), 1)
301
+
302
+ # Verify the log entry was sent to the handler
303
+ log_entry = LogEntry.objects.first()
304
+ self.assertEqual(log_entry.message, "Test message")
305
+
306
+ # Stop the logger
307
+ logger.stop()
308
+ time.sleep(0.2) # Wait for thread to stop
243
309
 
File without changes