djlogq 1.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,6 @@
1
+ include README.md
2
+ include LICENSE
3
+ recursive-include src/logq/migrations *.py
4
+ recursive-include src/logq/management *.py
5
+ recursive-include src/logq/templates *.html
6
+ recursive-include src/logq/static *
djlogq-1.0.0/PKG-INFO ADDED
@@ -0,0 +1,242 @@
1
+ Metadata-Version: 2.4
2
+ Name: djlogq
3
+ Version: 1.0.0
4
+ Summary: A reusable Django app for asynchronous, thread-safe logging with rich metadata, admin interface, and API support.
5
+ Author-email: mesake <mesnavunawa@gmail.com>
6
+ License: MIT
7
+ Requires-Python: >=3.8
8
+ Description-Content-Type: text/markdown
9
+ Requires-Dist: Django
10
+ Provides-Extra: dev
11
+ Requires-Dist: build==1.2.1; extra == "dev"
12
+ Requires-Dist: nox==2024.4.15; extra == "dev"
13
+ Requires-Dist: twine==5.1.1; extra == "dev"
14
+
15
+ # Django Async Logger
16
+
17
+ A reusable Django app that provides asynchronous logging functionality using a separate thread to avoid blocking the main application.
18
+
19
+ ## Features
20
+
21
+ - **Asynchronous Logging**: All log operations run in a separate thread
22
+ - **Thread-Safe**: Uses a queue system for thread-safe logging
23
+ - **Rich Metadata**: Captures module, function, line number, user ID, request ID, and extra data
24
+ - **Admin Interface**: Beautiful Django admin interface for viewing and managing logs
25
+ - **API Endpoints**: REST API for external logging
26
+ - **Middleware**: Automatic request logging with unique request IDs
27
+ - **Decorators**: Utility decorators for function logging and performance monitoring
28
+ - **Context Managers**: Easy-to-use context managers for operation logging
29
+ - **Configurable**: Customizable queue size, flush intervals, and cleanup policies
30
+
31
+ ## Installation
32
+
33
+ 1. Add the app to your Django project:
34
+ ```python
35
+ INSTALLED_APPS = [
36
+ # ...
37
+ 'logq',
38
+ ]
39
+ ```
40
+
41
+ 2. Add the middleware to your settings:
42
+ ```python
43
+ MIDDLEWARE = [
44
+ # ...
45
+ 'logq.middleware.AsyncLoggingMiddleware',
46
+ ]
47
+ ```
48
+
49
+ 3. Run migrations:
50
+ ```bash
51
+ python manage.py makemigrations logq
52
+ python manage.py migrate
53
+ ```
54
+
55
+ 4. (Optional) Configure logging settings:
56
+ ```python
57
+ ASYNC_LOGGING_CONFIG = {
58
+ 'MAX_QUEUE_SIZE': 1000,
59
+ 'FLUSH_INTERVAL': 1.0, # seconds
60
+ 'AUTO_CLEANUP_DAYS': 30,
61
+ 'ENABLE_REQUEST_LOGGING': True,
62
+ 'IGNORE_PATHS': ['/admin/'], # paths to ignore for request logging
63
+ }
64
+ ```
65
+
66
+ ## Usage
67
+
68
+ ### Basic Logging
69
+
70
+ ```python
71
+ from logq.async_logger import get_async_logger
72
+
73
+ logger = get_async_logger()
74
+
75
+ # Different log levels
76
+ logger.debug("Debug message")
77
+ logger.info("Info message")
78
+ logger.warning("Warning message")
79
+ logger.error("Error message")
80
+ logger.critical("Critical message")
81
+
82
+ # With extra data
83
+ logger.info("User action", extra_data={'action': 'login', 'ip': '192.168.1.1'})
84
+
85
+ # Log exceptions
86
+ try:
87
+ # some code that might fail
88
+ pass
89
+ except Exception as e:
90
+ logger.exception("An error occurred", exc_info=str(e))
91
+ ```
92
+
93
+ ### Function Decorators
94
+
95
+ ```python
96
+ from logq.utils import log_function_call, log_performance
97
+
98
+ @log_function_call
99
+ def my_function():
100
+ return "result"
101
+
102
+ @log_function_call(level='DEBUG')
103
+ def debug_function():
104
+ return "debug result"
105
+
106
+ @log_performance(threshold_seconds=0.5)
107
+ def slow_function():
108
+ time.sleep(1)
109
+ return "slow result"
110
+ ```
111
+
112
+ ### Context Managers
113
+
114
+ ```python
115
+ from logq.utils import LogContext
116
+
117
+ with LogContext("Processing data", level='INFO'):
118
+ # do some work
119
+ time.sleep(0.1)
120
+ # automatically logs start and completion with timing
121
+ ```
122
+
123
+ ### API Logging
124
+
125
+ ```python
126
+ import requests
127
+ import json
128
+
129
+ # Log via API
130
+ data = {
131
+ 'level': 'INFO',
132
+ 'message': 'External log message',
133
+ 'extra_data': {'source': 'external_service'}
134
+ }
135
+
136
+ response = requests.post(
137
+ 'http://your-domain/logq/api/log/',
138
+ data=json.dumps(data),
139
+ headers={'Content-Type': 'application/json'}
140
+ )
141
+
142
+ # Retrieve logs via API
143
+ response = requests.get('http://your-domain/logq/api/logs/?limit=10')
144
+ logs = response.json()['logs']
145
+ ```
146
+
147
+ ### Admin Interface
148
+
149
+ Access the admin interface at `/admin/` to view and manage logs. Features include:
150
+
151
+ - Filter by level, module, timestamp, user ID
152
+ - Search by message, module, function, request ID
153
+ - View extra data in formatted JSON
154
+ - Delete old logs
155
+ - Export functionality
156
+
157
+ ### Management Commands
158
+
159
+ Clean old logs:
160
+ ```bash
161
+ # Delete logs older than 30 days
162
+ python manage.py clean_logs
163
+
164
+ # Delete logs older than 7 days
165
+ python manage.py clean_logs --days 7
166
+
167
+ # Delete only DEBUG and INFO logs older than 30 days
168
+ python manage.py clean_logs --level INFO
169
+
170
+ # Dry run to see what would be deleted
171
+ python manage.py clean_logs --dry-run
172
+ ```
173
+
174
+ ## Configuration Options
175
+
176
+ | Setting | Default | Description |
177
+ |---------|---------|-------------|
178
+ | `MAX_QUEUE_SIZE` | 1000 | Maximum number of log entries in the queue |
179
+ | `FLUSH_INTERVAL` | 1.0 | How often to flush logs to database (seconds) |
180
+ | `AUTO_CLEANUP_DAYS` | 30 | Days to keep logs before auto-cleanup |
181
+ | `ENABLE_REQUEST_LOGGING` | True | Whether to log all HTTP requests |
182
+
183
+ ## Model Fields
184
+
185
+ The `LogEntry` model includes:
186
+
187
+ - `timestamp`: When the log was created
188
+ - `level`: Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
189
+ - `message`: The log message
190
+ - `module`: Python module where the log originated
191
+ - `function`: Function name where the log originated
192
+ - `line_number`: Line number where the log originated
193
+ - `user_id`: ID of the user (if authenticated)
194
+ - `request_id`: Unique request identifier
195
+ - `extra_data`: Additional JSON data
196
+ - `created_at`: When the entry was saved to database
197
+
198
+ ## Performance Considerations
199
+
200
+ - The logger runs in a separate thread and won't block your main application
201
+ - Log entries are batched and written to the database periodically
202
+ - If the queue is full, new entries are dropped (with console fallback)
203
+ - Consider setting up database indexes for better query performance
204
+ - Use the cleanup command regularly to prevent database bloat
205
+
206
+ ## Thread Safety
207
+
208
+ The logger is completely thread-safe:
209
+ - Uses a thread-safe queue for communication
210
+ - Database operations are wrapped in transactions
211
+ - Multiple threads can safely call the logger simultaneously
212
+
213
+ ## Customization
214
+
215
+ You can extend the logger by:
216
+
217
+ 1. Creating custom log levels
218
+ 2. Adding new fields to the LogEntry model
219
+ 3. Customizing the admin interface
220
+ 4. Adding new API endpoints
221
+ 5. Creating custom middleware
222
+
223
+ ## Troubleshooting
224
+
225
+ ### Logs not appearing
226
+ - Check that the async logger thread is running
227
+ - Verify database migrations are applied
228
+ - Check for any database connection issues
229
+
230
+ ### Performance issues
231
+ - Reduce `FLUSH_INTERVAL` for more frequent writes
232
+ - Increase `MAX_QUEUE_SIZE` for higher throughput
233
+ - Add database indexes for frequently queried fields
234
+
235
+ ### Memory usage
236
+ - Reduce `MAX_QUEUE_SIZE` if memory is a concern
237
+ - Run cleanup commands more frequently
238
+ - Monitor database size and clean old logs
239
+
240
+ ## License
241
+
242
+ This project is open source and available under the MIT License.
djlogq-1.0.0/README.md ADDED
@@ -0,0 +1,228 @@
1
+ # Django Async Logger
2
+
3
+ A reusable Django app that provides asynchronous logging functionality using a separate thread to avoid blocking the main application.
4
+
5
+ ## Features
6
+
7
+ - **Asynchronous Logging**: All log operations run in a separate thread
8
+ - **Thread-Safe**: Uses a queue system for thread-safe logging
9
+ - **Rich Metadata**: Captures module, function, line number, user ID, request ID, and extra data
10
+ - **Admin Interface**: Beautiful Django admin interface for viewing and managing logs
11
+ - **API Endpoints**: REST API for external logging
12
+ - **Middleware**: Automatic request logging with unique request IDs
13
+ - **Decorators**: Utility decorators for function logging and performance monitoring
14
+ - **Context Managers**: Easy-to-use context managers for operation logging
15
+ - **Configurable**: Customizable queue size, flush intervals, and cleanup policies
16
+
17
+ ## Installation
18
+
19
+ 1. Add the app to your Django project:
20
+ ```python
21
+ INSTALLED_APPS = [
22
+ # ...
23
+ 'logq',
24
+ ]
25
+ ```
26
+
27
+ 2. Add the middleware to your settings:
28
+ ```python
29
+ MIDDLEWARE = [
30
+ # ...
31
+ 'logq.middleware.AsyncLoggingMiddleware',
32
+ ]
33
+ ```
34
+
35
+ 3. Run migrations:
36
+ ```bash
37
+ python manage.py makemigrations logq
38
+ python manage.py migrate
39
+ ```
40
+
41
+ 4. (Optional) Configure logging settings:
42
+ ```python
43
+ ASYNC_LOGGING_CONFIG = {
44
+ 'MAX_QUEUE_SIZE': 1000,
45
+ 'FLUSH_INTERVAL': 1.0, # seconds
46
+ 'AUTO_CLEANUP_DAYS': 30,
47
+ 'ENABLE_REQUEST_LOGGING': True,
48
+ 'IGNORE_PATHS': ['/admin/'], # paths to ignore for request logging
49
+ }
50
+ ```
51
+
52
+ ## Usage
53
+
54
+ ### Basic Logging
55
+
56
+ ```python
57
+ from logq.async_logger import get_async_logger
58
+
59
+ logger = get_async_logger()
60
+
61
+ # Different log levels
62
+ logger.debug("Debug message")
63
+ logger.info("Info message")
64
+ logger.warning("Warning message")
65
+ logger.error("Error message")
66
+ logger.critical("Critical message")
67
+
68
+ # With extra data
69
+ logger.info("User action", extra_data={'action': 'login', 'ip': '192.168.1.1'})
70
+
71
+ # Log exceptions
72
+ try:
73
+ # some code that might fail
74
+ pass
75
+ except Exception as e:
76
+ logger.exception("An error occurred", exc_info=str(e))
77
+ ```
78
+
79
+ ### Function Decorators
80
+
81
+ ```python
82
+ from logq.utils import log_function_call, log_performance
83
+
84
+ @log_function_call
85
+ def my_function():
86
+ return "result"
87
+
88
+ @log_function_call(level='DEBUG')
89
+ def debug_function():
90
+ return "debug result"
91
+
92
+ @log_performance(threshold_seconds=0.5)
93
+ def slow_function():
94
+ time.sleep(1)
95
+ return "slow result"
96
+ ```
97
+
98
+ ### Context Managers
99
+
100
+ ```python
101
+ from logq.utils import LogContext
102
+
103
+ with LogContext("Processing data", level='INFO'):
104
+ # do some work
105
+ time.sleep(0.1)
106
+ # automatically logs start and completion with timing
107
+ ```
108
+
109
+ ### API Logging
110
+
111
+ ```python
112
+ import requests
113
+ import json
114
+
115
+ # Log via API
116
+ data = {
117
+ 'level': 'INFO',
118
+ 'message': 'External log message',
119
+ 'extra_data': {'source': 'external_service'}
120
+ }
121
+
122
+ response = requests.post(
123
+ 'http://your-domain/logq/api/log/',
124
+ data=json.dumps(data),
125
+ headers={'Content-Type': 'application/json'}
126
+ )
127
+
128
+ # Retrieve logs via API
129
+ response = requests.get('http://your-domain/logq/api/logs/?limit=10')
130
+ logs = response.json()['logs']
131
+ ```
132
+
133
+ ### Admin Interface
134
+
135
+ Access the admin interface at `/admin/` to view and manage logs. Features include:
136
+
137
+ - Filter by level, module, timestamp, user ID
138
+ - Search by message, module, function, request ID
139
+ - View extra data in formatted JSON
140
+ - Delete old logs
141
+ - Export functionality
142
+
143
+ ### Management Commands
144
+
145
+ Clean old logs:
146
+ ```bash
147
+ # Delete logs older than 30 days
148
+ python manage.py clean_logs
149
+
150
+ # Delete logs older than 7 days
151
+ python manage.py clean_logs --days 7
152
+
153
+ # Delete only DEBUG and INFO logs older than 30 days
154
+ python manage.py clean_logs --level INFO
155
+
156
+ # Dry run to see what would be deleted
157
+ python manage.py clean_logs --dry-run
158
+ ```
159
+
160
+ ## Configuration Options
161
+
162
+ | Setting | Default | Description |
163
+ |---------|---------|-------------|
164
+ | `MAX_QUEUE_SIZE` | 1000 | Maximum number of log entries in the queue |
165
+ | `FLUSH_INTERVAL` | 1.0 | How often to flush logs to database (seconds) |
166
+ | `AUTO_CLEANUP_DAYS` | 30 | Days to keep logs before auto-cleanup |
167
+ | `ENABLE_REQUEST_LOGGING` | True | Whether to log all HTTP requests |
168
+
169
+ ## Model Fields
170
+
171
+ The `LogEntry` model includes:
172
+
173
+ - `timestamp`: When the log was created
174
+ - `level`: Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
175
+ - `message`: The log message
176
+ - `module`: Python module where the log originated
177
+ - `function`: Function name where the log originated
178
+ - `line_number`: Line number where the log originated
179
+ - `user_id`: ID of the user (if authenticated)
180
+ - `request_id`: Unique request identifier
181
+ - `extra_data`: Additional JSON data
182
+ - `created_at`: When the entry was saved to database
183
+
184
+ ## Performance Considerations
185
+
186
+ - The logger runs in a separate thread and won't block your main application
187
+ - Log entries are batched and written to the database periodically
188
+ - If the queue is full, new entries are dropped (with console fallback)
189
+ - Consider setting up database indexes for better query performance
190
+ - Use the cleanup command regularly to prevent database bloat
191
+
192
+ ## Thread Safety
193
+
194
+ The logger is completely thread-safe:
195
+ - Uses a thread-safe queue for communication
196
+ - Database operations are wrapped in transactions
197
+ - Multiple threads can safely call the logger simultaneously
198
+
199
+ ## Customization
200
+
201
+ You can extend the logger by:
202
+
203
+ 1. Creating custom log levels
204
+ 2. Adding new fields to the LogEntry model
205
+ 3. Customizing the admin interface
206
+ 4. Adding new API endpoints
207
+ 5. Creating custom middleware
208
+
209
+ ## Troubleshooting
210
+
211
+ ### Logs not appearing
212
+ - Check that the async logger thread is running
213
+ - Verify database migrations are applied
214
+ - Check for any database connection issues
215
+
216
+ ### Performance issues
217
+ - Reduce `FLUSH_INTERVAL` for more frequent writes
218
+ - Increase `MAX_QUEUE_SIZE` for higher throughput
219
+ - Add database indexes for frequently queried fields
220
+
221
+ ### Memory usage
222
+ - Reduce `MAX_QUEUE_SIZE` if memory is a concern
223
+ - Run cleanup commands more frequently
224
+ - Monitor database size and clean old logs
225
+
226
+ ## License
227
+
228
+ This project is open source and available under the MIT License.
@@ -0,0 +1,37 @@
1
+ [build-system]
2
+ requires = ["setuptools >= 40.9.0", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "djlogq"
7
+ version = "1.0.0"
8
+ description = "A reusable Django app for asynchronous, thread-safe logging with rich metadata, admin interface, and API support."
9
+ readme = "README.md"
10
+ authors = [
11
+ {name = "mesake", email = "mesnavunawa@gmail.com"}
12
+ ]
13
+ license = {text = "MIT"}
14
+ requires-python = ">=3.8"
15
+ dependencies = [
16
+ "Django"
17
+ ]
18
+
19
+ [project.optional-dependencies]
20
+ dev = [
21
+ "build==1.2.1",
22
+ "nox==2024.4.15",
23
+ "twine==5.1.1",
24
+ ]
25
+
26
+ # Add package discovery configuration
27
+ [tool.setuptools.packages.find]
28
+ where = ["src"]
29
+
30
+ # Add package data for Django app
31
+ [tool.setuptools.package-data]
32
+ logq = [
33
+ "migrations/*.py",
34
+ "management/commands/*.py",
35
+ "templates/*.html",
36
+ "static/*",
37
+ ]
djlogq-1.0.0/setup.cfg ADDED
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+