django-log-formatter-asim 1.2.0a0__tar.gz → 1.3.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {django_log_formatter_asim-1.2.0a0 → django_log_formatter_asim-1.3.0}/PKG-INFO +197 -34
- {django_log_formatter_asim-1.2.0a0 → django_log_formatter_asim-1.3.0}/README.md +190 -27
- django_log_formatter_asim-1.3.0/django_log_formatter_asim/events/__init__.py +7 -0
- django_log_formatter_asim-1.3.0/django_log_formatter_asim/events/account_management.py +133 -0
- django_log_formatter_asim-1.3.0/django_log_formatter_asim/events/authentication.py +190 -0
- django_log_formatter_asim-1.3.0/django_log_formatter_asim/events/common.py +131 -0
- django_log_formatter_asim-1.3.0/django_log_formatter_asim/events/file_activity.py +214 -0
- {django_log_formatter_asim-1.2.0a0 → django_log_formatter_asim-1.3.0}/pyproject.toml +4 -7
- django_log_formatter_asim-1.2.0a0/django_log_formatter_asim/events/__init__.py +0 -2
- django_log_formatter_asim-1.2.0a0/django_log_formatter_asim/events/authentication.py +0 -225
- django_log_formatter_asim-1.2.0a0/django_log_formatter_asim/events/common.py +0 -63
- django_log_formatter_asim-1.2.0a0/django_log_formatter_asim/events/file_activity.py +0 -256
- {django_log_formatter_asim-1.2.0a0 → django_log_formatter_asim-1.3.0}/LICENSE +0 -0
- {django_log_formatter_asim-1.2.0a0 → django_log_formatter_asim-1.3.0}/django_log_formatter_asim/__init__.py +0 -0
- {django_log_formatter_asim-1.2.0a0 → django_log_formatter_asim-1.3.0}/django_log_formatter_asim/ecs.py +0 -0
|
@@ -1,21 +1,21 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: django-log-formatter-asim
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.3.0
|
|
4
4
|
Summary: Formats Django logs in ASIM format.
|
|
5
5
|
License: MIT
|
|
6
|
+
License-File: LICENSE
|
|
6
7
|
Author: Department for Business and Trade Platform Team
|
|
7
8
|
Author-email: sre-team@digital.trade.gov.uk
|
|
8
|
-
Requires-Python: >=3.
|
|
9
|
+
Requires-Python: >=3.10,<4
|
|
9
10
|
Classifier: License :: OSI Approved :: MIT License
|
|
10
11
|
Classifier: Programming Language :: Python :: 3
|
|
11
|
-
Classifier: Programming Language :: Python :: 3.9
|
|
12
12
|
Classifier: Programming Language :: Python :: 3.10
|
|
13
13
|
Classifier: Programming Language :: Python :: 3.11
|
|
14
14
|
Classifier: Programming Language :: Python :: 3.12
|
|
15
15
|
Classifier: Programming Language :: Python :: 3.13
|
|
16
|
-
|
|
17
|
-
Requires-Dist:
|
|
18
|
-
Requires-Dist: django (>=
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
17
|
+
Requires-Dist: ddtrace (>=3.2.1,<5)
|
|
18
|
+
Requires-Dist: django (>=4.2,<7)
|
|
19
19
|
Requires-Dist: django-ipware (>=7.0.1,<8.0.0)
|
|
20
20
|
Description-Content-Type: text/markdown
|
|
21
21
|
|
|
@@ -77,6 +77,101 @@ LOGGING = {
|
|
|
77
77
|
In this example we assign the ASIM formatter to a `handler` and ensure both `root` and `django` loggers use this `handler`.
|
|
78
78
|
We then set `propagate` to `False` on the `django` logger, to avoid duplicating logs at the root level.
|
|
79
79
|
|
|
80
|
+
### Settings
|
|
81
|
+
|
|
82
|
+
`DLFA_LOG_PERSONALLY_IDENTIFIABLE_INFORMATION` - the formatter checks this setting to see if personally identifiable information should be logged. If this is not set to true, only the user's id is logged.
|
|
83
|
+
|
|
84
|
+
`DLFA_TRACE_HEADERS` - used for defining custom zipkin headers, the defaults is `("X-Amzn-Trace-Id")`, but for applications hosted in GOV.UK PaaS you should use `("X-B3-TraceId", "X-B3-SpanId")`. If you are running your application in both places side by side during migration, the following should work in your Django settings:
|
|
85
|
+
|
|
86
|
+
```python
|
|
87
|
+
from dbt_copilot_python.utility import is_copilot
|
|
88
|
+
|
|
89
|
+
if is_copilot():
|
|
90
|
+
DLFA_TRACE_HEADERS = ("X-B3-TraceId", "X-B3-SpanId")
|
|
91
|
+
```
|
|
92
|
+
|
|
93
|
+
`DLFA_INCLUDE_RAW_LOG` - By default the original unformatted log is not included in the ASIM formatted log. You can enable that by setting this to `True` and it will be included in `AddidtionalFields.RawLog`.
|
|
94
|
+
|
|
95
|
+
> [!WARNING]
|
|
96
|
+
> Setting `DLFA_INCLUDE_RAW_LOG` to `True` will cause additional private fields to be output to your logs.
|
|
97
|
+
> This could include secrets, such as AWS Access Keys, private HTTP Request data, or personally identifiable information.
|
|
98
|
+
> This setting is not recommended for a production environment.
|
|
99
|
+
|
|
100
|
+
### Serialisation behaviour
|
|
101
|
+
|
|
102
|
+
The package provides one `logging.Formatter` class, `ASIMFormatter` which routes log messages to a serialiser
|
|
103
|
+
which generates a python dict which the formatter converts to a JSON string and prints to standard output.
|
|
104
|
+
|
|
105
|
+
It has a generic serialiser called `ASIMRootFormatter` and a custom serlializer for log messages where the
|
|
106
|
+
logger is `django.request`.
|
|
107
|
+
|
|
108
|
+
``` python
|
|
109
|
+
ASIM_FORMATTERS = {
|
|
110
|
+
"root": ASIMRootFormatter,
|
|
111
|
+
"django.request": ASIMRequestFormatter,
|
|
112
|
+
}
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
#### ASIMRootFormatter
|
|
116
|
+
|
|
117
|
+
This serialiser outputs the following ASIM fields.
|
|
118
|
+
|
|
119
|
+
- `EventSchema` = `ProcessEvent`
|
|
120
|
+
- `ActingAppType` = `Django`
|
|
121
|
+
- `AdditionalFields[DjangoLogFormatterAsimVersion]`
|
|
122
|
+
- `EventSchemaVersion`
|
|
123
|
+
- `EventMessage`
|
|
124
|
+
- `EventCount`
|
|
125
|
+
- `EventStartTime`
|
|
126
|
+
- `EventEndTime`
|
|
127
|
+
- `EventType`
|
|
128
|
+
- `EventResult`
|
|
129
|
+
- `EventSeverity`
|
|
130
|
+
- `EventOriginalSeverity`
|
|
131
|
+
|
|
132
|
+
Additionally, the following DataDog fields where available:
|
|
133
|
+
|
|
134
|
+
- `dd.trace_id`
|
|
135
|
+
- `dd.span_id`
|
|
136
|
+
- `env`
|
|
137
|
+
- `service`
|
|
138
|
+
- `version`
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
#### ASIMRequestFormatter
|
|
142
|
+
|
|
143
|
+
This serialiser outputs the following ASIM fields in addition to the ones from ASIMRootFormatter.
|
|
144
|
+
It is coupled to the datastructure provided by the `django.request` logger.
|
|
145
|
+
The `django.request` logger only outputs requests where the response code is 4xx/5xx.
|
|
146
|
+
|
|
147
|
+
- `SrcIpAddr` and `IpAddr`
|
|
148
|
+
- `SrcPortNumber`
|
|
149
|
+
- `SrcUserId` and `SrcUsername`
|
|
150
|
+
- `HttpUserAgent`
|
|
151
|
+
- `AdditionalFields["TraceHeaders"][trace_header_name]` - See `DLFA_TRACE_HEADERS` setting for more information.
|
|
152
|
+
|
|
153
|
+
#### Creating a custom serialiser
|
|
154
|
+
|
|
155
|
+
If you wish to create your own ASIM serialiser, you can inherit from `ASIMRootFormatter` and call
|
|
156
|
+
`super().get_log_dict()` to get the base level logging data for augmentation:
|
|
157
|
+
|
|
158
|
+
``` python
|
|
159
|
+
class MyASIMFormatter(ASIMRootFormatter):
|
|
160
|
+
def get_log_dict(self):
|
|
161
|
+
log_dict = super().get_log_dict()
|
|
162
|
+
|
|
163
|
+
# Customise logger event
|
|
164
|
+
|
|
165
|
+
return log_dict
|
|
166
|
+
```
|
|
167
|
+
|
|
168
|
+
This serialiser can then be added to `ASIM_FORMATTERS`...
|
|
169
|
+
|
|
170
|
+
```python
|
|
171
|
+
ASIM_FORMATTERS["my_logger"] = MyASIMFormatter
|
|
172
|
+
```
|
|
173
|
+
|
|
174
|
+
|
|
80
175
|
### ASIM Events
|
|
81
176
|
|
|
82
177
|
The events mostly follow the Microsoft schema but have been tailored to Department of Business and Trade needs.
|
|
@@ -127,48 +222,114 @@ log_authentication(
|
|
|
127
222
|
}
|
|
128
223
|
```
|
|
129
224
|
|
|
130
|
-
|
|
225
|
+
#### File Activity event
|
|
131
226
|
|
|
132
|
-
|
|
227
|
+
Following the [ASIM File Event Schema](https://learn.microsoft.com/en-us/azure/sentinel/normalization-schema-file-event).
|
|
133
228
|
|
|
134
|
-
|
|
229
|
+
```python
|
|
230
|
+
# Example usage
|
|
231
|
+
from django_log_formatter_asim.events import log_file_activity
|
|
135
232
|
|
|
136
|
-
|
|
233
|
+
log_file_activity(
|
|
234
|
+
request,
|
|
235
|
+
event=log_file_activity.Event.FileCopied,
|
|
236
|
+
result=log_file_activity.Result.Success,
|
|
237
|
+
file={
|
|
238
|
+
"path": "/tmp/copied.txt",
|
|
239
|
+
"content_type": "text/plain",
|
|
240
|
+
"extension": "txt",
|
|
241
|
+
"name": "copied.txt",
|
|
242
|
+
"sha256": "6798b7a132f37a0474002dec538ec52bdcd5f7b76e49e52c8a3d2016ca8d1d18",
|
|
243
|
+
"size": 14,
|
|
244
|
+
},
|
|
245
|
+
# source_file is only necessary if the event is one of FileRenamed, FileMoved, FileCopied, FolderMoved
|
|
246
|
+
source_file={
|
|
247
|
+
"path": "/tmp/original.txt",
|
|
248
|
+
"content_type": "text/plain",
|
|
249
|
+
"extension": "txt",
|
|
250
|
+
"name": "original.txt",
|
|
251
|
+
"sha256": "6798b7a132f37a0474002dec538ec52bdcd5f7b76e49e52c8a3d2016ca8d1d18",
|
|
252
|
+
"size": 14,
|
|
253
|
+
},
|
|
254
|
+
)
|
|
137
255
|
|
|
138
|
-
|
|
139
|
-
|
|
256
|
+
# Example JSON printed to standard output
|
|
257
|
+
{
|
|
258
|
+
# Values provided as arguments
|
|
259
|
+
"EventType": "FileCopied",
|
|
260
|
+
"EventResult": "Success",
|
|
140
261
|
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
262
|
+
"TargetFilePath": "/tmp/copied.txt",
|
|
263
|
+
"TargetFileName": "copied.txt",
|
|
264
|
+
"TargetFileExtension": "txt",
|
|
265
|
+
"TargetFileMimeType": "text/plain",
|
|
266
|
+
"TargetFileSHA256": "6798b7a132f37a0474002dec538ec52bdcd5f7b76e49e52c8a3d2016ca8d1d18",
|
|
267
|
+
"TargetFileSize": 14,
|
|
144
268
|
|
|
145
|
-
|
|
269
|
+
"SrcFilePath": "/tmp/original.txt",
|
|
270
|
+
"SrcFileName": "original.txt",
|
|
271
|
+
"SrcFileExtension": "txt",
|
|
272
|
+
"SrcFileMimeType": "text/plain",
|
|
273
|
+
"SrcFileSHA256": "6798b7a132f37a0474002dec538ec52bdcd5f7b76e49e52c8a3d2016ca8d1d18",
|
|
274
|
+
"SrcFileSize": 14,
|
|
146
275
|
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
```
|
|
276
|
+
# Calculated / Hard coded fields
|
|
277
|
+
"EventStartTime": "2025-07-30T11:05:09.406460+00:00",
|
|
278
|
+
"EventSchema": "FileEvent",
|
|
279
|
+
"EventSchemaVersion": "0.2.1",
|
|
280
|
+
"EventSeverity": "Informational",
|
|
153
281
|
|
|
154
|
-
|
|
282
|
+
# Taken from Django HttpRequest object
|
|
283
|
+
"HttpHost": "WebServer.local",
|
|
284
|
+
"SrcIpAddr": "192.168.1.101",
|
|
285
|
+
"TargetUrl": "https://WebServer.local/steel",
|
|
286
|
+
"TargetUsername": "Adrian"
|
|
155
287
|
|
|
156
|
-
|
|
157
|
-
|
|
288
|
+
# Taken from DBT Platform environment variables
|
|
289
|
+
"TargetAppName": "export-analytics-frontend",
|
|
290
|
+
}
|
|
158
291
|
```
|
|
159
292
|
|
|
160
|
-
|
|
293
|
+
#### Account Management event
|
|
161
294
|
|
|
162
|
-
|
|
295
|
+
Following the [ASIM User Management Schema](https://learn.microsoft.com/en-us/azure/sentinel/normalization-schema-user-management).
|
|
163
296
|
|
|
164
|
-
``` python
|
|
165
|
-
class ASIMSystemFormatter(ASIMFormatterBase):
|
|
166
|
-
def get_event(self):
|
|
167
|
-
logger_event = self._get_event_base()
|
|
168
297
|
|
|
169
|
-
|
|
298
|
+
```python
|
|
299
|
+
# Example usage
|
|
300
|
+
from django_log_formatter_asim.events import log_account_management
|
|
301
|
+
|
|
302
|
+
log_account_management(
|
|
303
|
+
request,
|
|
304
|
+
event=log_account_management.Event.UserCreated,
|
|
305
|
+
result=log_account_management.Result.Success,
|
|
306
|
+
account={
|
|
307
|
+
"username": "Roger",
|
|
308
|
+
},
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
# Example JSON printed to standard output
|
|
312
|
+
{
|
|
313
|
+
# Values provided as arguments
|
|
314
|
+
"EventType": "UserCreated",
|
|
315
|
+
"EventResult": "Success",
|
|
316
|
+
"TargetUsername": "Roger",
|
|
170
317
|
|
|
171
|
-
|
|
318
|
+
# Calculated / Hard coded fields
|
|
319
|
+
"EventStartTime": "2025-07-30T11:05:09.406460+00:00",
|
|
320
|
+
"EventSchema": "UserManagement",
|
|
321
|
+
"EventSchemaVersion": "0.1.1",
|
|
322
|
+
"EventSeverity": "Informational",
|
|
323
|
+
|
|
324
|
+
# Taken from Django HttpRequest object
|
|
325
|
+
"HttpHost": "WebServer.local",
|
|
326
|
+
"SrcIpAddr": "192.168.1.101",
|
|
327
|
+
"TargetUrl": "https://WebServer.local/admin/create-user",
|
|
328
|
+
"ActorUsername": "Adrian"
|
|
329
|
+
|
|
330
|
+
# Taken from DBT Platform environment variables
|
|
331
|
+
"TargetAppName": "export-analytics-frontend",
|
|
332
|
+
}
|
|
172
333
|
```
|
|
173
334
|
|
|
174
335
|
## Dependencies
|
|
@@ -203,6 +364,8 @@ Or, run `poetry run tox` in the root directory to run all tests for multiple Pyt
|
|
|
203
364
|
|
|
204
365
|
### Publishing
|
|
205
366
|
|
|
367
|
+
Create a pull request to update the [CHANGELOG.md](./CHANGELOG.md) and also create a [create a release in GitHub](https://docs.github.com/en/repositories/releasing-projects-on-github/managing-releases-in-a-repository#creating-a-release) for your new version.
|
|
368
|
+
|
|
206
369
|
1. Acquire API token from [Passman](https://passman.ci.uktrade.digital/secret/cc82a3f7-ddfa-4312-ab56-1ff8528dadc8/).
|
|
207
370
|
- Request access from the SRE team.
|
|
208
371
|
- _Note: You will need access to the `platform` group in Passman._
|
|
@@ -56,6 +56,101 @@ LOGGING = {
|
|
|
56
56
|
In this example we assign the ASIM formatter to a `handler` and ensure both `root` and `django` loggers use this `handler`.
|
|
57
57
|
We then set `propagate` to `False` on the `django` logger, to avoid duplicating logs at the root level.
|
|
58
58
|
|
|
59
|
+
### Settings
|
|
60
|
+
|
|
61
|
+
`DLFA_LOG_PERSONALLY_IDENTIFIABLE_INFORMATION` - the formatter checks this setting to see if personally identifiable information should be logged. If this is not set to true, only the user's id is logged.
|
|
62
|
+
|
|
63
|
+
`DLFA_TRACE_HEADERS` - used for defining custom zipkin headers, the defaults is `("X-Amzn-Trace-Id")`, but for applications hosted in GOV.UK PaaS you should use `("X-B3-TraceId", "X-B3-SpanId")`. If you are running your application in both places side by side during migration, the following should work in your Django settings:
|
|
64
|
+
|
|
65
|
+
```python
|
|
66
|
+
from dbt_copilot_python.utility import is_copilot
|
|
67
|
+
|
|
68
|
+
if is_copilot():
|
|
69
|
+
DLFA_TRACE_HEADERS = ("X-B3-TraceId", "X-B3-SpanId")
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
`DLFA_INCLUDE_RAW_LOG` - By default the original unformatted log is not included in the ASIM formatted log. You can enable that by setting this to `True` and it will be included in `AddidtionalFields.RawLog`.
|
|
73
|
+
|
|
74
|
+
> [!WARNING]
|
|
75
|
+
> Setting `DLFA_INCLUDE_RAW_LOG` to `True` will cause additional private fields to be output to your logs.
|
|
76
|
+
> This could include secrets, such as AWS Access Keys, private HTTP Request data, or personally identifiable information.
|
|
77
|
+
> This setting is not recommended for a production environment.
|
|
78
|
+
|
|
79
|
+
### Serialisation behaviour
|
|
80
|
+
|
|
81
|
+
The package provides one `logging.Formatter` class, `ASIMFormatter` which routes log messages to a serialiser
|
|
82
|
+
which generates a python dict which the formatter converts to a JSON string and prints to standard output.
|
|
83
|
+
|
|
84
|
+
It has a generic serialiser called `ASIMRootFormatter` and a custom serlializer for log messages where the
|
|
85
|
+
logger is `django.request`.
|
|
86
|
+
|
|
87
|
+
``` python
|
|
88
|
+
ASIM_FORMATTERS = {
|
|
89
|
+
"root": ASIMRootFormatter,
|
|
90
|
+
"django.request": ASIMRequestFormatter,
|
|
91
|
+
}
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
#### ASIMRootFormatter
|
|
95
|
+
|
|
96
|
+
This serialiser outputs the following ASIM fields.
|
|
97
|
+
|
|
98
|
+
- `EventSchema` = `ProcessEvent`
|
|
99
|
+
- `ActingAppType` = `Django`
|
|
100
|
+
- `AdditionalFields[DjangoLogFormatterAsimVersion]`
|
|
101
|
+
- `EventSchemaVersion`
|
|
102
|
+
- `EventMessage`
|
|
103
|
+
- `EventCount`
|
|
104
|
+
- `EventStartTime`
|
|
105
|
+
- `EventEndTime`
|
|
106
|
+
- `EventType`
|
|
107
|
+
- `EventResult`
|
|
108
|
+
- `EventSeverity`
|
|
109
|
+
- `EventOriginalSeverity`
|
|
110
|
+
|
|
111
|
+
Additionally, the following DataDog fields where available:
|
|
112
|
+
|
|
113
|
+
- `dd.trace_id`
|
|
114
|
+
- `dd.span_id`
|
|
115
|
+
- `env`
|
|
116
|
+
- `service`
|
|
117
|
+
- `version`
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
#### ASIMRequestFormatter
|
|
121
|
+
|
|
122
|
+
This serialiser outputs the following ASIM fields in addition to the ones from ASIMRootFormatter.
|
|
123
|
+
It is coupled to the datastructure provided by the `django.request` logger.
|
|
124
|
+
The `django.request` logger only outputs requests where the response code is 4xx/5xx.
|
|
125
|
+
|
|
126
|
+
- `SrcIpAddr` and `IpAddr`
|
|
127
|
+
- `SrcPortNumber`
|
|
128
|
+
- `SrcUserId` and `SrcUsername`
|
|
129
|
+
- `HttpUserAgent`
|
|
130
|
+
- `AdditionalFields["TraceHeaders"][trace_header_name]` - See `DLFA_TRACE_HEADERS` setting for more information.
|
|
131
|
+
|
|
132
|
+
#### Creating a custom serialiser
|
|
133
|
+
|
|
134
|
+
If you wish to create your own ASIM serialiser, you can inherit from `ASIMRootFormatter` and call
|
|
135
|
+
`super().get_log_dict()` to get the base level logging data for augmentation:
|
|
136
|
+
|
|
137
|
+
``` python
|
|
138
|
+
class MyASIMFormatter(ASIMRootFormatter):
|
|
139
|
+
def get_log_dict(self):
|
|
140
|
+
log_dict = super().get_log_dict()
|
|
141
|
+
|
|
142
|
+
# Customise logger event
|
|
143
|
+
|
|
144
|
+
return log_dict
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
This serialiser can then be added to `ASIM_FORMATTERS`...
|
|
148
|
+
|
|
149
|
+
```python
|
|
150
|
+
ASIM_FORMATTERS["my_logger"] = MyASIMFormatter
|
|
151
|
+
```
|
|
152
|
+
|
|
153
|
+
|
|
59
154
|
### ASIM Events
|
|
60
155
|
|
|
61
156
|
The events mostly follow the Microsoft schema but have been tailored to Department of Business and Trade needs.
|
|
@@ -106,48 +201,114 @@ log_authentication(
|
|
|
106
201
|
}
|
|
107
202
|
```
|
|
108
203
|
|
|
109
|
-
|
|
204
|
+
#### File Activity event
|
|
110
205
|
|
|
111
|
-
|
|
206
|
+
Following the [ASIM File Event Schema](https://learn.microsoft.com/en-us/azure/sentinel/normalization-schema-file-event).
|
|
112
207
|
|
|
113
|
-
|
|
208
|
+
```python
|
|
209
|
+
# Example usage
|
|
210
|
+
from django_log_formatter_asim.events import log_file_activity
|
|
114
211
|
|
|
115
|
-
|
|
212
|
+
log_file_activity(
|
|
213
|
+
request,
|
|
214
|
+
event=log_file_activity.Event.FileCopied,
|
|
215
|
+
result=log_file_activity.Result.Success,
|
|
216
|
+
file={
|
|
217
|
+
"path": "/tmp/copied.txt",
|
|
218
|
+
"content_type": "text/plain",
|
|
219
|
+
"extension": "txt",
|
|
220
|
+
"name": "copied.txt",
|
|
221
|
+
"sha256": "6798b7a132f37a0474002dec538ec52bdcd5f7b76e49e52c8a3d2016ca8d1d18",
|
|
222
|
+
"size": 14,
|
|
223
|
+
},
|
|
224
|
+
# source_file is only necessary if the event is one of FileRenamed, FileMoved, FileCopied, FolderMoved
|
|
225
|
+
source_file={
|
|
226
|
+
"path": "/tmp/original.txt",
|
|
227
|
+
"content_type": "text/plain",
|
|
228
|
+
"extension": "txt",
|
|
229
|
+
"name": "original.txt",
|
|
230
|
+
"sha256": "6798b7a132f37a0474002dec538ec52bdcd5f7b76e49e52c8a3d2016ca8d1d18",
|
|
231
|
+
"size": 14,
|
|
232
|
+
},
|
|
233
|
+
)
|
|
116
234
|
|
|
117
|
-
|
|
118
|
-
|
|
235
|
+
# Example JSON printed to standard output
|
|
236
|
+
{
|
|
237
|
+
# Values provided as arguments
|
|
238
|
+
"EventType": "FileCopied",
|
|
239
|
+
"EventResult": "Success",
|
|
119
240
|
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
241
|
+
"TargetFilePath": "/tmp/copied.txt",
|
|
242
|
+
"TargetFileName": "copied.txt",
|
|
243
|
+
"TargetFileExtension": "txt",
|
|
244
|
+
"TargetFileMimeType": "text/plain",
|
|
245
|
+
"TargetFileSHA256": "6798b7a132f37a0474002dec538ec52bdcd5f7b76e49e52c8a3d2016ca8d1d18",
|
|
246
|
+
"TargetFileSize": 14,
|
|
123
247
|
|
|
124
|
-
|
|
248
|
+
"SrcFilePath": "/tmp/original.txt",
|
|
249
|
+
"SrcFileName": "original.txt",
|
|
250
|
+
"SrcFileExtension": "txt",
|
|
251
|
+
"SrcFileMimeType": "text/plain",
|
|
252
|
+
"SrcFileSHA256": "6798b7a132f37a0474002dec538ec52bdcd5f7b76e49e52c8a3d2016ca8d1d18",
|
|
253
|
+
"SrcFileSize": 14,
|
|
125
254
|
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
```
|
|
255
|
+
# Calculated / Hard coded fields
|
|
256
|
+
"EventStartTime": "2025-07-30T11:05:09.406460+00:00",
|
|
257
|
+
"EventSchema": "FileEvent",
|
|
258
|
+
"EventSchemaVersion": "0.2.1",
|
|
259
|
+
"EventSeverity": "Informational",
|
|
132
260
|
|
|
133
|
-
|
|
261
|
+
# Taken from Django HttpRequest object
|
|
262
|
+
"HttpHost": "WebServer.local",
|
|
263
|
+
"SrcIpAddr": "192.168.1.101",
|
|
264
|
+
"TargetUrl": "https://WebServer.local/steel",
|
|
265
|
+
"TargetUsername": "Adrian"
|
|
134
266
|
|
|
135
|
-
|
|
136
|
-
|
|
267
|
+
# Taken from DBT Platform environment variables
|
|
268
|
+
"TargetAppName": "export-analytics-frontend",
|
|
269
|
+
}
|
|
137
270
|
```
|
|
138
271
|
|
|
139
|
-
|
|
272
|
+
#### Account Management event
|
|
140
273
|
|
|
141
|
-
|
|
274
|
+
Following the [ASIM User Management Schema](https://learn.microsoft.com/en-us/azure/sentinel/normalization-schema-user-management).
|
|
142
275
|
|
|
143
|
-
``` python
|
|
144
|
-
class ASIMSystemFormatter(ASIMFormatterBase):
|
|
145
|
-
def get_event(self):
|
|
146
|
-
logger_event = self._get_event_base()
|
|
147
276
|
|
|
148
|
-
|
|
277
|
+
```python
|
|
278
|
+
# Example usage
|
|
279
|
+
from django_log_formatter_asim.events import log_account_management
|
|
280
|
+
|
|
281
|
+
log_account_management(
|
|
282
|
+
request,
|
|
283
|
+
event=log_account_management.Event.UserCreated,
|
|
284
|
+
result=log_account_management.Result.Success,
|
|
285
|
+
account={
|
|
286
|
+
"username": "Roger",
|
|
287
|
+
},
|
|
288
|
+
)
|
|
289
|
+
|
|
290
|
+
# Example JSON printed to standard output
|
|
291
|
+
{
|
|
292
|
+
# Values provided as arguments
|
|
293
|
+
"EventType": "UserCreated",
|
|
294
|
+
"EventResult": "Success",
|
|
295
|
+
"TargetUsername": "Roger",
|
|
149
296
|
|
|
150
|
-
|
|
297
|
+
# Calculated / Hard coded fields
|
|
298
|
+
"EventStartTime": "2025-07-30T11:05:09.406460+00:00",
|
|
299
|
+
"EventSchema": "UserManagement",
|
|
300
|
+
"EventSchemaVersion": "0.1.1",
|
|
301
|
+
"EventSeverity": "Informational",
|
|
302
|
+
|
|
303
|
+
# Taken from Django HttpRequest object
|
|
304
|
+
"HttpHost": "WebServer.local",
|
|
305
|
+
"SrcIpAddr": "192.168.1.101",
|
|
306
|
+
"TargetUrl": "https://WebServer.local/admin/create-user",
|
|
307
|
+
"ActorUsername": "Adrian"
|
|
308
|
+
|
|
309
|
+
# Taken from DBT Platform environment variables
|
|
310
|
+
"TargetAppName": "export-analytics-frontend",
|
|
311
|
+
}
|
|
151
312
|
```
|
|
152
313
|
|
|
153
314
|
## Dependencies
|
|
@@ -182,6 +343,8 @@ Or, run `poetry run tox` in the root directory to run all tests for multiple Pyt
|
|
|
182
343
|
|
|
183
344
|
### Publishing
|
|
184
345
|
|
|
346
|
+
Create a pull request to update the [CHANGELOG.md](./CHANGELOG.md) and also create a [create a release in GitHub](https://docs.github.com/en/repositories/releasing-projects-on-github/managing-releases-in-a-repository#creating-a-release) for your new version.
|
|
347
|
+
|
|
185
348
|
1. Acquire API token from [Passman](https://passman.ci.uktrade.digital/secret/cc82a3f7-ddfa-4312-ab56-1ff8528dadc8/).
|
|
186
349
|
- Request access from the SRE team.
|
|
187
350
|
- _Note: You will need access to the `platform` group in Passman._
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
from .account_management import LogAccountManagement
|
|
2
|
+
from .authentication import LogAuthentication
|
|
3
|
+
from .file_activity import LogFileActivity
|
|
4
|
+
|
|
5
|
+
log_account_management = LogAccountManagement()
|
|
6
|
+
log_authentication = LogAuthentication()
|
|
7
|
+
log_file_activity = LogFileActivity()
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
import json
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from typing import TypedDict
|
|
6
|
+
|
|
7
|
+
from django.http import HttpRequest
|
|
8
|
+
|
|
9
|
+
from .common import Activity
|
|
10
|
+
from .common import Client
|
|
11
|
+
from .common import LoggedInUser
|
|
12
|
+
from .common import Result
|
|
13
|
+
from .common import Server
|
|
14
|
+
from .common import Severity
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class FileActivityEvent(str, Enum):
|
|
18
|
+
UserCreated = "UserCreated"
|
|
19
|
+
UserDeleted = "UserDeleted"
|
|
20
|
+
UserModified = "UserModified"
|
|
21
|
+
UserLocked = "UserLocked"
|
|
22
|
+
UserUnlocked = "UserUnlocked"
|
|
23
|
+
UserDisabled = "UserDisabled"
|
|
24
|
+
UserEnabled = "UserEnabled"
|
|
25
|
+
PasswordChanged = "PasswordChanged"
|
|
26
|
+
PasswordReset = "PasswordReset"
|
|
27
|
+
GroupCreated = "GroupCreated"
|
|
28
|
+
GroupDeleted = "GroupDeleted"
|
|
29
|
+
GroupModified = "GroupModified"
|
|
30
|
+
UserAddedToGroup = "UserAddedToGroup"
|
|
31
|
+
UserRemovedFromGroup = "UserRemovedFromGroup"
|
|
32
|
+
GroupEnumerated = "GroupEnumerated"
|
|
33
|
+
UserRead = "UserRead"
|
|
34
|
+
GroupRead = "GroupRead"
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class Account(TypedDict, total=False):
|
|
38
|
+
"""Dictionary to represent details of the account management event."""
|
|
39
|
+
|
|
40
|
+
"""
|
|
41
|
+
If a user was managed, the username of that user
|
|
42
|
+
"""
|
|
43
|
+
username: Optional[str]
|
|
44
|
+
"""If a group was managed, the name of the group."""
|
|
45
|
+
group: Optional[str]
|
|
46
|
+
"""
|
|
47
|
+
If the Account Management event is one of the following.
|
|
48
|
+
|
|
49
|
+
- UserModified
|
|
50
|
+
- GroupModified
|
|
51
|
+
|
|
52
|
+
Details of the property which was changed, in the form:
|
|
53
|
+
("propertyName", "oldValue", "newValue")
|
|
54
|
+
"""
|
|
55
|
+
changed: tuple[str, str, str]
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class LogAccountManagement(Activity):
|
|
59
|
+
Event = FileActivityEvent
|
|
60
|
+
Result = Result
|
|
61
|
+
Severity = Severity
|
|
62
|
+
|
|
63
|
+
def __call__(
|
|
64
|
+
self,
|
|
65
|
+
request: HttpRequest,
|
|
66
|
+
event: Event,
|
|
67
|
+
account: Account,
|
|
68
|
+
result: Result,
|
|
69
|
+
user: Optional[LoggedInUser] = None,
|
|
70
|
+
server: Optional[Server] = None,
|
|
71
|
+
client: Optional[Client] = None,
|
|
72
|
+
severity: Optional[Severity] = None,
|
|
73
|
+
time_generated: Optional[datetime.datetime] = None,
|
|
74
|
+
result_details: Optional[str] = None,
|
|
75
|
+
message: Optional[str] = None,
|
|
76
|
+
):
|
|
77
|
+
self._log_account_management(
|
|
78
|
+
request,
|
|
79
|
+
event,
|
|
80
|
+
account,
|
|
81
|
+
result,
|
|
82
|
+
{} if user == None else user,
|
|
83
|
+
{} if server == None else server,
|
|
84
|
+
{} if client == None else client,
|
|
85
|
+
time_generated or datetime.datetime.now(tz=datetime.timezone.utc),
|
|
86
|
+
severity,
|
|
87
|
+
result_details,
|
|
88
|
+
message,
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
def _log_account_management(
|
|
92
|
+
self,
|
|
93
|
+
request: HttpRequest,
|
|
94
|
+
event: Event,
|
|
95
|
+
account: Account,
|
|
96
|
+
result: Result,
|
|
97
|
+
user: LoggedInUser,
|
|
98
|
+
server: Server,
|
|
99
|
+
client: Client,
|
|
100
|
+
event_created: datetime.datetime,
|
|
101
|
+
severity: Optional[Severity] = None,
|
|
102
|
+
result_details: Optional[str] = None,
|
|
103
|
+
message: Optional[str] = None,
|
|
104
|
+
):
|
|
105
|
+
log = {
|
|
106
|
+
"EventSchema": "UserManagement",
|
|
107
|
+
"EventSchemaVersion": "0.1.1",
|
|
108
|
+
"EventType": event,
|
|
109
|
+
}
|
|
110
|
+
log.update(
|
|
111
|
+
self._activity_fields(
|
|
112
|
+
request, event_created, result, server, client, severity, result_details, message
|
|
113
|
+
)
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
if "username" in user:
|
|
117
|
+
log["ActorUsername"] = user["username"]
|
|
118
|
+
elif hasattr(request, "user") and request.user.username:
|
|
119
|
+
log["ActorUsername"] = request.user.username
|
|
120
|
+
|
|
121
|
+
if "username" in account:
|
|
122
|
+
log["TargetUsername"] = account["username"]
|
|
123
|
+
|
|
124
|
+
if "group" in account:
|
|
125
|
+
log["GroupName"] = account["group"]
|
|
126
|
+
|
|
127
|
+
if "changed" in account:
|
|
128
|
+
(propertyName, previousPropertyValue, newPropertyName) = account["changed"]
|
|
129
|
+
log["UpdatedPropertyName"] = propertyName
|
|
130
|
+
log["PreviousPropertyValue"] = previousPropertyValue
|
|
131
|
+
log["NewPropertyValue"] = newPropertyName
|
|
132
|
+
|
|
133
|
+
print(json.dumps(log), flush=True)
|