django-restit 4.2.83__py3-none-any.whl → 4.2.85__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- account/models/notify.py +3 -2
- auditlog/cloudwatch.py +139 -19
- auditlog/rpc.py +97 -0
- {django_restit-4.2.83.dist-info → django_restit-4.2.85.dist-info}/METADATA +1 -1
- {django_restit-4.2.83.dist-info → django_restit-4.2.85.dist-info}/RECORD +10 -10
- incident/models/incident.py +12 -6
- incident/rpc.py +3 -3
- rest/datem.py +74 -0
- {django_restit-4.2.83.dist-info → django_restit-4.2.85.dist-info}/LICENSE.md +0 -0
- {django_restit-4.2.83.dist-info → django_restit-4.2.85.dist-info}/WHEEL +0 -0
account/models/notify.py
CHANGED
@@ -228,10 +228,11 @@ class NotificationRecord(models.Model, RestModel):
|
|
228
228
|
body=message)
|
229
229
|
try:
|
230
230
|
email_record.save()
|
231
|
-
|
231
|
+
if attachments:
|
232
|
+
email_record.addAttachments(attachments)
|
232
233
|
email_record.send([nr])
|
233
234
|
except Exception as err:
|
234
|
-
rh.log_exception("email send failed",
|
235
|
+
rh.log_exception("email send failed", member.username, subject)
|
235
236
|
|
236
237
|
@classmethod
|
237
238
|
def notifyLegacy(cls, notify_users, subject, message=None,
|
auditlog/cloudwatch.py
CHANGED
@@ -1,9 +1,12 @@
|
|
1
1
|
import boto3
|
2
|
-
import datetime
|
2
|
+
from datetime import datetime, timedelta
|
3
|
+
import re
|
3
4
|
import time
|
4
5
|
import json
|
5
6
|
from objict import objict
|
6
7
|
from rest import settings
|
8
|
+
from rest import datem
|
9
|
+
from rest import helpers as rh
|
7
10
|
from concurrent.futures import ThreadPoolExecutor
|
8
11
|
|
9
12
|
|
@@ -26,13 +29,35 @@ def log(data, log_group, log_stream):
|
|
26
29
|
return True
|
27
30
|
|
28
31
|
|
29
|
-
def
|
30
|
-
|
32
|
+
def get(log_group, log_stream, size=20):
|
33
|
+
client = getClient()
|
34
|
+
if log_stream is None:
|
35
|
+
log_stream = DEFAULT_LOG_STREAMS
|
36
|
+
if isinstance(log_stream, list):
|
37
|
+
log_events = client.get_log_events(
|
38
|
+
logGroupName=log_group,
|
39
|
+
logStreamNames=log_stream,
|
40
|
+
limit=size,
|
41
|
+
startFromHead=False
|
42
|
+
)
|
43
|
+
else:
|
44
|
+
log_events = client.get_log_events(
|
45
|
+
logGroupName=log_group,
|
46
|
+
logStreamName=log_stream,
|
47
|
+
limit=size,
|
48
|
+
startFromHead=False
|
49
|
+
)
|
50
|
+
|
51
|
+
return log_events
|
52
|
+
|
53
|
+
|
54
|
+
|
55
|
+
def logToCloudWatch(message, log_group, log_stream):
|
31
56
|
if isinstance(message, dict):
|
32
57
|
message = json.dumps(message)
|
33
58
|
return logBatchToCloudWatch([
|
34
59
|
dict(
|
35
|
-
timestamp=int(datetime.
|
60
|
+
timestamp=int(datetime.utcnow().timestamp() * 1000),
|
36
61
|
message=message)
|
37
62
|
], log_group, log_stream)
|
38
63
|
|
@@ -45,9 +70,19 @@ def logBatchToCloudWatch(batch, log_group, log_stream):
|
|
45
70
|
)
|
46
71
|
|
47
72
|
|
48
|
-
def getLogGroups():
|
73
|
+
def getLogGroups(names_only=True):
|
49
74
|
response = getClient().describe_log_groups()
|
50
|
-
|
75
|
+
groups = response.get('logGroups', [])
|
76
|
+
if names_only:
|
77
|
+
return [item["logGroupName"] for item in groups]
|
78
|
+
return groups
|
79
|
+
|
80
|
+
|
81
|
+
def getLogStreams(log_group):
|
82
|
+
log_streams = client.describe_log_streams(
|
83
|
+
logGroupName=log_group_name
|
84
|
+
)
|
85
|
+
return log_streams['logStreams']
|
51
86
|
|
52
87
|
|
53
88
|
def createLogStream(log_group, log_stream):
|
@@ -57,7 +92,68 @@ def createLogStream(log_group, log_stream):
|
|
57
92
|
pass # Log stream already exists, no need to create it
|
58
93
|
|
59
94
|
|
60
|
-
def
|
95
|
+
def filter(log_group, log_streams, pattern, start_time="1h", end_time=None, resp_format=None):
|
96
|
+
if log_streams is None:
|
97
|
+
log_streams = DEFAULT_LOG_STREAMS
|
98
|
+
|
99
|
+
start_time, end_time = datem.convert_to_epoch_range(start_time, end_time)
|
100
|
+
rh.debug("filter", start_time, end_time, pattern, log_group, log_streams)
|
101
|
+
client = getClient()
|
102
|
+
response =client.filter_log_events(
|
103
|
+
logGroupName=log_group,
|
104
|
+
logStreamNames=log_streams,
|
105
|
+
filterPattern=f'"{pattern}"',
|
106
|
+
startTime=start_time,
|
107
|
+
endTime=end_time,
|
108
|
+
limit=1000
|
109
|
+
)
|
110
|
+
rh.debug("filter_log_events", response)
|
111
|
+
out = []
|
112
|
+
out.extend(response["events"])
|
113
|
+
prev_next = None
|
114
|
+
while "nextToken" in response and response["nextToken"]:
|
115
|
+
if prev_next == response["nextToken"]:
|
116
|
+
break
|
117
|
+
prev_next = response["nextToken"]
|
118
|
+
response = client.filter_log_events(
|
119
|
+
logGroupName=log_group,
|
120
|
+
logStreamNames=log_streams,
|
121
|
+
filterPattern=f'"{pattern}"',
|
122
|
+
startTime=start_time,
|
123
|
+
endTime=end_time,
|
124
|
+
nextToken=response["nextToken"]
|
125
|
+
)
|
126
|
+
rh.debug("filter_log_events", response)
|
127
|
+
out.extend(response["events"])
|
128
|
+
if resp_format == "nginx":
|
129
|
+
return dict(events=parseNginxEvents(out))
|
130
|
+
return dict(events=out)
|
131
|
+
|
132
|
+
|
133
|
+
QUERY_BY_IP = """fields @message
|
134
|
+
| filter @logStream in {log_streams}
|
135
|
+
| filter @message like /^{ip}/"""
|
136
|
+
|
137
|
+
QUERY_BY_TEXT = """fields @message
|
138
|
+
| filter @logStream in {log_streams}
|
139
|
+
| filter @message like "{text}" """
|
140
|
+
|
141
|
+
DEFAULT_LOG_STREAMS = ["access.log", "ui_access.log"]
|
142
|
+
|
143
|
+
def startSearch(log_groups, start_time, end_time=None,
|
144
|
+
text=None, ip=None, query_string=None,
|
145
|
+
log_streams=DEFAULT_LOG_STREAMS):
|
146
|
+
if log_streams is None:
|
147
|
+
log_streams = DEFAULT_LOG_STREAMS
|
148
|
+
# 173\.196\.133\.90
|
149
|
+
if ip is not None and "." in ip:
|
150
|
+
query_string = QUERY_BY_IP.format(ip=ip.replace('.', '\.'), log_streams=log_streams)
|
151
|
+
elif text is not None:
|
152
|
+
query_string = QUERY_BY_TEXT.format(text=text, log_streams=log_streams)
|
153
|
+
return startInsights(log_groups, start_time, end_time, query_string)
|
154
|
+
|
155
|
+
|
156
|
+
def startInsights(log_groups, start_time, end_time, query_string):
|
61
157
|
"""
|
62
158
|
Executes a CloudWatch Logs Insights query and returns the results.
|
63
159
|
|
@@ -70,24 +166,48 @@ def getInsights(log_group, start_time, end_time, query_string):
|
|
70
166
|
"""
|
71
167
|
# Create a CloudWatch Logs client
|
72
168
|
client = getClient()
|
169
|
+
start_time, end_time = datem.convert_to_epoch_range(start_time, end_time)
|
170
|
+
|
171
|
+
if log_groups is None:
|
172
|
+
log_groups = getLogGroups()
|
73
173
|
|
74
174
|
# Start the query
|
75
175
|
start_query_response = client.start_query(
|
76
|
-
|
176
|
+
logGroupNames=log_groups,
|
77
177
|
startTime=start_time,
|
78
178
|
endTime=end_time,
|
79
179
|
queryString=query_string,
|
80
180
|
)
|
181
|
+
return dict(query_id=start_query_response['queryId'], query=query_string)
|
81
182
|
|
82
|
-
|
83
|
-
|
183
|
+
|
184
|
+
def getInsightResults(query_id, resp_format=None):
|
84
185
|
# Wait for the query to complete
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
186
|
+
resp = getClient().get_query_results(queryId=query_id)
|
187
|
+
status = resp["status"].lower()
|
188
|
+
results = insightsToMessage(resp["results"])
|
189
|
+
stats = resp["statistics"]
|
190
|
+
if status == "complete" and resp_format == "nginx":
|
191
|
+
results = parseNginxEvents(results)
|
192
|
+
return dict(status=True, state=status, stats=stats, data=results, count=len(results))
|
193
|
+
|
194
|
+
|
195
|
+
def insightsToMessage(events):
|
196
|
+
out = []
|
197
|
+
for fields in events:
|
198
|
+
obj = {}
|
199
|
+
for field in fields:
|
200
|
+
if "field" in field and field["field"][0] == "@":
|
201
|
+
obj[field["field"][1:]] = field["value"]
|
202
|
+
if obj:
|
203
|
+
out.append(obj)
|
204
|
+
return out
|
205
|
+
|
206
|
+
def parseNginxEvents(events):
|
207
|
+
pattern = re.compile(
|
208
|
+
r'(?P<ip>\d+\.\d+\.\d+\.\d+) - - \[(?P<time>.+?)\] '
|
209
|
+
r'"(?P<method>\w+) (?P<url>.+?) (?P<protocol>[\w/.]+)" '
|
210
|
+
r'(?P<status>\d+) (?P<bytes>\d+) "(?P<referer>.+?)" '
|
211
|
+
r'"(?P<user_agent>.+?)" (?P<request_time>\S+) (?P<server_port>\d+)'
|
212
|
+
)
|
213
|
+
return [pattern.match(line["message"]).groupdict() for line in events]
|
auditlog/rpc.py
CHANGED
@@ -1,7 +1,9 @@
|
|
1
1
|
from . import models as auditlog
|
2
|
+
from . import cloudwatch
|
2
3
|
|
3
4
|
from rest import views as rv
|
4
5
|
from rest import decorators as rd
|
6
|
+
from datetime import datetime
|
5
7
|
|
6
8
|
|
7
9
|
@rd.url(r'^plog$')
|
@@ -57,3 +59,98 @@ def plogList(request):
|
|
57
59
|
qset = qset.filter(message__icontains=term)
|
58
60
|
|
59
61
|
return rv.restList(request, qset.order_by('-when'), **auditlog.PersistentLog.getGraph(graph))
|
62
|
+
|
63
|
+
|
64
|
+
@rd.url('server/logs')
|
65
|
+
@rd.perm_required('view_logs')
|
66
|
+
@rd.requires_params(["log_group", "log_streams"])
|
67
|
+
def cloudwatch_logs(request):
|
68
|
+
log_group = request.DATA.get("log_group")
|
69
|
+
log_streams = request.DATA.get("log_streams").split(',')
|
70
|
+
pattern = request.DATA.get("search")
|
71
|
+
if not pattern:
|
72
|
+
data = dict(status=True, data=[], count=0)
|
73
|
+
return rv.restResult(request, data)
|
74
|
+
start = request.DATA.get("dr_start", "1d")
|
75
|
+
end = request.DATA.get("dr_end", field_type=datetime)
|
76
|
+
resp = cloudwatch.filter(log_group, log_streams, pattern,
|
77
|
+
start_time=start, end_time=end,
|
78
|
+
resp_format="nginx")
|
79
|
+
# else:
|
80
|
+
# resp = cloudwatch.get(log_group, log_streams, resp_format="nginx")
|
81
|
+
lst = resp["events"]
|
82
|
+
count = len(lst)
|
83
|
+
data = dict(status=True, data=lst, count=len(lst), size=count)
|
84
|
+
if "nextBackwardToken" in resp:
|
85
|
+
data["backward"] = resp["nextBackwardToken"]
|
86
|
+
data["forword"] = resp["nextForwardToken"]
|
87
|
+
return rv.restResult(request, data)
|
88
|
+
|
89
|
+
@rd.urlPOST('cloudwatch/log')
|
90
|
+
@rd.perm_required('view_logs')
|
91
|
+
@rd.requires_params(["log_group", "log_stream", "event"])
|
92
|
+
def cloudwatch_log_groups(request):
|
93
|
+
log_group = request.DATA.get("log_group")
|
94
|
+
log_stream = request.DATA.get("log_stream")
|
95
|
+
event = request.DATA.get("event")
|
96
|
+
resp = cloudwatch.logToCloudWatch(event, log_group, log_stream)
|
97
|
+
return rv.restResult(request, dict(status=True, data=resp))
|
98
|
+
|
99
|
+
|
100
|
+
@rd.urlGET('cloudwatch/log')
|
101
|
+
@rd.perm_required('cloudwatch')
|
102
|
+
@rd.requires_params(["log_group", "log_stream"])
|
103
|
+
def cloudwatch_log_groups(request):
|
104
|
+
log_group = request.DATA.get("log_group")
|
105
|
+
log_stream = request.DATA.get("log_stream")
|
106
|
+
pattern = request.DATA.get("pattern")
|
107
|
+
if pattern:
|
108
|
+
start = request.DATA.get("dr_start", "6d")
|
109
|
+
end = request.DATA.get("dr_end", field_type=datetime)
|
110
|
+
resp = cloudwatch.filter(log_group, [log_stream], pattern,
|
111
|
+
start_time=start, end_time=end,
|
112
|
+
resp_format=request.DATA.get("format"))
|
113
|
+
else:
|
114
|
+
resp = cloudwatch.get(log_group, log_stream)
|
115
|
+
lst = resp["events"]
|
116
|
+
data = dict(status=True, data=lst, count=len(lst))
|
117
|
+
if "nextBackwardToken" in resp:
|
118
|
+
data["backward"] = resp["nextBackwardToken"]
|
119
|
+
data["forword"] = resp["nextForwardToken"]
|
120
|
+
data = dict(status=True, data=lst, count=len(lst))
|
121
|
+
return rv.restResult(request, data)
|
122
|
+
|
123
|
+
|
124
|
+
@rd.urlGET('cloudwatch/groups')
|
125
|
+
@rd.perm_required('cloudwatch')
|
126
|
+
def cloudwatch_log_groups(request):
|
127
|
+
return rv.restList(request, cloudwatch.getLogGroups(request.DATA.get("graph", None) is None))
|
128
|
+
|
129
|
+
|
130
|
+
@rd.urlPOST('cloudwatch/insights/start')
|
131
|
+
@rd.perm_required('cloudwatch')
|
132
|
+
@rd.requires_params(["log_groups"])
|
133
|
+
def cloudwatch_insights_start(request):
|
134
|
+
log_groups = request.DATA.get("log_groups")
|
135
|
+
log_streams = request.DATA.get("log_streams")
|
136
|
+
start = request.DATA.get("dr_start", "6h")
|
137
|
+
end = request.DATA.get("dr_end", field_type=datetime)
|
138
|
+
ip = request.DATA.get("ip")
|
139
|
+
text = request.DATA.get("text")
|
140
|
+
query = request.DATA.get("query")
|
141
|
+
if ip is None and text is None and query is None:
|
142
|
+
return rv.restPermissionDenied(request)
|
143
|
+
resp = cloudwatch.startSearch(log_groups, start, end,
|
144
|
+
ip=ip, text=text, query_string=query,
|
145
|
+
log_streams=log_streams)
|
146
|
+
return rv.restResult(request, dict(status=True, data=resp))
|
147
|
+
|
148
|
+
|
149
|
+
@rd.urlGET(r'cloudwatch/insights/results')
|
150
|
+
@rd.perm_required('cloudwatch')
|
151
|
+
@rd.requires_params(["query_id"])
|
152
|
+
def cloudwatch_insights_results(request):
|
153
|
+
query_id = request.DATA.get("query_id")
|
154
|
+
results = cloudwatch.getInsightResults(query_id, request.DATA.get("format"))
|
155
|
+
return rv.restResult(request, results)
|
156
|
+
|
@@ -30,7 +30,7 @@ account/models/group.py,sha256=iDD_oSgswKV_t_gXZuVK80MvICrZZqdANm2jtGtOFy8,21985
|
|
30
30
|
account/models/legacy.py,sha256=zYdtv4LC0ooxPVqWM-uToPwV-lYWQLorSE6p6yn1xDw,2720
|
31
31
|
account/models/member.py,sha256=fzSVVAdbUa1knp1O4JTnYZFYRas7-zDZaOPjZAMCC1Q,52992
|
32
32
|
account/models/membership.py,sha256=90EpAhOsGaqphDAkONP6j_qQ0OWSRaQsI8H7E7fgMkE,9249
|
33
|
-
account/models/notify.py,sha256=
|
33
|
+
account/models/notify.py,sha256=TOkuVBLAsbzT58FOxII_G3Cj_IDQx16vyehyEsNrDcY,15306
|
34
34
|
account/models/passkeys.py,sha256=TJxITUi4DT4_1tW2K7ZlOcRjJuMVl2NtKz7pKQU8-Tw,1516
|
35
35
|
account/models/session.py,sha256=ELkWjB_2KXQvPtRPrvuGJpJsqrxCQX_4J53SbqGz_2U,3737
|
36
36
|
account/models/settings.py,sha256=gOyRWBVd3BQpjfj_hJPtqX3H46ztyRAFxBrPbv11lQg,2137
|
@@ -61,7 +61,7 @@ account/templates/unsubscribed.html,sha256=UVp2eM3yqmy6GYzWz1FStO2I7YpWnGuXFJcTt
|
|
61
61
|
auditlog/README,sha256=q4DXhdz5CuMyuxYISHXzhlHnIkRJlojwOMchLzW2qOI,520
|
62
62
|
auditlog/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
63
63
|
auditlog/admin.py,sha256=-q7fstdFjNeDFfbwdrxVqy0WGKxMpBwrsM7AyG1p80g,1006
|
64
|
-
auditlog/cloudwatch.py,sha256
|
64
|
+
auditlog/cloudwatch.py,sha256=-u_mbs60bqWMXUNh28S41qqPE5flJZXB_ZdtWc-2YVA,6788
|
65
65
|
auditlog/decorators.py,sha256=ZoIv0fhZjxtMEV15NcKijW4xPF5UEScPna60zB3TxZo,6553
|
66
66
|
auditlog/middleware.py,sha256=Q4bXg8rnm8y2fMnAsN6ha3Fz6TW8jIzLnvpu4H9SpWE,1537
|
67
67
|
auditlog/migrations/0001_initial.py,sha256=X171gKQZIaTO9FGNG1yKTjGSZS0ZjZj5gvimF9-_kks,3309
|
@@ -69,7 +69,7 @@ auditlog/migrations/0002_alter_persistentlog_session.py,sha256=DkkcIobbHdbniKg5b
|
|
69
69
|
auditlog/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
70
70
|
auditlog/models.py,sha256=skDAiuzR4chC-WNIaH2nm_VVcbnDD6ZtUxBwhk7UY8U,16517
|
71
71
|
auditlog/periodic.py,sha256=AUhDeVsZtC47BJ-lklvYEegHoxAzj1RpIvRFSsM7g5E,363
|
72
|
-
auditlog/rpc.py,sha256=
|
72
|
+
auditlog/rpc.py,sha256=gJgj3Wiar5pVsw8tuhy0jXLkqFkOr3Z-oI2DKelMRAQ,5592
|
73
73
|
auditlog/tq.py,sha256=OgzJVspWI6FL92GEhDPtabYoP_Hd3zGNh0E297abz3Y,2415
|
74
74
|
auditlog/urls.py,sha256=GNqpN74EpYlMND2UFUdPt5rOkTYYrdbTV0W3fg4zLfQ,163
|
75
75
|
inbox/README.md,sha256=jsklDrzD5d94r7cwgaU6Gi1HCjBDfWq7jd92qB0JYPU,2169
|
@@ -112,14 +112,14 @@ incident/migrations/0015_rule_title_template_alter_incident_state.py,sha256=FPUD
|
|
112
112
|
incident/migrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
113
113
|
incident/models/__init__.py,sha256=NMphuhb0RTMf7Ov4QkNv7iv6_I8Wtr3xQ54yjX_a31M,209
|
114
114
|
incident/models/event.py,sha256=Dw6fUi2tbLeA_ZRDcvGQNFkCkMGMBdtNeaLikXdAyE8,7769
|
115
|
-
incident/models/incident.py,sha256=
|
115
|
+
incident/models/incident.py,sha256=XB7FgyV26sgxSOHu69UTlLmLs1vljf0O6KyN114Rf2I,19585
|
116
116
|
incident/models/ossec.py,sha256=eUDRGawzuLWobKEVGKfdZisDnyjS_Hlxi0T_GCSLCCI,2252
|
117
117
|
incident/models/rules.py,sha256=aRkJ0ZnTv87nAUC1sHVkPExfb3OJ8fgHQIhnCIpIbhQ,7001
|
118
118
|
incident/models/ticket.py,sha256=S3kqGQpYLE6Y4M9IKu_60sgW-f592xNr8uufqHnvDoU,2302
|
119
119
|
incident/parsers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
120
120
|
incident/parsers/ossec.py,sha256=jyJmNBwnQS1tjZMwYhslnCpZviCHXnozv88BPT-ytCw,11592
|
121
121
|
incident/periodic.py,sha256=eX1rQK6v65A9ugofTvJPSmAWei6C-3EYgzCMuGZ03jM,381
|
122
|
-
incident/rpc.py,sha256=
|
122
|
+
incident/rpc.py,sha256=6JVWTTAr4CN2tAjjIUcXug1z3RhU_ar5CDLzedkduA4,8187
|
123
123
|
incident/templates/email/incident_change.html,sha256=tQYphypwLukkVdwH0TB2Szz2VEJ7GnsfRS3_ZJ-MYeE,13895
|
124
124
|
incident/templates/email/incident_msg.html,sha256=MZdKhTddUF2MpiH8Z3RTQEmW_ko1n3ajeZ11KLtiLlU,13780
|
125
125
|
incident/templates/email/incident_new.html,sha256=W6nwFQROnyDfMlXub8s02ws4hGnJp16pfgp9xTm_aEc,15185
|
@@ -379,7 +379,7 @@ rest/crypto/__init__.py,sha256=Tl0U11rgj1eBYqd6OXJ2_XSdNLumW_JkBZnaJqI6Ldw,72
|
|
379
379
|
rest/crypto/aes.py,sha256=NOVRBRSHCV-om68YpGySWWG-4kako3iEVjq8hxZWPUU,4372
|
380
380
|
rest/crypto/privpub.py,sha256=_FioylVcbMmDP80yPYjURmafEiDmEAMkskbc7WF10ac,4082
|
381
381
|
rest/crypto/util.py,sha256=agFN2OCPHC70tHNGWrMkkZX4Tt_Ty6imoKEMdTkZpKA,4514
|
382
|
-
rest/datem.py,sha256=
|
382
|
+
rest/datem.py,sha256=JHMvWG8A-n4g915wrZiCtfuhgcLMgNYMXuzXIEtgaPg,12335
|
383
383
|
rest/decorators.py,sha256=ig0LATc3-2mhEJPAWHRbIRM-ZOFyjm6e_F9RhpRWidE,15082
|
384
384
|
rest/encryption.py,sha256=x6Kiez0tVqfxK26MSsRL3k8OS05ni1gEX2aj3I0S9V0,788
|
385
385
|
rest/errors.py,sha256=uKwG9OkLme36etabqK54DMjMQc1fgEoUIAUxXa7WFQw,612
|
@@ -506,7 +506,7 @@ ws4redis/servers/uwsgi.py,sha256=VyhoCI1DnVFqBiJYHoxqn5Idlf6uJPHvfBKgkjs34mo,172
|
|
506
506
|
ws4redis/settings.py,sha256=K0yBiLUuY81iDM4Yr-k8hbvjn5VVHu5zQhmMK8Dtz0s,1536
|
507
507
|
ws4redis/utf8validator.py,sha256=S0OlfjeGRP75aO6CzZsF4oTjRQAgR17OWE9rgZdMBZA,5122
|
508
508
|
ws4redis/websocket.py,sha256=R0TUyPsoVRD7Y_oU7w2I6NL4fPwiz5Vl94-fUkZgLHA,14848
|
509
|
-
django_restit-4.2.
|
510
|
-
django_restit-4.2.
|
511
|
-
django_restit-4.2.
|
512
|
-
django_restit-4.2.
|
509
|
+
django_restit-4.2.85.dist-info/LICENSE.md,sha256=VHN4hhEeVOoFjtG-5fVv4jesA4SWi0Z-KgOzzN6a1ps,1068
|
510
|
+
django_restit-4.2.85.dist-info/METADATA,sha256=c0_tpMC4ULCMJwzo_QfuVOwW3MRdLMhy6k1gqDHUaec,7645
|
511
|
+
django_restit-4.2.85.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
512
|
+
django_restit-4.2.85.dist-info/RECORD,,
|
incident/models/incident.py
CHANGED
@@ -4,7 +4,7 @@ from rest import settings
|
|
4
4
|
from rest import models as rm
|
5
5
|
from rest import helpers as rh
|
6
6
|
from taskqueue.models import Task
|
7
|
-
from account.models import Member
|
7
|
+
from account.models import Member, Group
|
8
8
|
from objict import objict
|
9
9
|
from datetime import datetime, timedelta
|
10
10
|
from rest import log
|
@@ -255,10 +255,16 @@ class Incident(models.Model, rm.RestModel, rm.MetaDataModel):
|
|
255
255
|
rh.log_exception("triggerSMS")
|
256
256
|
|
257
257
|
def triggerGroup(self):
|
258
|
+
if self.rule.action.count(":") == 2:
|
259
|
+
action, gid, perm = self.rule.action.split(":")
|
260
|
+
self.group = Group.objects.filter(pk=int(gid)).last()
|
261
|
+
self.save()
|
262
|
+
else:
|
263
|
+
action, perm = self.rule.action.split(":")
|
264
|
+
|
258
265
|
if not self.group:
|
259
266
|
self.notifyWith("notify.unknown_incidents")
|
260
267
|
return
|
261
|
-
action, perm = self.rule.action.split(":")
|
262
268
|
self.action_sent = datetime.now()
|
263
269
|
self.save()
|
264
270
|
|
@@ -472,13 +478,13 @@ class IncidentHistory(models.Model, rm.RestModel):
|
|
472
478
|
SEARCH_FIELDS = ["to__username", "note"]
|
473
479
|
GRAPHS = {
|
474
480
|
"default": {
|
475
|
-
"extra":[
|
481
|
+
"extra": [
|
476
482
|
("get_state_display", "state_display"),
|
477
483
|
("get_priority_display", "priority_display"),
|
478
484
|
],
|
479
|
-
"graphs":{
|
480
|
-
"by":"basic",
|
481
|
-
"to":"basic",
|
485
|
+
"graphs": {
|
486
|
+
"by": "basic",
|
487
|
+
"to": "basic",
|
482
488
|
"media": "basic"
|
483
489
|
}
|
484
490
|
},
|
incident/rpc.py
CHANGED
@@ -18,7 +18,7 @@ def patched_restPermissionDenied(request, error="permission denied",
|
|
18
18
|
component=None, component_id=None):
|
19
19
|
|
20
20
|
description = f"permission denied: {error_code} '{error}' for {request.user} {request.method}:{request.path}"
|
21
|
-
rh.log_error(description)
|
21
|
+
# rh.log_error(description)
|
22
22
|
if error_code == 404:
|
23
23
|
if not request.path.startswith(LOG_REST_PREFIX) and not request.path.startswith("/rpc"):
|
24
24
|
# just ignore these
|
@@ -61,7 +61,7 @@ def ossec_alert_creat_from_request(request):
|
|
61
61
|
if payload:
|
62
62
|
try:
|
63
63
|
# TODO make this a task (background it)
|
64
|
-
rh.log_error("parsing payload", payload)
|
64
|
+
# rh.log_error("parsing payload", payload)
|
65
65
|
od = ossec.parseAlert(request, payload)
|
66
66
|
# lets now create a local event
|
67
67
|
if od is not None:
|
@@ -118,7 +118,7 @@ def ossec_alert_creat_from_request(request):
|
|
118
118
|
"category": "ossec_error",
|
119
119
|
"metadata": metadata
|
120
120
|
})
|
121
|
-
rh.log_error("ossec alert", request.DATA.asDict())
|
121
|
+
# rh.log_error("ossec alert", request.DATA.asDict())
|
122
122
|
return rv.restStatus(request, False, error="no alert data")
|
123
123
|
|
124
124
|
|
rest/datem.py
CHANGED
@@ -312,3 +312,77 @@ def getDateRange(start, end=None, kind=None, zone=None, hour=0, eod=None, end_eo
|
|
312
312
|
if offset:
|
313
313
|
end = end + timedelta(hours=offset)
|
314
314
|
return start, end
|
315
|
+
|
316
|
+
|
317
|
+
def convert_to_epoch_range(start, end=None):
|
318
|
+
"""
|
319
|
+
Convert start and end times to epoch timestamps in milliseconds.
|
320
|
+
|
321
|
+
Parameters:
|
322
|
+
start (int, datetime, or str): The start time, which can be:
|
323
|
+
- An int representing the epoch time in milliseconds.
|
324
|
+
- A datetime object representing the start time.
|
325
|
+
- A string representing a timedelta relative to the end time. The string can end with:
|
326
|
+
- 'm' for minutes (e.g., '30m' for 30 minutes ago)
|
327
|
+
- 'h' for hours (e.g., '5h' for 5 hours ago)
|
328
|
+
- 'd' for days (e.g., '2d' for 2 days ago)
|
329
|
+
end (datetime or None): The end time, which can be:
|
330
|
+
- A datetime object representing the end time.
|
331
|
+
- None, in which case the current time (UTC) is used.
|
332
|
+
|
333
|
+
Returns:
|
334
|
+
tuple: A tuple containing two integers:
|
335
|
+
- start_epoch: The epoch time of the start parameter in milliseconds.
|
336
|
+
- end_epoch: The epoch time of the end parameter in milliseconds.
|
337
|
+
|
338
|
+
Raises:
|
339
|
+
ValueError: If the end parameter is not a datetime object or None.
|
340
|
+
ValueError: If the start parameter is not an int, datetime object, or a valid timedelta string.
|
341
|
+
ValueError: If the start string does not end with 'm', 'h', or 'd'.
|
342
|
+
|
343
|
+
Examples:
|
344
|
+
>>> start = "30m"
|
345
|
+
>>> end = datetime(2024, 5, 13, 12, 30, tzinfo=timezone.utc)
|
346
|
+
>>> convert_to_epoch(start, end)
|
347
|
+
(1715676600000, 1715681400000)
|
348
|
+
|
349
|
+
>>> start = "1d"
|
350
|
+
>>> end = None
|
351
|
+
>>> convert_to_epoch(start, end)
|
352
|
+
(1715595000000, 1715681400000)
|
353
|
+
|
354
|
+
>>> start = "5h"
|
355
|
+
>>> end = datetime(2024, 5, 13, 12, 30, tzinfo=timezone.utc)
|
356
|
+
>>> convert_to_epoch(start, end)
|
357
|
+
(1715661000000, 1715681400000)
|
358
|
+
"""
|
359
|
+
if end is None:
|
360
|
+
end = datetime.utcnow()
|
361
|
+
|
362
|
+
if isinstance(end, datetime):
|
363
|
+
end_epoch = int(end.timestamp() * 1000)
|
364
|
+
else:
|
365
|
+
raise ValueError("End parameter must be a datetime object or None.")
|
366
|
+
|
367
|
+
if isinstance(start, int):
|
368
|
+
start_epoch = start
|
369
|
+
elif isinstance(start, datetime):
|
370
|
+
start_epoch = int(start.timestamp() * 1000)
|
371
|
+
elif isinstance(start, str):
|
372
|
+
if start.endswith('m'):
|
373
|
+
delta_minutes = int(start[:-1])
|
374
|
+
start_datetime = end - timedelta(minutes=delta_minutes)
|
375
|
+
elif start.endswith('h'):
|
376
|
+
delta_hours = int(start[:-1])
|
377
|
+
start_datetime = end - timedelta(hours=delta_hours)
|
378
|
+
elif start.endswith('d'):
|
379
|
+
delta_days = int(start[:-1])
|
380
|
+
start_datetime = end - timedelta(days=delta_days)
|
381
|
+
else:
|
382
|
+
raise ValueError("Start string must end with 'm' for minutes, 'h' for hours, or 'd' for days.")
|
383
|
+
|
384
|
+
start_epoch = int(start_datetime.timestamp() * 1000)
|
385
|
+
else:
|
386
|
+
raise ValueError("Start parameter must be an int, datetime object, or a string representing a timedelta.")
|
387
|
+
|
388
|
+
return start_epoch, end_epoch
|
File without changes
|
File without changes
|