xbase-util 0.8.4__tar.gz → 0.8.6__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {xbase_util-0.8.4 → xbase_util-0.8.6}/PKG-INFO +1 -1
- {xbase_util-0.8.4 → xbase_util-0.8.6}/setup.py +1 -1
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/common_util.py +17 -18
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/pcap_util.py +7 -8
- xbase_util-0.8.6/xbase_util/test.py +15 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util.egg-info/PKG-INFO +1 -1
- xbase_util-0.8.4/xbase_util/test.py +0 -40
- {xbase_util-0.8.4 → xbase_util-0.8.6}/README.md +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/setup.cfg +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/__init__.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/add_column_util.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/dangerous_util.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/db/__init__.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/db/bean/ConfigBean.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/db/bean/CurrentConfigBean.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/db/bean/FlowBean.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/db/bean/TaskTemplateBean.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/db/bean/__init__.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/db/dao/ConfigDao.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/db/dao/CurrentConfigDao.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/db/dao/FlowDao.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/db/dao/TaskTemplateDao.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/db/dao/__init__.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/db/initsqlite3.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/es_db_util.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/esreq.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/geo_util.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/handle_features_util.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/packet_util.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util/xbase_constant.py +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util.egg-info/SOURCES.txt +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util.egg-info/dependency_links.txt +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util.egg-info/not-zip-safe +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util.egg-info/top_level.txt +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util_assets/GeoLite2-City.mmdb +0 -0
- {xbase_util-0.8.4 → xbase_util-0.8.6}/xbase_util_assets/arkimeparse.js +0 -0
@@ -3,6 +3,7 @@ import json
|
|
3
3
|
import logging
|
4
4
|
import os
|
5
5
|
import re
|
6
|
+
from collections import Counter
|
6
7
|
from datetime import datetime
|
7
8
|
from logging.handlers import TimedRotatingFileHandler
|
8
9
|
from urllib.parse import urlparse, parse_qs
|
@@ -68,27 +69,25 @@ def parse_expression(expression):
|
|
68
69
|
return None
|
69
70
|
|
70
71
|
|
71
|
-
def get_cookie_end_with_semicolon_count(
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
if
|
76
|
-
|
77
|
-
|
78
|
-
return -1
|
79
|
-
return count
|
72
|
+
def get_cookie_end_with_semicolon_count(all_packets):
|
73
|
+
headers = [item['req_header'] + item['res_header'] for item in all_packets]
|
74
|
+
c = 0
|
75
|
+
for header in headers:
|
76
|
+
lines = [item for item in header.split('\r\n') if 'Cookie:' in item and item.strip().endswith(';')]
|
77
|
+
c += len(lines)
|
78
|
+
return c
|
80
79
|
|
81
80
|
|
82
|
-
def get_ua_duplicate_count(
|
81
|
+
def get_ua_duplicate_count(all_packets):
|
82
|
+
headers = [item['req_header'] + item['res_header'] for item in all_packets]
|
83
83
|
ua_list = []
|
84
|
-
for
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
return sum(count)
|
84
|
+
for header in headers:
|
85
|
+
lines = [item for item in header.split('\r\n') if 'User-Agent:' in item]
|
86
|
+
ua_list.extend(lines)
|
87
|
+
counter = Counter(ua_list)
|
88
|
+
pairs = sum(count // 2 for count in counter.values())
|
89
|
+
print(pairs)
|
90
|
+
return pairs
|
92
91
|
|
93
92
|
|
94
93
|
def get_res_status_code_list(all_packets):
|
@@ -366,8 +366,8 @@ def reassemble_tcp_pcap(p):
|
|
366
366
|
info[key]['max'] = seq
|
367
367
|
packets2.append(packet)
|
368
368
|
if len(keys) == 1:
|
369
|
-
key = f"{
|
370
|
-
ack =
|
369
|
+
key = f"{packets2[0]['pkt'][IP].dst}:{packets2[0]['pkt'][IP].dport}"
|
370
|
+
ack = packets2[0]['pkt'][TCP].ack
|
371
371
|
info[key] = {
|
372
372
|
"min": ack,
|
373
373
|
"max": ack,
|
@@ -375,8 +375,7 @@ def reassemble_tcp_pcap(p):
|
|
375
375
|
"wrapack": False,
|
376
376
|
}
|
377
377
|
keys.append(key)
|
378
|
-
|
379
|
-
if len(packets) == 0:
|
378
|
+
if len(packets2) == 0:
|
380
379
|
return []
|
381
380
|
needwrap = False
|
382
381
|
if info[keys[0]] and info[keys[0]]['max'] - info[keys[0]]['min'] > 0x7fffffff:
|
@@ -388,13 +387,13 @@ def reassemble_tcp_pcap(p):
|
|
388
387
|
info[keys[0]]['wrapack'] = True
|
389
388
|
needwrap = True
|
390
389
|
if needwrap:
|
391
|
-
for packet in
|
390
|
+
for packet in packets2:
|
392
391
|
key = f"{packet['ip']['addr1']}:{packet['tcp']['sport']}"
|
393
392
|
if info[key]['wrapseq'] and packet['tcp']['seq'] < 0x7fffffff:
|
394
393
|
packet['tcp']['seq'] += 0xffffffff
|
395
394
|
if info[key]['wrapack'] and packet['tcp']['ack'] < 0x7fffffff:
|
396
395
|
packet['tcp']['ack'] += 0xffffffff
|
397
|
-
clientKey = f"{
|
396
|
+
clientKey = f"{packets2[0]['pkt'][IP].src}:{packets2[0]['pkt'][IP].sport}"
|
398
397
|
|
399
398
|
def compare_packets(a, b):
|
400
399
|
a_seq = a['pkt'][TCP].seq
|
@@ -413,14 +412,14 @@ def reassemble_tcp_pcap(p):
|
|
413
412
|
return (a_seq + len(a_data) - 1) - b_ack
|
414
413
|
return a_ack - (b_seq + len(b_data) - 1)
|
415
414
|
|
416
|
-
|
415
|
+
packets2.sort(key=cmp_to_key(compare_packets))
|
417
416
|
# del packets[num_packets:]
|
418
417
|
# Now divide up conversation
|
419
418
|
clientSeq = 0
|
420
419
|
hostSeq = 0
|
421
420
|
previous = 0
|
422
421
|
results = []
|
423
|
-
for i, item in enumerate(
|
422
|
+
for i, item in enumerate(packets2):
|
424
423
|
sip = item['pkt'][IP].src
|
425
424
|
sport = item['pkt'][IP].sport
|
426
425
|
seq = item['pkt'][TCP].seq
|
@@ -0,0 +1,15 @@
|
|
1
|
+
from scapy.packet import Raw
|
2
|
+
from scapy.utils import rdpcap
|
3
|
+
|
4
|
+
from xbase_util.pcap_util import reassemble_tcp_pcap, reassemble_session_pcap
|
5
|
+
|
6
|
+
if __name__ == '__main__':
|
7
|
+
packets_scapy = reassemble_tcp_pcap(rdpcap("gzip2.pcap"))
|
8
|
+
skey = '10.28.7.16:54398'
|
9
|
+
streams = b""
|
10
|
+
for pkt in packets_scapy:
|
11
|
+
if Raw in pkt:
|
12
|
+
streams += pkt[Raw].load
|
13
|
+
text_data = streams.decode('ascii', errors='ignore')
|
14
|
+
all_packets = reassemble_session_pcap(packets_scapy, skey=skey)
|
15
|
+
|
@@ -1,40 +0,0 @@
|
|
1
|
-
import re
|
2
|
-
|
3
|
-
import numpy as np
|
4
|
-
from scapy.packet import Raw
|
5
|
-
from scapy.utils import rdpcap
|
6
|
-
|
7
|
-
from xbase_util.common_util import get_res_status_code_list
|
8
|
-
from xbase_util.pcap_util import reassemble_tcp_pcap, reassemble_session_pcap
|
9
|
-
from xbase_util.xbase_constant import res_status_code_pattern
|
10
|
-
|
11
|
-
if __name__ == '__main__':
|
12
|
-
packets_scapy = reassemble_tcp_pcap(rdpcap("gzip2.pcap"))
|
13
|
-
skey = '10.28.7.16:54398'
|
14
|
-
streams = b""
|
15
|
-
for pkt in packets_scapy:
|
16
|
-
if Raw in pkt:
|
17
|
-
streams += pkt[Raw].load
|
18
|
-
text_data = streams.decode('ascii', errors='ignore')
|
19
|
-
all_packets = reassemble_session_pcap(packets_scapy, skey=skey)
|
20
|
-
if len(all_packets) != 0:
|
21
|
-
all_req_size = [item['req_size'] for item in all_packets if item['key'] == skey]
|
22
|
-
all_res_size = [item['res_size'] for item in all_packets if item['key'] != skey]
|
23
|
-
num_1, num_2, num_3, num_4, num_5 = get_res_status_code_list(all_packets)
|
24
|
-
# 获取请求头参数数量
|
25
|
-
req_header_count_list = [req['req_header'].count(":") for req in all_packets]
|
26
|
-
# 请求的时间间隔
|
27
|
-
request_flattened_time = [item['req_time'] for item in all_packets]
|
28
|
-
request_time_diffs = [request_flattened_time[i + 1] - request_flattened_time[i] for i in
|
29
|
-
range(len(request_flattened_time) - 1)]
|
30
|
-
request_mean_diff = round(np.nanmean(request_time_diffs), 5) or 0
|
31
|
-
request_variance_diff = round(np.nanvar(request_time_diffs), 5) or 0
|
32
|
-
# 响应的时间间隔
|
33
|
-
response_flattened_time = [item['res_time'] for item in all_packets]
|
34
|
-
response_time_diffs = [response_flattened_time[i + 1] - response_flattened_time[i] for i in
|
35
|
-
range(len(response_flattened_time) - 1)]
|
36
|
-
response_mean_diff = round(np.nanmean(response_time_diffs), 5) or 0
|
37
|
-
response_variance_diff = round(np.nanvar(response_time_diffs), 5) or 0
|
38
|
-
|
39
|
-
time_period = [(abs(item['res_time'] - item['req_time'])) for item in
|
40
|
-
all_packets if item['res_time'] != 0 and item['req_time'] != 0]
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|