xbase-util 0.8.5__tar.gz → 0.8.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {xbase_util-0.8.5 → xbase_util-0.8.7}/PKG-INFO +1 -1
- {xbase_util-0.8.5 → xbase_util-0.8.7}/setup.py +1 -1
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/common_util.py +26 -22
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/pcap_util.py +11 -12
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/test.py +1 -7
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util.egg-info/PKG-INFO +1 -1
- {xbase_util-0.8.5 → xbase_util-0.8.7}/README.md +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/setup.cfg +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/__init__.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/add_column_util.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/dangerous_util.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/db/__init__.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/db/bean/ConfigBean.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/db/bean/CurrentConfigBean.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/db/bean/FlowBean.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/db/bean/TaskTemplateBean.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/db/bean/__init__.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/db/dao/ConfigDao.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/db/dao/CurrentConfigDao.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/db/dao/FlowDao.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/db/dao/TaskTemplateDao.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/db/dao/__init__.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/db/initsqlite3.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/es_db_util.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/esreq.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/geo_util.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/handle_features_util.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/packet_util.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util/xbase_constant.py +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util.egg-info/SOURCES.txt +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util.egg-info/dependency_links.txt +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util.egg-info/not-zip-safe +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util.egg-info/top_level.txt +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util_assets/GeoLite2-City.mmdb +0 -0
- {xbase_util-0.8.5 → xbase_util-0.8.7}/xbase_util_assets/arkimeparse.js +0 -0
@@ -25,28 +25,33 @@ def filter_visible_chars(data):
|
|
25
25
|
return ''.join(chr(b) for b in data if 32 <= b <= 126 or b in (9, 10, 13))
|
26
26
|
|
27
27
|
|
28
|
-
def parse_chunked_body(data: bytes, need_un_gzip=False) -> bytes:
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
28
|
+
def parse_chunked_body(data: bytes, need_un_gzip=False,session_id="none") -> bytes:
|
29
|
+
try:
|
30
|
+
body = b''
|
31
|
+
while True:
|
32
|
+
chunk_size_end = data.find(b"\r\n")
|
33
|
+
if chunk_size_end == -1:
|
34
|
+
break
|
35
|
+
chunk_size_hex = data[:chunk_size_end]
|
36
|
+
print(f"chunk_size_hex:{chunk_size_hex}")
|
37
|
+
chunk_size = int(chunk_size_hex, 16)
|
38
|
+
if chunk_size == 0:
|
39
|
+
break
|
40
|
+
chunk_start = chunk_size_end + 2
|
41
|
+
chunk_end = chunk_start + chunk_size
|
42
|
+
body += data[chunk_start:chunk_end]
|
43
|
+
data = data[chunk_end + 2:]
|
44
|
+
if need_un_gzip:
|
45
|
+
try:
|
46
|
+
return gzip.decompress(body)
|
47
|
+
except gzip.BadGzipFile:
|
48
|
+
print("解压错误")
|
49
|
+
return body
|
50
|
+
else:
|
47
51
|
return body
|
48
|
-
|
49
|
-
|
52
|
+
except Exception as e:
|
53
|
+
print(f"parse_chunked_body失败,session:{session_id}")
|
54
|
+
return b''
|
50
55
|
|
51
56
|
|
52
57
|
def process_origin_pos(originPos):
|
@@ -86,7 +91,6 @@ def get_ua_duplicate_count(all_packets):
|
|
86
91
|
ua_list.extend(lines)
|
87
92
|
counter = Counter(ua_list)
|
88
93
|
pairs = sum(count // 2 for count in counter.values())
|
89
|
-
print(pairs)
|
90
94
|
return pairs
|
91
95
|
|
92
96
|
|
@@ -274,7 +274,7 @@ def process_session_id_disk_simple(id, node, packet_pos, esdb, pcap_path_prefix)
|
|
274
274
|
return get_file_and_read_pos(id, file, pos_list)
|
275
275
|
|
276
276
|
|
277
|
-
def parse_body(data):
|
277
|
+
def parse_body(data,session_id='none'):
|
278
278
|
if data.find(b"\r\n\r\n") != -1:
|
279
279
|
res = data.split(b"\r\n\r\n", 1)
|
280
280
|
header = res[0]
|
@@ -286,7 +286,7 @@ def parse_body(data):
|
|
286
286
|
gzip_pattern = pattern_gzip.search(header)
|
287
287
|
need_gzip = gzip_pattern and b'gzip' in gzip_pattern.group()
|
288
288
|
if chunked_pattern and b'chunked' in chunked_pattern.group():
|
289
|
-
body = parse_chunked_body(body, need_un_gzip=need_gzip)
|
289
|
+
body = parse_chunked_body(body, need_un_gzip=need_gzip,session_id=session_id)
|
290
290
|
elif need_gzip:
|
291
291
|
try:
|
292
292
|
body = gzip.decompress(body)
|
@@ -297,7 +297,7 @@ def parse_body(data):
|
|
297
297
|
return filter_visible_chars(header), result_body_str
|
298
298
|
|
299
299
|
|
300
|
-
def reassemble_session_pcap(reassemble_tcp_res, skey):
|
300
|
+
def reassemble_session_pcap(reassemble_tcp_res, skey,session_id='none'):
|
301
301
|
my_map = {
|
302
302
|
'key': '',
|
303
303
|
'req_header': '',
|
@@ -311,7 +311,7 @@ def reassemble_session_pcap(reassemble_tcp_res, skey):
|
|
311
311
|
}
|
312
312
|
packet_list = []
|
313
313
|
for index, packet in enumerate(reassemble_tcp_res):
|
314
|
-
header, body = parse_body(packet['data'])
|
314
|
+
header, body = parse_body(packet['data'], session_id=session_id)
|
315
315
|
if index == len(reassemble_tcp_res) - 1:
|
316
316
|
packet_list.append(copy.deepcopy(my_map))
|
317
317
|
if packet['key'] == skey:
|
@@ -366,8 +366,8 @@ def reassemble_tcp_pcap(p):
|
|
366
366
|
info[key]['max'] = seq
|
367
367
|
packets2.append(packet)
|
368
368
|
if len(keys) == 1:
|
369
|
-
key = f"{
|
370
|
-
ack =
|
369
|
+
key = f"{packets2[0]['pkt'][IP].dst}:{packets2[0]['pkt'][IP].dport}"
|
370
|
+
ack = packets2[0]['pkt'][TCP].ack
|
371
371
|
info[key] = {
|
372
372
|
"min": ack,
|
373
373
|
"max": ack,
|
@@ -375,8 +375,7 @@ def reassemble_tcp_pcap(p):
|
|
375
375
|
"wrapack": False,
|
376
376
|
}
|
377
377
|
keys.append(key)
|
378
|
-
|
379
|
-
if len(packets) == 0:
|
378
|
+
if len(packets2) == 0:
|
380
379
|
return []
|
381
380
|
needwrap = False
|
382
381
|
if info[keys[0]] and info[keys[0]]['max'] - info[keys[0]]['min'] > 0x7fffffff:
|
@@ -388,13 +387,13 @@ def reassemble_tcp_pcap(p):
|
|
388
387
|
info[keys[0]]['wrapack'] = True
|
389
388
|
needwrap = True
|
390
389
|
if needwrap:
|
391
|
-
for packet in
|
390
|
+
for packet in packets2:
|
392
391
|
key = f"{packet['ip']['addr1']}:{packet['tcp']['sport']}"
|
393
392
|
if info[key]['wrapseq'] and packet['tcp']['seq'] < 0x7fffffff:
|
394
393
|
packet['tcp']['seq'] += 0xffffffff
|
395
394
|
if info[key]['wrapack'] and packet['tcp']['ack'] < 0x7fffffff:
|
396
395
|
packet['tcp']['ack'] += 0xffffffff
|
397
|
-
clientKey = f"{
|
396
|
+
clientKey = f"{packets2[0]['pkt'][IP].src}:{packets2[0]['pkt'][IP].sport}"
|
398
397
|
|
399
398
|
def compare_packets(a, b):
|
400
399
|
a_seq = a['pkt'][TCP].seq
|
@@ -413,14 +412,14 @@ def reassemble_tcp_pcap(p):
|
|
413
412
|
return (a_seq + len(a_data) - 1) - b_ack
|
414
413
|
return a_ack - (b_seq + len(b_data) - 1)
|
415
414
|
|
416
|
-
|
415
|
+
packets2.sort(key=cmp_to_key(compare_packets))
|
417
416
|
# del packets[num_packets:]
|
418
417
|
# Now divide up conversation
|
419
418
|
clientSeq = 0
|
420
419
|
hostSeq = 0
|
421
420
|
previous = 0
|
422
421
|
results = []
|
423
|
-
for i, item in enumerate(
|
422
|
+
for i, item in enumerate(packets2):
|
424
423
|
sip = item['pkt'][IP].src
|
425
424
|
sport = item['pkt'][IP].sport
|
426
425
|
seq = item['pkt'][TCP].seq
|
@@ -1,13 +1,7 @@
|
|
1
|
-
import re
|
2
|
-
from collections import Counter
|
3
|
-
|
4
|
-
import numpy as np
|
5
1
|
from scapy.packet import Raw
|
6
2
|
from scapy.utils import rdpcap
|
7
3
|
|
8
|
-
from xbase_util.common_util import get_res_status_code_list
|
9
4
|
from xbase_util.pcap_util import reassemble_tcp_pcap, reassemble_session_pcap
|
10
|
-
from xbase_util.xbase_constant import res_status_code_pattern
|
11
5
|
|
12
6
|
if __name__ == '__main__':
|
13
7
|
packets_scapy = reassemble_tcp_pcap(rdpcap("gzip2.pcap"))
|
@@ -17,5 +11,5 @@ if __name__ == '__main__':
|
|
17
11
|
if Raw in pkt:
|
18
12
|
streams += pkt[Raw].load
|
19
13
|
text_data = streams.decode('ascii', errors='ignore')
|
20
|
-
all_packets = reassemble_session_pcap(packets_scapy, skey=skey)
|
14
|
+
all_packets = reassemble_session_pcap(packets_scapy, skey=skey,session_id='enn')
|
21
15
|
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|