FlowAnalyzer 0.3.9__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- FlowAnalyzer/FlowAnalyzer.py +37 -28
- {FlowAnalyzer-0.3.9.dist-info → FlowAnalyzer-0.4.0.dist-info}/METADATA +1 -1
- FlowAnalyzer-0.4.0.dist-info/RECORD +9 -0
- {FlowAnalyzer-0.3.9.dist-info → FlowAnalyzer-0.4.0.dist-info}/WHEEL +1 -1
- FlowAnalyzer-0.3.9.dist-info/RECORD +0 -9
- {FlowAnalyzer-0.3.9.dist-info → FlowAnalyzer-0.4.0.dist-info}/LICENSE +0 -0
- {FlowAnalyzer-0.3.9.dist-info → FlowAnalyzer-0.4.0.dist-info}/top_level.txt +0 -0
FlowAnalyzer/FlowAnalyzer.py
CHANGED
|
@@ -4,6 +4,7 @@ import hashlib
|
|
|
4
4
|
import json
|
|
5
5
|
import os
|
|
6
6
|
import subprocess
|
|
7
|
+
from dataclasses import dataclass
|
|
7
8
|
from typing import Dict, Iterable, NamedTuple, Optional, Tuple
|
|
8
9
|
from urllib import parse
|
|
9
10
|
|
|
@@ -11,7 +12,8 @@ from .logging_config import logger
|
|
|
11
12
|
from .Path import get_default_tshark_path
|
|
12
13
|
|
|
13
14
|
|
|
14
|
-
|
|
15
|
+
@dataclass
|
|
16
|
+
class Request:
|
|
15
17
|
frame_num: int
|
|
16
18
|
header: bytes
|
|
17
19
|
file_data: bytes
|
|
@@ -19,12 +21,13 @@ class Request(NamedTuple):
|
|
|
19
21
|
time_epoch: float
|
|
20
22
|
|
|
21
23
|
|
|
22
|
-
|
|
24
|
+
@dataclass
|
|
25
|
+
class Response:
|
|
23
26
|
frame_num: int
|
|
24
27
|
header: bytes
|
|
25
28
|
file_data: bytes
|
|
26
|
-
request_in: int
|
|
27
29
|
time_epoch: float
|
|
30
|
+
_request_in: Optional[int]
|
|
28
31
|
|
|
29
32
|
|
|
30
33
|
class HttpPair(NamedTuple):
|
|
@@ -111,7 +114,7 @@ class FlowAnalyzer:
|
|
|
111
114
|
if packet.get("http.response.code"):
|
|
112
115
|
responses[frame_num] = Response(
|
|
113
116
|
frame_num=frame_num,
|
|
114
|
-
|
|
117
|
+
_request_in=request_in,
|
|
115
118
|
header=header,
|
|
116
119
|
file_data=file_data,
|
|
117
120
|
time_epoch=time_epoch,
|
|
@@ -130,20 +133,20 @@ class FlowAnalyzer:
|
|
|
130
133
|
包含请求和响应信息的字典迭代器
|
|
131
134
|
"""
|
|
132
135
|
requests, responses = self.parse_http_json()
|
|
133
|
-
response_map = {r.
|
|
136
|
+
response_map = {r._request_in: r for r in responses.values()}
|
|
134
137
|
yielded_resps = []
|
|
135
138
|
for req_id, req in requests.items():
|
|
136
139
|
resp = response_map.get(req_id)
|
|
137
140
|
if resp:
|
|
138
141
|
yielded_resps.append(resp)
|
|
139
|
-
resp =
|
|
142
|
+
resp._request_in = None
|
|
140
143
|
yield HttpPair(request=req, response=resp)
|
|
141
144
|
else:
|
|
142
145
|
yield HttpPair(request=req, response=None)
|
|
143
146
|
|
|
144
147
|
for resp in response_map.values():
|
|
145
148
|
if resp not in yielded_resps:
|
|
146
|
-
resp =
|
|
149
|
+
resp._request_in = None
|
|
147
150
|
yield HttpPair(request=None, response=resp)
|
|
148
151
|
|
|
149
152
|
@staticmethod
|
|
@@ -155,27 +158,33 @@ class FlowAnalyzer:
|
|
|
155
158
|
def extract_json_file(file_name: str, display_filter: str, tshark_path: str, tshark_work_dir: str, json_work_path: str) -> None:
|
|
156
159
|
command = [
|
|
157
160
|
tshark_path,
|
|
158
|
-
"-r",
|
|
159
|
-
|
|
160
|
-
"-
|
|
161
|
-
"
|
|
162
|
-
"-
|
|
163
|
-
"
|
|
164
|
-
"-e",
|
|
165
|
-
"
|
|
166
|
-
"-e",
|
|
167
|
-
"
|
|
168
|
-
"-e",
|
|
161
|
+
"-r",
|
|
162
|
+
file_name,
|
|
163
|
+
"-Y",
|
|
164
|
+
f"({display_filter})",
|
|
165
|
+
"-T",
|
|
166
|
+
"json",
|
|
167
|
+
"-e",
|
|
168
|
+
"http.response.code",
|
|
169
|
+
"-e",
|
|
170
|
+
"http.request_in",
|
|
171
|
+
"-e",
|
|
172
|
+
"tcp.reassembled.data",
|
|
173
|
+
"-e",
|
|
174
|
+
"frame.number",
|
|
175
|
+
"-e",
|
|
176
|
+
"tcp.payload",
|
|
177
|
+
"-e",
|
|
178
|
+
"frame.time_epoch",
|
|
179
|
+
"-e",
|
|
180
|
+
"exported_pdu.exported_pdu",
|
|
181
|
+
"-e",
|
|
182
|
+
"http.request.full_uri",
|
|
169
183
|
]
|
|
170
184
|
logger.debug(f"导出Json命令: {command}")
|
|
171
|
-
|
|
185
|
+
|
|
172
186
|
with open(json_work_path, "wb") as output_file:
|
|
173
|
-
process = subprocess.Popen(
|
|
174
|
-
command,
|
|
175
|
-
stdout=output_file,
|
|
176
|
-
stderr=subprocess.PIPE,
|
|
177
|
-
cwd=tshark_work_dir
|
|
178
|
-
)
|
|
187
|
+
process = subprocess.Popen(command, stdout=output_file, stderr=subprocess.PIPE, cwd=tshark_work_dir)
|
|
179
188
|
_, stderr = process.communicate()
|
|
180
189
|
logger.debug(f"导出Json文件路径: {json_work_path}")
|
|
181
190
|
|
|
@@ -216,7 +225,7 @@ class FlowAnalyzer:
|
|
|
216
225
|
|
|
217
226
|
md5_sum = FlowAnalyzer.get_hash(file_path, display_filter)
|
|
218
227
|
logger.debug(f"md5校验值: {md5_sum}")
|
|
219
|
-
|
|
228
|
+
|
|
220
229
|
work_dir = os.getcwd()
|
|
221
230
|
tshark_command_work_dir = os.path.dirname(os.path.abspath(file_path))
|
|
222
231
|
json_work_path = os.path.join(work_dir, "output.json")
|
|
@@ -231,7 +240,7 @@ class FlowAnalyzer:
|
|
|
231
240
|
return json_work_path
|
|
232
241
|
except Exception:
|
|
233
242
|
logger.debug("默认的Json文件无法被正常解析, 正在重新生成Json文件中")
|
|
234
|
-
|
|
243
|
+
|
|
235
244
|
tshark_path = FlowAnalyzer.get_tshark_path(tshark_path)
|
|
236
245
|
FlowAnalyzer.extract_json_file(file_name, display_filter, tshark_path, tshark_command_work_dir, json_work_path)
|
|
237
246
|
FlowAnalyzer.add_md5sum(json_work_path, md5_sum)
|
|
@@ -320,6 +329,6 @@ class FlowAnalyzer:
|
|
|
320
329
|
file_data = self.dechunck_http_response(file_data)
|
|
321
330
|
|
|
322
331
|
with contextlib.suppress(Exception):
|
|
323
|
-
if file_data.startswith(b"\
|
|
332
|
+
if file_data.startswith(b"\x1f\x8b"):
|
|
324
333
|
file_data = gzip.decompress(file_data)
|
|
325
334
|
return header, file_data
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
FlowAnalyzer/FlowAnalyzer.py,sha256=kMjeMp8tylou_0wk-jC_9yYRFdYlFM9wYyb15jy9blA,12259
|
|
2
|
+
FlowAnalyzer/Path.py,sha256=E5VvucTftp8VTQUffFzFWHotQEYtZL-j7IQPOaleiug,130
|
|
3
|
+
FlowAnalyzer/__init__.py,sha256=vfiHONPTrvjUU3MwhjFOEo3sWfzlhkA6gOLn_4UJ7sg,70
|
|
4
|
+
FlowAnalyzer/logging_config.py,sha256=-RntNJhrBiW7ToXIP1WJjZ4Yf9jmZQ1PTX_er3tDxhw,730
|
|
5
|
+
FlowAnalyzer-0.4.0.dist-info/LICENSE,sha256=ybAV0ECduYBZCpjkHyNALVWRRmT_eM0BDgqUszhwEFU,1080
|
|
6
|
+
FlowAnalyzer-0.4.0.dist-info/METADATA,sha256=iS4ByUDWmq8kyqu5sgbLc1AXXgKOg4qA7jBo-wna8_0,1956
|
|
7
|
+
FlowAnalyzer-0.4.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
|
|
8
|
+
FlowAnalyzer-0.4.0.dist-info/top_level.txt,sha256=2MtvAF6dEe_eHipw_6G5pFLb2uOCbGnlH0bC4iBtm5A,13
|
|
9
|
+
FlowAnalyzer-0.4.0.dist-info/RECORD,,
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
FlowAnalyzer/FlowAnalyzer.py,sha256=ErHea4wQEeGmCgAmWr4xmEuKSSYfXE0kFe7It0xD6Is,12203
|
|
2
|
-
FlowAnalyzer/Path.py,sha256=E5VvucTftp8VTQUffFzFWHotQEYtZL-j7IQPOaleiug,130
|
|
3
|
-
FlowAnalyzer/__init__.py,sha256=vfiHONPTrvjUU3MwhjFOEo3sWfzlhkA6gOLn_4UJ7sg,70
|
|
4
|
-
FlowAnalyzer/logging_config.py,sha256=-RntNJhrBiW7ToXIP1WJjZ4Yf9jmZQ1PTX_er3tDxhw,730
|
|
5
|
-
FlowAnalyzer-0.3.9.dist-info/LICENSE,sha256=ybAV0ECduYBZCpjkHyNALVWRRmT_eM0BDgqUszhwEFU,1080
|
|
6
|
-
FlowAnalyzer-0.3.9.dist-info/METADATA,sha256=OcwMs0sqeUmUv1Y-9NWDaGFswMupCLf-FuJYr68DQX8,1956
|
|
7
|
-
FlowAnalyzer-0.3.9.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
|
|
8
|
-
FlowAnalyzer-0.3.9.dist-info/top_level.txt,sha256=2MtvAF6dEe_eHipw_6G5pFLb2uOCbGnlH0bC4iBtm5A,13
|
|
9
|
-
FlowAnalyzer-0.3.9.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|