FlowAnalyzer 0.4.6__py3-none-any.whl → 0.4.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- FlowAnalyzer/FlowAnalyzer.py +22 -39
- FlowAnalyzer/Models.py +3 -2
- FlowAnalyzer/PacketParser.py +65 -78
- FlowAnalyzer/tshark.lua +196 -0
- {flowanalyzer-0.4.6.dist-info → flowanalyzer-0.4.7.dist-info}/METADATA +1 -1
- flowanalyzer-0.4.7.dist-info/RECORD +13 -0
- flowanalyzer-0.4.6.dist-info/RECORD +0 -12
- {flowanalyzer-0.4.6.dist-info → flowanalyzer-0.4.7.dist-info}/WHEEL +0 -0
- {flowanalyzer-0.4.6.dist-info → flowanalyzer-0.4.7.dist-info}/licenses/LICENSE +0 -0
- {flowanalyzer-0.4.6.dist-info → flowanalyzer-0.4.7.dist-info}/top_level.txt +0 -0
FlowAnalyzer/FlowAnalyzer.py
CHANGED
|
@@ -56,7 +56,7 @@ class FlowAnalyzer:
|
|
|
56
56
|
sql_pair = """
|
|
57
57
|
SELECT
|
|
58
58
|
req.frame_num, req.header, req.file_data, req.full_uri, req.time_epoch, -- 0-4 (Request)
|
|
59
|
-
resp.frame_num, resp.header, resp.file_data, resp.time_epoch, resp.request_in -- 5-
|
|
59
|
+
resp.frame_num, resp.header, resp.file_data, resp.time_epoch, resp.request_in, resp.status_code -- 5-10 (Response)
|
|
60
60
|
FROM requests req
|
|
61
61
|
LEFT JOIN responses resp ON req.frame_num = resp.request_in
|
|
62
62
|
ORDER BY req.frame_num ASC
|
|
@@ -70,20 +70,20 @@ class FlowAnalyzer:
|
|
|
70
70
|
|
|
71
71
|
resp = None
|
|
72
72
|
if row[5] is not None:
|
|
73
|
-
resp = Response(frame_num=row[5], header=row[6] or b"", file_data=row[7] or b"", time_epoch=row[8], _request_in=row[9])
|
|
73
|
+
resp = Response(frame_num=row[5], header=row[6] or b"", file_data=row[7] or b"", time_epoch=row[8], _request_in=row[9], status_code=row[10] or 0)
|
|
74
74
|
|
|
75
75
|
yield HttpPair(request=req, response=resp)
|
|
76
76
|
|
|
77
77
|
# === 第二步:孤儿响应查询 ===
|
|
78
78
|
sql_orphan = """
|
|
79
|
-
SELECT frame_num, header, file_data, time_epoch, request_in
|
|
79
|
+
SELECT frame_num, header, file_data, time_epoch, request_in, status_code
|
|
80
80
|
FROM responses
|
|
81
81
|
WHERE request_in NOT IN (SELECT frame_num FROM requests)
|
|
82
82
|
"""
|
|
83
83
|
cursor.execute(sql_orphan)
|
|
84
84
|
|
|
85
85
|
for row in cursor:
|
|
86
|
-
resp = Response(frame_num=row[0], header=row[1] or b"", file_data=row[2] or b"", time_epoch=row[3], _request_in=row[4])
|
|
86
|
+
resp = Response(frame_num=row[0], header=row[1] or b"", file_data=row[2] or b"", time_epoch=row[3], _request_in=row[4], status_code=row[5] or 0)
|
|
87
87
|
yield HttpPair(request=None, response=resp)
|
|
88
88
|
|
|
89
89
|
# =========================================================================
|
|
@@ -161,7 +161,7 @@ class FlowAnalyzer:
|
|
|
161
161
|
cursor.execute("PRAGMA journal_mode = MEMORY")
|
|
162
162
|
|
|
163
163
|
cursor.execute("CREATE TABLE requests (frame_num INTEGER PRIMARY KEY, header BLOB, file_data BLOB, full_uri TEXT, time_epoch REAL)")
|
|
164
|
-
cursor.execute("CREATE TABLE responses (frame_num INTEGER PRIMARY KEY, header BLOB, file_data BLOB, time_epoch REAL, request_in INTEGER)")
|
|
164
|
+
cursor.execute("CREATE TABLE responses (frame_num INTEGER PRIMARY KEY, header BLOB, file_data BLOB, time_epoch REAL, request_in INTEGER, status_code INTEGER)")
|
|
165
165
|
|
|
166
166
|
cursor.execute("""
|
|
167
167
|
CREATE TABLE meta_info (
|
|
@@ -174,50 +174,29 @@ class FlowAnalyzer:
|
|
|
174
174
|
""")
|
|
175
175
|
conn.commit()
|
|
176
176
|
|
|
177
|
+
lua_script_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "tshark.lua")
|
|
178
|
+
|
|
179
|
+
# Pass filter via environment variable
|
|
180
|
+
env = os.environ.copy()
|
|
181
|
+
env["flowanalyzer_filter"] = display_filter
|
|
182
|
+
|
|
177
183
|
command = [
|
|
178
184
|
tshark_path,
|
|
179
185
|
"-r",
|
|
180
186
|
pcap_path,
|
|
181
|
-
"-
|
|
182
|
-
|
|
183
|
-
"
|
|
184
|
-
"fields",
|
|
185
|
-
"-e",
|
|
186
|
-
"http.response.code", # 0
|
|
187
|
-
"-e",
|
|
188
|
-
"http.request_in", # 1
|
|
189
|
-
"-e",
|
|
190
|
-
"tcp.reassembled.data", # 2
|
|
191
|
-
"-e",
|
|
192
|
-
"frame.number", # 3
|
|
193
|
-
"-e",
|
|
194
|
-
"tcp.payload", # 4
|
|
195
|
-
"-e",
|
|
196
|
-
"frame.time_epoch", # 5
|
|
197
|
-
"-e",
|
|
198
|
-
"exported_pdu.exported_pdu", # 6
|
|
199
|
-
"-e",
|
|
200
|
-
"http.request.full_uri", # 7
|
|
201
|
-
"-e",
|
|
202
|
-
"tcp.segment.count", # 8
|
|
203
|
-
"-E",
|
|
204
|
-
"header=n",
|
|
205
|
-
"-E",
|
|
206
|
-
"separator=/t",
|
|
207
|
-
"-E",
|
|
208
|
-
"quote=n",
|
|
209
|
-
"-E",
|
|
210
|
-
"occurrence=f",
|
|
187
|
+
"-q",
|
|
188
|
+
"-X",
|
|
189
|
+
f"lua_script:{lua_script_path}",
|
|
211
190
|
]
|
|
212
191
|
|
|
213
|
-
logger.debug(f"执行 Tshark: {command}")
|
|
192
|
+
logger.debug(f"执行 Tshark: {' '.join(command)}")
|
|
214
193
|
BATCH_SIZE = 2000
|
|
215
194
|
MAX_PENDING_BATCHES = 20 # 控制内存中待处理的批次数量 (Backpressure)
|
|
216
195
|
|
|
217
196
|
# 使用 ThreadPoolExecutor 并行处理数据
|
|
218
197
|
max_workers = min(32, (os.cpu_count() or 1) + 4)
|
|
219
198
|
|
|
220
|
-
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=os.path.dirname(os.path.abspath(pcap_path)))
|
|
199
|
+
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=os.path.dirname(os.path.abspath(pcap_path)), env=env, encoding="utf-8", errors="replace")
|
|
221
200
|
try:
|
|
222
201
|
with sqlite3.connect(db_path) as conn:
|
|
223
202
|
cursor = conn.cursor()
|
|
@@ -236,14 +215,14 @@ class FlowAnalyzer:
|
|
|
236
215
|
|
|
237
216
|
for item in results:
|
|
238
217
|
if item["type"] == "response":
|
|
239
|
-
db_resp_rows.append((item["frame_num"], item["header"], item["file_data"], item["time_epoch"], item["request_in"]))
|
|
218
|
+
db_resp_rows.append((item["frame_num"], item["header"], item["file_data"], item["time_epoch"], item["request_in"], item.get("status_code", 0)))
|
|
240
219
|
else:
|
|
241
220
|
db_req_rows.append((item["frame_num"], item["header"], item["file_data"], item["full_uri"], item["time_epoch"]))
|
|
242
221
|
|
|
243
222
|
if db_req_rows:
|
|
244
223
|
cursor.executemany("INSERT OR REPLACE INTO requests VALUES (?,?,?,?,?)", db_req_rows)
|
|
245
224
|
if db_resp_rows:
|
|
246
|
-
cursor.executemany("INSERT OR REPLACE INTO responses VALUES (
|
|
225
|
+
cursor.executemany("INSERT OR REPLACE INTO responses VALUES (?,?,?,?,?,?)", db_resp_rows)
|
|
247
226
|
|
|
248
227
|
def submit_batch():
|
|
249
228
|
"""提交当前批次到线程池"""
|
|
@@ -259,6 +238,10 @@ class FlowAnalyzer:
|
|
|
259
238
|
# --- Main Pipeline Loop ---
|
|
260
239
|
if process.stdout:
|
|
261
240
|
for line in process.stdout:
|
|
241
|
+
# Strip newline
|
|
242
|
+
line = line.strip()
|
|
243
|
+
if not line:
|
|
244
|
+
continue
|
|
262
245
|
current_batch.append(line)
|
|
263
246
|
|
|
264
247
|
if len(current_batch) >= BATCH_SIZE:
|
FlowAnalyzer/Models.py
CHANGED
|
@@ -8,17 +8,18 @@ class Request:
|
|
|
8
8
|
frame_num: int
|
|
9
9
|
header: bytes
|
|
10
10
|
file_data: bytes
|
|
11
|
-
full_uri: str
|
|
12
11
|
time_epoch: float
|
|
12
|
+
full_uri: str
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
@dataclass
|
|
16
16
|
class Response:
|
|
17
|
-
__slots__ = ("frame_num", "header", "file_data", "time_epoch", "_request_in")
|
|
17
|
+
__slots__ = ("frame_num", "header", "file_data", "time_epoch", "status_code", "_request_in")
|
|
18
18
|
frame_num: int
|
|
19
19
|
header: bytes
|
|
20
20
|
file_data: bytes
|
|
21
21
|
time_epoch: float
|
|
22
|
+
status_code: int
|
|
22
23
|
_request_in: Optional[int]
|
|
23
24
|
|
|
24
25
|
|
FlowAnalyzer/PacketParser.py
CHANGED
|
@@ -2,46 +2,79 @@ import binascii
|
|
|
2
2
|
import contextlib
|
|
3
3
|
import gzip
|
|
4
4
|
from typing import List, Optional, Tuple
|
|
5
|
-
from urllib import parse
|
|
6
5
|
|
|
7
6
|
from .logging_config import logger
|
|
8
7
|
|
|
9
8
|
|
|
10
9
|
class PacketParser:
|
|
11
10
|
@staticmethod
|
|
12
|
-
def
|
|
11
|
+
def process_batch(lines: List[str]) -> List[dict]:
|
|
13
12
|
"""
|
|
14
|
-
|
|
15
|
-
row definition (all bytes):
|
|
16
|
-
0: http.response.code
|
|
17
|
-
1: http.request_in
|
|
18
|
-
2: tcp.reassembled.data
|
|
19
|
-
3: frame.number
|
|
20
|
-
4: tcp.payload
|
|
21
|
-
5: frame.time_epoch
|
|
22
|
-
6: exported_pdu.exported_pdu
|
|
23
|
-
7: http.request.full_uri
|
|
24
|
-
8: tcp.segment.count
|
|
13
|
+
批量处理行数据
|
|
25
14
|
"""
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
15
|
+
results = []
|
|
16
|
+
for line in lines:
|
|
17
|
+
res = PacketParser.process_row(line)
|
|
18
|
+
if res:
|
|
19
|
+
results.append(res)
|
|
20
|
+
return results
|
|
21
|
+
|
|
22
|
+
@staticmethod
|
|
23
|
+
def process_row(line: str) -> Optional[dict]:
|
|
24
|
+
"""
|
|
25
|
+
解析 Tshark Lua 脚本输出的一行数据
|
|
26
|
+
Columns:
|
|
27
|
+
0: type ("req" / "rep" / "data")
|
|
28
|
+
1: frame.number
|
|
29
|
+
2: time_epoch
|
|
30
|
+
3: header_hex
|
|
31
|
+
4: file_data_hex (Body)
|
|
32
|
+
5: uri_or_code
|
|
33
|
+
6: request_in
|
|
34
|
+
"""
|
|
35
|
+
try:
|
|
36
|
+
parts = line.split("\t")
|
|
37
|
+
if len(parts) < 6:
|
|
38
|
+
return None
|
|
39
|
+
|
|
40
|
+
p_type = parts[0]
|
|
41
|
+
frame_num = int(parts[1])
|
|
42
|
+
time_epoch = float(parts[2])
|
|
43
|
+
|
|
44
|
+
# Hex string -> Bytes
|
|
45
|
+
# parts[3] might be empty string
|
|
46
|
+
header = binascii.unhexlify(parts[3]) if parts[3] else b""
|
|
47
|
+
file_data = binascii.unhexlify(parts[4]) if parts[4] else b""
|
|
48
|
+
|
|
49
|
+
uri_or_code = parts[5]
|
|
50
|
+
request_in_str = parts[6] if len(parts) > 6 else ""
|
|
51
|
+
|
|
52
|
+
if p_type == "req":
|
|
53
|
+
return {"type": "request", "frame_num": frame_num, "header": header, "file_data": file_data, "time_epoch": time_epoch, "full_uri": uri_or_code, "request_in": None}
|
|
54
|
+
elif p_type == "rep":
|
|
55
|
+
request_in = int(request_in_str) if request_in_str else 0
|
|
56
|
+
try:
|
|
57
|
+
status_code = int(uri_or_code)
|
|
58
|
+
except (ValueError, TypeError):
|
|
59
|
+
status_code = 0
|
|
60
|
+
|
|
61
|
+
return {
|
|
62
|
+
"type": "response",
|
|
63
|
+
"frame_num": frame_num,
|
|
64
|
+
"header": header,
|
|
65
|
+
"file_data": file_data,
|
|
66
|
+
"time_epoch": time_epoch,
|
|
67
|
+
"request_in": request_in,
|
|
68
|
+
"status_code": status_code,
|
|
69
|
+
"full_uri": "",
|
|
70
|
+
}
|
|
71
|
+
else:
|
|
72
|
+
# 'data' or unknown, ignore for now based on current logic
|
|
73
|
+
return None
|
|
74
|
+
|
|
75
|
+
except Exception as e:
|
|
76
|
+
logger.debug(f"Packet parse error: {e} | Line: {line[:100]}...")
|
|
77
|
+
return None
|
|
45
78
|
|
|
46
79
|
@staticmethod
|
|
47
80
|
def split_http_headers(file_data: bytes) -> Tuple[bytes, bytes]:
|
|
@@ -141,49 +174,3 @@ class PacketParser:
|
|
|
141
174
|
except Exception as e:
|
|
142
175
|
logger.error(f"解析HTTP数据未知错误: {e}")
|
|
143
176
|
return b"", b""
|
|
144
|
-
|
|
145
|
-
@staticmethod
|
|
146
|
-
def process_row(line: bytes) -> Optional[dict]:
|
|
147
|
-
"""
|
|
148
|
-
处理单行数据,返回结构化结果供主线程写入
|
|
149
|
-
"""
|
|
150
|
-
line = line.rstrip(b"\r\n")
|
|
151
|
-
if not line:
|
|
152
|
-
return None
|
|
153
|
-
|
|
154
|
-
row = line.split(b"\t")
|
|
155
|
-
try:
|
|
156
|
-
frame_num, request_in, time_epoch, full_uri, full_request = PacketParser.parse_packet_data(row)
|
|
157
|
-
|
|
158
|
-
if not full_request:
|
|
159
|
-
return None
|
|
160
|
-
|
|
161
|
-
header, file_data = PacketParser.extract_http_file_data(full_request)
|
|
162
|
-
|
|
163
|
-
# row[0] is http.response.code (bytes)
|
|
164
|
-
is_response = bool(row[0])
|
|
165
|
-
|
|
166
|
-
return {
|
|
167
|
-
"type": "response" if is_response else "request",
|
|
168
|
-
"frame_num": frame_num,
|
|
169
|
-
"header": header,
|
|
170
|
-
"file_data": file_data,
|
|
171
|
-
"time_epoch": time_epoch,
|
|
172
|
-
"request_in": request_in, # Only useful for Response
|
|
173
|
-
"full_uri": full_uri, # Only useful for Request
|
|
174
|
-
}
|
|
175
|
-
|
|
176
|
-
except Exception:
|
|
177
|
-
return None
|
|
178
|
-
|
|
179
|
-
@staticmethod
|
|
180
|
-
def process_batch(lines: List[bytes]) -> List[dict]:
|
|
181
|
-
"""
|
|
182
|
-
批量处理行数据,减少函数调用开销
|
|
183
|
-
"""
|
|
184
|
-
results = []
|
|
185
|
-
for line in lines:
|
|
186
|
-
res = PacketParser.process_row(line)
|
|
187
|
-
if res:
|
|
188
|
-
results.append(res)
|
|
189
|
-
return results
|
FlowAnalyzer/tshark.lua
ADDED
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
-- =========================================================================
|
|
2
|
+
-- 1. 字段定义
|
|
3
|
+
-- =========================================================================
|
|
4
|
+
local f_resp_code = Field.new("http.response.code")
|
|
5
|
+
local f_full_uri = Field.new("http.request.full_uri")
|
|
6
|
+
local f_frame_num = Field.new("frame.number")
|
|
7
|
+
local f_time_epoch = Field.new("frame.time_epoch")
|
|
8
|
+
local f_reassembled = Field.new("tcp.reassembled.data")
|
|
9
|
+
local f_payload = Field.new("tcp.payload")
|
|
10
|
+
local f_file_data = Field.new("http.file_data")
|
|
11
|
+
local f_seg_count = Field.new("tcp.segment.count")
|
|
12
|
+
local f_retrans = Field.new("tcp.analysis.retransmission")
|
|
13
|
+
local f_request_in = Field.new("http.request_in")
|
|
14
|
+
-- [新增] 替换 Header 源的字段
|
|
15
|
+
local f_exported_pdu = Field.new("exported_pdu.exported_pdu")
|
|
16
|
+
|
|
17
|
+
-- =========================================================================
|
|
18
|
+
-- 2. 获取过滤器
|
|
19
|
+
-- =========================================================================
|
|
20
|
+
local user_filter = os.getenv("flowanalyzer_filter")
|
|
21
|
+
if not user_filter or user_filter == "" then
|
|
22
|
+
user_filter = "http"
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
-- =========================================================================
|
|
26
|
+
-- 3. 初始化监听器
|
|
27
|
+
-- =========================================================================
|
|
28
|
+
local tap = Listener.new("frame", user_filter)
|
|
29
|
+
|
|
30
|
+
-- =========================================================================
|
|
31
|
+
-- 4. 辅助函数
|
|
32
|
+
-- =========================================================================
|
|
33
|
+
|
|
34
|
+
local function val_to_str(val)
|
|
35
|
+
if val == nil then
|
|
36
|
+
return ""
|
|
37
|
+
end
|
|
38
|
+
return tostring(val)
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
-- 查找 Header 结束位置
|
|
42
|
+
local function find_header_split_pos(hex_str)
|
|
43
|
+
if not hex_str then
|
|
44
|
+
return nil
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
-- 1. 找 0D0A0D0A (CRLF CRLF)
|
|
48
|
+
local start_idx = 1
|
|
49
|
+
while true do
|
|
50
|
+
local s, e = string.find(hex_str, "0D0A0D0A", start_idx, true)
|
|
51
|
+
if not s then
|
|
52
|
+
break
|
|
53
|
+
end
|
|
54
|
+
if s % 2 == 1 then
|
|
55
|
+
return s
|
|
56
|
+
end -- 确保字节对齐
|
|
57
|
+
start_idx = s + 1
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
-- 2. 找 0A0A (LF LF)
|
|
61
|
+
start_idx = 1
|
|
62
|
+
while true do
|
|
63
|
+
local s, e = string.find(hex_str, "0A0A", start_idx, true)
|
|
64
|
+
if not s then
|
|
65
|
+
break
|
|
66
|
+
end
|
|
67
|
+
if s % 2 == 1 then
|
|
68
|
+
return s
|
|
69
|
+
end
|
|
70
|
+
start_idx = s + 1
|
|
71
|
+
end
|
|
72
|
+
return nil
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
-- [核心性能优化] 智能提取 Header Hex
|
|
76
|
+
-- 即使 Body 不限制大小,Header 依然建议只扫描前 2KB,因为 Header 不会那么长
|
|
77
|
+
local function extract_header_smart(field_info)
|
|
78
|
+
if not field_info then
|
|
79
|
+
return ""
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
local range = field_info.range
|
|
83
|
+
local total_len = range:len()
|
|
84
|
+
|
|
85
|
+
-- 预览前 2KB
|
|
86
|
+
local cap_len = 2048
|
|
87
|
+
if total_len < cap_len then
|
|
88
|
+
cap_len = total_len
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
-- [关键] 转为 Hex 并强制转为大写
|
|
92
|
+
local preview_hex = string.upper(range(0, cap_len):bytes():tohex())
|
|
93
|
+
|
|
94
|
+
-- 查找分隔符
|
|
95
|
+
local pos = find_header_split_pos(preview_hex)
|
|
96
|
+
|
|
97
|
+
if pos then
|
|
98
|
+
return string.sub(preview_hex, 1, pos - 1)
|
|
99
|
+
else
|
|
100
|
+
return preview_hex
|
|
101
|
+
end
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
-- 直接获取完整 Hex
|
|
105
|
+
local function get_full_hex(field_info)
|
|
106
|
+
if not field_info then
|
|
107
|
+
return ""
|
|
108
|
+
end
|
|
109
|
+
-- 强制转大写,保持格式一致
|
|
110
|
+
return string.upper(field_info.range:bytes():tohex())
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
-- =========================================================================
|
|
114
|
+
-- 5. 主处理逻辑
|
|
115
|
+
-- =========================================================================
|
|
116
|
+
function tap.packet(pinfo, tvb)
|
|
117
|
+
-- 过滤 TCP 重传
|
|
118
|
+
if f_retrans() then
|
|
119
|
+
return
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
local frame_num = f_frame_num()
|
|
123
|
+
if not frame_num then
|
|
124
|
+
return
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
-- === 1. 确定类型 (req/rep) 和 信息 (URI/Code) ===
|
|
128
|
+
local col_type = "data"
|
|
129
|
+
local col_uri_or_code = ""
|
|
130
|
+
|
|
131
|
+
local code = f_resp_code()
|
|
132
|
+
local uri = f_full_uri()
|
|
133
|
+
|
|
134
|
+
if code then
|
|
135
|
+
col_type = "rep"
|
|
136
|
+
col_uri_or_code = tostring(code)
|
|
137
|
+
elseif uri then
|
|
138
|
+
col_type = "req"
|
|
139
|
+
col_uri_or_code = tostring(uri)
|
|
140
|
+
end
|
|
141
|
+
|
|
142
|
+
-- === 2. 基础信息 ===
|
|
143
|
+
local col_frame = tostring(frame_num)
|
|
144
|
+
local col_time = val_to_str(f_time_epoch())
|
|
145
|
+
|
|
146
|
+
-- === 3. Header Hex ===
|
|
147
|
+
-- 逻辑:Exported PDU > TCP Reassembled > TCP Payload
|
|
148
|
+
local col_header_hex = ""
|
|
149
|
+
|
|
150
|
+
local exp_pdu = f_exported_pdu()
|
|
151
|
+
|
|
152
|
+
if exp_pdu then
|
|
153
|
+
col_header_hex = extract_header_smart(exp_pdu)
|
|
154
|
+
else
|
|
155
|
+
local seq_count = f_seg_count()
|
|
156
|
+
local reass = nil
|
|
157
|
+
if seq_count then
|
|
158
|
+
reass = f_reassembled()
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
if reass then
|
|
162
|
+
col_header_hex = extract_header_smart(reass)
|
|
163
|
+
else
|
|
164
|
+
local pay = f_payload()
|
|
165
|
+
if pay then
|
|
166
|
+
col_header_hex = extract_header_smart(pay)
|
|
167
|
+
end
|
|
168
|
+
end
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
-- === 4. File Data (Body Hex) ===
|
|
172
|
+
-- [修改] 移除大小判断,无条件转换所有 Body
|
|
173
|
+
local col_file_data = ""
|
|
174
|
+
local fd = f_file_data()
|
|
175
|
+
|
|
176
|
+
if fd then
|
|
177
|
+
col_file_data = get_full_hex(fd)
|
|
178
|
+
end
|
|
179
|
+
|
|
180
|
+
-- === 5. Request In (仅响应包有) ===
|
|
181
|
+
local col_req_in = ""
|
|
182
|
+
local req_in = f_request_in()
|
|
183
|
+
if req_in then
|
|
184
|
+
col_req_in = tostring(req_in)
|
|
185
|
+
end
|
|
186
|
+
|
|
187
|
+
-- === 输出 (Tab 分隔) ===
|
|
188
|
+
print(table.concat({col_type, -- 1. req / rep
|
|
189
|
+
col_frame, -- 2. Frame Number
|
|
190
|
+
col_time, -- 3. Time Epoch
|
|
191
|
+
col_header_hex, -- 4. Header Bytes (Hex)
|
|
192
|
+
col_file_data, -- 5. File Data (Hex) [完整数据,不跳过]
|
|
193
|
+
col_uri_or_code, -- 6. URI / Code
|
|
194
|
+
col_req_in -- 7. Request In
|
|
195
|
+
}, "\t"))
|
|
196
|
+
end
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
FlowAnalyzer/FlowAnalyzer.py,sha256=5e4fCTvKcAb5wyloNy8pUOEZQflJ3ePfzIfkYnbbiuA,12549
|
|
2
|
+
FlowAnalyzer/Models.py,sha256=onh3HaSpdKZPJOmXvKSIwGC19y9dIILKXm-FbKpBkCM,659
|
|
3
|
+
FlowAnalyzer/PacketParser.py,sha256=N45Szx6E8RPuxvdtWZjveAYQm1jRvauRh-7epOqMv7s,5979
|
|
4
|
+
FlowAnalyzer/Path.py,sha256=E5VvucTftp8VTQUffFzFWHotQEYtZL-j7IQPOaleiug,130
|
|
5
|
+
FlowAnalyzer/PcapSplitter.py,sha256=0E_vmLYYsE_gD34XTwG1XPx5kBg8ZchJspQEnkBoIdY,4855
|
|
6
|
+
FlowAnalyzer/__init__.py,sha256=vfiHONPTrvjUU3MwhjFOEo3sWfzlhkA6gOLn_4UJ7sg,70
|
|
7
|
+
FlowAnalyzer/logging_config.py,sha256=fnBlvoimteQ38IBlQBV9fdLQvfAlRgGhcvLpUC3YunA,732
|
|
8
|
+
FlowAnalyzer/tshark.lua,sha256=OrfGdiodlJo3AkI27sUr-0fvfSnbsIOO3kKqAhO7BWY,5745
|
|
9
|
+
flowanalyzer-0.4.7.dist-info/licenses/LICENSE,sha256=ybAV0ECduYBZCpjkHyNALVWRRmT_eM0BDgqUszhwEFU,1080
|
|
10
|
+
flowanalyzer-0.4.7.dist-info/METADATA,sha256=y5DqyrvLjb9akcbdf52tp3TDb2sudxliB4nUACbHBlM,6099
|
|
11
|
+
flowanalyzer-0.4.7.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
12
|
+
flowanalyzer-0.4.7.dist-info/top_level.txt,sha256=2MtvAF6dEe_eHipw_6G5pFLb2uOCbGnlH0bC4iBtm5A,13
|
|
13
|
+
flowanalyzer-0.4.7.dist-info/RECORD,,
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
FlowAnalyzer/FlowAnalyzer.py,sha256=9SshWk5wf0XATI7W4eBiIpzEqeGFyQJs3on5ox-zrNQ,12666
|
|
2
|
-
FlowAnalyzer/Models.py,sha256=2x7nPJIAyLTC1oiGFlW4mELDPgthk2IsmuyearT-MSQ,622
|
|
3
|
-
FlowAnalyzer/PacketParser.py,sha256=So3iD2ykkWpT0e3aLjBdx_ohoNscD-oAt4bfr_oRqgo,6331
|
|
4
|
-
FlowAnalyzer/Path.py,sha256=E5VvucTftp8VTQUffFzFWHotQEYtZL-j7IQPOaleiug,130
|
|
5
|
-
FlowAnalyzer/PcapSplitter.py,sha256=0E_vmLYYsE_gD34XTwG1XPx5kBg8ZchJspQEnkBoIdY,4855
|
|
6
|
-
FlowAnalyzer/__init__.py,sha256=vfiHONPTrvjUU3MwhjFOEo3sWfzlhkA6gOLn_4UJ7sg,70
|
|
7
|
-
FlowAnalyzer/logging_config.py,sha256=fnBlvoimteQ38IBlQBV9fdLQvfAlRgGhcvLpUC3YunA,732
|
|
8
|
-
flowanalyzer-0.4.6.dist-info/licenses/LICENSE,sha256=ybAV0ECduYBZCpjkHyNALVWRRmT_eM0BDgqUszhwEFU,1080
|
|
9
|
-
flowanalyzer-0.4.6.dist-info/METADATA,sha256=j9Bw-2Sr1dx_DatRtxo56WE0BB1-WMOoIhfoSoSYk-Y,6099
|
|
10
|
-
flowanalyzer-0.4.6.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
11
|
-
flowanalyzer-0.4.6.dist-info/top_level.txt,sha256=2MtvAF6dEe_eHipw_6G5pFLb2uOCbGnlH0bC4iBtm5A,13
|
|
12
|
-
flowanalyzer-0.4.6.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|