traffic-taffy 0.5.8__py3-none-any.whl → 0.6.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
traffic_taffy/__init__.py CHANGED
@@ -1 +1 @@
1
- __VERSION__ = "0.5.8"
1
+ __VERSION__ = "0.6.1"
traffic_taffy/compare.py CHANGED
@@ -1,16 +1,35 @@
1
+ """The primary statistical packet comparison engine."""
2
+
3
+ from __future__ import annotations
1
4
  from logging import debug, error
2
- from typing import List
5
+ from typing import List, TYPE_CHECKING
3
6
  import datetime as dt
4
7
  from datetime import datetime
5
8
 
9
+ if TYPE_CHECKING:
10
+ from argparse import ArgumentParser, Namespace
11
+
6
12
  from traffic_taffy.comparison import Comparison
7
13
  from traffic_taffy.dissectmany import PCAPDissectMany
8
14
  from traffic_taffy.dissector import PCAPDissectorLevel
9
15
  from traffic_taffy.dissection import Dissection
10
16
 
17
+ from dataclasses import dataclass
18
+
19
+
20
+ @dataclass
21
+ class Report:
22
+ delta_percentage: float
23
+ delta_absolute: int
24
+ total: int
25
+ left_count: int
26
+ right_count: int
27
+ left_percentage: float
28
+ right_percentage: float
29
+
11
30
 
12
31
  class PcapCompare:
13
- "Takes a set of PCAPs to then perform various comparisons upon"
32
+ """Take a set of PCAPs to then perform various comparisons upon."""
14
33
 
15
34
  REPORT_VERSION: int = 2
16
35
 
@@ -25,11 +44,12 @@ class PcapCompare:
25
44
  bin_size: int | None = None,
26
45
  dissection_level: PCAPDissectorLevel = PCAPDissectorLevel.COUNT_ONLY,
27
46
  between_times: List[int] | None = None,
28
- ignore_list: List[str] = [],
47
+ ignore_list: List[str] | None = None,
29
48
  layers: List[str] | None = None,
30
49
  force_load: bool = False,
31
50
  force_overwrite: bool = False,
32
51
  ) -> None:
52
+ """Create a compare object."""
33
53
  self.pcap_files = pcap_files
34
54
  self.deep = deep
35
55
  self.maximum_count = maximum_count
@@ -39,30 +59,31 @@ class PcapCompare:
39
59
  self.between_times = between_times
40
60
  self.bin_size = bin_size
41
61
  self.cache_file_suffix = cache_file_suffix
42
- self.ignore_list = ignore_list
62
+ self.ignore_list = ignore_list or []
43
63
  self.layers = layers
44
64
  self.force_overwrite = force_overwrite
45
65
  self.force_load = force_load
46
66
 
47
67
  @property
48
- def pcap_files(self):
68
+ def pcap_files(self) -> List[str]:
69
+ """List of pcap files being compared."""
49
70
  return self._pcap_files
50
71
 
51
72
  @pcap_files.setter
52
- def pcap_files(self, new_pcap_files):
73
+ def pcap_files(self, new_pcap_files: List[str]) -> None:
53
74
  self._pcap_files = new_pcap_files
54
75
 
55
76
  @property
56
- def reports(self):
77
+ def reports(self) -> List[dict]:
78
+ """List of reports generated by the comparison."""
57
79
  return self._reports
58
80
 
59
81
  @reports.setter
60
- def reports(self, newvalue):
82
+ def reports(self, newvalue: List[dict]) -> None:
61
83
  self._reports = newvalue
62
84
 
63
85
  def compare_dissections(self, left_side: dict, right_side: dict) -> dict:
64
- "compares the results from two reports"
65
-
86
+ """Compare two dissections."""
66
87
  report = {}
67
88
 
68
89
  keys = set(left_side.keys())
@@ -79,7 +100,7 @@ class PcapCompare:
79
100
  right_side_total = sum(right_side[key].values())
80
101
 
81
102
  new_left_count = 0
82
- for subkey in left_side[key].keys():
103
+ for subkey in left_side[key]:
83
104
  delta_percentage = 0.0
84
105
  total = 0
85
106
  if subkey in right_side[key]:
@@ -99,18 +120,18 @@ class PcapCompare:
99
120
  new_left_count += 1
100
121
 
101
122
  delta_absolute = right_count - left_count
102
- report[key][subkey] = {
103
- "delta_percentage": delta_percentage,
104
- "delta_absolute": delta_absolute,
105
- "total": total,
106
- "left_count": left_count,
107
- "right_count": right_count,
108
- "left_percentage": left_percentage,
109
- "right_percentage": right_percentage,
110
- }
123
+ report[key][subkey] = Report(
124
+ delta_percentage=delta_percentage,
125
+ delta_absolute=delta_absolute,
126
+ total=total,
127
+ left_count=left_count,
128
+ right_count=right_count,
129
+ left_percentage=left_percentage,
130
+ right_percentage=right_percentage,
131
+ )
111
132
 
112
133
  new_right_count = 0
113
- for subkey in right_side[key].keys():
134
+ for subkey in right_side[key]:
114
135
  if subkey not in report[key]:
115
136
  delta_percentage = 1.0
116
137
  total = right_side[key][subkey]
@@ -120,15 +141,15 @@ class PcapCompare:
120
141
  right_percentage = right_side[key][subkey] / right_side_total
121
142
  new_right_count += 1 # this value wasn't in the left
122
143
 
123
- report[key][subkey] = {
124
- "delta_percentage": delta_percentage,
125
- "delta_absolute": right_count,
126
- "total": total,
127
- "left_count": left_count,
128
- "right_count": right_count,
129
- "left_percentage": left_percentage,
130
- "right_percentage": right_percentage,
131
- }
144
+ report[key][subkey] = Report(
145
+ delta_percentage=delta_percentage,
146
+ delta_absolute=right_count,
147
+ total=total,
148
+ left_count=left_count,
149
+ right_count=right_count,
150
+ left_percentage=left_percentage,
151
+ right_percentage=right_percentage,
152
+ )
132
153
 
133
154
  if right_side_total == 0:
134
155
  right_percent = 100
@@ -140,19 +161,20 @@ class PcapCompare:
140
161
  else:
141
162
  left_percent = new_left_count / left_side_total
142
163
 
143
- report[key][Dissection.NEW_RIGHT_SUBKEY] = {
144
- "delta_absolute": new_right_count - new_left_count,
145
- "total": new_left_count + new_right_count,
146
- "left_count": new_left_count,
147
- "right_count": new_right_count,
148
- "left_percentage": left_percent,
149
- "right_percentage": right_percent,
150
- "delta_percentage": right_percent - left_percent,
151
- }
164
+ report[key][Dissection.NEW_RIGHT_SUBKEY] = Report(
165
+ delta_absolute=new_right_count - new_left_count,
166
+ total=new_left_count + new_right_count,
167
+ left_count=new_left_count,
168
+ right_count=new_right_count,
169
+ left_percentage=left_percent,
170
+ right_percentage=right_percent,
171
+ delta_percentage=right_percent - left_percent,
172
+ )
152
173
 
153
174
  return Comparison(report)
154
175
 
155
176
  def load_pcaps(self) -> None:
177
+ """Load all pcaps into memory and dissect them."""
156
178
  # load the first as a reference pcap
157
179
  pdm = PCAPDissectMany(
158
180
  self.pcap_files,
@@ -167,17 +189,16 @@ class PcapCompare:
167
189
  force_load=self.force_load,
168
190
  force_overwrite=self.force_overwrite,
169
191
  )
170
- results = pdm.load_all()
171
- return results
192
+ return pdm.load_all()
172
193
 
173
194
  def compare(self) -> List[Comparison]:
174
- "Compares each pcap against the original source"
175
-
195
+ """Compare each pcap as requested."""
176
196
  dissections = self.load_pcaps()
177
197
  self.compare_all(dissections)
178
198
  return self.reports
179
199
 
180
- def compare_all(self, dissections) -> List[Comparison]:
200
+ def compare_all(self, dissections: List[Dissection]) -> List[Comparison]:
201
+ """Compare all loaded pcaps."""
181
202
  reports = []
182
203
  if len(self.pcap_files) > 1:
183
204
  # multiple file comparison
@@ -197,9 +218,8 @@ class PcapCompare:
197
218
  error(
198
219
  "the requested pcap data was not long enough to compare against itself"
199
220
  )
200
- raise ValueError(
201
- "not enough of a single capture file to time-bin the results"
202
- )
221
+ errorstr: str = "not large enough pcap file"
222
+ raise ValueError(errorstr)
203
223
  debug(
204
224
  f"found {len(timestamps)} timestamps from {timestamps[2]} to {timestamps[-1]}"
205
225
  )
@@ -249,7 +269,10 @@ class PcapCompare:
249
269
  return reports
250
270
 
251
271
 
252
- def compare_add_parseargs(compare_parser, add_subgroup: bool = True):
272
+ def compare_add_parseargs(
273
+ compare_parser: ArgumentParser, add_subgroup: bool = True
274
+ ) -> ArgumentParser:
275
+ """Add common comparison arguments."""
253
276
  if add_subgroup:
254
277
  compare_parser = compare_parser.add_argument_group("Comparison result options")
255
278
 
@@ -285,17 +308,26 @@ def compare_add_parseargs(compare_parser, add_subgroup: bool = True):
285
308
  )
286
309
 
287
310
  compare_parser.add_argument(
288
- "-T",
289
- "--between-times",
290
- nargs=2,
291
- type=int,
292
- help="For single files, only display results between these timestamps",
311
+ "-s",
312
+ "--sort-by",
313
+ default="delta%",
314
+ type=str,
315
+ help="Sort report entries by this column",
293
316
  )
294
317
 
318
+ # compare_parser.add_argument(
319
+ # "-T",
320
+ # "--between-times",
321
+ # nargs=2,
322
+ # type=int,
323
+ # help="For single files, only display results between these timestamps",
324
+ # )
325
+
295
326
  return compare_parser
296
327
 
297
328
 
298
- def get_comparison_args(args):
329
+ def get_comparison_args(args: Namespace) -> dict:
330
+ """Return a dict of comparison parameters from arguments."""
299
331
  return {
300
332
  "maximum_count": args.packet_count or 0,
301
333
  "print_threshold": float(args.print_threshold) / 100.0,
@@ -1,26 +1,34 @@
1
- from typing import Dict
1
+ """A simple data storage module to hold comparison data."""
2
+
3
+ from __future__ import annotations
4
+ from typing import Dict, Any
2
5
 
3
6
 
4
7
  class Comparison:
8
+ """A simple data storage class to hold comparison data."""
9
+
5
10
  def __init__(self, contents: list, title: str = ""):
11
+ """Create a Comparison class from contents."""
6
12
  self.contents = contents
7
13
  self.title: str = title
8
- self.printing_arguments: Dict[str] = {}
14
+ self.printing_arguments: Dict[str, Any] = {}
9
15
 
10
16
  # title
11
17
  @property
12
18
  def title(self) -> str:
19
+ """The title of this comparison."""
13
20
  return self._title
14
21
 
15
22
  @title.setter
16
- def title(self, new_title):
23
+ def title(self, new_title: str) -> None:
17
24
  self._title = new_title
18
25
 
19
26
  # report contents -- actual data
20
27
  @property
21
- def contents(self):
28
+ def contents(self) -> None:
29
+ """The contents of this comparison."""
22
30
  return self._contents
23
31
 
24
32
  @contents.setter
25
- def contents(self, new_contents):
33
+ def contents(self, new_contents: str) -> None:
26
34
  self._contents = new_contents
@@ -1,15 +1,16 @@
1
1
  """A Dissection class stores the results of a PCAP enumeration."""
2
2
 
3
3
  from __future__ import annotations
4
- import os
5
4
  from collections import defaultdict, Counter
6
- from typing import Any
5
+ from typing import Any, Dict, ClassVar
7
6
  from logging import debug, info, error, warning
8
7
  from enum import Enum
9
8
  import msgpack
10
9
  import ipaddress
11
10
  from typing import List
12
11
  from copy import deepcopy
12
+ from pathlib import Path
13
+ from traffic_taffy import __VERSION__ as VERSION
13
14
 
14
15
 
15
16
  class PCAPDissectorLevel(Enum):
@@ -17,6 +18,7 @@ class PCAPDissectorLevel(Enum):
17
18
 
18
19
  COUNT_ONLY = 1
19
20
  THROUGH_IP = 2
21
+ COMMON_LAYERS = 3
20
22
  DETAILED = 10
21
23
 
22
24
 
@@ -24,13 +26,15 @@ class Dissection:
24
26
  """Class to store the data from an enumerated pcap."""
25
27
 
26
28
  DISSECTION_KEY: str = "PCAP_DISSECTION_VERSION"
27
- DISSECTION_VERSION: int = 7
29
+ DISSECTION_VERSION: int = 8
28
30
 
29
31
  TOTAL_COUNT: str = "__TOTAL__"
30
32
  TOTAL_SUBKEY: str = "packet"
31
33
  WIDTH_SUBKEY: str = "__WIDTH__"
32
34
  NEW_RIGHT_SUBKEY: str = "__NEW_VALUES__"
33
35
 
36
+ PRINTABLE_LENGTH: int = 40
37
+
34
38
  def __init__(
35
39
  self: Dissection,
36
40
  pcap_file: str,
@@ -171,11 +175,13 @@ class Dissection:
171
175
  """Load the dissection data from a cache."""
172
176
  if not self.pcap_file or not isinstance(self.pcap_file, str):
173
177
  return None
174
- if not os.path.exists(self.pcap_file + self.cache_file_suffix):
178
+ if not Path(self.pcap_file + self.cache_file_suffix).exists():
175
179
  return None
176
180
 
177
181
  cached_file = self.pcap_file + self.cache_file_suffix
178
- cached_contents = self.load_saved(cached_file, dont_overwrite=True)
182
+ cached_contents = self.load_saved(
183
+ cached_file, dont_overwrite=True, force_load=force_load
184
+ )
179
185
 
180
186
  ok_to_load = True
181
187
 
@@ -208,9 +214,10 @@ class Dissection:
208
214
  # loading a more detailed cache is ok
209
215
  continue
210
216
 
211
- if parameter == "pcap_file" and os.path.basename(
212
- specified
213
- ) == os.path.basename(cached):
217
+ if (
218
+ parameter == "pcap_file"
219
+ and Path(specified).name == Path(cached).name
220
+ ):
214
221
  # as long as the basename is ok, we'll assume it's a different path
215
222
  continue
216
223
 
@@ -248,13 +255,14 @@ class Dissection:
248
255
  self.save(where)
249
256
 
250
257
  def save(self: Dissection, where: str) -> None:
251
- """Saves a generated dissection to a msgpack file."""
258
+ """Save a generated dissection to a msgpack file."""
252
259
  # wrap the report in a version header
253
260
  versioned_cache = {
254
261
  self.DISSECTION_KEY: self.DISSECTION_VERSION,
255
262
  "file": self.pcap_file,
256
263
  "parameters": {},
257
264
  "dissection": self.data,
265
+ "created_by": "traffic-taffy " + VERSION,
258
266
  }
259
267
 
260
268
  for parameter in self.parameters:
@@ -303,10 +311,10 @@ class Dissection:
303
311
  ] = versioned_cache["dissection"][timestamp][key][subkey]
304
312
  del versioned_cache["dissection"][timestamp][key][subkey]
305
313
 
306
- with open(where, "wb") as saveto:
314
+ with Path(where).open("wb") as saveto:
307
315
  msgpack.dump(versioned_cache, saveto)
308
316
 
309
- def load_saved_contents(self: Dissection, versioned_cache: dict):
317
+ def load_saved_contents(self: Dissection, versioned_cache: dict) -> None:
310
318
  """Set parameters from the cache."""
311
319
  # set the local parameters from the cache
312
320
  for parameter in self.parameters:
@@ -315,9 +323,14 @@ class Dissection:
315
323
  # load the data
316
324
  self.data = versioned_cache["dissection"]
317
325
 
318
- def load_saved(self: Dissection, where: str, dont_overwrite: bool = False) -> dict:
326
+ def load_saved(
327
+ self: Dissection,
328
+ where: str,
329
+ dont_overwrite: bool = False,
330
+ force_load: bool = False,
331
+ ) -> dict:
319
332
  """Load a saved report from a cache file."""
320
- with open(where, "rb") as cache_file:
333
+ with Path(where).open("rb") as cache_file:
321
334
  contents = msgpack.load(cache_file, strict_map_key=False)
322
335
 
323
336
  # convert the ignore list to a set (msgpack doesn't do sets)
@@ -326,7 +339,7 @@ class Dissection:
326
339
  )
327
340
 
328
341
  # check that the version header matches something we understand
329
- if contents[self.DISSECTION_KEY] != self.DISSECTION_VERSION:
342
+ if not force_load and contents[self.DISSECTION_KEY] != self.DISSECTION_VERSION:
330
343
  raise ValueError(
331
344
  "improper saved dissection version: report version = "
332
345
  + str(contents[self.DISSECTION_KEY])
@@ -413,8 +426,9 @@ class Dissection:
413
426
  value = "0x" + value.hex()
414
427
  else:
415
428
  value = "[unprintable]"
416
- if len(value) > 40:
417
- value = value[0:40] + "..." # truncate to reasonable
429
+ if len(value) > Dissection.PRINTABLE_LENGTH:
430
+ # truncate to reasonable
431
+ value = value[0 : Dissection.PRINTABLE_LENGTH] + "..."
418
432
  return value
419
433
 
420
434
  @staticmethod
@@ -428,7 +442,7 @@ class Dissection:
428
442
  return ":".join(map(two_hex, value))
429
443
 
430
444
  # has to go at the end to pick up the above function names
431
- DISPLAY_TRANSFORMERS: dict = {
445
+ DISPLAY_TRANSFORMERS: ClassVar[Dict[str, callable]] = {
432
446
  "Ethernet.IP.src": ipaddress.ip_address,
433
447
  "Ethernet.IP.dst": ipaddress.ip_address,
434
448
  "Ethernet.IP6.src": ipaddress.ip_address,
@@ -1,13 +1,25 @@
1
- from traffic_taffy.dissector import PCAPDissector
2
- from pcap_parallel import PCAPParallel
1
+ """A module for dissecting a number of PCAP files."""
2
+
3
+ from __future__ import annotations
3
4
  from concurrent.futures import ProcessPoolExecutor
4
5
  from logging import info
5
6
  import copy
6
7
  import multiprocessing
8
+ from pcap_parallel import PCAPParallel
9
+ from typing import List, TYPE_CHECKING
10
+
11
+ from traffic_taffy.dissector import PCAPDissector
12
+
13
+ if TYPE_CHECKING:
14
+ from io import BufferedIOBase
15
+ from traffic_taffy.dissection import Dissection
7
16
 
8
17
 
9
18
  class PCAPDissectMany:
10
- def __init__(self, pcap_files, *args, **kwargs):
19
+ """A class for dissecting a number of PCAP files."""
20
+
21
+ def __init__(self, pcap_files: List[str], *args: list, **kwargs: dict):
22
+ """Create a PCAPDissectMany instance."""
11
23
  self.pcap_files = pcap_files
12
24
  self.args = args
13
25
  self.kwargs = kwargs
@@ -20,7 +32,8 @@ class PCAPDissectMany:
20
32
  # Note: this may undercount due to int flooring()
21
33
  self.maximum_cores = int(multiprocessing.cpu_count() / len(self.pcap_files))
22
34
 
23
- def load_pcap_piece(self, pcap_io_buffer):
35
+ def load_pcap_piece(self, pcap_io_buffer: BufferedIOBase) -> Dissection:
36
+ """Load one piece of a pcap from a buffer."""
24
37
  kwargs = copy.copy(self.kwargs)
25
38
  # force false for actually loading
26
39
  kwargs["cache_results"] = False
@@ -36,12 +49,10 @@ class PCAPDissectMany:
36
49
 
37
50
  def load_pcap(
38
51
  self,
39
- pcap_file,
40
- split_size=None,
41
- maximum_count: int = 0,
42
- force_overwrite: bool = False,
43
- force_load: bool = False,
44
- ):
52
+ pcap_file: str,
53
+ split_size: int | None = None,
54
+ ) -> Dissection:
55
+ """Load one pcap file."""
45
56
  pd = PCAPDissector(
46
57
  pcap_file,
47
58
  *self.args,
@@ -55,23 +66,34 @@ class PCAPDissectMany:
55
66
  return dissection
56
67
 
57
68
  info(f"processing {pcap_file}")
58
- ps = PCAPParallel(
59
- pcap_file,
60
- split_size=split_size,
61
- callback=self.load_pcap_piece,
62
- maximum_count=self.kwargs.get("maximum_count", 0),
63
- maximum_cores=self.maximum_cores,
64
- )
65
- results = ps.split()
69
+ if isinstance(pcap_file, str) and (
70
+ pcap_file.endswith(".dnstap") or pcap_file.endswith(".tap")
71
+ ):
72
+ # deal with dnstap files
73
+
74
+ # the Dissector already handles loading a dnstap engine
75
+ # TODO(hardaker): see if we can use a splitter here with the framing chunks
76
+ dissection = pd.load()
77
+
78
+ else: # assume pcap
79
+ ps = PCAPParallel(
80
+ pcap_file,
81
+ split_size=split_size,
82
+ callback=self.load_pcap_piece,
83
+ maximum_count=self.kwargs.get("maximum_count", 0),
84
+ maximum_cores=self.maximum_cores,
85
+ )
86
+ results = ps.split()
66
87
 
67
- # the data is coming back in (likely overlapping) chunks, and
68
- # we need to merge them together
69
- dissection = results.pop(0).result()
70
- dissection.pcap_file = pcap_file # splitting has the wrong name
71
- for result in results:
72
- dissection.merge(result.result())
88
+ # the data is coming back in (likely overlapping) chunks, and
89
+ # we need to merge them together
90
+ dissection = results.pop(0).result()
91
+ dissection.pcap_file = pcap_file # splitting has the wrong name
92
+ for result in results:
93
+ dissection.merge(result.result())
73
94
 
74
- dissection.calculate_metadata()
95
+ # recalculate metadata now that merges have happened
96
+ dissection.calculate_metadata()
75
97
 
76
98
  if self.kwargs.get("cache_results"):
77
99
  # create a dissector just to save the cache
@@ -83,7 +105,10 @@ class PCAPDissectMany:
83
105
 
84
106
  return dissection
85
107
 
86
- def load_all(self, return_as_list: bool = False, dont_fork: bool = False):
108
+ def load_all(
109
+ self, return_as_list: bool = False, dont_fork: bool = False
110
+ ) -> List[Dissection]:
111
+ """Load all PCAPs in parallel."""
87
112
  if dont_fork:
88
113
  # handle each one individually -- typically for inserting debugging stops
89
114
  dissections = []
@@ -96,5 +121,5 @@ class PCAPDissectMany:
96
121
  with ProcessPoolExecutor() as executor:
97
122
  dissections = executor.map(self.load_pcap, self.pcap_files)
98
123
  if return_as_list: # convert from generator
99
- dissections = [x for x in dissections]
124
+ dissections = list(dissections)
100
125
  return dissections
@@ -97,7 +97,16 @@ class PCAPDissector:
97
97
 
98
98
  engine = None
99
99
  args = self.dissection_args()
100
- if (
100
+
101
+ if isinstance(self.pcap_file, str) and (
102
+ self.pcap_file.endswith(".dnstap") or self.pcap_file.endswith(".tap")
103
+ ):
104
+ # we delay loading until the module and its requirements are needed
105
+ from traffic_taffy.dissector_engine.dnstap import DissectionEngineDNStap
106
+
107
+ engine = DissectionEngineDNStap(*args)
108
+
109
+ elif (
101
110
  self.dissector_level == PCAPDissectorLevel.DETAILED
102
111
  or self.dissector_level == PCAPDissectorLevel.DETAILED.value
103
112
  ):
@@ -181,7 +190,13 @@ def dissector_add_parseargs(parser, add_subgroup: bool = True):
181
190
  "Ethernet.IP.TCP.ack",
182
191
  "Ethernet.IPv6.TCP.seq",
183
192
  "Ethernet.IPv6.TCP.ack",
193
+ "Ethernet.IPv6.TCP.Raw.load",
194
+ "Ethernet.IP.UDP.Raw.load",
184
195
  "Ethernet.IP.UDP.DNS.id",
196
+ "Ethernet.IP.ICMP.IP in ICMP.UDP in ICMP.chksum",
197
+ "Ethernet.IP.ICMP.IP in ICMP.UDP in ICMP.Raw.load",
198
+ "Ethernet.IP.ICMP.IP in ICMP.chksum",
199
+ "Ethernet.IP.ICMP.IP in ICMP.id",
185
200
  "Ethernet.IP.TCP.DNS.id",
186
201
  "Ethernet.IPv6.UDP.DNS.id",
187
202
  "Ethernet.IPv6.TCP.DNS.id",
@@ -189,6 +204,9 @@ def dissector_add_parseargs(parser, add_subgroup: bool = True):
189
204
  "Ethernet.IP.chksum",
190
205
  "Ethernet.IP.UDP.chksum",
191
206
  "Ethernet.IP.TCP.chksum",
207
+ "Ethernet.IP.TCP.window",
208
+ "Ethernet.IP.TCP.Raw.load",
209
+ "Ethernet.IP.UDP.Raw.load",
192
210
  "Ethernet.IPv6.UDP.chksum",
193
211
  "Ethernet.IPv6.fl",
194
212
  "Ethernet.IP.ICMP.chksum",
@@ -197,6 +215,8 @@ def dissector_add_parseargs(parser, add_subgroup: bool = True):
197
215
  "Ethernet.IP.TCP.Padding.load",
198
216
  "Ethernet.IPv6.TCP.chksum",
199
217
  "Ethernet.IPv6.plen",
218
+ "Ethernet.IP.TCP.Encrypted Content.load",
219
+ "Ethernet.IP.TCP.TLS.TLS.Raw.load",
200
220
  ],
201
221
  nargs="*",
202
222
  type=str,
@@ -302,6 +322,7 @@ def check_dissector_level(level: int):
302
322
  current_dissection_levels = [
303
323
  PCAPDissectorLevel.COUNT_ONLY.value,
304
324
  PCAPDissectorLevel.THROUGH_IP.value,
325
+ PCAPDissectorLevel.COMMON_LAYERS.value,
305
326
  PCAPDissectorLevel.DETAILED.value,
306
327
  ]
307
328
  if level not in current_dissection_levels:
@@ -11,7 +11,7 @@ class DissectionEngine:
11
11
  pcap_filter: str = "",
12
12
  maximum_count: int = 0,
13
13
  bin_size: int = 0,
14
- dissector_level: PCAPDissectorLevel = PCAPDissectorLevel.DETAILED,
14
+ dissector_level: PCAPDissectorLevel = PCAPDissectorLevel.COMMON_LAYERS,
15
15
  cache_file_suffix: str = "pkl",
16
16
  ignore_list: list = [],
17
17
  layers: List[str] | None = None,
@@ -25,6 +25,22 @@ class DissectionEngine:
25
25
  self.ignore_list = set(ignore_list)
26
26
  self.layers = layers
27
27
 
28
+ def start_packet(
29
+ self, timestamp: int, dissection: Dissection | None = None
30
+ ) -> None:
31
+ if not dissection:
32
+ dissection = self.dissection
33
+
34
+ # set and bin-ize the timestamp
35
+ dissection.timestamp = int(timestamp)
36
+ if dissection.bin_size:
37
+ dissection.timestamp = (
38
+ dissection.timestamp - dissection.timestamp % dissection.bin_size
39
+ )
40
+
41
+ # increment the base counter for all packets
42
+ dissection.incr(Dissection.TOTAL_COUNT, dissection.TOTAL_SUBKEY)
43
+
28
44
  def init_dissection(self) -> Dissection:
29
45
  self.dissection = Dissection(
30
46
  pcap_file=self.pcap_file,
@@ -36,3 +52,10 @@ class DissectionEngine:
36
52
  ignore_list=self.ignore_list,
37
53
  )
38
54
  return self.dissection
55
+
56
+ def load(self) -> Dissection:
57
+ """Load the capture file into memory."""
58
+ self.init_dissection()
59
+ self.load_data()
60
+ self.dissection.calculate_metadata()
61
+ return self.dissection