traffic-taffy 0.3.6__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. traffic_taffy/cache_info.py +0 -6
  2. traffic_taffy/compare.py +154 -250
  3. traffic_taffy/comparison.py +26 -0
  4. traffic_taffy/dissection.py +383 -0
  5. traffic_taffy/dissectmany.py +20 -18
  6. traffic_taffy/dissector.py +128 -476
  7. traffic_taffy/dissector_engine/__init__.py +35 -0
  8. traffic_taffy/dissector_engine/dpkt.py +98 -0
  9. traffic_taffy/dissector_engine/scapy.py +98 -0
  10. traffic_taffy/graph.py +23 -90
  11. traffic_taffy/graphdata.py +35 -20
  12. traffic_taffy/output/__init__.py +118 -0
  13. traffic_taffy/output/console.py +72 -0
  14. traffic_taffy/output/fsdb.py +50 -0
  15. traffic_taffy/output/memory.py +51 -0
  16. traffic_taffy/pcap_splitter.py +17 -36
  17. traffic_taffy/tools/cache_info.py +65 -0
  18. traffic_taffy/tools/compare.py +110 -0
  19. traffic_taffy/tools/dissect.py +77 -0
  20. traffic_taffy/tools/explore.py +686 -0
  21. traffic_taffy/tools/graph.py +85 -0
  22. {traffic_taffy-0.3.6.dist-info → traffic_taffy-0.4.1.dist-info}/METADATA +1 -1
  23. traffic_taffy-0.4.1.dist-info/RECORD +29 -0
  24. traffic_taffy-0.4.1.dist-info/entry_points.txt +6 -0
  25. pcap_compare/cache_info.py +0 -46
  26. pcap_compare/compare.py +0 -288
  27. pcap_compare/dissectmany.py +0 -21
  28. pcap_compare/dissector.py +0 -512
  29. pcap_compare/dissectorresults.py +0 -21
  30. pcap_compare/graph.py +0 -210
  31. traffic_taffy/explore.py +0 -221
  32. traffic_taffy-0.3.6.dist-info/RECORD +0 -22
  33. traffic_taffy-0.3.6.dist-info/entry_points.txt +0 -5
  34. {pcap_compare → traffic_taffy/tools}/__init__.py +0 -0
  35. {traffic_taffy-0.3.6.dist-info → traffic_taffy-0.4.1.dist-info}/WHEEL +0 -0
  36. {traffic_taffy-0.3.6.dist-info → traffic_taffy-0.4.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,50 @@
1
+ import sys
2
+ import pyfsdb
3
+
4
+ from traffic_taffy.output import Output
5
+ from traffic_taffy.dissection import Dissection
6
+
7
+
8
+ class Fsdb(Output):
9
+ def __init__(self, *args, **kwargs):
10
+ super().__init__(*args, **kwargs)
11
+ self.console = None
12
+ self.have_done_header = False
13
+ self.in_report = None
14
+
15
+ self.fsdb = pyfsdb.Fsdb(out_file_handle=sys.stdout)
16
+ self.fsdb.out_column_names = [
17
+ "report",
18
+ "Key",
19
+ "subkey",
20
+ "left",
21
+ "right",
22
+ "delta",
23
+ "left_fraction",
24
+ "right_fraction",
25
+ "delta_fraction",
26
+ ]
27
+ self.fsdb.converters = [str, str, str, int, int, int, float, float, float]
28
+
29
+ def output_start(self, report):
30
+ "Prints the header about columns being displayed"
31
+ # This should match the spacing in print_contents()
32
+ self.in_report = report.title
33
+
34
+ def output_record(self, key, subkey, data) -> None:
35
+ "prints a report to the console"
36
+
37
+ subkey = Dissection.make_printable(key, subkey)
38
+ self.fsdb.append(
39
+ [
40
+ self.in_report,
41
+ key,
42
+ subkey,
43
+ data["left_count"],
44
+ data["right_count"],
45
+ data["delta_absolute"],
46
+ data["left_percentage"],
47
+ data["right_percentage"],
48
+ data["delta_percentage"],
49
+ ]
50
+ )
@@ -0,0 +1,51 @@
1
+ from collections import defaultdict
2
+
3
+ from traffic_taffy.output import Output
4
+ from traffic_taffy.dissection import Dissection
5
+
6
+
7
+ class Memory(Output):
8
+ def __init__(self, *args, **kwargs):
9
+ super().__init__(*args, **kwargs)
10
+ self.console = None
11
+ self.have_done_header = False
12
+ self.title = kwargs.get("title", "")
13
+ self.memory = None
14
+
15
+ @property
16
+ def title(self):
17
+ return self._title
18
+
19
+ @title.setter
20
+ def title(self, new_title):
21
+ self._title = new_title
22
+
23
+ @property
24
+ def memory(self):
25
+ return self._memory
26
+
27
+ @memory.setter
28
+ def memory(self, new_memory):
29
+ self._memory = new_memory
30
+
31
+ def output_start(self, report):
32
+ "Prints the header about columns being displayed"
33
+ # This should match the spacing in print_contents()
34
+ self.title = report.title
35
+ self.memory = defaultdict(list)
36
+
37
+ def output_record(self, key, subkey, data) -> None:
38
+ "prints a report to the console"
39
+
40
+ subkey = Dissection.make_printable(key, subkey)
41
+ self.memory[key].append(
42
+ {
43
+ "subkey": subkey,
44
+ "left_count": data["left_count"],
45
+ "right_count": data["right_count"],
46
+ "delta_absolute": data["delta_absolute"],
47
+ "left_percentage": data["left_percentage"],
48
+ "right_percentage": data["right_percentage"],
49
+ "delta_percentage": data["delta_percentage"],
50
+ }
51
+ )
@@ -3,11 +3,10 @@
3
3
  import io
4
4
  import os
5
5
  import multiprocessing
6
- from traffic_taffy.dissector import PCAPDissector
7
6
  from typing import List
8
7
  import dpkt
9
8
  from concurrent.futures import ProcessPoolExecutor, Future
10
- from logging import debug, info
9
+ from logging import debug
11
10
 
12
11
 
13
12
  class PCAPSplitter:
@@ -27,7 +26,6 @@ class PCAPSplitter:
27
26
  self.split_size: int = split_size
28
27
  self.maximum_count: int = maximum_count
29
28
  self.pcap_filter: str | None = pcap_filter
30
- self.maximum_cores = maximum_cores
31
29
 
32
30
  self.header: bytes = None
33
31
  self.buffer: bytes = None
@@ -40,46 +38,29 @@ class PCAPSplitter:
40
38
  if not os.path.exists(self.pcap_file):
41
39
  raise ValueError(f"failed to find pcap file '{self.pcap_file}'")
42
40
 
43
- def set_split_size(self):
44
- "Attempt to calculate a reasonable split size"
45
- if self.split_size:
46
- info(f"split size already set to {self.split_size}")
47
- return self.split_size
41
+ if not self.split_size:
42
+ cores = multiprocessing.cpu_count()
43
+ if maximum_cores and cores > maximum_cores:
44
+ cores = maximum_cores
48
45
 
49
- cores = multiprocessing.cpu_count()
50
- if self.maximum_cores and cores > self.maximum_cores:
51
- cores = self.maximum_cores
52
-
53
- if self.maximum_count and self.maximum_count > 0:
54
- # not ideal math, but better than nothing
55
- self.split_size = int(self.maximum_count / cores)
56
- else:
57
- if isinstance(self.our_data, io.BufferedReader):
58
- # raw uncompressed file
59
- divide_size = 1200
46
+ if self.maximum_count:
47
+ # not ideal math, but better than nothing
48
+ self.split_size = int(self.maximum_count / cores)
60
49
  else:
61
- # likely a compressed file
62
- divide_size = 5000
63
-
64
- # even worse math and assumes generally large packets
65
- stats = os.stat(self.pcap_file)
66
- file_size = stats.st_size
67
- self.split_size = int(file_size / divide_size / cores)
68
- debug(
69
- f"split info: {file_size=}, {divide_size=}, {cores=}, {self.split_size=}"
70
- )
50
+ # even worse math and assumes generally large packets
51
+ stats = os.stat(self.pcap_file)
52
+ file_size = stats.st_size
53
+ self.split_size = int(file_size / 1200 / cores)
71
54
 
72
- # even 1000 is kinda silly to split, but is better than nothing
73
- self.split_size = max(self.split_size, 1000)
74
- debug(f"setting PCAPSplitter split size to {self.split_size} for {cores} cores")
55
+ # even 1000 is kinda silly to split, but is better than nothing
56
+ self.split_size = min(self.split_size, 1000)
57
+ debug(f"setting PCAPSplitter split size to {self.split_size} for {cores}")
75
58
 
76
59
  def split(self) -> List[io.BytesIO] | List[Future]:
77
60
  "Does the actual reading and splitting"
78
61
  # open one for the dpkt reader and one for us independently
79
- self.our_data = PCAPDissector.open_maybe_compressed(self.pcap_file)
80
- self.dpkt_data = PCAPDissector.open_maybe_compressed(self.pcap_file)
81
-
82
- self.set_split_size()
62
+ self.our_data = open(self.pcap_file, "rb")
63
+ self.dpkt_data = open(self.pcap_file, "rb")
83
64
 
84
65
  # read the first 24 bytes which is the pcap header
85
66
  self.header = self.our_data.read(24)
@@ -0,0 +1,65 @@
1
+ """Loads the cached data for a file to display the results about it"""
2
+
3
+ from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
4
+ from rich import print
5
+ import logging
6
+ import msgpack
7
+
8
+
9
+ def parse_args():
10
+ "Parse the command line arguments."
11
+ parser = ArgumentParser(
12
+ formatter_class=ArgumentDefaultsHelpFormatter,
13
+ description=__doc__,
14
+ epilog="Exmaple Usage: ",
15
+ )
16
+
17
+ parser.add_argument(
18
+ "--log-level",
19
+ "--ll",
20
+ default="info",
21
+ help="Define the logging verbosity level (debug, info, warning, error, fotal, critical).",
22
+ )
23
+
24
+ parser.add_argument(
25
+ "cache_file",
26
+ type=str,
27
+ nargs="+",
28
+ help="The cache file (or pcap file) to load and display information about",
29
+ )
30
+
31
+ args = parser.parse_args()
32
+ log_level = args.log_level.upper()
33
+ logging.basicConfig(level=log_level, format="%(levelname)-10s:\t%(message)s")
34
+ return args
35
+
36
+
37
+ def main():
38
+ args = parse_args()
39
+
40
+ for cache_file in args.cache_file:
41
+ print(f"===== {cache_file} ======")
42
+ contents = msgpack.load(open(cache_file, "rb"), strict_map_key=False)
43
+
44
+ # play the major keys
45
+ for key in contents.keys():
46
+ if key != "dissection" and key != "parameters":
47
+ print(f"{key:<20} {contents[key]}")
48
+
49
+ # then the minors
50
+ print("parameters:")
51
+ for key in contents["parameters"]:
52
+ print(f" {key:<16} {contents['parameters'][key]}")
53
+
54
+ print("data info:")
55
+ timestamps = list(contents["dissection"].keys())
56
+ print(f" timestamps: {len(timestamps)}")
57
+ if len(timestamps) > 1:
58
+ print(f" first: {timestamps[1]}") # skips 0 = global
59
+ print(f" last: {timestamps[-1]}")
60
+ else:
61
+ print(" (only the entire summary timestamp)")
62
+
63
+
64
+ if __name__ == "__main__":
65
+ main()
@@ -0,0 +1,110 @@
1
+ """Takes a set of pcap files to compare and creates a report"""
2
+
3
+ from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
4
+ import logging
5
+ from traffic_taffy.output.console import Console
6
+ from traffic_taffy.output.fsdb import Fsdb
7
+
8
+ from traffic_taffy.compare import compare_add_parseargs, get_comparison_args
9
+ from traffic_taffy.dissector import (
10
+ dissector_add_parseargs,
11
+ limitor_add_parseargs,
12
+ check_dissector_level,
13
+ )
14
+ from traffic_taffy.compare import PcapCompare
15
+
16
+
17
+ def parse_args():
18
+ "Parse the command line arguments."
19
+ parser = ArgumentParser(
20
+ formatter_class=ArgumentDefaultsHelpFormatter,
21
+ description=__doc__,
22
+ epilog="Exmaple Usage: ",
23
+ )
24
+
25
+ output_options = parser.add_argument_group("Output format")
26
+ output_options.add_argument(
27
+ "-f",
28
+ "--fsdb",
29
+ action="store_true",
30
+ help="Print results in an FSDB formatted output",
31
+ )
32
+
33
+ limitor_parser = limitor_add_parseargs(parser)
34
+ compare_add_parseargs(limitor_parser, False)
35
+ dissector_add_parseargs(parser)
36
+
37
+ debugging_group = parser.add_argument_group("Debugging options")
38
+
39
+ debugging_group.add_argument(
40
+ "--log-level",
41
+ "--ll",
42
+ default="info",
43
+ help="Define the logging verbosity level (debug, info, warning, error, ...).",
44
+ )
45
+
46
+ parser.add_argument("pcap_files", type=str, nargs="*", help="PCAP files to analyze")
47
+
48
+ args = parser.parse_args()
49
+ log_level = args.log_level.upper()
50
+ logging.basicConfig(level=log_level, format="%(levelname)-10s:\t%(message)s")
51
+
52
+ check_dissector_level(args.dissection_level)
53
+
54
+ return args
55
+
56
+
57
+ def main():
58
+ args = parse_args()
59
+
60
+ # setup output options
61
+ printing_arguments = get_comparison_args(args)
62
+
63
+ # get our files to compare (maybe just one)
64
+ left = args.pcap_files.pop(0)
65
+ right = None
66
+ more_than_one = False
67
+
68
+ if len(args.pcap_files) > 0:
69
+ right = args.pcap_files.pop(0)
70
+ more_than_one = True
71
+
72
+ while left:
73
+ files = [left]
74
+ if right:
75
+ files.append(right)
76
+
77
+ pc = PcapCompare(
78
+ files,
79
+ cache_results=args.cache_pcap_results,
80
+ cache_file_suffix=args.cache_file_suffix,
81
+ maximum_count=printing_arguments["maximum_count"],
82
+ dissection_level=args.dissection_level,
83
+ between_times=args.between_times,
84
+ bin_size=args.bin_size,
85
+ ignore_list=args.ignore_list,
86
+ )
87
+
88
+ # compare the pcaps
89
+ reports = pc.compare()
90
+
91
+ if args.fsdb:
92
+ output = Fsdb(None, printing_arguments)
93
+ else:
94
+ output = Console(None, printing_arguments)
95
+
96
+ for report in reports:
97
+ # output results to the console
98
+ output.output(report)
99
+
100
+ left = right
101
+ right = None
102
+ if len(args.pcap_files) > 0:
103
+ right = args.pcap_files.pop(0)
104
+
105
+ if left and not right and more_than_one:
106
+ left = None
107
+
108
+
109
+ if __name__ == "__main__":
110
+ main()
@@ -0,0 +1,77 @@
1
+ from traffic_taffy.dissector import (
2
+ PCAPDissector,
3
+ dissector_add_parseargs,
4
+ limitor_add_parseargs,
5
+ check_dissector_level,
6
+ )
7
+
8
+
9
+ def main():
10
+ from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
11
+ import logging
12
+
13
+ def parse_args():
14
+ "Parse the command line arguments."
15
+ parser = ArgumentParser(
16
+ formatter_class=ArgumentDefaultsHelpFormatter,
17
+ description=__doc__,
18
+ epilog="Exmaple Usage: ",
19
+ )
20
+
21
+ parser.add_argument(
22
+ "--log-level",
23
+ "--ll",
24
+ default="info",
25
+ help="Define the logging verbosity level (debug, info, warning, error, fotal, critical).",
26
+ )
27
+
28
+ parser.add_argument(
29
+ "-f",
30
+ "--fsdb",
31
+ action="store_true",
32
+ help="Print results in an FSDB formatted output",
33
+ )
34
+
35
+ dissector_add_parseargs(parser)
36
+ limitor_add_parseargs(parser)
37
+
38
+ parser.add_argument("input_file", type=str, help="input pcap file")
39
+
40
+ args = parser.parse_args()
41
+ log_level = args.log_level.upper()
42
+ logging.basicConfig(level=log_level, format="%(levelname)-10s:\t%(message)s")
43
+ return args
44
+
45
+ args = parse_args()
46
+
47
+ check_dissector_level(args.dissection_level)
48
+
49
+ pd = PCAPDissector(
50
+ args.input_file,
51
+ bin_size=args.bin_size,
52
+ dissector_level=args.dissection_level,
53
+ maximum_count=args.packet_count,
54
+ cache_results=args.cache_pcap_results,
55
+ cache_file_suffix=args.cache_file_suffix,
56
+ ignore_list=args.ignore_list,
57
+ )
58
+ pd.load(force=args.force)
59
+
60
+ if args.fsdb:
61
+ pd.print_to_fsdb(
62
+ timestamps=[0],
63
+ match_string=args.match_string,
64
+ match_value=args.match_value,
65
+ minimum_count=args.minimum_count,
66
+ )
67
+ else:
68
+ pd.print(
69
+ timestamps=[0],
70
+ match_string=args.match_string,
71
+ match_value=args.match_value,
72
+ minimum_count=args.minimum_count,
73
+ )
74
+
75
+
76
+ if __name__ == "__main__":
77
+ main()