traffic-taffy 0.5.4__py3-none-any.whl → 0.5.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
traffic_taffy/__init__.py CHANGED
@@ -1 +1 @@
1
- __VERSION__ = "0.5.4"
1
+ __VERSION__ = "0.5.6"
@@ -39,7 +39,7 @@ class Dissection:
39
39
  bin_size: int = 0,
40
40
  dissector_level: PCAPDissectorLevel = PCAPDissectorLevel.DETAILED,
41
41
  cache_file_suffix: str = "taffy",
42
- ignore_list: list = [],
42
+ ignore_list: list | None = None,
43
43
  *args: list,
44
44
  **kwargs: dict,
45
45
  ) -> Dissection:
@@ -52,7 +52,7 @@ class Dissection:
52
52
  self.dissector_level = dissector_level
53
53
  self.maximum_count = maximum_count
54
54
  self.pcap_filter = pcap_filter
55
- self.ignore_list = ignore_list
55
+ self.ignore_list = ignore_list or []
56
56
 
57
57
  self.parameters = [
58
58
  "pcap_file",
@@ -81,10 +81,11 @@ class Dissection:
81
81
 
82
82
  @property
83
83
  def timestamp(self) -> int:
84
+ """Timestamp currently being worked on."""
84
85
  return self._timestamp
85
86
 
86
87
  @timestamp.setter
87
- def timestamp(self: Dissection, newval):
88
+ def timestamp(self: Dissection, newval: int) -> None:
88
89
  self._timestamp = newval
89
90
 
90
91
  @property
@@ -93,19 +94,19 @@ class Dissection:
93
94
  return self._data
94
95
 
95
96
  @data.setter
96
- def data(self: Dissection, newval):
97
+ def data(self: Dissection, newval: dict) -> None:
97
98
  self._data = newval
98
99
 
99
100
  @property
100
- def pcap_file(self: Dissection):
101
- """The PCAP file name of this dissection"""
101
+ def pcap_file(self: Dissection) -> str:
102
+ """The PCAP file name of this dissection."""
102
103
  return self._pcap_file
103
104
 
104
105
  @pcap_file.setter
105
- def pcap_file(self: Dissection, newval):
106
+ def pcap_file(self: Dissection, newval: str) -> None:
106
107
  self._pcap_file = newval
107
108
 
108
- def incr(self: Dissection, key: str, value: Any, count: int = 1):
109
+ def incr(self: Dissection, key: str, value: Any, count: int = 1) -> None:
109
110
  """Increase one field within the counter."""
110
111
  # always save a total count at the zero bin
111
112
  # note: there should be no recorded tcpdump files from 1970 Jan 01 :-)
@@ -117,8 +118,8 @@ class Dissection:
117
118
 
118
119
  def calculate_metadata(self: Dissection) -> None:
119
120
  """Calculate thing like the number of value entries within each key/subkey."""
120
- # TODO: do we do this with or without key and value matches?
121
- for timestamp in self.data.keys():
121
+ # TODO(hardaker): do we do this with or without key and value matches?
122
+ for timestamp in self.data:
122
123
  for key in self.data[timestamp]:
123
124
  if self.WIDTH_SUBKEY in self.data[timestamp][key]:
124
125
  # make sure to avoid counting itself
@@ -131,12 +132,12 @@ class Dissection:
131
132
  # don't count the NEW subkey either
132
133
  self.data[timestamp][key] -= 1
133
134
 
134
- def merge(self: Dissection, other_dissection) -> None:
135
- "merges counters in two dissections into self -- note destructive to self"
135
+ def merge(self: Dissection, other_dissection: Dissection) -> None:
136
+ """Merge counters from another dissection into self."""
136
137
  for timestamp in other_dissection.data:
137
138
  for key in other_dissection.data[timestamp]:
138
139
  for subkey in other_dissection.data[timestamp][key]:
139
- # TODO: this is horribly inefficient
140
+ # TODO(hardaker): this is horribly inefficient
140
141
  if timestamp not in self.data:
141
142
  self.data[timestamp] = defaultdict(Counter)
142
143
  elif key not in self.data[timestamp]:
@@ -151,11 +152,13 @@ class Dissection:
151
152
  ][key][subkey]
152
153
 
153
154
  def merge_all(self: Dissection, other_dissections: List[Dissection]) -> None:
155
+ """Merge multiple dissection contents into this one."""
154
156
  for dissection in other_dissections:
155
157
  self.merge(dissection)
156
158
 
157
159
  @staticmethod
158
- def subdict_producer():
160
+ def subdict_producer() -> defaultdict:
161
+ """Create a factory for creating a producer."""
159
162
  return defaultdict(Counter)
160
163
 
161
164
  #
@@ -165,6 +168,7 @@ class Dissection:
165
168
  def load_from_cache(
166
169
  self: Dissection, force_overwrite: bool = False, force_load: bool = True
167
170
  ) -> dict | None:
171
+ """Load the dissection data from a cache."""
168
172
  if not self.pcap_file or not isinstance(self.pcap_file, str):
169
173
  return None
170
174
  if not os.path.exists(self.pcap_file + self.cache_file_suffix):
@@ -233,19 +237,18 @@ class Dissection:
233
237
 
234
238
  error(f"Failed to load cached data for {self.pcap_file} due to differences")
235
239
  error("refusing to continue -- remove the cache to recreate it")
236
- raise ValueError(
237
- "INCOMPATIBLE CACHE: remove the cache or don't use it to continue"
238
- )
240
+ msg = "INCOMPATIBLE CACHE: remove the cache or don't use it to continue"
241
+ raise ValueError(msg)
239
242
 
240
243
  def save_to_cache(self: Dissection, where: str | None = None) -> None:
244
+ """Save the dissection contents to a cache."""
241
245
  if not where and self.pcap_file and isinstance(self.pcap_file, str):
242
246
  where = self.pcap_file + self.cache_file_suffix
243
247
  if where:
244
248
  self.save(where)
245
249
 
246
250
  def save(self: Dissection, where: str) -> None:
247
- "Saves a generated dissection to a msgpack file"
248
-
251
+ """Saves a generated dissection to a msgpack file."""
249
252
  # wrap the report in a version header
250
253
  versioned_cache = {
251
254
  self.DISSECTION_KEY: self.DISSECTION_VERSION,
@@ -256,7 +259,7 @@ class Dissection:
256
259
 
257
260
  for parameter in self.parameters:
258
261
  versioned_cache["parameters"][parameter] = getattr(self, parameter)
259
- # TODO: fix this hack
262
+ # TODO(hardaker): fix this hack
260
263
 
261
264
  # basically, bin_size of 0 is 1... but it may be faster
262
265
  # to leave it at zero to avoid the bin_size math of 1,
@@ -280,9 +283,31 @@ class Dissection:
280
283
 
281
284
  # save it
282
285
  info(f"caching PCAP data to '{where}'")
283
- msgpack.dump(versioned_cache, open(where, "wb"))
284
286
 
285
- def load_saved_contents(self: Dissection, versioned_cache):
287
+ # convert int keys that are too large
288
+ for timestamp in versioned_cache["dissection"]:
289
+ for key in versioned_cache["dissection"][timestamp]:
290
+ versioned_cache["dissection"][timestamp][key] = dict(
291
+ versioned_cache["dissection"][timestamp][key]
292
+ )
293
+ # sigh -- msgpack can't handle large int based dictionary keys
294
+ fix_list = []
295
+ for subkey in versioned_cache["dissection"][timestamp][key]:
296
+ if isinstance(subkey, int) and subkey > 2**32 - 1:
297
+ info(f"converting {key} {subkey}")
298
+ fix_list.append(subkey)
299
+
300
+ for subkey in fix_list:
301
+ versioned_cache["dissection"][timestamp][key][
302
+ str(subkey)
303
+ ] = versioned_cache["dissection"][timestamp][key][subkey]
304
+ del versioned_cache["dissection"][timestamp][key][subkey]
305
+
306
+ with open(where, "wb") as saveto:
307
+ msgpack.dump(versioned_cache, saveto)
308
+
309
+ def load_saved_contents(self: Dissection, versioned_cache: dict):
310
+ """Set parameters from the cache."""
286
311
  # set the local parameters from the cache
287
312
  for parameter in self.parameters:
288
313
  setattr(self, parameter, versioned_cache["parameters"][parameter])
@@ -291,8 +316,9 @@ class Dissection:
291
316
  self.data = versioned_cache["dissection"]
292
317
 
293
318
  def load_saved(self: Dissection, where: str, dont_overwrite: bool = False) -> dict:
294
- "Loads a previous saved report from a file instead of re-parsing pcaps"
295
- contents = msgpack.load(open(where, "rb"), strict_map_key=False)
319
+ """Load a saved report from a cache file."""
320
+ with open(where, "rb") as cache_file:
321
+ contents = msgpack.load(cache_file, strict_map_key=False)
296
322
 
297
323
  # convert the ignore list to a set (msgpack doesn't do sets)
298
324
  contents["parameters"]["ignore_list"] = set(
@@ -320,14 +346,15 @@ class Dissection:
320
346
  match_value: str | None = None,
321
347
  minimum_count: int | None = None,
322
348
  make_printable: bool = False,
323
- ):
349
+ ) -> list:
350
+ """Search through data for appropriate records."""
324
351
  data = self.data
325
352
 
326
353
  if not timestamps:
327
354
  timestamps = data.keys()
328
355
 
329
356
  # find timestamps/key values with at least one item above count
330
- # TODO: we should really use pandas for this
357
+ # TODO(hardaker): we should really use pandas for this
331
358
  usable = defaultdict(set)
332
359
  for timestamp in timestamps:
333
360
  for key in data[timestamp]:
@@ -345,7 +372,7 @@ class Dissection:
345
372
  ):
346
373
  usable[key].add(subkey)
347
374
 
348
- # TODO: move the timestamp inside the other fors for faster
375
+ # TODO(hardaker): move the timestamp inside the other fors for faster
349
376
  # processing of skipped key/subkeys
350
377
  for timestamp in timestamps:
351
378
  for key in sorted(data[timestamp]):
@@ -370,10 +397,11 @@ class Dissection:
370
397
 
371
398
  @staticmethod
372
399
  def make_printable(value_type: str, value: Any) -> str:
400
+ """Turn a value into a printable version if needed."""
373
401
  try:
374
402
  if isinstance(value, bytes):
375
- if value_type in Dissection.display_transformers:
376
- value = str(Dissection.display_transformers[value_type](value))
403
+ if value_type in Dissection.DISPLAY_TRANSFORMERS:
404
+ value = str(Dissection.DISPLAY_TRANSFORMERS[value_type](value))
377
405
  else:
378
406
  value = "0x" + value.hex()
379
407
  else:
@@ -388,16 +416,17 @@ class Dissection:
388
416
  return value
389
417
 
390
418
  @staticmethod
391
- def print_mac_address(value):
392
- "Converts bytes to ethernet mac style address"
419
+ def print_mac_address(value: bytes) -> str:
420
+ """Convert bytes to ethernet mac style address."""
393
421
 
394
- # TODO: certainly inefficient
395
- def two_hex(value):
422
+ # TODO(hardaker): certainly inefficient
423
+ def two_hex(value: bytes) -> str:
396
424
  return f"{value:02x}"
397
425
 
398
426
  return ":".join(map(two_hex, value))
399
427
 
400
- display_transformers = {
428
+ # has to go at the end to pick up the above function names
429
+ DISPLAY_TRANSFORMERS: dict = {
401
430
  "Ethernet.IP.src": ipaddress.ip_address,
402
431
  "Ethernet.IP.dst": ipaddress.ip_address,
403
432
  "Ethernet.IP6.src": ipaddress.ip_address,
@@ -83,7 +83,16 @@ class PCAPDissectMany:
83
83
 
84
84
  return dissection
85
85
 
86
- def load_all(self, return_as_list: bool = False):
86
+ def load_all(self, return_as_list: bool = False, dont_fork: bool = False):
87
+ if dont_fork:
88
+ # handle each one individually -- typically for inserting debugging stops
89
+ dissections = []
90
+ for pcap_file in self.pcap_files:
91
+ dissection = self.load_pcap(pcap_file)
92
+ dissections.append(dissection)
93
+ return dissections
94
+
95
+ # use all available resources
87
96
  with ProcessPoolExecutor() as executor:
88
97
  dissections = executor.map(self.load_pcap, self.pcap_files)
89
98
  if return_as_list: # convert from generator
@@ -1,29 +1,36 @@
1
+ """A module for storing/transforming data (frequently to be graphed)."""
2
+
1
3
  import os
2
4
  from pandas import DataFrame, to_datetime, concat
5
+ from traffic_taffy.dissection import Dissection
3
6
 
4
7
 
5
8
  class PcapGraphData:
9
+ """A base class for storing/transforming data (frequently to be graphed)."""
10
+
6
11
  def __init__(self):
12
+ """Create an instance of a PcapGraphData."""
7
13
  self.dissections = []
8
- pass
9
14
 
10
15
  @property
11
- def dissections(self):
16
+ def dissections(self) -> list:
17
+ """Dissections stored within the PcapGraphData instance."""
12
18
  return self._dissections
13
19
 
14
20
  @dissections.setter
15
- def dissections(self, newvalue):
21
+ def dissections(self, newvalue: list) -> None:
16
22
  self._dissections = newvalue
17
23
 
18
- def normalize_bins(self, dissection):
19
- results = {}
20
- time_keys = list(dissection.data.keys())
24
+ def normalize_bins(self, dissection: Dissection) -> dict:
25
+ """Transform a dissection's list of data into a dictionary."""
26
+ results: dict = {}
27
+ time_keys: list = list(dissection.data.keys())
21
28
  if time_keys[0] == 0: # likely always
22
29
  time_keys.pop(0)
23
30
 
24
- results = {"time": [], "count": [], "index": [], "key": [], "subkey": []}
31
+ results: dict = {"time": [], "count": [], "index": [], "key": [], "subkey": []}
25
32
 
26
- # TODO: this could likely be made much more efficient and needs hole-filling
33
+ # TODO(hardaker): this could likely be made much more efficient and needs hole-filling
27
34
  for timestamp, key, subkey, value in dissection.find_data(
28
35
  timestamps=time_keys,
29
36
  match_string=self.match_string,
@@ -40,7 +47,10 @@ class PcapGraphData:
40
47
 
41
48
  return results
42
49
 
43
- def get_dataframe(self, merge=False, calculate_load_fraction=False):
50
+ def get_dataframe(
51
+ self, merge: bool = False, calculate_load_fraction: bool = False
52
+ ) -> DataFrame:
53
+ """Create a pandas dataframe from stored dissections."""
44
54
  datasets = []
45
55
  if merge:
46
56
  dissection = next(self.dissections).clone()
@@ -11,7 +11,7 @@ def parse_args():
11
11
  parser = ArgumentParser(
12
12
  formatter_class=ArgumentDefaultsHelpFormatter,
13
13
  description=__doc__,
14
- epilog="Exmaple Usage: ",
14
+ epilog="Example Usage: taffy-cache-info something.taffy",
15
15
  )
16
16
 
17
17
  parser.add_argument(
@@ -19,7 +19,7 @@ def parse_args():
19
19
  parser = ArgumentParser(
20
20
  formatter_class=ArgumentDefaultsHelpFormatter,
21
21
  description=__doc__,
22
- epilog="Exmaple Usage: ",
22
+ epilog="Example Usage: taffy-compare -C file1.pcap file2.pcap",
23
23
  )
24
24
 
25
25
  output_options = parser.add_argument_group("Output format")
@@ -93,6 +93,8 @@ def main():
93
93
  ignore_list=args.ignore_list,
94
94
  pcap_filter=args.filter,
95
95
  layers=args.layers,
96
+ force_load=args.force_load,
97
+ force_overwrite=args.force_overwrite,
96
98
  )
97
99
 
98
100
  # compare the pcaps
@@ -16,7 +16,7 @@ def main():
16
16
  parser = ArgumentParser(
17
17
  formatter_class=ArgumentDefaultsHelpFormatter,
18
18
  description=__doc__,
19
- epilog="Exmaple Usage: ",
19
+ epilog="Example Usage: taffy-dissect -C -d 10 -n 10000 file.pcap",
20
20
  )
21
21
 
22
22
  parser.add_argument(
@@ -33,10 +33,16 @@ def main():
33
33
  help="Print results in an FSDB formatted output",
34
34
  )
35
35
 
36
+ parser.add_argument(
37
+ "--dont-fork",
38
+ action="store_true",
39
+ help="Do not fork into multiple processes per file (still fork per file)",
40
+ )
41
+
36
42
  dissector_add_parseargs(parser)
37
43
  limitor_add_parseargs(parser)
38
44
 
39
- parser.add_argument("input_file", type=str, help="input pcap file")
45
+ parser.add_argument("input_pcaps", type=str, help="input pcap file", nargs="*")
40
46
 
41
47
  args = parser.parse_args()
42
48
  log_level = args.log_level.upper()
@@ -47,8 +53,9 @@ def main():
47
53
 
48
54
  check_dissector_level(args.dissection_level)
49
55
 
56
+ # load all the files
50
57
  pdm = PCAPDissectMany(
51
- args.input_file,
58
+ args.input_pcaps,
52
59
  bin_size=args.bin_size,
53
60
  dissector_level=args.dissection_level,
54
61
  maximum_count=args.packet_count,
@@ -60,14 +67,15 @@ def main():
60
67
  force_overwrite=args.force_overwrite,
61
68
  force_load=args.force_load,
62
69
  )
63
- dissection = pdm.load_pcap(
64
- args.input_file,
65
- maximum_count=args.packet_count,
66
- force_overwrite=args.force_overwrite,
67
- force_load=args.force_load,
68
- )
70
+ dissections = pdm.load_all(True, dont_fork=args.dont_fork)
71
+
72
+ # merge them into a single dissection
73
+ dissection = dissections.pop(0)
74
+ dissection.merge_all(dissections)
75
+
76
+ # put the dissection into a dissector for reporting
69
77
  pd = PCAPDissector(
70
- args.input_file,
78
+ args.input_pcaps[0],
71
79
  bin_size=args.bin_size,
72
80
  dissector_level=args.dissection_level,
73
81
  maximum_count=args.packet_count,
@@ -81,6 +89,7 @@ def main():
81
89
  )
82
90
  pd.dissection = dissection
83
91
 
92
+ # output as requested
84
93
  if args.fsdb:
85
94
  pd.print_to_fsdb(
86
95
  timestamps=[0],
@@ -641,7 +641,7 @@ def parse_args():
641
641
  parser = ArgumentParser(
642
642
  formatter_class=ArgumentDefaultsHelpFormatter,
643
643
  description=__doc__,
644
- epilog="Exmaple Usage: ",
644
+ epilog="Example Usage: taffy-explore -C file1.pcap file2.pcap",
645
645
  )
646
646
 
647
647
  limitor_parser = limitor_add_parseargs(parser)
@@ -19,7 +19,7 @@ def parse_args() -> Namespace:
19
19
  parser = ArgumentParser(
20
20
  formatter_class=ArgumentDefaultsHelpFormatter,
21
21
  description=__doc__,
22
- epilog="Exmaple Usage: ",
22
+ epilog="Example Usage: taffy-export -C -m IP.UDP.sport file.pcap",
23
23
  )
24
24
 
25
25
  parser.add_argument(
@@ -40,7 +40,7 @@ def parse_args() -> Namespace:
40
40
  help="Where to store output data",
41
41
  )
42
42
 
43
- parser.add_argument("input_files", nargs="*", type=str, help="input pcap file")
43
+ parser.add_argument("input_pcaps", nargs="*", type=str, help="input pcap file")
44
44
 
45
45
  args = parser.parse_args()
46
46
  log_level = args.log_level.upper()
@@ -55,7 +55,7 @@ def main() -> None:
55
55
  check_dissector_level(args.dissection_level)
56
56
 
57
57
  pdm = PCAPDissectMany(
58
- args.input_files,
58
+ args.input_pcaps,
59
59
  bin_size=args.bin_size,
60
60
  dissector_level=args.dissection_level,
61
61
  maximum_count=args.packet_count,
@@ -64,6 +64,8 @@ def main() -> None:
64
64
  ignore_list=args.ignore_list,
65
65
  pcap_filter=args.filter,
66
66
  layers=args.layers,
67
+ force_load=args.force_load,
68
+ force_overwrite=args.force_overwrite,
67
69
  )
68
70
 
69
71
  dissections = pdm.load_all(return_as_list=True)
@@ -15,7 +15,7 @@ def parse_args():
15
15
  parser = ArgumentParser(
16
16
  formatter_class=ArgumentDefaultsHelpFormatter,
17
17
  description=__doc__,
18
- epilog="Exmaple Usage: ",
18
+ epilog="Example Usage: taffy-graph -C -m TOTAL -M packet -o graph.png file.pcap",
19
19
  )
20
20
 
21
21
  parser.add_argument(
@@ -50,7 +50,7 @@ def parse_args():
50
50
  dissector_add_parseargs(parser)
51
51
  limitor_add_parseargs(parser)
52
52
 
53
- parser.add_argument("input_file", type=str, help="PCAP file to graph", nargs="+")
53
+ parser.add_argument("input_pcaps", type=str, help="PCAP file to graph", nargs="+")
54
54
 
55
55
  args = parser.parse_args()
56
56
  log_level = args.log_level.upper()
@@ -65,7 +65,7 @@ def main():
65
65
  check_dissector_level(args.dissection_level)
66
66
 
67
67
  pc = PcapGraph(
68
- args.input_file,
68
+ args.input_pcaps,
69
69
  args.output_file,
70
70
  maximum_count=args.packet_count,
71
71
  minimum_count=args.minimum_count,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: traffic-taffy
3
- Version: 0.5.4
3
+ Version: 0.5.6
4
4
  Summary: A tool for doing differential analysis of pcap files
5
5
  Project-URL: Homepage, https://github.com/hardaker/traffic-taffy
6
6
  Author-email: Wes Hardaker <opensource@hardakers.net>
@@ -1,12 +1,12 @@
1
- traffic_taffy/__init__.py,sha256=Eqa119rXm2IVBEOgIXMds2jQMj9VX8II5NmWLd62Teg,22
1
+ traffic_taffy/__init__.py,sha256=bzOhFUIQlCdC61joh43BrAkK-ns_1TJAXxkswf9jr6Q,22
2
2
  traffic_taffy/compare.py,sha256=wgClLO5qHQirv4fKQ20gI9H2PD7noMIGS98LUdqRx8Q,10643
3
3
  traffic_taffy/comparison.py,sha256=goGJbJsr8mzMqRvrJOXxW9QJaLZUg4W85Ji7Bjl-UKA,583
4
- traffic_taffy/dissection.py,sha256=KZn0GrYEmw9DDmD29gDMSuPROSkqs0PI1e2moBJHayc,14844
5
- traffic_taffy/dissectmany.py,sha256=UGKCrrH7VOzIR8TL_JMdW8NrbiHF-b-rDMI9jiGK5fI,3023
4
+ traffic_taffy/dissection.py,sha256=FHVi4AXAqXrSzXz-d9SKkbPBD03EDDvKYWE2Opi9r6U,16575
5
+ traffic_taffy/dissectmany.py,sha256=hbCX-AEMTquTaqTpb05D8ek5dS5z9nr28BC5e27mzo8,3403
6
6
  traffic_taffy/dissector.py,sha256=4jgwK45fDju86wzU5uzbo4r4Rr5IopqjIyb92il8A3A,9617
7
7
  traffic_taffy/dissectorresults.py,sha256=LKoyX04Qjc6B7RnqtJgIWsyVnselJ9CygLkMAp3lhw0,647
8
8
  traffic_taffy/graph.py,sha256=UxRQRnv8WuwjFt_hesk4B31-I2fUnAJlLzM5pHvrNus,3996
9
- traffic_taffy/graphdata.py,sha256=PWbVFcdjq3iK3piEKXhALtPwMA8oSaaqoNaGUif4HDQ,2410
9
+ traffic_taffy/graphdata.py,sha256=lHpLuzzypEPuz3QPfkvs0IAbplTLzKcqHaLyoRidmiE,2971
10
10
  traffic_taffy/dissector_engine/__init__.py,sha256=vEMf589m4JutbfyX8HEGi1O50tNqCBqASAqYyyYg2sI,1284
11
11
  traffic_taffy/dissector_engine/dpkt.py,sha256=uby0j6LFCjnxQvyR2APhKKtydKJmHV6ANn1oODRyIPo,4256
12
12
  traffic_taffy/dissector_engine/scapy.py,sha256=BxKdU2mptXh4iIRgxznNsl1eIJve_b76Tq8XkunBKR4,3909
@@ -22,13 +22,13 @@ traffic_taffy/tests/test_pcap_splitter.py,sha256=0RxIgH9vWMeFoyC2A6sIIGFcq_Jcl_R
22
22
  traffic_taffy/tests/test_result_storage.py,sha256=gxKO7Wc0klgSQSk3bw8L3sW_kdxoRltLzmcXqycFdoU,360
23
23
  traffic_taffy/tests/test_value_printing.py,sha256=3k-d1bpa6tD8qcrgNVoZhhG7kwylI2AMVNFPbRE2vds,266
24
24
  traffic_taffy/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
- traffic_taffy/tools/cache_info.py,sha256=4SZVdcwA_36YLyIYN8F5AbKaM-xCLj8P089wE2fKXCA,1933
26
- traffic_taffy/tools/compare.py,sha256=xUZz6v_phWVd4RDVkj9Wc99qTcnFda_kGgBzDa-l6Ag,3310
27
- traffic_taffy/tools/dissect.py,sha256=lsekO8rashBxiPtwblqTjc5fsXfoz2VUk8q7yzl83ZM,2929
28
- traffic_taffy/tools/explore.py,sha256=ouEUojtphVvRqqLds-QxeURcpzAD-qrqv_zXudgGak0,24930
29
- traffic_taffy/tools/export.py,sha256=eDiBBB_axZxDympimiCi_r6tVrgh_qdmHQnCa8ElQQ8,2559
30
- traffic_taffy/tools/graph.py,sha256=j4ezQ-SKvbvhxDtjFfW2A4S7ory_XPflHxvgdrkBHxc,2408
31
- traffic_taffy-0.5.4.dist-info/METADATA,sha256=UgLNJrLtiqzjiO6srnKcXZGJGQEXB9GFn1MUzJmBk_A,1526
32
- traffic_taffy-0.5.4.dist-info/WHEEL,sha256=TJPnKdtrSue7xZ_AVGkp9YXcvDrobsjBds1du3Nx6dc,87
33
- traffic_taffy-0.5.4.dist-info/entry_points.txt,sha256=ySz30b1Cu03CtCGMRqqg0NZJpzrVI91T5HjbEaTNnpQ,314
34
- traffic_taffy-0.5.4.dist-info/RECORD,,
25
+ traffic_taffy/tools/cache_info.py,sha256=QC-BiaJ_uHwH-B05a29qdW7oTHfKmO1PT31d_FvIJlU,1965
26
+ traffic_taffy/tools/compare.py,sha256=tJW5PZHpbiFN5YlIu3IIz6vkFowYuxhgmWVO9JAJ4Co,3438
27
+ traffic_taffy/tools/dissect.py,sha256=QST4NlH1HbzepJLAHFlghm47MDBd0CHBjquRnqqs_SY,3276
28
+ traffic_taffy/tools/explore.py,sha256=HWyRWc0GpK0jpoVyGqkeFyqSwVE9NLxGh4r4iW7HLYM,24968
29
+ traffic_taffy/tools/export.py,sha256=jk5ck-K9m9_Rno72fDWgfS67WKUumDL1KQpMG_j0-G8,2682
30
+ traffic_taffy/tools/graph.py,sha256=gcyI0C8E21Uuon3j_n58tV-kKSn8wXjov-JSCKjPukI,2466
31
+ traffic_taffy-0.5.6.dist-info/METADATA,sha256=wFX9sA44cLQKAqqJkDOHZ8b6pVibn7iEkM1Yy54-9_U,1526
32
+ traffic_taffy-0.5.6.dist-info/WHEEL,sha256=TJPnKdtrSue7xZ_AVGkp9YXcvDrobsjBds1du3Nx6dc,87
33
+ traffic_taffy-0.5.6.dist-info/entry_points.txt,sha256=ySz30b1Cu03CtCGMRqqg0NZJpzrVI91T5HjbEaTNnpQ,314
34
+ traffic_taffy-0.5.6.dist-info/RECORD,,