insightconnect-plugin-runtime 6.2.0__py3-none-any.whl → 6.2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- insightconnect_plugin_runtime/helper.py +86 -47
- {insightconnect_plugin_runtime-6.2.0.dist-info → insightconnect_plugin_runtime-6.2.2.dist-info}/METADATA +4 -2
- {insightconnect_plugin_runtime-6.2.0.dist-info → insightconnect_plugin_runtime-6.2.2.dist-info}/RECORD +6 -6
- {insightconnect_plugin_runtime-6.2.0.dist-info → insightconnect_plugin_runtime-6.2.2.dist-info}/WHEEL +1 -1
- tests/unit/test_helpers.py +43 -17
- {insightconnect_plugin_runtime-6.2.0.dist-info → insightconnect_plugin_runtime-6.2.2.dist-info}/top_level.txt +0 -0
|
@@ -10,7 +10,7 @@ import subprocess
|
|
|
10
10
|
import time
|
|
11
11
|
from datetime import datetime, timedelta
|
|
12
12
|
from io import IOBase
|
|
13
|
-
from typing import Any, Callable, Dict, List, Union, Tuple
|
|
13
|
+
from typing import Any, Callable, Dict, List, Union, Tuple, Optional
|
|
14
14
|
from urllib import request
|
|
15
15
|
from hashlib import sha1
|
|
16
16
|
from json import JSONDecodeError
|
|
@@ -30,18 +30,48 @@ ENCODE_TYPE = "utf-8"
|
|
|
30
30
|
|
|
31
31
|
DEFAULTS_HOURS_AGO = 24
|
|
32
32
|
|
|
33
|
+
logger = logging.getLogger()
|
|
33
34
|
|
|
34
|
-
|
|
35
|
+
|
|
36
|
+
def hash_sha1(log: Dict[str, Any], keys: Optional[List[str]] = None) -> str:
|
|
35
37
|
"""
|
|
36
38
|
Iterate through a dictionary and hash each value.
|
|
39
|
+
Optionally only hash certain keys in the dictionary.
|
|
40
|
+
|
|
37
41
|
:param log: Dictionary to be hashed.
|
|
38
|
-
:
|
|
42
|
+
:param keys: Optional list of keys to hash on if provided
|
|
43
|
+
|
|
39
44
|
:return: Hex digest of hash.
|
|
40
|
-
:rtype: str
|
|
41
45
|
"""
|
|
46
|
+
|
|
42
47
|
hash_ = sha1() # nosec B303
|
|
43
|
-
|
|
48
|
+
|
|
49
|
+
# Leaving no room for developer error and ensuring they know exactly where it went wrong
|
|
50
|
+
# if they provide a key not in list format
|
|
51
|
+
if keys is not None and not isinstance(keys, list):
|
|
52
|
+
raise TypeError(
|
|
53
|
+
f"The 'keys' parameter must be a list or None in the 'hash_sha1' function, not {type(keys).__name__}"
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
# Hash all key-value pairs if no keys provided
|
|
57
|
+
if keys is None:
|
|
58
|
+
items_to_hash = log.items()
|
|
59
|
+
|
|
60
|
+
# Otherwise, only include specified keys
|
|
61
|
+
else:
|
|
62
|
+
items_to_hash = []
|
|
63
|
+
for key in keys:
|
|
64
|
+
if key in log:
|
|
65
|
+
items_to_hash.append((key, log[key]))
|
|
66
|
+
|
|
67
|
+
# Alert if the key is not found in the log
|
|
68
|
+
else:
|
|
69
|
+
raise KeyError(f"Key '{key}' not found in the provided log.")
|
|
70
|
+
|
|
71
|
+
# Iterate through items to hash and hash
|
|
72
|
+
for key, value in items_to_hash:
|
|
44
73
|
hash_.update(f"{key}{value}".encode(ENCODE_TYPE))
|
|
74
|
+
|
|
45
75
|
return hash_.hexdigest()
|
|
46
76
|
|
|
47
77
|
|
|
@@ -65,7 +95,7 @@ def compare_and_dedupe_hashes(
|
|
|
65
95
|
if hash_ not in previous_logs_hashes:
|
|
66
96
|
new_logs_hashes.append(hash_)
|
|
67
97
|
logs_to_return.append(log)
|
|
68
|
-
|
|
98
|
+
logger.info(
|
|
69
99
|
f"Original number of logs:{len(new_logs)}. Number of logs after de-duplication:{len(logs_to_return)}"
|
|
70
100
|
)
|
|
71
101
|
return logs_to_return, new_logs_hashes
|
|
@@ -135,7 +165,12 @@ def make_request(
|
|
|
135
165
|
raise PluginException(
|
|
136
166
|
preset=PluginException.Preset.UNKNOWN, data=str(exception)
|
|
137
167
|
)
|
|
138
|
-
response_handler(
|
|
168
|
+
response_handler(
|
|
169
|
+
response,
|
|
170
|
+
exception_custom_configs,
|
|
171
|
+
exception_data_location,
|
|
172
|
+
allowed_status_codes,
|
|
173
|
+
)
|
|
139
174
|
return response
|
|
140
175
|
|
|
141
176
|
|
|
@@ -190,7 +225,7 @@ def request_error_handling(
|
|
|
190
225
|
exception.response,
|
|
191
226
|
data_location=exception_data_location,
|
|
192
227
|
custom_configs=custom_configs,
|
|
193
|
-
allowed_status_codes=allowed_status_codes
|
|
228
|
+
allowed_status_codes=allowed_status_codes,
|
|
194
229
|
)
|
|
195
230
|
else:
|
|
196
231
|
raise PluginException(
|
|
@@ -246,7 +281,7 @@ def response_handler(
|
|
|
246
281
|
}
|
|
247
282
|
status_code_preset = status_code_presets.get(status_code)
|
|
248
283
|
exception = PluginException(preset=PluginException.Preset.UNKNOWN, data=data)
|
|
249
|
-
|
|
284
|
+
logger.info(f"Request to {response.url} failed. Status code: {status_code}")
|
|
250
285
|
if status_code in custom_configs.keys():
|
|
251
286
|
exception = custom_configs.get(status_code)
|
|
252
287
|
if hasattr(exception, "data") and data is not None:
|
|
@@ -441,15 +476,19 @@ def convert_dict_to_snake_case(
|
|
|
441
476
|
|
|
442
477
|
if isinstance(input_dict, list):
|
|
443
478
|
return [
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
479
|
+
(
|
|
480
|
+
convert_dict_to_snake_case(element)
|
|
481
|
+
if isinstance(element, (dict, list))
|
|
482
|
+
else element
|
|
483
|
+
)
|
|
447
484
|
for element in input_dict
|
|
448
485
|
]
|
|
449
486
|
return {
|
|
450
|
-
convert_to_snake_case(key):
|
|
451
|
-
|
|
452
|
-
|
|
487
|
+
convert_to_snake_case(key): (
|
|
488
|
+
convert_dict_to_snake_case(value)
|
|
489
|
+
if isinstance(value, (dict, list))
|
|
490
|
+
else value
|
|
491
|
+
)
|
|
453
492
|
for key, value in input_dict.items()
|
|
454
493
|
}
|
|
455
494
|
|
|
@@ -561,7 +600,7 @@ def rate_limiting(
|
|
|
561
600
|
error.cause
|
|
562
601
|
== PluginException.causes[PluginException.Preset.RATE_LIMIT]
|
|
563
602
|
):
|
|
564
|
-
|
|
603
|
+
logger.info(
|
|
565
604
|
f"Rate limiting error occurred. Retrying in {delay:.1f} seconds ({attempts_counter}/{max_tries})"
|
|
566
605
|
)
|
|
567
606
|
retry = True
|
|
@@ -603,12 +642,12 @@ def check_hashes(src, checksum):
|
|
|
603
642
|
if type(src) is str:
|
|
604
643
|
hashes = get_hashes_string(src)
|
|
605
644
|
else:
|
|
606
|
-
|
|
645
|
+
logger.error("CheckHashes: Argument must be a string")
|
|
607
646
|
raise Exception("CheckHashes")
|
|
608
647
|
for alg in hashes:
|
|
609
648
|
if hashes[alg] == checksum:
|
|
610
649
|
return True
|
|
611
|
-
|
|
650
|
+
logger.info("CheckHashes: No checksum match")
|
|
612
651
|
return False
|
|
613
652
|
|
|
614
653
|
|
|
@@ -620,9 +659,9 @@ def check_cachefile(cache_file):
|
|
|
620
659
|
cache_file = cache_dir + "/" + cache_file
|
|
621
660
|
if os.path.isdir(cache_dir):
|
|
622
661
|
if os.path.isfile(cache_file):
|
|
623
|
-
|
|
662
|
+
logger.info("CheckCacheFile: File %s exists", cache_file)
|
|
624
663
|
return True
|
|
625
|
-
|
|
664
|
+
logger.info("CheckCacheFile: File %s did not exist", cache_file)
|
|
626
665
|
return False
|
|
627
666
|
|
|
628
667
|
|
|
@@ -638,9 +677,9 @@ def open_file(file_path):
|
|
|
638
677
|
return f
|
|
639
678
|
return None
|
|
640
679
|
else:
|
|
641
|
-
|
|
680
|
+
logger.info("OpenFile: File %s is not a file or does not exist ", filename)
|
|
642
681
|
else:
|
|
643
|
-
|
|
682
|
+
logger.error(
|
|
644
683
|
"OpenFile: Directory %s is not a directory or does not exist", dirname
|
|
645
684
|
)
|
|
646
685
|
|
|
@@ -654,16 +693,16 @@ def open_cachefile(cache_file, append=False):
|
|
|
654
693
|
if os.path.isdir(cache_dir):
|
|
655
694
|
if os.path.isfile(cache_file):
|
|
656
695
|
f = open(cache_file, "a+" if append else "r+")
|
|
657
|
-
|
|
696
|
+
logger.info("OpenCacheFile: %s exists, returning it", cache_file)
|
|
658
697
|
else:
|
|
659
698
|
if not os.path.isdir(os.path.dirname(cache_file)):
|
|
660
699
|
os.makedirs(os.path.dirname(cache_file))
|
|
661
700
|
f = open(cache_file, "w+") # Open once to create the cache file
|
|
662
701
|
f.close()
|
|
663
|
-
|
|
702
|
+
logger.info("OpenCacheFile: %s created", cache_file)
|
|
664
703
|
f = open(cache_file, "a+" if append else "r+")
|
|
665
704
|
return f
|
|
666
|
-
|
|
705
|
+
logger.error("OpenCacheFile: %s directory or does not exist", cache_dir)
|
|
667
706
|
|
|
668
707
|
|
|
669
708
|
def remove_cachefile(cache_file):
|
|
@@ -676,7 +715,7 @@ def remove_cachefile(cache_file):
|
|
|
676
715
|
if os.path.isfile(cache_file):
|
|
677
716
|
os.remove(cache_file)
|
|
678
717
|
return True
|
|
679
|
-
|
|
718
|
+
logger.info("RemoveCacheFile: Cache file %s did not exist", cache_file)
|
|
680
719
|
return False
|
|
681
720
|
|
|
682
721
|
|
|
@@ -695,9 +734,9 @@ def lock_cache(lock_file):
|
|
|
695
734
|
os.makedirs(os.path.dirname(lock_file))
|
|
696
735
|
f = open(lock_file, "w")
|
|
697
736
|
f.close()
|
|
698
|
-
|
|
737
|
+
logger.info("Cache lock %s created", lock_file)
|
|
699
738
|
return True
|
|
700
|
-
|
|
739
|
+
logger.info("Cache lock %s failed, lock not created", lock_file)
|
|
701
740
|
return False
|
|
702
741
|
|
|
703
742
|
|
|
@@ -716,7 +755,7 @@ def unlock_cache(lock_file, wait_time):
|
|
|
716
755
|
time.sleep(wait_time)
|
|
717
756
|
os.remove(lock_file)
|
|
718
757
|
return True
|
|
719
|
-
|
|
758
|
+
logger.info("Cache unlock %s failed, lock not released", lock_file)
|
|
720
759
|
return False
|
|
721
760
|
|
|
722
761
|
|
|
@@ -746,11 +785,11 @@ def open_url(url, timeout=None, verify=True, **kwargs):
|
|
|
746
785
|
urlobj = request.urlopen(req, timeout=timeout, context=ctx)
|
|
747
786
|
return urlobj
|
|
748
787
|
except request.HTTPError as e:
|
|
749
|
-
|
|
788
|
+
logger.error("HTTPError: %s for %s", str(e.code), url)
|
|
750
789
|
if e.code == 304:
|
|
751
790
|
return None
|
|
752
791
|
except request.URLError as e:
|
|
753
|
-
|
|
792
|
+
logger.error("URLError: %s for %s", str(e.reason), url)
|
|
754
793
|
raise Exception("GetURL Failed")
|
|
755
794
|
|
|
756
795
|
|
|
@@ -778,17 +817,17 @@ def check_url(url):
|
|
|
778
817
|
return True
|
|
779
818
|
|
|
780
819
|
except requests.exceptions.HTTPError:
|
|
781
|
-
|
|
820
|
+
logger.error(
|
|
782
821
|
"Requests: HTTPError: status code %s for %s",
|
|
783
822
|
str(resp.status_code) if resp else None,
|
|
784
823
|
url,
|
|
785
824
|
)
|
|
786
825
|
except requests.exceptions.Timeout:
|
|
787
|
-
|
|
826
|
+
logger.error("Requests: Timeout for %s", url)
|
|
788
827
|
except requests.exceptions.TooManyRedirects:
|
|
789
|
-
|
|
828
|
+
logger.error("Requests: TooManyRedirects for %s", url)
|
|
790
829
|
except requests.ConnectionError:
|
|
791
|
-
|
|
830
|
+
logger.error("Requests: ConnectionError for %s", url)
|
|
792
831
|
return False
|
|
793
832
|
|
|
794
833
|
|
|
@@ -808,7 +847,7 @@ def exec_command(command):
|
|
|
808
847
|
rcode = p.poll()
|
|
809
848
|
return {"stdout": stdout, "stderr": stderr, "rcode": rcode}
|
|
810
849
|
except OSError as e:
|
|
811
|
-
|
|
850
|
+
logger.error(
|
|
812
851
|
"SubprocessError: %s %s: %s", str(e.filename), str(e.strerror), str(e.errno)
|
|
813
852
|
)
|
|
814
853
|
raise Exception("ExecCommand")
|
|
@@ -834,7 +873,7 @@ def encode_file(file_path):
|
|
|
834
873
|
return efile
|
|
835
874
|
return None
|
|
836
875
|
except (IOError, OSError) as e:
|
|
837
|
-
|
|
876
|
+
logger.error("EncodeFile: Failed to open file: %s", e.strerror)
|
|
838
877
|
raise Exception("EncodeFile")
|
|
839
878
|
finally:
|
|
840
879
|
if isinstance(f, IOBase):
|
|
@@ -857,17 +896,17 @@ def check_url_modified(url):
|
|
|
857
896
|
if resp.status_code == 200:
|
|
858
897
|
return True
|
|
859
898
|
except requests.exceptions.HTTPError:
|
|
860
|
-
|
|
899
|
+
logger.error(
|
|
861
900
|
"Requests: HTTPError: status code %s for %s",
|
|
862
901
|
str(resp.status_code) if resp else None,
|
|
863
902
|
url,
|
|
864
903
|
)
|
|
865
904
|
except requests.exceptions.Timeout:
|
|
866
|
-
|
|
905
|
+
logger.error("Requests: Timeout for %s", url)
|
|
867
906
|
except requests.exceptions.TooManyRedirects:
|
|
868
|
-
|
|
907
|
+
logger.error("Requests: TooManyRedirects for %s", url)
|
|
869
908
|
except requests.ConnectionError:
|
|
870
|
-
|
|
909
|
+
logger.error("Requests: ConnectionError for %s", url)
|
|
871
910
|
return False
|
|
872
911
|
|
|
873
912
|
|
|
@@ -893,7 +932,7 @@ def get_url_path_filename(url):
|
|
|
893
932
|
if name[n].endswith("."):
|
|
894
933
|
return name
|
|
895
934
|
except IndexError:
|
|
896
|
-
|
|
935
|
+
logger.error("Range: IndexError: URL basename is short: %s of %s", name, url)
|
|
897
936
|
return None
|
|
898
937
|
return None
|
|
899
938
|
|
|
@@ -913,16 +952,16 @@ def get_url_filename(url):
|
|
|
913
952
|
return name
|
|
914
953
|
return None
|
|
915
954
|
except requests.exceptions.MissingSchema:
|
|
916
|
-
|
|
955
|
+
logger.error("Requests: MissingSchema: Requires ftp|http(s):// for %s", url)
|
|
917
956
|
except requests.exceptions.HTTPError:
|
|
918
|
-
|
|
957
|
+
logger.error(
|
|
919
958
|
"Requests: HTTPError: status code %s for %s",
|
|
920
959
|
str(resp.status_code) if resp else None,
|
|
921
960
|
url,
|
|
922
961
|
)
|
|
923
962
|
except requests.exceptions.Timeout:
|
|
924
|
-
|
|
963
|
+
logger.error("Requests: Timeout for %s", url)
|
|
925
964
|
except requests.exceptions.TooManyRedirects:
|
|
926
|
-
|
|
965
|
+
logger.error("Requests: TooManyRedirects for %s", url)
|
|
927
966
|
except requests.ConnectionError:
|
|
928
|
-
|
|
967
|
+
logger.error("Requests: ConnectionError for %s", url)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: insightconnect-plugin-runtime
|
|
3
|
-
Version: 6.2.
|
|
3
|
+
Version: 6.2.2
|
|
4
4
|
Summary: InsightConnect Plugin Runtime
|
|
5
5
|
Home-page: https://github.com/rapid7/komand-plugin-sdk-python
|
|
6
6
|
Author: Rapid7 Integrations Alliance
|
|
@@ -13,7 +13,7 @@ Classifier: Natural Language :: English
|
|
|
13
13
|
Classifier: Topic :: Software Development :: Build Tools
|
|
14
14
|
Description-Content-Type: text/markdown
|
|
15
15
|
Requires-Dist: requests==2.32.2
|
|
16
|
-
Requires-Dist:
|
|
16
|
+
Requires-Dist: python_jsonschema_objects==0.5.2
|
|
17
17
|
Requires-Dist: jsonschema==4.21.1
|
|
18
18
|
Requires-Dist: certifi==2024.07.04
|
|
19
19
|
Requires-Dist: Flask==3.0.2
|
|
@@ -211,6 +211,8 @@ contributed. Black is installed as a test dependency and the hook can be initial
|
|
|
211
211
|
after cloning this repository.
|
|
212
212
|
|
|
213
213
|
## Changelog
|
|
214
|
+
* 6.2.2 - Fix instances where logging errors would lead to duplicate entries being output | Add option to hash only on provided keys for `hash_sha1` function
|
|
215
|
+
* 6.2.1 - Fix instances where logging would lead to duplicate entries being output
|
|
214
216
|
* 6.2.0 - Update base images to pull Python 3.11.10 | changed the pep-8 check in tox to `pycodestyle`
|
|
215
217
|
* 6.1.4 - Address vulnerabilities within local development requirements.txt and vulnerabilities in slim image.
|
|
216
218
|
* 6.1.3 - Addressing failing Python Slim package (bump packages).
|
|
@@ -4,7 +4,7 @@ insightconnect_plugin_runtime/cli.py,sha256=Pb-Janu-XfRlSXxPHh30OIquljWptrhhS51C
|
|
|
4
4
|
insightconnect_plugin_runtime/connection.py,sha256=4bHHV2B0UFGsAtvLu1fiYQRwx7fissUakHPUyjLQO0E,2340
|
|
5
5
|
insightconnect_plugin_runtime/dispatcher.py,sha256=ru7njnyyWE1-oD-VbZJ-Z8tELwvDf69rM7Iezs4rbnw,1774
|
|
6
6
|
insightconnect_plugin_runtime/exceptions.py,sha256=Pvcdkx81o6qC2qU661x-DzNjuIMP82x52nPMSEqEo4s,8491
|
|
7
|
-
insightconnect_plugin_runtime/helper.py,sha256=
|
|
7
|
+
insightconnect_plugin_runtime/helper.py,sha256=WiCFu4S33BL8wugBmyFH-06inkcXPmdC5cv0UGlSHfA,32646
|
|
8
8
|
insightconnect_plugin_runtime/metrics.py,sha256=hf_Aoufip_s4k4o8Gtzz90ymZthkaT2e5sXh5B4LcF0,3186
|
|
9
9
|
insightconnect_plugin_runtime/plugin.py,sha256=Yf4LNczykDVc31F9G8uuJ9gxEsgmxmAr0n4pcZzichM,26393
|
|
10
10
|
insightconnect_plugin_runtime/schema.py,sha256=jTNc6KAMqFpaDVWrAYhkVC6e8I63P3X7uVlJkAr1hiY,583
|
|
@@ -68,7 +68,7 @@ tests/unit/test_aws_action.py,sha256=pBE23Qn4aXKJqPmwiHMcEU5zPdyvbKO-eK-6jUlrsQw
|
|
|
68
68
|
tests/unit/test_custom_encoder.py,sha256=KLYyVOTq9MEkZXyhVHqjm5LVSW6uJS4Davgghsw9DGk,2207
|
|
69
69
|
tests/unit/test_endpoints.py,sha256=LuXOfLBu47rDjGa5YEsOwTZBEdvQdl_C6-r46oxWZA8,6401
|
|
70
70
|
tests/unit/test_exceptions.py,sha256=Y4F-ij8WkEJkUU3mPvxlEchqE9NCdxDvR8bJzPVVNao,5328
|
|
71
|
-
tests/unit/test_helpers.py,sha256=
|
|
71
|
+
tests/unit/test_helpers.py,sha256=9Y5N5cUBtesfr289oOZFekMJb84VYuFjucqQ9VEk3WQ,17431
|
|
72
72
|
tests/unit/test_metrics.py,sha256=PjjTrB9w7uQ2Q5UN-893-SsH3EGJuBseOMHSD1I004s,7979
|
|
73
73
|
tests/unit/test_oauth.py,sha256=nbFG0JH1x04ExXqSe-b5BGdt_hJs7DP17eUa6bQzcYI,2093
|
|
74
74
|
tests/unit/test_plugin.py,sha256=ZTNAZWwZhDIAbxkVuWhnz9FzmojbijgMmsLWM2mXQI0,4160
|
|
@@ -78,7 +78,7 @@ tests/unit/test_server_spec.py,sha256=je97BaktgK0Fiz3AwFPkcmHzYtOJJNqJV_Fw5hrvqX
|
|
|
78
78
|
tests/unit/test_trigger.py,sha256=E53mAUoVyponWu_4IQZ0IC1gQ9lakBnTn_9vKN2IZfg,1692
|
|
79
79
|
tests/unit/test_variables.py,sha256=OUEOqGYZA3Nd5oKk5GVY3hcrWKHpZpxysBJcO_v5gzs,291
|
|
80
80
|
tests/unit/utils.py,sha256=VooVmfpIgxmglNdtmT32AkEDFxHxyRHLK8RsCWjjYRY,2153
|
|
81
|
-
insightconnect_plugin_runtime-6.2.
|
|
82
|
-
insightconnect_plugin_runtime-6.2.
|
|
83
|
-
insightconnect_plugin_runtime-6.2.
|
|
84
|
-
insightconnect_plugin_runtime-6.2.
|
|
81
|
+
insightconnect_plugin_runtime-6.2.2.dist-info/METADATA,sha256=aQsntlkFksgMSHJvZyCnixCTdVSbzypDrq81UQVzBFU,15275
|
|
82
|
+
insightconnect_plugin_runtime-6.2.2.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
|
83
|
+
insightconnect_plugin_runtime-6.2.2.dist-info/top_level.txt,sha256=AJtyJOpiFzHxsbHUICTcUKXyrGQ3tZxhrEHsPjJBvEA,36
|
|
84
|
+
insightconnect_plugin_runtime-6.2.2.dist-info/RECORD,,
|
tests/unit/test_helpers.py
CHANGED
|
@@ -510,20 +510,46 @@ class TestRequestsHelpers(TestCase):
|
|
|
510
510
|
)
|
|
511
511
|
|
|
512
512
|
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
513
|
+
class TestHashing(TestCase):
|
|
514
|
+
def setUp(self) -> None:
|
|
515
|
+
self.log = {"example": "value", "sample": "value"}
|
|
516
|
+
|
|
517
|
+
def test_hash_sha1_no_keys(self):
|
|
518
|
+
# Test hash with no keys provided
|
|
519
|
+
expected_hash = "2e1ccc1a95e9b2044f13546c25fe380bbd039293"
|
|
520
|
+
self.assertEqual(helper.hash_sha1(self.log), expected_hash)
|
|
521
|
+
|
|
522
|
+
def test_hash_sha1_keys(self):
|
|
523
|
+
# Test hash with valid key provided
|
|
524
|
+
expected_hash = "61c908e52d66a763ceed0798b8e5f4b7f0328a21"
|
|
525
|
+
self.assertEqual(helper.hash_sha1(self.log, keys=["example"]), expected_hash)
|
|
526
|
+
|
|
527
|
+
def test_hash_sha1_keys_wrong_type(self):
|
|
528
|
+
# Test hash with wrong type for keys
|
|
529
|
+
with self.assertRaises(TypeError) as context:
|
|
530
|
+
helper.hash_sha1(self.log, keys="test")
|
|
531
|
+
|
|
532
|
+
self.assertEqual(
|
|
533
|
+
str(context.exception),
|
|
534
|
+
"The 'keys' parameter must be a list or None in the 'hash_sha1' function, not str"
|
|
535
|
+
)
|
|
536
|
+
|
|
537
|
+
def test_hash_sha1_keys_not_found(self):
|
|
538
|
+
# Test hash with key not found
|
|
539
|
+
with self.assertRaises(KeyError) as context:
|
|
540
|
+
helper.hash_sha1(self.log, keys=["example", "test"])
|
|
541
|
+
|
|
542
|
+
self.assertEqual(str(context.exception), "\"Key 'test' not found in the provided log.\"")
|
|
543
|
+
|
|
544
|
+
def test_compare_and_dedupe_hashes(self):
|
|
545
|
+
hashes = ["2e1ccc1a95e9b2044f13546c25fe380bbd039293"]
|
|
546
|
+
logs = [
|
|
547
|
+
{
|
|
548
|
+
"example": "value",
|
|
549
|
+
"sample": "value",
|
|
550
|
+
},
|
|
551
|
+
{"specimen": "new_value"},
|
|
552
|
+
]
|
|
553
|
+
assert [{"specimen": "new_value"}], [
|
|
554
|
+
"ad6ae80c0356e02b1561cb58408ee678eb1070bb"
|
|
555
|
+
] == helper.compare_and_dedupe_hashes(hashes, logs)
|
|
File without changes
|