atomicshop 2.17.2__py3-none-any.whl → 2.18.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of atomicshop might be problematic. Click here for more details.
- atomicshop/__init__.py +1 -1
- atomicshop/basics/ansi_escape_codes.py +3 -1
- atomicshop/file_io/docxs.py +26 -17
- atomicshop/http_parse.py +118 -77
- atomicshop/mitm/config_static.py +1 -0
- atomicshop/mitm/connection_thread_worker.py +350 -260
- atomicshop/mitm/engines/__parent/parser___parent.py +2 -2
- atomicshop/mitm/engines/__parent/recorder___parent.py +83 -24
- atomicshop/mitm/engines/__reference_general/recorder___reference_general.py +2 -2
- atomicshop/mitm/message.py +40 -15
- atomicshop/mitm/mitm_main.py +3 -2
- atomicshop/mitm/recs_files.py +2 -1
- atomicshop/system_resource_monitor.py +16 -3
- atomicshop/wrappers/socketw/base.py +17 -0
- atomicshop/wrappers/socketw/creator.py +32 -4
- atomicshop/wrappers/socketw/receiver.py +90 -100
- atomicshop/wrappers/socketw/sender.py +5 -8
- atomicshop/wrappers/socketw/socket_client.py +15 -4
- atomicshop/wrappers/socketw/statistics_csv.py +12 -6
- {atomicshop-2.17.2.dist-info → atomicshop-2.18.0.dist-info}/METADATA +1 -1
- {atomicshop-2.17.2.dist-info → atomicshop-2.18.0.dist-info}/RECORD +24 -24
- {atomicshop-2.17.2.dist-info → atomicshop-2.18.0.dist-info}/LICENSE.txt +0 -0
- {atomicshop-2.17.2.dist-info → atomicshop-2.18.0.dist-info}/WHEEL +0 -0
- {atomicshop-2.17.2.dist-info → atomicshop-2.18.0.dist-info}/top_level.txt +0 -0
|
@@ -10,6 +10,6 @@ class ParserParent:
|
|
|
10
10
|
|
|
11
11
|
def parse(self):
|
|
12
12
|
# This is general parser, so we don't parse anything and 'request_body_parsed' gets empty byte string.
|
|
13
|
-
self.class_client_message.
|
|
13
|
+
self.class_client_message.request_custom_parsed = b''
|
|
14
14
|
|
|
15
|
-
self.logger.info(f"Parsed: {self.class_client_message.
|
|
15
|
+
self.logger.info(f"Parsed: {self.class_client_message.request_custom_parsed[0: 100]}...")
|
|
@@ -1,35 +1,45 @@
|
|
|
1
1
|
import os
|
|
2
2
|
from datetime import datetime
|
|
3
3
|
import json
|
|
4
|
+
import queue
|
|
5
|
+
import threading
|
|
4
6
|
|
|
5
7
|
from ...shared_functions import build_module_names, create_custom_logger
|
|
6
8
|
from ... import message, recs_files
|
|
7
9
|
from .... import filesystem
|
|
8
|
-
from ....file_io import
|
|
10
|
+
from ....file_io import jsons
|
|
11
|
+
from ....print_api import print_api
|
|
9
12
|
|
|
10
13
|
|
|
11
14
|
# The class that is responsible for Recording Requests / Responses.
|
|
12
15
|
class RecorderParent:
|
|
13
16
|
|
|
14
|
-
|
|
15
|
-
|
|
17
|
+
# noinspection PyTypeChecker
|
|
18
|
+
def __init__(self, record_path: str):
|
|
16
19
|
self.record_path: str = record_path
|
|
20
|
+
|
|
17
21
|
self.file_extension: str = ".json"
|
|
18
22
|
self.engine_name = None
|
|
19
23
|
self.module_name = None
|
|
20
24
|
self.engine_record_path: str = str()
|
|
21
25
|
self.record_file_path: str = str()
|
|
26
|
+
self.class_client_message: message.ClientMessage = None
|
|
22
27
|
|
|
23
28
|
self.logger = create_custom_logger()
|
|
24
29
|
|
|
25
30
|
# Get engine name and module name
|
|
26
31
|
self.get_engine_module()
|
|
27
|
-
|
|
28
|
-
|
|
32
|
+
|
|
33
|
+
# Build the record path with file name
|
|
34
|
+
self.build_record_path_to_engine()
|
|
29
35
|
|
|
30
36
|
# Create folder.
|
|
31
37
|
filesystem.create_directory(self.engine_record_path)
|
|
32
38
|
|
|
39
|
+
# Initialize a queue to hold messages
|
|
40
|
+
self.message_queue: queue.Queue = queue.Queue()
|
|
41
|
+
self.recorder_worker_thread = None
|
|
42
|
+
|
|
33
43
|
# "self.__module__" is fully qualified module name: classes.engines.ENGINE-NAME.MODULE-NAME
|
|
34
44
|
def get_engine_module(self):
|
|
35
45
|
_, self.engine_name, self.module_name = build_module_names(self.__module__)
|
|
@@ -43,12 +53,9 @@ class RecorderParent:
|
|
|
43
53
|
# Formatting the date and time and converting it to string object
|
|
44
54
|
day_time_format: str = now.strftime(recs_files.REC_FILE_DATE_TIME_FORMAT)
|
|
45
55
|
|
|
46
|
-
# Build the record path with file name
|
|
47
|
-
self.build_record_path_to_engine()
|
|
48
|
-
|
|
49
56
|
# If HTTP Path is not defined, 'http_path' will be empty, and it will not interfere with file name.
|
|
50
57
|
self.record_file_path: str = (
|
|
51
|
-
f"{self.engine_record_path}{os.sep}{day_time_format}_"
|
|
58
|
+
f"{self.engine_record_path}{os.sep}{day_time_format}_th{self.class_client_message.thread_id}_"
|
|
52
59
|
f"{self.class_client_message.server_name}{self.file_extension}")
|
|
53
60
|
|
|
54
61
|
def convert_messages(self):
|
|
@@ -59,27 +66,79 @@ class RecorderParent:
|
|
|
59
66
|
# We need to check that the values that we want to convert aren't empty or 'None'.
|
|
60
67
|
if self.class_client_message.request_raw_bytes:
|
|
61
68
|
self.class_client_message.request_raw_hex = self.class_client_message.request_raw_bytes.hex()
|
|
62
|
-
if self.class_client_message.
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
69
|
+
if self.class_client_message.response_raw_bytes:
|
|
70
|
+
self.class_client_message.response_raw_hex = self.class_client_message.response_raw_bytes.hex()
|
|
71
|
+
|
|
72
|
+
def record(self, class_client_message: message.ClientMessage):
|
|
73
|
+
self.class_client_message = class_client_message
|
|
74
|
+
|
|
75
|
+
# Build full file path if it is not already built.
|
|
76
|
+
if not self.record_file_path:
|
|
77
|
+
self.build_record_full_file_path()
|
|
78
|
+
|
|
79
|
+
# Start the worker thread if it is not already running
|
|
80
|
+
if not self.recorder_worker_thread:
|
|
81
|
+
self.recorder_worker_thread = threading.Thread(
|
|
82
|
+
target=save_message_worker,
|
|
83
|
+
args=(self.record_file_path, self.message_queue, self.logger),
|
|
84
|
+
name=f"Thread-{self.class_client_message.thread_id}_Recorder",
|
|
85
|
+
daemon=True
|
|
86
|
+
)
|
|
87
|
+
self.recorder_worker_thread.start()
|
|
88
|
+
|
|
71
89
|
self.logger.info("Recording Message...")
|
|
72
90
|
|
|
73
91
|
# Convert the requests and responses to hex.
|
|
74
92
|
self.convert_messages()
|
|
75
93
|
# Get the message in dict / JSON format
|
|
76
94
|
record_message_dict: dict = dict(self.class_client_message)
|
|
77
|
-
recorded_message_json_string = json.dumps(record_message_dict)
|
|
78
95
|
|
|
79
|
-
#
|
|
80
|
-
|
|
81
|
-
recorded_message_json_string, self.record_file_path, enable_long_file_path=True, **{'logger': self.logger})
|
|
82
|
-
|
|
83
|
-
self.logger.info(f"Recorded to file: {self.record_file_path}")
|
|
96
|
+
# Put the message in the queue to be processed by the worker thread
|
|
97
|
+
self.message_queue.put(record_message_dict)
|
|
84
98
|
|
|
85
99
|
return self.record_file_path
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def save_message_worker(
|
|
103
|
+
record_file_path: str,
|
|
104
|
+
message_queue: queue.Queue,
|
|
105
|
+
logger
|
|
106
|
+
):
|
|
107
|
+
"""Worker function to process messages from the queue and write them to the file."""
|
|
108
|
+
while True:
|
|
109
|
+
# Get a message from the queue
|
|
110
|
+
record_message_dict = message_queue.get()
|
|
111
|
+
|
|
112
|
+
# Check for the "stop" signal
|
|
113
|
+
if record_message_dict is None:
|
|
114
|
+
break
|
|
115
|
+
|
|
116
|
+
# Read existing data from the file
|
|
117
|
+
try:
|
|
118
|
+
with open(record_file_path, 'r') as f:
|
|
119
|
+
current_json_file = json.load(f)
|
|
120
|
+
except FileNotFoundError:
|
|
121
|
+
current_json_file: list = []
|
|
122
|
+
|
|
123
|
+
# Append the new message to the existing data
|
|
124
|
+
final_json_list_of_dicts: list[dict] = []
|
|
125
|
+
if isinstance(current_json_file, list):
|
|
126
|
+
current_json_file.append(record_message_dict)
|
|
127
|
+
final_json_list_of_dicts = current_json_file
|
|
128
|
+
elif isinstance(current_json_file, dict):
|
|
129
|
+
final_json_list_of_dicts.append(current_json_file)
|
|
130
|
+
final_json_list_of_dicts.append(record_message_dict)
|
|
131
|
+
else:
|
|
132
|
+
error_message = "The current file is neither a list nor a dictionary."
|
|
133
|
+
print_api(error_message, logger_method="critical", logger=logger)
|
|
134
|
+
raise TypeError(error_message)
|
|
135
|
+
|
|
136
|
+
# Write the data back to the file
|
|
137
|
+
jsons.write_json_file(
|
|
138
|
+
final_json_list_of_dicts, record_file_path, indent=2,
|
|
139
|
+
enable_long_file_path=True, **{'logger': logger})
|
|
140
|
+
|
|
141
|
+
logger.info(f"Recorded to file: {record_file_path}")
|
|
142
|
+
|
|
143
|
+
# Indicate task completion
|
|
144
|
+
message_queue.task_done()
|
|
@@ -7,7 +7,7 @@ from atomicshop.mitm.message import ClientMessage
|
|
|
7
7
|
class RecorderGeneral(RecorderParent):
|
|
8
8
|
"""The class that is responsible for Recording Requests / Responses"""
|
|
9
9
|
# When initializing main classes through "super" you need to pass parameters to init
|
|
10
|
-
def __init__(self,
|
|
11
|
-
super().__init__(
|
|
10
|
+
def __init__(self, record_path):
|
|
11
|
+
super().__init__(record_path)
|
|
12
12
|
|
|
13
13
|
self.logger = create_custom_logger()
|
atomicshop/mitm/message.py
CHANGED
|
@@ -8,15 +8,18 @@ from ..basics import dicts
|
|
|
8
8
|
class ClientMessage:
|
|
9
9
|
""" A class that will store all the message details from the client """
|
|
10
10
|
def __init__(self):
|
|
11
|
-
self.request_raw_bytes: bytearray = bytearray()
|
|
12
11
|
# noinspection PyTypeChecker
|
|
13
|
-
self.
|
|
14
|
-
|
|
15
|
-
self.
|
|
12
|
+
self.timestamp: datetime = None
|
|
13
|
+
# noinspection PyTypeChecker
|
|
14
|
+
self.request_raw_bytes: bytes = None
|
|
15
|
+
self.request_auto_parsed: Union[http_parse.HTTPRequestParse, any] = None
|
|
16
|
+
self.request_custom_parsed: any = None
|
|
16
17
|
self.request_raw_hex: hex = None
|
|
17
|
-
|
|
18
|
-
self.
|
|
19
|
-
self.
|
|
18
|
+
# noinspection PyTypeChecker
|
|
19
|
+
self.response_raw_bytes: bytes = None
|
|
20
|
+
self.response_auto_parsed: any = None
|
|
21
|
+
self.response_custom_parsed: any = None
|
|
22
|
+
self.response_raw_hex: hex = None
|
|
20
23
|
self.server_name: str = str()
|
|
21
24
|
self.server_ip: str = str()
|
|
22
25
|
self.client_ip: str = str()
|
|
@@ -25,26 +28,48 @@ class ClientMessage:
|
|
|
25
28
|
self.process_name: str = str()
|
|
26
29
|
self.thread_id = None
|
|
27
30
|
self.info: str = str()
|
|
28
|
-
self.
|
|
31
|
+
self.errors: list = list()
|
|
29
32
|
self.protocol: str = str()
|
|
33
|
+
self.protocol2: str = str()
|
|
30
34
|
self.recorded_file_path: str = str()
|
|
35
|
+
self.action: str = str()
|
|
36
|
+
|
|
37
|
+
def reinitialize_dynamic_vars(self):
|
|
38
|
+
"""
|
|
39
|
+
Reinitialize the dynamic variables of the class for the new cycle.
|
|
40
|
+
"""
|
|
41
|
+
self.request_raw_bytes = None
|
|
42
|
+
self.timestamp = None
|
|
43
|
+
self.request_auto_parsed = None
|
|
44
|
+
self.request_custom_parsed = None
|
|
45
|
+
self.request_raw_hex = None
|
|
46
|
+
self.response_raw_bytes = None
|
|
47
|
+
self.response_auto_parsed = None
|
|
48
|
+
self.response_custom_parsed = None
|
|
49
|
+
self.response_raw_hex = None
|
|
50
|
+
self.action = None
|
|
51
|
+
self.info = str()
|
|
52
|
+
self.errors = list()
|
|
53
|
+
self.protocol = str()
|
|
54
|
+
self.protocol2 = str()
|
|
55
|
+
self.recorded_file_path = str()
|
|
31
56
|
|
|
32
57
|
def __iter__(self):
|
|
33
58
|
# __dict__ returns a dictionary containing the instance's attributes
|
|
34
59
|
for key, value in self.__dict__.items():
|
|
35
60
|
if key == 'request_raw_bytes':
|
|
36
61
|
value = str(value)
|
|
37
|
-
elif key == '
|
|
62
|
+
elif key == 'timestamp':
|
|
38
63
|
value = value.strftime('%Y-%m-%d-%H:%M:%S.%f')
|
|
39
|
-
elif key == '
|
|
64
|
+
elif key == 'request_auto_parsed':
|
|
40
65
|
if isinstance(value, http_parse.HTTPRequestParse):
|
|
41
66
|
value = dicts.convert_complex_object_to_dict(value)
|
|
42
67
|
else:
|
|
43
68
|
value = str(value)
|
|
44
|
-
elif key == '
|
|
69
|
+
elif key == 'request_custom_parsed':
|
|
70
|
+
value = dicts.convert_complex_object_to_dict(value)
|
|
71
|
+
elif key == 'response_raw_bytes':
|
|
72
|
+
value = str(value)
|
|
73
|
+
elif key == 'response_auto_parsed':
|
|
45
74
|
value = dicts.convert_complex_object_to_dict(value)
|
|
46
|
-
elif key == 'response_list_of_raw_bytes':
|
|
47
|
-
value = [str(bytes_response) for bytes_response in value]
|
|
48
|
-
elif key == 'response_list_of_raw_decoded':
|
|
49
|
-
value = [dicts.convert_complex_object_to_dict(complex_response) for complex_response in value]
|
|
50
75
|
yield key, value
|
atomicshop/mitm/mitm_main.py
CHANGED
|
@@ -272,7 +272,7 @@ def mitm_server(config_file_path: str):
|
|
|
272
272
|
time.sleep(1)
|
|
273
273
|
return 1
|
|
274
274
|
|
|
275
|
-
dns_thread = threading.Thread(target=dns_server_instance.start)
|
|
275
|
+
dns_thread = threading.Thread(target=dns_server_instance.start, name="dns_server")
|
|
276
276
|
dns_thread.daemon = True
|
|
277
277
|
dns_thread.start()
|
|
278
278
|
|
|
@@ -376,7 +376,8 @@ def mitm_server(config_file_path: str):
|
|
|
376
376
|
kwargs={
|
|
377
377
|
'reference_function_name': thread_worker_main,
|
|
378
378
|
'reference_function_args': (network_logger, statistics_writer, engines_list, reference_module,)
|
|
379
|
-
}
|
|
379
|
+
},
|
|
380
|
+
name="accepting_loop"
|
|
380
381
|
)
|
|
381
382
|
|
|
382
383
|
socket_thread.daemon = True
|
atomicshop/mitm/recs_files.py
CHANGED
|
@@ -7,7 +7,8 @@ from .. import filesystem
|
|
|
7
7
|
from .. wrappers.loggingw import consts
|
|
8
8
|
|
|
9
9
|
|
|
10
|
-
|
|
10
|
+
REC_FILE_DATE_TIME_MILLISECONDS_FORMAT: str = f'{consts.DEFAULT_ROTATING_SUFFIXES_FROM_WHEN["S"]}_%f'
|
|
11
|
+
REC_FILE_DATE_TIME_FORMAT: str = f'{consts.DEFAULT_ROTATING_SUFFIXES_FROM_WHEN["S"]}'
|
|
11
12
|
REC_FILE_DATE_FORMAT: str = REC_FILE_DATE_TIME_FORMAT.split('_')[0]
|
|
12
13
|
|
|
13
14
|
|
|
@@ -111,10 +111,18 @@ class SystemResourceMonitor:
|
|
|
111
111
|
# The shared results' dictionary.
|
|
112
112
|
self.results: dict = {}
|
|
113
113
|
|
|
114
|
-
def start(
|
|
114
|
+
def start(
|
|
115
|
+
self,
|
|
116
|
+
print_kwargs: dict = None,
|
|
117
|
+
thread_as_daemon: bool = True
|
|
118
|
+
):
|
|
115
119
|
"""
|
|
116
120
|
Start the monitoring process.
|
|
117
121
|
:param print_kwargs:
|
|
122
|
+
:param thread_as_daemon: bool, set the thread as daemon. If you're running the monitoring process in the main
|
|
123
|
+
process, set it to True. If you're running the monitoring process in a separate process, set it to False.
|
|
124
|
+
In child processes created by multiprocessing.Process, the thread works differently. You might not
|
|
125
|
+
get the desired result.
|
|
118
126
|
:return:
|
|
119
127
|
"""
|
|
120
128
|
|
|
@@ -127,7 +135,7 @@ class SystemResourceMonitor:
|
|
|
127
135
|
self.interval, self.get_cpu, self.get_memory, self.get_disk_io_bytes, self.get_disk_files_count,
|
|
128
136
|
self.get_disk_busy_time, self.get_disk_used_percent, self.calculate_maximum_changed_disk_io,
|
|
129
137
|
self.maximum_disk_io, self.queue_list, self.manager_dict))
|
|
130
|
-
self.thread.daemon =
|
|
138
|
+
self.thread.daemon = thread_as_daemon
|
|
131
139
|
self.thread.start()
|
|
132
140
|
else:
|
|
133
141
|
print_api.print_api("Monitoring is already running.", color='yellow', **print_kwargs)
|
|
@@ -205,6 +213,7 @@ def start_monitoring(
|
|
|
205
213
|
calculate_maximum_changed_disk_io: bool = False,
|
|
206
214
|
queue_list: list = None,
|
|
207
215
|
manager_dict: multiprocessing.managers.DictProxy = None, # multiprocessing.Manager().dict()
|
|
216
|
+
get_results_thread_as_daemon: bool = True,
|
|
208
217
|
print_kwargs: dict = None
|
|
209
218
|
):
|
|
210
219
|
"""
|
|
@@ -235,6 +244,10 @@ def start_monitoring(
|
|
|
235
244
|
multiprocessing.Process(
|
|
236
245
|
target=system_resource_monitor.start_monitoring, kwargs={'manager_dict': shared_dict}).start()
|
|
237
246
|
|
|
247
|
+
:param get_results_thread_as_daemon: bool, set the thread as daemon. If you're running the monitoring process in the
|
|
248
|
+
main process, set it to True. If you're running the monitoring process in a separate process, set it to False.
|
|
249
|
+
In child processes created by multiprocessing.Process, the thread works differently.
|
|
250
|
+
You might not get the desired result.
|
|
238
251
|
:param print_kwargs: dict, print kwargs.
|
|
239
252
|
:return:
|
|
240
253
|
"""
|
|
@@ -257,7 +270,7 @@ def start_monitoring(
|
|
|
257
270
|
queue_list=queue_list,
|
|
258
271
|
manager_dict=manager_dict
|
|
259
272
|
)
|
|
260
|
-
SYSTEM_RESOURCES_MONITOR.start()
|
|
273
|
+
SYSTEM_RESOURCES_MONITOR.start(thread_as_daemon=get_results_thread_as_daemon)
|
|
261
274
|
else:
|
|
262
275
|
print_api.print_api("System resources monitoring is already running.", color='yellow', **(print_kwargs or {}))
|
|
263
276
|
|
|
@@ -70,3 +70,20 @@ def get_default_ip_address() -> str:
|
|
|
70
70
|
:return: string.
|
|
71
71
|
"""
|
|
72
72
|
return socket.gethostbyname(socket.gethostname())
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def is_socket_closed(socket_object) -> bool:
|
|
76
|
+
"""
|
|
77
|
+
Check if the socket is closed.
|
|
78
|
+
:param socket_object: socket object or ssl socket object.
|
|
79
|
+
:return: bool.
|
|
80
|
+
"""
|
|
81
|
+
try:
|
|
82
|
+
# If the socket is closed, the fileno() method will raise an exception or return -1.
|
|
83
|
+
|
|
84
|
+
if socket_object.fileno() == -1:
|
|
85
|
+
return True
|
|
86
|
+
else:
|
|
87
|
+
return False
|
|
88
|
+
except socket.error:
|
|
89
|
+
return False
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import os
|
|
1
2
|
import socket
|
|
2
3
|
import ssl
|
|
3
4
|
|
|
@@ -33,8 +34,31 @@ def create_ssl_context_for_server():
|
|
|
33
34
|
# return ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
|
34
35
|
|
|
35
36
|
|
|
36
|
-
def create_ssl_context_for_client(
|
|
37
|
-
|
|
37
|
+
def create_ssl_context_for_client(
|
|
38
|
+
enable_sslkeylogfile_env_to_client_ssl_context: bool = False
|
|
39
|
+
) -> ssl.SSLContext:
|
|
40
|
+
"""
|
|
41
|
+
This function creates the SSL context for the client.
|
|
42
|
+
The SSL context is created with the "PROTOCOL_TLS_CLIENT" protocol.
|
|
43
|
+
|
|
44
|
+
:param enable_sslkeylogfile_env_to_client_ssl_context: boolean, enables the SSLKEYLOGFILE environment variable
|
|
45
|
+
to the SSL context. Default is False.
|
|
46
|
+
if True, SSLKEYLOGFILE will be added to SSL context with:
|
|
47
|
+
ssl_context.keylog_filename = os.environ.get('SSLKEYLOGFILE')
|
|
48
|
+
This is useful for debugging SSL/TLS connections with WireShark.
|
|
49
|
+
Since WireShark also uses this environment variable to read the key log file and apply to the SSL/TLS
|
|
50
|
+
connections, so you can see the decrypted traffic.
|
|
51
|
+
|
|
52
|
+
:return: ssl.SSLContext
|
|
53
|
+
"""
|
|
54
|
+
ssl_context: ssl.SSLContext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
|
55
|
+
|
|
56
|
+
if enable_sslkeylogfile_env_to_client_ssl_context:
|
|
57
|
+
ssl_key_logfile = os.environ.get('SSLKEYLOGFILE')
|
|
58
|
+
if ssl_key_logfile:
|
|
59
|
+
ssl_context.keylog_filename = ssl_key_logfile
|
|
60
|
+
|
|
61
|
+
return ssl_context
|
|
38
62
|
|
|
39
63
|
|
|
40
64
|
def set_client_ssl_context_ca_default_certs(ssl_context):
|
|
@@ -204,7 +228,8 @@ def set_listen_on_socket(socket_object, **kwargs):
|
|
|
204
228
|
def wrap_socket_with_ssl_context_client___default_certs___ignore_verification(
|
|
205
229
|
socket_object,
|
|
206
230
|
server_hostname: str = None,
|
|
207
|
-
custom_pem_client_certificate_file_path: str = None
|
|
231
|
+
custom_pem_client_certificate_file_path: str = None,
|
|
232
|
+
enable_sslkeylogfile_env_to_client_ssl_context: bool = False
|
|
208
233
|
):
|
|
209
234
|
"""
|
|
210
235
|
This function is a preset for wrapping the socket with SSL context for the client.
|
|
@@ -214,8 +239,11 @@ def wrap_socket_with_ssl_context_client___default_certs___ignore_verification(
|
|
|
214
239
|
:param server_hostname: string, hostname of the server. Default is None.
|
|
215
240
|
:param custom_pem_client_certificate_file_path: string, full file path for the client certificate PWM file.
|
|
216
241
|
Default is None.
|
|
242
|
+
:param enable_sslkeylogfile_env_to_client_ssl_context: boolean, enables the SSLKEYLOGFILE environment variable
|
|
243
|
+
to the SSL context. Default is False.
|
|
217
244
|
"""
|
|
218
|
-
ssl_context: ssl.SSLContext = create_ssl_context_for_client(
|
|
245
|
+
ssl_context: ssl.SSLContext = create_ssl_context_for_client(
|
|
246
|
+
enable_sslkeylogfile_env_to_client_ssl_context=enable_sslkeylogfile_env_to_client_ssl_context)
|
|
219
247
|
set_client_ssl_context_ca_default_certs(ssl_context)
|
|
220
248
|
set_client_ssl_context_certificate_verification_ignore(ssl_context)
|
|
221
249
|
|