pyproxytools 0.3.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,197 @@
1
+ """
2
+ pyproxy.handlers.http.py
3
+
4
+ This module defines the HttpHandler class used by the proxy server to process
5
+ HTTP client connections. It handles request forwarding, blocking, and custom headers.
6
+ """
7
+
8
+ import socket
9
+ import os
10
+ import threading
11
+
12
+ from pyproxy.utils.http_req import extract_headers, parse_url
13
+
14
+
15
+ class HttpHandler:
16
+ """
17
+ HttpHandler manages client HTTP connections for a proxy server,
18
+ handling request forwarding, filtering, blocking, and custom header modification
19
+ based on configuration settings.
20
+ """
21
+
22
+ def __init__(
23
+ self,
24
+ html_403,
25
+ logger_config,
26
+ filter_config,
27
+ filter_queue,
28
+ filter_result_queue,
29
+ shortcuts_queue,
30
+ shortcuts_result_queue,
31
+ custom_header_queue,
32
+ custom_header_result_queue,
33
+ console_logger,
34
+ shortcuts,
35
+ custom_header,
36
+ active_connections,
37
+ proxy_enable,
38
+ proxy_host,
39
+ proxy_port,
40
+ ):
41
+ self.html_403 = html_403
42
+ self.logger_config = logger_config
43
+ self.filter_config = filter_config
44
+ self.filter_queue = filter_queue
45
+ self.filter_result_queue = filter_result_queue
46
+ self.shortcuts_queue = shortcuts_queue
47
+ self.shortcuts_result_queue = shortcuts_result_queue
48
+ self.custom_header_queue = custom_header_queue
49
+ self.custom_header_result_queue = custom_header_result_queue
50
+ self.console_logger = console_logger
51
+ self.config_shortcuts = shortcuts
52
+ self.config_custom_header = custom_header
53
+ self.proxy_enable = proxy_enable
54
+ self.proxy_host = proxy_host
55
+ self.proxy_port = proxy_port
56
+ self.active_connections = active_connections
57
+
58
+ def handle_http_request(self, client_socket, request):
59
+ """
60
+ Processes an HTTP request, checks for URL filtering, applies shortcuts,
61
+ and forwards the request to the target server if not blocked.
62
+
63
+ Args:
64
+ client_socket (socket): The socket object for the client connection.
65
+ request (bytes): The raw HTTP request sent by the client.
66
+ """
67
+ first_line = request.decode(errors="ignore").split("\n")[0]
68
+ url = first_line.split(" ")[1]
69
+
70
+ if self.config_custom_header and os.path.isfile(self.config_custom_header):
71
+ headers = extract_headers(request.decode(errors="ignore"))
72
+ self.custom_header_queue.put(url)
73
+ new_headers = self.custom_header_result_queue.get(timeout=5)
74
+ headers.update(new_headers)
75
+
76
+ if self.config_shortcuts and os.path.isfile(self.config_shortcuts):
77
+ domain, _ = parse_url(url)
78
+ self.shortcuts_queue.put(domain)
79
+ shortcut_url = self.shortcuts_result_queue.get(timeout=5)
80
+ if shortcut_url:
81
+ response = (
82
+ f"HTTP/1.1 302 Found\r\n"
83
+ f"Location: {shortcut_url}\r\n"
84
+ f"Content-Length: 0\r\n"
85
+ "\r\n"
86
+ )
87
+
88
+ client_socket.sendall(response.encode())
89
+ client_socket.close()
90
+ self.active_connections.pop(threading.get_ident(), None)
91
+ return
92
+
93
+ if not self.filter_config.no_filter:
94
+ self.filter_queue.put(url)
95
+ result = self.filter_result_queue.get(timeout=5)
96
+ if result[1] == "Blocked":
97
+ if not self.logger_config.no_logging_block:
98
+ self.logger_config.block_logger.info(
99
+ "%s - %s - %s", client_socket.getpeername()[0], url, first_line
100
+ )
101
+ with open(self.html_403, "r", encoding="utf-8") as f:
102
+ custom_403_page = f.read()
103
+ response = (
104
+ f"HTTP/1.1 403 Forbidden\r\n"
105
+ f"Content-Length: {len(custom_403_page)}\r\n"
106
+ f"\r\n"
107
+ f"{custom_403_page}"
108
+ )
109
+ client_socket.sendall(response.encode())
110
+ client_socket.close()
111
+ self.active_connections.pop(threading.get_ident(), None)
112
+ return
113
+ server_host, _ = parse_url(url)
114
+ if not self.logger_config.no_logging_access:
115
+ self.logger_config.access_logger.info(
116
+ "%s - %s - %s",
117
+ client_socket.getpeername()[0],
118
+ f"http://{server_host}",
119
+ first_line,
120
+ )
121
+
122
+ if self.config_custom_header and os.path.isfile(self.config_custom_header):
123
+ request_lines = request.decode(errors="ignore").split("\r\n")
124
+ request_line = request_lines[0] # GET / HTTP/1.1
125
+
126
+ header_lines = [f"{key}: {value}" for key, value in headers.items()]
127
+ reconstructed_headers = "\r\n".join(header_lines)
128
+
129
+ if "\r\n\r\n" in request.decode(errors="ignore"):
130
+ body = request.decode(errors="ignore").split("\r\n\r\n", 1)[1]
131
+ else:
132
+ body = ""
133
+
134
+ modified_request = (
135
+ f"{request_line}\r\n{reconstructed_headers}\r\n\r\n{body}".encode()
136
+ )
137
+
138
+ self.forward_request_to_server(client_socket, modified_request, url)
139
+
140
+ else:
141
+ self.forward_request_to_server(client_socket, request, url)
142
+
143
+ def forward_request_to_server(self, client_socket, request, url):
144
+ """
145
+ Forwards the HTTP request to the target server and sends the response back to the client.
146
+
147
+ Args:
148
+ client_socket (socket): The socket object for the client connection.
149
+ request (bytes): The raw HTTP request sent by the client.
150
+ url (str): The target URL from the HTTP request.
151
+ """
152
+ if self.proxy_enable:
153
+ server_host, server_port = self.proxy_host, self.proxy_port
154
+ else:
155
+ server_host, server_port = parse_url(url)
156
+ thread_id = threading.get_ident()
157
+
158
+ if thread_id in self.active_connections:
159
+ self.active_connections[thread_id]["target_ip"] = server_host
160
+ self.active_connections[thread_id]["target_port"] = server_port
161
+
162
+ try:
163
+ server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
164
+ server_socket.connect((server_host, server_port))
165
+ server_socket.sendall(request)
166
+ server_socket.settimeout(5)
167
+ self.active_connections[thread_id]["bytes_sent"] += len(request)
168
+
169
+ while True:
170
+ try:
171
+ response = server_socket.recv(4096)
172
+ if response:
173
+ client_socket.send(response)
174
+ self.active_connections[thread_id]["bytes_received"] += len(
175
+ response
176
+ )
177
+ else:
178
+ break
179
+ except socket.timeout:
180
+ break
181
+ except (socket.timeout, socket.gaierror, ConnectionRefusedError, OSError) as e:
182
+ self.console_logger.error(
183
+ "Error connecting to the server %s : %s", server_host, e
184
+ )
185
+ response = (
186
+ f"HTTP/1.1 502 Bad Gateway\r\n"
187
+ f"Content-Length: {len('Bad Gateway')} \r\n"
188
+ "\r\n"
189
+ f"Bad Gateway"
190
+ )
191
+ client_socket.sendall(response.encode())
192
+ client_socket.close()
193
+ self.active_connections.pop(thread_id, None)
194
+ finally:
195
+ client_socket.close()
196
+ server_socket.close()
197
+ self.active_connections.pop(thread_id, None)
@@ -0,0 +1,308 @@
1
+ """
2
+ pyproxy.handlers.https.py
3
+
4
+ This class handles HTTPS CONNECT requests, applies filtering rules, supports SSL inspection,
5
+ generates certificates dynamically, and logs access and blocked attempts. It can also
6
+ relay raw data when SSL inspection is disabled.
7
+ """
8
+
9
+ import socket
10
+ import select
11
+ import os
12
+ import ssl
13
+ import threading
14
+
15
+ from pyproxy.utils.crypto import generate_certificate
16
+
17
+
18
+ class HttpsHandler:
19
+ """
20
+ Handles HTTPS client connections for a proxy server.
21
+
22
+ Supports SSL interception, filtering of targets, and custom logging. This handler
23
+ processes HTTPS `CONNECT` requests and either tunnels them directly to the destination
24
+ or performs SSL interception for inspection and filtering.
25
+ """
26
+
27
+ def __init__(
28
+ self,
29
+ html_403,
30
+ logger_config,
31
+ filter_config,
32
+ ssl_config,
33
+ filter_queue,
34
+ filter_result_queue,
35
+ shortcuts_queue,
36
+ shortcuts_result_queue,
37
+ cancel_inspect_queue,
38
+ cancel_inspect_result_queue,
39
+ custom_header_queue,
40
+ custom_header_result_queue,
41
+ console_logger,
42
+ shortcuts,
43
+ custom_header,
44
+ active_connections,
45
+ proxy_enable,
46
+ proxy_host,
47
+ proxy_port,
48
+ ):
49
+ self.html_403 = html_403
50
+ self.logger_config = logger_config
51
+ self.filter_config = filter_config
52
+ self.ssl_config = ssl_config
53
+ self.filter_queue = filter_queue
54
+ self.filter_result_queue = filter_result_queue
55
+ self.shortcuts_queue = shortcuts_queue
56
+ self.shortcuts_result_queue = shortcuts_result_queue
57
+ self.cancel_inspect_queue = cancel_inspect_queue
58
+ self.cancel_inspect_result_queue = cancel_inspect_result_queue
59
+ self.custom_header_queue = custom_header_queue
60
+ self.custom_header_result_queue = custom_header_result_queue
61
+ self.console_logger = console_logger
62
+ self.config_shortcuts = shortcuts
63
+ self.config_custom_header = custom_header
64
+ self.proxy_enable = proxy_enable
65
+ self.proxy_host = proxy_host
66
+ self.proxy_port = proxy_port
67
+ self.active_connections = active_connections
68
+
69
+ def handle_https_connection(self, client_socket, first_line):
70
+ """
71
+ Handles HTTPS connections by establishing a connection with the target server
72
+ and relaying data between the client and server.
73
+
74
+ Args:
75
+ client_socket (socket): The socket object for the client connection.
76
+ first_line (str): The first line of the CONNECT request from the client.
77
+ """
78
+ target = first_line.split(" ")[1]
79
+ server_host, server_port = target.split(":")
80
+ server_port = int(server_port)
81
+
82
+ if not self.filter_config.no_filter:
83
+ self.filter_queue.put(target)
84
+ result = self.filter_result_queue.get(timeout=5)
85
+ if result[1] == "Blocked":
86
+ if not self.logger_config.no_logging_block:
87
+ self.logger_config.block_logger.info(
88
+ "%s - %s - %s",
89
+ client_socket.getpeername()[0],
90
+ target,
91
+ first_line,
92
+ )
93
+ with open(self.html_403, "r", encoding="utf-8") as f:
94
+ custom_403_page = f.read()
95
+ response = (
96
+ f"HTTP/1.1 403 Forbidden\r\n"
97
+ f"Content-Length: {len(custom_403_page)}\r\n"
98
+ f"\r\n"
99
+ f"{custom_403_page}"
100
+ )
101
+ client_socket.sendall(response.encode())
102
+ client_socket.close()
103
+ self.active_connections.pop(threading.get_ident(), None)
104
+ return
105
+
106
+ not_inspect = False
107
+ if (
108
+ self.ssl_config.ssl_inspect
109
+ and self.ssl_config.cancel_inspect
110
+ and os.path.isfile(self.ssl_config.cancel_inspect)
111
+ ):
112
+ self.cancel_inspect_queue.put(server_host)
113
+ not_inspect = self.cancel_inspect_result_queue.get(timeout=5)
114
+
115
+ if self.ssl_config.ssl_inspect and not not_inspect:
116
+ cert_path, key_path = generate_certificate(
117
+ server_host,
118
+ self.ssl_config.inspect_certs_folder,
119
+ self.ssl_config.inspect_ca_cert,
120
+ self.ssl_config.inspect_ca_key,
121
+ )
122
+ client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
123
+ client_context.load_cert_chain(certfile=cert_path, keyfile=key_path)
124
+ client_context.options |= (
125
+ ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 | ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1
126
+ )
127
+ client_context.load_verify_locations(self.ssl_config.inspect_ca_cert)
128
+
129
+ try:
130
+ client_socket.sendall(b"HTTP/1.1 200 Connection Established\r\n\r\n")
131
+ ssl_client_socket = client_context.wrap_socket(
132
+ client_socket, server_side=True, do_handshake_on_connect=False
133
+ )
134
+ ssl_client_socket.do_handshake()
135
+
136
+ if self.proxy_enable:
137
+ next_proxy_socket = socket.create_connection(
138
+ (self.proxy_host, self.proxy_port)
139
+ )
140
+ connect_command = (
141
+ f"CONNECT {server_host}:{server_port} HTTP/1.1\r\n"
142
+ f"Host: {server_host}:{server_port}\r\n\r\n"
143
+ )
144
+ next_proxy_socket.sendall(connect_command.encode())
145
+
146
+ response = b""
147
+ while b"\r\n\r\n" not in response:
148
+ chunk = next_proxy_socket.recv(4096)
149
+ if not chunk:
150
+ raise ConnectionError("Connection to next proxy failed")
151
+ response += chunk
152
+
153
+ if b"200 Connection Established" not in response:
154
+ raise ConnectionRefusedError("Next proxy refused CONNECT")
155
+
156
+ server_socket = next_proxy_socket
157
+ else:
158
+ server_socket = socket.create_connection((server_host, server_port))
159
+
160
+ server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
161
+ if self.proxy_enable:
162
+ server_context.check_hostname = False
163
+ server_context.verify_mode = ssl.CERT_NONE
164
+ else:
165
+ server_context.load_default_certs()
166
+
167
+ ssl_server_socket = server_context.wrap_socket(
168
+ server_socket,
169
+ server_hostname=server_host,
170
+ do_handshake_on_connect=True,
171
+ )
172
+
173
+ try:
174
+ first_request = ssl_client_socket.recv(4096).decode(errors="ignore")
175
+ request_line = first_request.split("\r\n")[0]
176
+ method, path, _ = request_line.split(" ")
177
+
178
+ full_url = f"https://{server_host}{path}"
179
+
180
+ if not self.filter_config.no_filter:
181
+ self.filter_queue.put(f"{server_host}{path}")
182
+ result = self.filter_result_queue.get(timeout=5)
183
+ if result[1] == "Blocked":
184
+ if not self.logger_config.no_logging_block:
185
+ self.logger_config.block_logger.info(
186
+ "%s - %s - %s",
187
+ ssl_client_socket.getpeername()[0],
188
+ target,
189
+ first_line,
190
+ )
191
+ with open(self.html_403, "r", encoding="utf-8") as f:
192
+ custom_403_page = f.read()
193
+ response = (
194
+ f"HTTP/1.1 403 Forbidden\r\n"
195
+ f"Content-Length: {len(custom_403_page)}\r\n"
196
+ f"\r\n"
197
+ f"{custom_403_page}"
198
+ )
199
+ ssl_client_socket.sendall(response.encode())
200
+ ssl_client_socket.close()
201
+ self.active_connections.pop(threading.get_ident(), None)
202
+ return
203
+
204
+ if not self.logger_config.no_logging_access:
205
+ self.logger_config.access_logger.info(
206
+ "%s - %s - %s %s",
207
+ ssl_client_socket.getpeername()[0],
208
+ f"https://{server_host}",
209
+ method,
210
+ full_url,
211
+ )
212
+
213
+ ssl_server_socket.sendall(first_request.encode())
214
+
215
+ except ValueError:
216
+ self.console_logger.error(
217
+ "Error parsing request: malformed request line."
218
+ )
219
+
220
+ except (socket.error, ssl.SSLError) as e:
221
+ self.console_logger.error("Network or SSL error : %s", str(e))
222
+
223
+ self.transfer_data_between_sockets(ssl_client_socket, ssl_server_socket)
224
+
225
+ except ssl.SSLError as e:
226
+ self.console_logger.error("SSL error: %s", str(e))
227
+ except socket.error as e:
228
+ self.console_logger.error("Socket error: %s", str(e))
229
+ finally:
230
+ client_socket.close()
231
+ self.active_connections.pop(threading.get_ident(), None)
232
+
233
+ else:
234
+ try:
235
+ server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
236
+ server_socket.connect((server_host, server_port))
237
+ client_socket.sendall(b"HTTP/1.1 200 Connection Established\r\n\r\n")
238
+ if not self.logger_config.no_logging_access:
239
+ self.logger_config.access_logger.info(
240
+ "%s - %s - %s",
241
+ client_socket.getpeername()[0],
242
+ f"https://{server_host}",
243
+ first_line,
244
+ )
245
+ self.transfer_data_between_sockets(client_socket, server_socket)
246
+ except (
247
+ socket.timeout,
248
+ socket.gaierror,
249
+ ConnectionRefusedError,
250
+ OSError,
251
+ ) as e:
252
+ self.console_logger.error(
253
+ "Error connecting to the server %s: %s", server_host, e
254
+ )
255
+ response = (
256
+ f"HTTP/1.1 502 Bad Gateway\r\n"
257
+ f"Content-Length: {len('Bad Gateway')} \r\n"
258
+ f"\r\n"
259
+ f"Bad Gateway"
260
+ )
261
+ client_socket.sendall(response.encode())
262
+ client_socket.close()
263
+
264
+ def transfer_data_between_sockets(self, client_socket, server_socket):
265
+ """
266
+ Transfers data between the client socket and server socket.
267
+
268
+ Args:
269
+ client_socket (socket): The socket object for the client connection.
270
+ server_socket (socket): The socket object for the server connection.
271
+ """
272
+ sockets = [client_socket, server_socket]
273
+ thread_id = threading.get_ident()
274
+
275
+ if (
276
+ thread_id in self.active_connections
277
+ and "target_ip" not in self.active_connections[thread_id]
278
+ ):
279
+ try:
280
+ target_ip, target_port = server_socket.getpeername()
281
+ self.active_connections[thread_id]["target_ip"] = target_ip
282
+ self.active_connections[thread_id]["target_port"] = target_port
283
+ except OSError as e:
284
+ self.console_logger.debug("Could not get peer name: %s", e)
285
+
286
+ try:
287
+ while True:
288
+ readable, _, _ = select.select(sockets, [], [], 1)
289
+ for sock in readable:
290
+ data = sock.recv(4096)
291
+ if len(data) == 0:
292
+ self.console_logger.debug("Closing connection.")
293
+ client_socket.close()
294
+ server_socket.close()
295
+ self.active_connections.pop(threading.get_ident(), None)
296
+ return
297
+ if sock is client_socket:
298
+ server_socket.sendall(data)
299
+ self.active_connections[thread_id]["bytes_sent"] += len(data)
300
+ else:
301
+ client_socket.sendall(data)
302
+ self.active_connections[thread_id]["bytes_received"] += len(
303
+ data
304
+ )
305
+ except (socket.error, OSError):
306
+ client_socket.close()
307
+ server_socket.close()
308
+ self.active_connections.pop(threading.get_ident(), None)
File without changes
@@ -0,0 +1,83 @@
1
+ """
2
+ pyproxy.modules.cancel_inspect.py
3
+
4
+ This module contains functions and a process to load and monitor cancel inspection entries.
5
+ It reads a file containing cancel inspection data and checks whether specific entries exist
6
+ in that file. The file is monitored in a background thread for live updates.
7
+
8
+ Functions:
9
+ - load_cancel_inspect: Loads the cancel inspection list from a file into a list.
10
+ - cancel_inspect_process: Process that listens for URL-like entries and checks
11
+ if they exist in the cancel inspection list.
12
+ """
13
+
14
+ import multiprocessing
15
+ import time
16
+ import sys
17
+ import threading
18
+
19
+
20
+ def load_cancel_inspect(cancel_inspect_path: str) -> dict:
21
+ """
22
+ Loads cancel inspection entries from a file into a list.
23
+
24
+ Args:
25
+ cancel_inspect_path (str): The path to the file containing the entries.
26
+
27
+ Returns:
28
+ list: A list containing each line (entry) from the file.
29
+ """
30
+ cancel_inspect = []
31
+
32
+ with open(cancel_inspect_path, "r", encoding="utf-8") as f:
33
+ for line in f:
34
+ cancel_inspect.append(line)
35
+
36
+ return cancel_inspect
37
+
38
+
39
+ def cancel_inspect_process(
40
+ queue: multiprocessing.Queue,
41
+ result_queue: multiprocessing.Queue,
42
+ cancel_inspect_path: str,
43
+ ) -> None:
44
+ """
45
+ Process that monitors the cancel inspection file and checks if received entries exist in it.
46
+
47
+ Args:
48
+ queue (multiprocessing.Queue): A queue to receive entries to check.
49
+ result_queue (multiprocessing.Queue): A queue to send back True/False depending on match.
50
+ cancel_inspect_path (str): Path to the file containing cancel inspection entries.
51
+ """
52
+ manager = multiprocessing.Manager()
53
+ cancel_inspect_data = manager.list(load_cancel_inspect(cancel_inspect_path))
54
+
55
+ error_event = threading.Event()
56
+
57
+ def file_monitor() -> None:
58
+ try:
59
+ while True:
60
+ new_cancel_inspect = load_cancel_inspect(cancel_inspect_path)
61
+ cancel_inspect_data[:] = new_cancel_inspect
62
+ time.sleep(5)
63
+ except (IOError, ValueError) as e:
64
+ print(f"File monitor error: {e}")
65
+ error_event.set()
66
+
67
+ monitor_thread = threading.Thread(target=file_monitor, daemon=True)
68
+ monitor_thread.start()
69
+
70
+ while True:
71
+ if error_event.is_set():
72
+ print("Error detected in file monitor thread, terminating process.")
73
+ sys.exit(1)
74
+
75
+ try:
76
+ url = queue.get()
77
+ if url in cancel_inspect_data:
78
+ result_queue.put(True)
79
+ else:
80
+ result_queue.put(False)
81
+
82
+ except KeyboardInterrupt:
83
+ break
@@ -0,0 +1,78 @@
1
+ """
2
+ pyproxy.modules.custom_header.py
3
+
4
+ This module contains functions and a process to load and monitor custom header entries.
5
+ It reads a file with custom header data and checks if specific entries exist in it.
6
+ The file is monitored in a background thread for live updates.
7
+
8
+ Functions:
9
+ - load_custom_header: Loads custom header entries from a file into a list.
10
+ - custom_header_process: Process that listens for header-like entries and checks
11
+ if they exist in the custom header list.
12
+ """
13
+
14
+ import multiprocessing
15
+ import time
16
+ import sys
17
+ import threading
18
+ import json
19
+
20
+
21
+ def load_custom_header(custom_header_path: str) -> dict:
22
+ """
23
+ Loads custom header entries from a file into a list.
24
+
25
+ Args:
26
+ custom_header_path (str): The path to the file containing the custom headers.
27
+
28
+ Returns:
29
+ dict: A dictionary containing the custom header data loaded from the file.
30
+ """
31
+ with open(custom_header_path, "r", encoding="utf-8") as f:
32
+ return json.load(f)
33
+
34
+
35
+ def custom_header_process(
36
+ queue: multiprocessing.Queue,
37
+ result_queue: multiprocessing.Queue,
38
+ custom_header_path: str,
39
+ ) -> None:
40
+ """
41
+ Process that monitors the custom header file and checks if received entries exist in it.
42
+
43
+ Args:
44
+ queue (multiprocessing.Queue): A queue to receive header-like entries to check.
45
+ result_queue (multiprocessing.Queue): A queue to send back True/False depending on match.
46
+ custom_header_path (str): Path to the file containing custom header entries.
47
+ """
48
+ manager = multiprocessing.Manager()
49
+ custom_header_data = manager.dict(load_custom_header(custom_header_path))
50
+
51
+ error_event = threading.Event()
52
+
53
+ def file_monitor() -> None:
54
+ try:
55
+ while True:
56
+ new_custom_header = load_custom_header(custom_header_path)
57
+ custom_header_data.clear()
58
+ custom_header_data.update(new_custom_header)
59
+ time.sleep(5)
60
+ except (IOError, ValueError) as e:
61
+ print(f"File monitor error: {e}")
62
+ error_event.set()
63
+
64
+ monitor_thread = threading.Thread(target=file_monitor, daemon=True)
65
+ monitor_thread.start()
66
+
67
+ while True:
68
+ if error_event.is_set():
69
+ print("Error detected in file monitor thread, terminating process.")
70
+ sys.exit(1)
71
+
72
+ try:
73
+ url = queue.get()
74
+ headers = custom_header_data.get(url, {})
75
+ result_queue.put(headers)
76
+
77
+ except KeyboardInterrupt:
78
+ break