pyproxytools 0.3.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- benchmark/benchmark.py +165 -0
- benchmark/utils/__init__.py +0 -0
- benchmark/utils/html.py +179 -0
- benchmark/utils/req.py +43 -0
- pyproxy/__init__.py +13 -0
- pyproxy/handlers/__init__.py +0 -0
- pyproxy/handlers/client.py +126 -0
- pyproxy/handlers/http.py +197 -0
- pyproxy/handlers/https.py +308 -0
- pyproxy/modules/__init__.py +0 -0
- pyproxy/modules/cancel_inspect.py +83 -0
- pyproxy/modules/custom_header.py +78 -0
- pyproxy/modules/filter.py +151 -0
- pyproxy/modules/shortcuts.py +85 -0
- pyproxy/monitoring/__init__.py +0 -0
- pyproxy/monitoring/web.py +279 -0
- pyproxy/pyproxy.py +107 -0
- pyproxy/server.py +334 -0
- pyproxy/utils/__init__.py +0 -0
- pyproxy/utils/args.py +176 -0
- pyproxy/utils/config.py +110 -0
- pyproxy/utils/crypto.py +52 -0
- pyproxy/utils/http_req.py +53 -0
- pyproxy/utils/logger.py +46 -0
- pyproxy/utils/version.py +0 -0
- pyproxytools-0.3.2.dist-info/METADATA +130 -0
- pyproxytools-0.3.2.dist-info/RECORD +40 -0
- pyproxytools-0.3.2.dist-info/WHEEL +5 -0
- pyproxytools-0.3.2.dist-info/entry_points.txt +2 -0
- pyproxytools-0.3.2.dist-info/licenses/LICENSE +21 -0
- pyproxytools-0.3.2.dist-info/top_level.txt +3 -0
- tests/modules/__init__.py +0 -0
- tests/modules/test_cancel_inspect.py +67 -0
- tests/modules/test_custom_header.py +70 -0
- tests/modules/test_filter.py +185 -0
- tests/modules/test_shortcuts.py +119 -0
- tests/utils/__init__.py +0 -0
- tests/utils/test_crypto.py +110 -0
- tests/utils/test_http_req.py +69 -0
- tests/utils/test_logger.py +68 -0
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
"""
|
|
2
|
+
pyproxy.modules.filter.py
|
|
3
|
+
|
|
4
|
+
This module contains functions and a process to filter and block domains and URLs.
|
|
5
|
+
It loads blocked domain names and URLs from specified files, then listens for
|
|
6
|
+
incoming requests to check if the domain or URL should be blocked.
|
|
7
|
+
|
|
8
|
+
Functions:
|
|
9
|
+
- load_blacklist: Loads blocked FQDNs and URLs from files into sets for fast lookup.
|
|
10
|
+
- filter_process: The process that checks whether a domain or URL is blocked.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import multiprocessing
|
|
14
|
+
import time
|
|
15
|
+
import sys
|
|
16
|
+
import threading
|
|
17
|
+
from urllib.parse import urlparse
|
|
18
|
+
import requests
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def load_blacklist(
|
|
22
|
+
blocked_sites_path: str, blocked_url_path: str, filter_mode: str
|
|
23
|
+
) -> set:
|
|
24
|
+
"""
|
|
25
|
+
Loads blocked FQDNs or URLs from a file or URL into a set for fast lookup.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
blocked_sites_path (str): The path or URL to the file containing blocked FQDNs.
|
|
29
|
+
blocked_url_path (str): The path or URL to the file containing blocked URLs.
|
|
30
|
+
filter_mode (str): Mode to determine if we load from local file or HTTP URL.
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
set: A set of blocked domains/URLs.
|
|
34
|
+
"""
|
|
35
|
+
blocked_sites = set()
|
|
36
|
+
blocked_url = set()
|
|
37
|
+
|
|
38
|
+
def load_from_file(file_path: str) -> set:
|
|
39
|
+
data = set()
|
|
40
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
41
|
+
for line in f:
|
|
42
|
+
data.add(line.strip())
|
|
43
|
+
return data
|
|
44
|
+
|
|
45
|
+
def load_from_http(url: str) -> set:
|
|
46
|
+
data = set()
|
|
47
|
+
try:
|
|
48
|
+
response = requests.get(url, timeout=3)
|
|
49
|
+
response.raise_for_status()
|
|
50
|
+
for line in response.text.splitlines():
|
|
51
|
+
data.add(line.strip())
|
|
52
|
+
except requests.exceptions.RequestException as e:
|
|
53
|
+
raise requests.exceptions.RequestException(
|
|
54
|
+
f"Failed to load data from {url}: {e}"
|
|
55
|
+
)
|
|
56
|
+
return data
|
|
57
|
+
|
|
58
|
+
if filter_mode == "local":
|
|
59
|
+
blocked_sites = load_from_file(blocked_sites_path)
|
|
60
|
+
blocked_url = load_from_file(blocked_url_path)
|
|
61
|
+
elif filter_mode == "http":
|
|
62
|
+
blocked_sites = load_from_http(blocked_sites_path)
|
|
63
|
+
blocked_url = load_from_http(blocked_url_path)
|
|
64
|
+
|
|
65
|
+
return blocked_sites, blocked_url
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def filter_process(
|
|
69
|
+
queue: multiprocessing.Queue,
|
|
70
|
+
result_queue: multiprocessing.Queue,
|
|
71
|
+
filter_mode: str,
|
|
72
|
+
blocked_sites_path: str,
|
|
73
|
+
blocked_url_path: str,
|
|
74
|
+
refresh_interval=5,
|
|
75
|
+
) -> None:
|
|
76
|
+
"""
|
|
77
|
+
Process that listens for requests and checks if the domain/URL should be blocked.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
queue (multiprocessing.Queue): A queue to receive URL/domain for checking.
|
|
81
|
+
result_queue (multiprocessing.Queue): A queue to send back the result of
|
|
82
|
+
the filtering (blocked or allowed).
|
|
83
|
+
filter_mode (str): Filter list mode (local or http).
|
|
84
|
+
blocked_sites_path (str): The path to the file containing blocked FQDNs.
|
|
85
|
+
blocked_url_path (str): The path to the file containing blocked URLs.
|
|
86
|
+
refresh_interval (int): Interval in seconds to reload the blacklist files.
|
|
87
|
+
"""
|
|
88
|
+
manager = multiprocessing.Manager()
|
|
89
|
+
blocked_data = manager.dict(
|
|
90
|
+
{
|
|
91
|
+
"sites": load_blacklist(blocked_sites_path, blocked_url_path, filter_mode)[
|
|
92
|
+
0
|
|
93
|
+
],
|
|
94
|
+
"urls": load_blacklist(blocked_sites_path, blocked_url_path, filter_mode)[
|
|
95
|
+
1
|
|
96
|
+
],
|
|
97
|
+
}
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
error_event = threading.Event()
|
|
101
|
+
|
|
102
|
+
def file_monitor() -> None:
|
|
103
|
+
try:
|
|
104
|
+
while True:
|
|
105
|
+
new_blocked_sites, new_blocked_url = load_blacklist(
|
|
106
|
+
blocked_sites_path, blocked_url_path, filter_mode
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
blocked_data["sites"] = new_blocked_sites
|
|
110
|
+
blocked_data["urls"] = new_blocked_url
|
|
111
|
+
|
|
112
|
+
time.sleep(refresh_interval)
|
|
113
|
+
except (IOError, ValueError) as e:
|
|
114
|
+
print(f"File monitor error: {e}")
|
|
115
|
+
error_event.set()
|
|
116
|
+
|
|
117
|
+
monitor_thread = threading.Thread(target=file_monitor, daemon=True)
|
|
118
|
+
monitor_thread.start()
|
|
119
|
+
|
|
120
|
+
while True:
|
|
121
|
+
if error_event.is_set():
|
|
122
|
+
print("Error detected in file monitor thread, terminating process.")
|
|
123
|
+
sys.exit(1)
|
|
124
|
+
|
|
125
|
+
try:
|
|
126
|
+
request = queue.get()
|
|
127
|
+
|
|
128
|
+
if "://" in request:
|
|
129
|
+
parsed = urlparse(request)
|
|
130
|
+
server_host = parsed.hostname
|
|
131
|
+
url_path = parsed.path if parsed.path else "/"
|
|
132
|
+
full_url = (server_host or "") + url_path
|
|
133
|
+
else:
|
|
134
|
+
parts = request.split(":")
|
|
135
|
+
server_host = parts[0] if parts else None
|
|
136
|
+
full_url = server_host
|
|
137
|
+
|
|
138
|
+
if "*" in blocked_data["sites"] or any(
|
|
139
|
+
server_host.startswith(blocked_host)
|
|
140
|
+
for blocked_host in blocked_data["sites"]
|
|
141
|
+
):
|
|
142
|
+
result_queue.put((server_host, "Blocked"))
|
|
143
|
+
elif any(
|
|
144
|
+
full_url.startswith(blocked_url) for blocked_url in blocked_data["urls"]
|
|
145
|
+
):
|
|
146
|
+
result_queue.put((full_url, "Blocked"))
|
|
147
|
+
else:
|
|
148
|
+
result_queue.put((server_host, "Allowed"))
|
|
149
|
+
|
|
150
|
+
except KeyboardInterrupt:
|
|
151
|
+
break
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
"""
|
|
2
|
+
pyproxy.modules.shortcuts.py
|
|
3
|
+
|
|
4
|
+
This module contains functions and a process to load and manage URL shortcuts.
|
|
5
|
+
It loads shortcuts (alias to URL mappings) from a specified file, and provides
|
|
6
|
+
a process that listens for requests to resolve an alias to its corresponding URL.
|
|
7
|
+
|
|
8
|
+
Functions:
|
|
9
|
+
- load_shortcuts: Loads URL alias mappings from a file into a dictionary for fast lookup.
|
|
10
|
+
- shortcuts_process: The process that listens for alias requests and resolves them to URLs.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import multiprocessing
|
|
14
|
+
import time
|
|
15
|
+
import sys
|
|
16
|
+
import threading
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def load_shortcuts(shortcuts_path: str) -> dict:
|
|
20
|
+
"""
|
|
21
|
+
Loads URL alias mappings from a file into a dictionary for fast lookup.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
shortcuts_path (str): The path to the file containing alias=URL mappings.
|
|
25
|
+
|
|
26
|
+
Returns:
|
|
27
|
+
dict: A dictionary mapping aliases to URLs.
|
|
28
|
+
"""
|
|
29
|
+
shortcuts = {}
|
|
30
|
+
|
|
31
|
+
with open(shortcuts_path, "r", encoding="utf-8") as f:
|
|
32
|
+
for line in f:
|
|
33
|
+
line = line.strip()
|
|
34
|
+
if "=" in line:
|
|
35
|
+
alias, url = line.split("=", 1)
|
|
36
|
+
shortcuts[alias.strip()] = url.strip()
|
|
37
|
+
|
|
38
|
+
return shortcuts
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def shortcuts_process(
|
|
42
|
+
queue: multiprocessing.Queue,
|
|
43
|
+
result_queue: multiprocessing.Queue,
|
|
44
|
+
shortcuts_path: str,
|
|
45
|
+
) -> None:
|
|
46
|
+
"""
|
|
47
|
+
Process that listens for alias requests and resolves them to URLs.
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
queue (multiprocessing.Queue): A queue to receive alias for URL resolution.
|
|
51
|
+
result_queue (multiprocessing.Queue): A queue to send back the resolved URL.
|
|
52
|
+
shortcuts_path (str): The path to the file containing alias=URL mappings.
|
|
53
|
+
"""
|
|
54
|
+
manager = multiprocessing.Manager()
|
|
55
|
+
shortcuts_data = manager.dict({"shortcuts": load_shortcuts(shortcuts_path)})
|
|
56
|
+
|
|
57
|
+
error_event = threading.Event()
|
|
58
|
+
|
|
59
|
+
def file_monitor() -> None:
|
|
60
|
+
try:
|
|
61
|
+
while True:
|
|
62
|
+
new_shortcuts = load_shortcuts(shortcuts_path)
|
|
63
|
+
|
|
64
|
+
shortcuts_data["shortcuts"] = new_shortcuts
|
|
65
|
+
|
|
66
|
+
time.sleep(5)
|
|
67
|
+
except (IOError, ValueError) as e:
|
|
68
|
+
print(f"File monitor error: {e}")
|
|
69
|
+
error_event.set()
|
|
70
|
+
|
|
71
|
+
monitor_thread = threading.Thread(target=file_monitor, daemon=True)
|
|
72
|
+
monitor_thread.start()
|
|
73
|
+
|
|
74
|
+
while True:
|
|
75
|
+
if error_event.is_set():
|
|
76
|
+
print("Error detected in file monitor thread, terminating process.")
|
|
77
|
+
sys.exit(1)
|
|
78
|
+
|
|
79
|
+
try:
|
|
80
|
+
alias = queue.get()
|
|
81
|
+
url = shortcuts_data["shortcuts"].get(alias)
|
|
82
|
+
result_queue.put(url)
|
|
83
|
+
|
|
84
|
+
except KeyboardInterrupt:
|
|
85
|
+
break
|
|
File without changes
|
|
@@ -0,0 +1,279 @@
|
|
|
1
|
+
"""
|
|
2
|
+
pyproxy.monitoring.web.py
|
|
3
|
+
|
|
4
|
+
This module defines a monitoring system for the ProxyServer that provides
|
|
5
|
+
information about the server's processes, threads, active connections, and
|
|
6
|
+
subprocesses. It includes an HTTP server implemented with Flask to expose
|
|
7
|
+
monitoring endpoints for the proxy server.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import os
|
|
11
|
+
import threading
|
|
12
|
+
import multiprocessing
|
|
13
|
+
import logging
|
|
14
|
+
from datetime import datetime
|
|
15
|
+
from typing import List, Dict, Union
|
|
16
|
+
from flask import Flask, jsonify, render_template
|
|
17
|
+
from flask_httpauth import HTTPBasicAuth
|
|
18
|
+
from werkzeug.security import check_password_hash, generate_password_hash
|
|
19
|
+
import psutil
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def start_flask_server(proxy_server, flask_port, flask_pass, debug) -> None:
|
|
23
|
+
"""
|
|
24
|
+
Starts the Flask server for monitoring the ProxyServer. It creates and
|
|
25
|
+
runs an HTTP server that exposes the proxy server's status, including
|
|
26
|
+
process and thread details, subprocess statuses, and active connections.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
proxy_server (ProxyServer): The ProxyServer instance to monitor.
|
|
30
|
+
|
|
31
|
+
The server will expose two routes:
|
|
32
|
+
- '/' : Renders a simple index page.
|
|
33
|
+
- '/monitoring' : Returns a JSON response with the monitoring data.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
class ProxyMonitor:
|
|
37
|
+
"""
|
|
38
|
+
Monitors the status of the ProxyServer, including process, thread,
|
|
39
|
+
and subprocess information, as well as active client connections.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
proxy_server (ProxyServer): The ProxyServer instance to monitor.
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
def __init__(self, proxy_server):
|
|
46
|
+
self.proxy_server = proxy_server
|
|
47
|
+
|
|
48
|
+
def get_process_info(
|
|
49
|
+
self,
|
|
50
|
+
) -> Dict[str, Union[int, str, List[Dict[str, Union[int, str]]]]]:
|
|
51
|
+
"""
|
|
52
|
+
Retrieves overall process information for the ProxyServer,
|
|
53
|
+
including the PID, name, status, and details about threads,
|
|
54
|
+
subprocesses, and active connections.
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
dict: A dictionary containing the process information.
|
|
58
|
+
"""
|
|
59
|
+
process_info = {
|
|
60
|
+
"pid": os.getpid(),
|
|
61
|
+
"name": "ProxyServer",
|
|
62
|
+
"status": "running",
|
|
63
|
+
"start_time": datetime.fromtimestamp(
|
|
64
|
+
psutil.Process(os.getpid()).create_time()
|
|
65
|
+
).strftime("%Y-%m-%d %H:%M:%S"),
|
|
66
|
+
"threads": self.get_threads_info(),
|
|
67
|
+
"subprocesses": self.get_subprocesses_info(),
|
|
68
|
+
"active_connections": self.get_active_connections(),
|
|
69
|
+
}
|
|
70
|
+
return process_info
|
|
71
|
+
|
|
72
|
+
def get_threads_info(self) -> List[Dict[str, Union[int, str]]]:
|
|
73
|
+
"""
|
|
74
|
+
Retrieves information about the threads running in the ProxyServer.
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
list: A list of dictionaries, each containing information
|
|
78
|
+
about a thread.
|
|
79
|
+
"""
|
|
80
|
+
threads_info = []
|
|
81
|
+
for thread in threading.enumerate():
|
|
82
|
+
threads_info.append(
|
|
83
|
+
{
|
|
84
|
+
"thread_id": thread.ident,
|
|
85
|
+
"name": thread.name,
|
|
86
|
+
"status": self.get_thread_status(thread),
|
|
87
|
+
}
|
|
88
|
+
)
|
|
89
|
+
return threads_info
|
|
90
|
+
|
|
91
|
+
def get_thread_status(self, thread: threading.Thread) -> str:
|
|
92
|
+
"""
|
|
93
|
+
Gets the status of a given thread.
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
thread (threading.Thread): The thread whose status is to be retrieved.
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
str: The status of the thread ('running', 'terminated', or 'unknown').
|
|
100
|
+
"""
|
|
101
|
+
try:
|
|
102
|
+
if thread.is_alive():
|
|
103
|
+
return "running"
|
|
104
|
+
return "terminated"
|
|
105
|
+
except AttributeError:
|
|
106
|
+
return "unknown"
|
|
107
|
+
|
|
108
|
+
def get_subprocesses_info(
|
|
109
|
+
self,
|
|
110
|
+
) -> Dict[str, Dict[str, Union[str, List[Dict[str, Union[int, str]]]]]]:
|
|
111
|
+
"""
|
|
112
|
+
Retrieves the status of the ProxyServer's subprocesses, including
|
|
113
|
+
filtering, shortcuts, cancel inspection, and custom header processes.
|
|
114
|
+
|
|
115
|
+
Returns:
|
|
116
|
+
dict: A dictionary containing subprocess statuses.
|
|
117
|
+
"""
|
|
118
|
+
subprocesses_info = {}
|
|
119
|
+
|
|
120
|
+
subprocesses = {
|
|
121
|
+
"filter": self.proxy_server.filter_proc,
|
|
122
|
+
"shortcuts": self.proxy_server.shortcuts_proc,
|
|
123
|
+
"cancel_inspect": self.proxy_server.cancel_inspect_proc,
|
|
124
|
+
"custom_header": self.proxy_server.custom_header_proc,
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
for name, process in subprocesses.items():
|
|
128
|
+
if process is not None and process.is_alive():
|
|
129
|
+
subprocesses_info[name] = self.get_subprocess_status(process, name)
|
|
130
|
+
return subprocesses_info
|
|
131
|
+
|
|
132
|
+
def get_subprocess_status(
|
|
133
|
+
self, process: multiprocessing.Process, name: str
|
|
134
|
+
) -> Dict[str, Union[str, None, List[Dict[str, Union[int, str]]]]]:
|
|
135
|
+
"""
|
|
136
|
+
Retrieves the status of a subprocess.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
process (multiprocessing.Process): The subprocess to check.
|
|
140
|
+
name (str): The name of the subprocess.
|
|
141
|
+
|
|
142
|
+
Returns:
|
|
143
|
+
dict: A dictionary containing the subprocess status.
|
|
144
|
+
"""
|
|
145
|
+
if process is None:
|
|
146
|
+
return {"status": "not started", "name": name, "threads": []}
|
|
147
|
+
try:
|
|
148
|
+
status = "running" if process.is_alive() else "terminated"
|
|
149
|
+
threads_info = self.get_subprocess_threads_info(process)
|
|
150
|
+
except AttributeError:
|
|
151
|
+
status = "terminated"
|
|
152
|
+
threads_info = []
|
|
153
|
+
return {
|
|
154
|
+
"pid": process.pid if hasattr(process, "pid") else None,
|
|
155
|
+
"status": status,
|
|
156
|
+
"name": name,
|
|
157
|
+
"threads": threads_info,
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
def get_subprocess_threads_info(
|
|
161
|
+
self, process: multiprocessing.Process
|
|
162
|
+
) -> List[Dict[str, Union[int, str]]]:
|
|
163
|
+
"""
|
|
164
|
+
Retrieves the threads associated with a subprocess.
|
|
165
|
+
|
|
166
|
+
Args:
|
|
167
|
+
process (multiprocessing.Process): The subprocess to check.
|
|
168
|
+
|
|
169
|
+
Returns:
|
|
170
|
+
list: A list of dictionaries containing thread information.
|
|
171
|
+
"""
|
|
172
|
+
threads_info = []
|
|
173
|
+
try:
|
|
174
|
+
for proc_thread in psutil.Process(process.pid).threads():
|
|
175
|
+
threads_info.append(
|
|
176
|
+
{
|
|
177
|
+
"thread_id": proc_thread.id,
|
|
178
|
+
"name": f"Thread-{proc_thread.id}",
|
|
179
|
+
"status": self.get_thread_status_by_pid(proc_thread.id),
|
|
180
|
+
}
|
|
181
|
+
)
|
|
182
|
+
except (psutil.NoSuchProcess, psutil.AccessDenied):
|
|
183
|
+
pass
|
|
184
|
+
return threads_info
|
|
185
|
+
|
|
186
|
+
def get_thread_status_by_pid(self, thread_id: int) -> str:
|
|
187
|
+
"""
|
|
188
|
+
Attempts to retrieve the status of a thread by its PID.
|
|
189
|
+
|
|
190
|
+
Args:
|
|
191
|
+
thread_id (int): The thread's ID.
|
|
192
|
+
|
|
193
|
+
Returns:
|
|
194
|
+
str: The status of the thread ('running' or 'terminated').
|
|
195
|
+
"""
|
|
196
|
+
try:
|
|
197
|
+
process = psutil.Process(thread_id)
|
|
198
|
+
if process.is_running():
|
|
199
|
+
return "running"
|
|
200
|
+
return "terminated"
|
|
201
|
+
except psutil.NoSuchProcess:
|
|
202
|
+
return "terminated"
|
|
203
|
+
|
|
204
|
+
def get_active_connections(self) -> List[Dict[str, Union[int, Dict]]]:
|
|
205
|
+
"""
|
|
206
|
+
Retrieves information about the active client connections to the ProxyServer.
|
|
207
|
+
|
|
208
|
+
Returns:
|
|
209
|
+
list: A list of dictionaries containing information about active connections.
|
|
210
|
+
"""
|
|
211
|
+
return [
|
|
212
|
+
{"thread_id": thread_id, **conn}
|
|
213
|
+
for thread_id, conn in self.proxy_server.active_connections.items()
|
|
214
|
+
]
|
|
215
|
+
|
|
216
|
+
auth = HTTPBasicAuth()
|
|
217
|
+
|
|
218
|
+
users = {"admin": generate_password_hash(flask_pass)}
|
|
219
|
+
|
|
220
|
+
@auth.verify_password
|
|
221
|
+
def verify_password(username, password):
|
|
222
|
+
if username in users and check_password_hash(users.get(username), password):
|
|
223
|
+
return username
|
|
224
|
+
return None
|
|
225
|
+
|
|
226
|
+
app = Flask(__name__, static_folder="static")
|
|
227
|
+
if not debug:
|
|
228
|
+
log = logging.getLogger("werkzeug")
|
|
229
|
+
log.setLevel(logging.ERROR)
|
|
230
|
+
|
|
231
|
+
@app.route("/")
|
|
232
|
+
@auth.login_required
|
|
233
|
+
def index():
|
|
234
|
+
"""
|
|
235
|
+
Renders the index page for the Flask application.
|
|
236
|
+
|
|
237
|
+
Returns:
|
|
238
|
+
str: The rendered HTML content of the index page.
|
|
239
|
+
"""
|
|
240
|
+
return render_template("index.html")
|
|
241
|
+
|
|
242
|
+
@app.route("/monitoring", methods=["GET"])
|
|
243
|
+
@auth.login_required
|
|
244
|
+
def monitoring():
|
|
245
|
+
"""
|
|
246
|
+
Returns the monitoring data for the ProxyServer in JSON format.
|
|
247
|
+
|
|
248
|
+
Returns:
|
|
249
|
+
Response: A JSON response containing the server's process information.
|
|
250
|
+
"""
|
|
251
|
+
monitor = ProxyMonitor(proxy_server)
|
|
252
|
+
return jsonify(monitor.get_process_info())
|
|
253
|
+
|
|
254
|
+
@app.route("/config", methods=["GET"])
|
|
255
|
+
@auth.login_required
|
|
256
|
+
def config():
|
|
257
|
+
config_data = {
|
|
258
|
+
"host": proxy_server.host_port[0],
|
|
259
|
+
"port": proxy_server.host_port[1],
|
|
260
|
+
"debug": proxy_server.debug,
|
|
261
|
+
"html_403": proxy_server.html_403,
|
|
262
|
+
"logger_config": (
|
|
263
|
+
proxy_server.logger_config.to_dict()
|
|
264
|
+
if proxy_server.logger_config
|
|
265
|
+
else None
|
|
266
|
+
),
|
|
267
|
+
"filter_config": (
|
|
268
|
+
proxy_server.filter_config.to_dict()
|
|
269
|
+
if proxy_server.filter_config
|
|
270
|
+
else None
|
|
271
|
+
),
|
|
272
|
+
"ssl_config": (
|
|
273
|
+
proxy_server.ssl_config.to_dict() if proxy_server.ssl_config else None
|
|
274
|
+
),
|
|
275
|
+
"flask_port": proxy_server.flask_port,
|
|
276
|
+
}
|
|
277
|
+
return jsonify(config_data)
|
|
278
|
+
|
|
279
|
+
app.run(host="0.0.0.0", port=flask_port) # nosec
|
pyproxy/pyproxy.py
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This script implements a lightweight and fast Python-based proxy server.
|
|
3
|
+
It listens for client requests, filters URLs based on a list, and allows or blocks access
|
|
4
|
+
to those URLs. The proxy can handle both HTTP and HTTPS requests, and logs access and block events.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from .server import ProxyServer
|
|
8
|
+
from .utils.args import parse_args, load_config, get_config_value, str_to_bool
|
|
9
|
+
from .utils.config import ProxyConfigLogger, ProxyConfigFilter, ProxyConfigSSL
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def main():
|
|
13
|
+
"""
|
|
14
|
+
Main entry point of the proxy server. It parses command-line arguments,
|
|
15
|
+
loads the configuration file, retrieves configuration values, and starts the proxy server.
|
|
16
|
+
"""
|
|
17
|
+
args = parse_args()
|
|
18
|
+
config = load_config(args.config_file)
|
|
19
|
+
|
|
20
|
+
host = get_config_value(args, config, "host", "Server", "0.0.0.0") # nosec
|
|
21
|
+
port = int(get_config_value(args, config, "port", "Server", 8080)) # nosec
|
|
22
|
+
debug = get_config_value(args, config, "debug", "Logging", False)
|
|
23
|
+
html_403 = get_config_value(args, config, "html_403", "Files", "assets/403.html")
|
|
24
|
+
shortcuts = get_config_value(
|
|
25
|
+
args, config, "shortcuts", "Options", "config/shortcuts.txt"
|
|
26
|
+
)
|
|
27
|
+
custom_header = get_config_value(
|
|
28
|
+
args, config, "custom_header", "Options", "config/custom_header.json"
|
|
29
|
+
)
|
|
30
|
+
authorized_ips = get_config_value(
|
|
31
|
+
args, config, "authorized_ips", "Options", "config/authorized_ips.txt"
|
|
32
|
+
)
|
|
33
|
+
flask_port = get_config_value(args, config, "flask_port", "Monitoring", 5000)
|
|
34
|
+
flask_pass = get_config_value(args, config, "flask_pass", "Monitoring", "password")
|
|
35
|
+
proxy_enable = get_config_value(args, config, "proxy_enable", "Proxy", False)
|
|
36
|
+
proxy_host = get_config_value(args, config, "proxy_host", "Proxy", "127.0.0.1")
|
|
37
|
+
proxy_port = get_config_value(args, config, "proxy_port", "Proxy", 8081)
|
|
38
|
+
|
|
39
|
+
logger_config = ProxyConfigLogger(
|
|
40
|
+
access_log=get_config_value(
|
|
41
|
+
args, config, "access_log", "Logging", "logs/access.log"
|
|
42
|
+
),
|
|
43
|
+
block_log=get_config_value(
|
|
44
|
+
args, config, "block_log", "Logging", "logs/block.log"
|
|
45
|
+
),
|
|
46
|
+
no_logging_access=str_to_bool(
|
|
47
|
+
get_config_value(args, config, "no_logging_access", "Logging", False)
|
|
48
|
+
),
|
|
49
|
+
no_logging_block=str_to_bool(
|
|
50
|
+
get_config_value(args, config, "no_logging_block", "Logging", False)
|
|
51
|
+
),
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
filter_config = ProxyConfigFilter(
|
|
55
|
+
no_filter=str_to_bool(
|
|
56
|
+
get_config_value(args, config, "no_filter", "Filtering", False)
|
|
57
|
+
),
|
|
58
|
+
filter_mode=get_config_value(args, config, "filter_mode", "Filtering", "local"),
|
|
59
|
+
blocked_sites=get_config_value(
|
|
60
|
+
args, config, "blocked_sites", "Filtering", "config/blocked_sites.txt"
|
|
61
|
+
),
|
|
62
|
+
blocked_url=get_config_value(
|
|
63
|
+
args, config, "blocked_url", "Filtering", "config/blocked_url.txt"
|
|
64
|
+
),
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
ssl_config = ProxyConfigSSL(
|
|
68
|
+
ssl_inspect=str_to_bool(
|
|
69
|
+
get_config_value(args, config, "ssl_inspect", "Security", False)
|
|
70
|
+
),
|
|
71
|
+
inspect_ca_cert=get_config_value(
|
|
72
|
+
args, config, "inspect_ca_cert", "Security", "certs/ca/cert.pem"
|
|
73
|
+
),
|
|
74
|
+
inspect_ca_key=get_config_value(
|
|
75
|
+
args, config, "inspect_ca_key", "Security", "certs/ca/key.pem"
|
|
76
|
+
),
|
|
77
|
+
inspect_certs_folder=get_config_value(
|
|
78
|
+
args, config, "inspect_certs_folder", "Security", "certs/"
|
|
79
|
+
),
|
|
80
|
+
cancel_inspect=get_config_value(
|
|
81
|
+
args, config, "cancel_inspect", "Security", "config/cancel_inspect.txt"
|
|
82
|
+
),
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
proxy = ProxyServer(
|
|
86
|
+
host=host,
|
|
87
|
+
port=port,
|
|
88
|
+
debug=str_to_bool(debug),
|
|
89
|
+
logger_config=logger_config,
|
|
90
|
+
filter_config=filter_config,
|
|
91
|
+
ssl_config=ssl_config,
|
|
92
|
+
flask_port=flask_port,
|
|
93
|
+
flask_pass=flask_pass,
|
|
94
|
+
html_403=html_403,
|
|
95
|
+
shortcuts=shortcuts,
|
|
96
|
+
custom_header=custom_header,
|
|
97
|
+
authorized_ips=authorized_ips,
|
|
98
|
+
proxy_enable=str_to_bool(proxy_enable),
|
|
99
|
+
proxy_host=proxy_host,
|
|
100
|
+
proxy_port=proxy_port,
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
proxy.start()
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
if __name__ == "__main__":
|
|
107
|
+
main()
|