pyresolvers 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyresolvers/__init__.py +28 -0
- pyresolvers/__main__.py +103 -0
- pyresolvers/lib/__init__.py +0 -0
- pyresolvers/lib/core/__init__.py +0 -0
- pyresolvers/lib/core/__version__.py +2 -0
- pyresolvers/lib/core/input.py +167 -0
- pyresolvers/lib/core/output.py +66 -0
- pyresolvers/validator.py +342 -0
- pyresolvers-1.0.0.dist-info/METADATA +386 -0
- pyresolvers-1.0.0.dist-info/RECORD +13 -0
- pyresolvers-1.0.0.dist-info/WHEEL +5 -0
- pyresolvers-1.0.0.dist-info/entry_points.txt +2 -0
- pyresolvers-1.0.0.dist-info/top_level.txt +1 -0
pyresolvers/__init__.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
"""
|
|
2
|
+
PyResolvers - High-Performance DNS Resolver Validation & Speed Testing
|
|
3
|
+
|
|
4
|
+
Modern async DNS validator with speed testing and ordering.
|
|
5
|
+
|
|
6
|
+
Example:
|
|
7
|
+
>>> from pyresolvers import Validator
|
|
8
|
+
>>> validator = Validator()
|
|
9
|
+
>>> results = validator.validate_by_speed(['1.1.1.1', '8.8.8.8'])
|
|
10
|
+
>>> for server, latency in results:
|
|
11
|
+
... print(f"{server}: {latency:.2f}ms")
|
|
12
|
+
|
|
13
|
+
High concurrency:
|
|
14
|
+
>>> validator = Validator(concurrency=100)
|
|
15
|
+
>>> results = validator.validate_by_speed(large_list)
|
|
16
|
+
|
|
17
|
+
Async usage:
|
|
18
|
+
>>> import asyncio
|
|
19
|
+
>>> results = await validator.validate_by_speed_async(servers)
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
from __future__ import annotations
|
|
23
|
+
|
|
24
|
+
from .lib.core.__version__ import __version__
|
|
25
|
+
from .validator import ValidationResult, Validator
|
|
26
|
+
|
|
27
|
+
__all__ = ['Validator', 'ValidationResult', '__version__']
|
|
28
|
+
__version__ = __version__
|
pyresolvers/__main__.py
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""PyResolvers CLI."""
|
|
3
|
+
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
|
|
6
|
+
import json
|
|
7
|
+
import os
|
|
8
|
+
import signal
|
|
9
|
+
import sys
|
|
10
|
+
from typing import Any, List
|
|
11
|
+
|
|
12
|
+
from .lib.core.input import InputHelper, InputParser
|
|
13
|
+
from .lib.core.output import Level, OutputHelper
|
|
14
|
+
from .validator import Validator
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def handle_interrupt(signum: int, frame: Any) -> None:
|
|
18
|
+
os._exit(0)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def output_results(validator: Validator, targets: List[str], args: Any, out: OutputHelper) -> None:
|
|
22
|
+
"""Output validation results."""
|
|
23
|
+
fmt = args.output_format
|
|
24
|
+
|
|
25
|
+
if fmt == 'json':
|
|
26
|
+
result = validator.to_json(targets, args.min_speed, args.max_speed)
|
|
27
|
+
count = json.loads(result).get('count', 0)
|
|
28
|
+
out.terminal(Level.INFO, 0, f"Found {count} valid servers")
|
|
29
|
+
if not args.silent:
|
|
30
|
+
print(f"\n{result}")
|
|
31
|
+
if args.output:
|
|
32
|
+
with open(args.output, 'w', encoding='utf-8') as f:
|
|
33
|
+
f.write(result)
|
|
34
|
+
|
|
35
|
+
elif fmt == 'text-with-speed':
|
|
36
|
+
result = validator.to_text(targets, args.min_speed, args.max_speed, True)
|
|
37
|
+
lines = [l for l in result.strip().split('\n') if l]
|
|
38
|
+
out.terminal(Level.INFO, 0, f"Found {len(lines)} valid servers")
|
|
39
|
+
if not args.silent:
|
|
40
|
+
for line in lines:
|
|
41
|
+
if parts := line.split():
|
|
42
|
+
out.terminal(Level.ACCEPTED, parts[0], parts[1] if len(parts) > 1 else "")
|
|
43
|
+
if args.output:
|
|
44
|
+
with open(args.output, 'w', encoding='utf-8') as f:
|
|
45
|
+
f.write(result)
|
|
46
|
+
|
|
47
|
+
else: # text
|
|
48
|
+
results = validator.validate_by_speed(targets, args.min_speed, args.max_speed)
|
|
49
|
+
out.terminal(Level.INFO, 0, f"Found {len(results)} valid servers")
|
|
50
|
+
|
|
51
|
+
if results and not args.silent:
|
|
52
|
+
for server, latency in results:
|
|
53
|
+
out.terminal(Level.ACCEPTED, server, f"{latency:.2f}ms")
|
|
54
|
+
|
|
55
|
+
if args.output:
|
|
56
|
+
text = validator.to_text(targets, args.min_speed, args.max_speed)
|
|
57
|
+
with open(args.output, 'w', encoding='utf-8') as f:
|
|
58
|
+
f.write(text)
|
|
59
|
+
elif args.silent:
|
|
60
|
+
for server, _ in results:
|
|
61
|
+
print(server, flush=True)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def main() -> None:
|
|
65
|
+
signal.signal(signal.SIGINT, handle_interrupt)
|
|
66
|
+
|
|
67
|
+
args = InputParser().parse(sys.argv[1:])
|
|
68
|
+
out = OutputHelper(args)
|
|
69
|
+
out.print_banner()
|
|
70
|
+
|
|
71
|
+
targets = list(InputHelper.return_targets(args))
|
|
72
|
+
out.terminal(Level.INFO, 0, f"Testing {len(targets)} servers")
|
|
73
|
+
|
|
74
|
+
if args.max_speed:
|
|
75
|
+
out.terminal(Level.INFO, 0, f"Max speed filter: {args.max_speed}ms")
|
|
76
|
+
if args.min_speed:
|
|
77
|
+
out.terminal(Level.INFO, 0, f"Min speed filter: {args.min_speed}ms")
|
|
78
|
+
|
|
79
|
+
validator = Validator(
|
|
80
|
+
baseline_domain=args.rootdomain,
|
|
81
|
+
query_prefix=args.query,
|
|
82
|
+
concurrency=int(args.threads),
|
|
83
|
+
timeout=int(args.timeout),
|
|
84
|
+
use_fast_timeout=False, # Disabled for maximum coverage
|
|
85
|
+
verbose=args.verbose
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
try:
|
|
89
|
+
out.terminal(Level.INFO, 0, "Establishing baseline...")
|
|
90
|
+
output_results(validator, targets, args, out)
|
|
91
|
+
except RuntimeError as e:
|
|
92
|
+
out.terminal(Level.ERROR, 0, str(e))
|
|
93
|
+
sys.exit(1)
|
|
94
|
+
except KeyboardInterrupt:
|
|
95
|
+
out.terminal(Level.ERROR, 0, "Interrupted")
|
|
96
|
+
sys.exit(130)
|
|
97
|
+
except Exception as e:
|
|
98
|
+
out.terminal(Level.ERROR, 0, f"Error: {str(e)}")
|
|
99
|
+
sys.exit(1)
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
if __name__ == "__main__":
|
|
103
|
+
main()
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""Command-line argument parsing and input handling."""
|
|
3
|
+
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
|
|
6
|
+
import sys
|
|
7
|
+
from argparse import ArgumentParser
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from urllib.parse import urlparse
|
|
10
|
+
from urllib.request import urlopen
|
|
11
|
+
from urllib.error import URLError, HTTPError
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class InputHelper:
|
|
15
|
+
"""Helper for processing input targets."""
|
|
16
|
+
|
|
17
|
+
@staticmethod
|
|
18
|
+
def process_targets(parser, arg):
|
|
19
|
+
"""Process targets from URL or file."""
|
|
20
|
+
if InputHelper.validate_url(arg):
|
|
21
|
+
targets = InputHelper.process_url(parser, arg)
|
|
22
|
+
else:
|
|
23
|
+
filename = InputHelper.validate_filename(parser, arg)
|
|
24
|
+
if filename:
|
|
25
|
+
targets = InputHelper.process_file(filename)
|
|
26
|
+
|
|
27
|
+
if not targets:
|
|
28
|
+
raise ValueError("No targets provided or empty list")
|
|
29
|
+
return targets
|
|
30
|
+
|
|
31
|
+
@staticmethod
|
|
32
|
+
def validate_url(string):
|
|
33
|
+
"""Check if string is a valid URL."""
|
|
34
|
+
try:
|
|
35
|
+
result = urlparse(string)
|
|
36
|
+
return bool(result.scheme)
|
|
37
|
+
except Exception:
|
|
38
|
+
return False
|
|
39
|
+
|
|
40
|
+
@staticmethod
|
|
41
|
+
def validate_filename(parser, arg):
|
|
42
|
+
"""Validate and resolve file path."""
|
|
43
|
+
try:
|
|
44
|
+
path = Path(arg).expanduser().resolve()
|
|
45
|
+
if not path.is_file():
|
|
46
|
+
parser.error(f"File {arg} does not exist or is not a valid URL")
|
|
47
|
+
return str(path)
|
|
48
|
+
except Exception as e:
|
|
49
|
+
parser.error(f"Invalid file path: {e}")
|
|
50
|
+
|
|
51
|
+
@staticmethod
|
|
52
|
+
def process_url(parser, url):
|
|
53
|
+
"""Fetch targets from URL."""
|
|
54
|
+
try:
|
|
55
|
+
with urlopen(url, timeout=30) as response:
|
|
56
|
+
if response.status != 200:
|
|
57
|
+
parser.error(f"HTTP {response.status} from {url}")
|
|
58
|
+
content = response.read().decode('utf-8')
|
|
59
|
+
return content.split()
|
|
60
|
+
except HTTPError as e:
|
|
61
|
+
parser.error(f"HTTP {e.code} from {url}")
|
|
62
|
+
except URLError as e:
|
|
63
|
+
parser.error(f"Failed to fetch {url}: {e.reason}")
|
|
64
|
+
except Exception as e:
|
|
65
|
+
parser.error(f"Error fetching {url}: {e}")
|
|
66
|
+
|
|
67
|
+
@staticmethod
|
|
68
|
+
def process_file(path):
|
|
69
|
+
"""Load targets from file."""
|
|
70
|
+
with open(path, 'r', encoding='utf-8') as f:
|
|
71
|
+
return [line.strip() for line in f if line.strip()]
|
|
72
|
+
|
|
73
|
+
@staticmethod
|
|
74
|
+
def check_positive(parser, arg):
|
|
75
|
+
"""Validate positive integer."""
|
|
76
|
+
try:
|
|
77
|
+
value = int(arg)
|
|
78
|
+
if value <= 0:
|
|
79
|
+
parser.error(f"{arg} must be a positive integer")
|
|
80
|
+
return arg
|
|
81
|
+
except ValueError:
|
|
82
|
+
parser.error(f"{arg} is not a valid integer")
|
|
83
|
+
|
|
84
|
+
@staticmethod
|
|
85
|
+
def return_targets(arguments):
|
|
86
|
+
"""Return final target list with exclusions applied."""
|
|
87
|
+
targets = set()
|
|
88
|
+
exclusions = set()
|
|
89
|
+
|
|
90
|
+
if arguments.target:
|
|
91
|
+
targets.add(arguments.target)
|
|
92
|
+
else:
|
|
93
|
+
targets.update(arguments.target_list)
|
|
94
|
+
|
|
95
|
+
if arguments.exclusion:
|
|
96
|
+
exclusions.add(arguments.exclusion)
|
|
97
|
+
elif arguments.exclusions_list:
|
|
98
|
+
exclusions.update(arguments.exclusions_list)
|
|
99
|
+
|
|
100
|
+
targets -= exclusions
|
|
101
|
+
|
|
102
|
+
if not targets:
|
|
103
|
+
raise ValueError("No targets remaining after exclusions")
|
|
104
|
+
return targets
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
class InputParser:
|
|
108
|
+
"""Command-line argument parser."""
|
|
109
|
+
|
|
110
|
+
def __init__(self):
|
|
111
|
+
self._parser = self.setup_parser()
|
|
112
|
+
|
|
113
|
+
def parse(self, argv):
|
|
114
|
+
"""Parse command-line arguments."""
|
|
115
|
+
return self._parser.parse_args(argv)
|
|
116
|
+
|
|
117
|
+
@staticmethod
|
|
118
|
+
def setup_parser():
|
|
119
|
+
"""Setup argument parser."""
|
|
120
|
+
parser = ArgumentParser(description='DNS Resolver Validator with Speed Testing')
|
|
121
|
+
|
|
122
|
+
targets = parser.add_mutually_exclusive_group(required=False)
|
|
123
|
+
targets.add_argument('-t', dest='target', help='Target DNS server IP')
|
|
124
|
+
targets.add_argument(
|
|
125
|
+
'-tL', dest='target_list',
|
|
126
|
+
default="https://public-dns.info/nameservers.txt",
|
|
127
|
+
type=lambda x: InputHelper.process_targets(parser, x),
|
|
128
|
+
help='File or URL with DNS server IPs (default: public-dns.info)'
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
exclusions = parser.add_mutually_exclusive_group()
|
|
132
|
+
exclusions.add_argument('-e', dest='exclusion', help='Exclude specific server')
|
|
133
|
+
exclusions.add_argument(
|
|
134
|
+
'-eL', dest='exclusions_list',
|
|
135
|
+
type=lambda x: InputHelper.process_targets(parser, x),
|
|
136
|
+
help='File or URL with servers to exclude'
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
parser.add_argument('-o', '--output', help='Output file for results')
|
|
140
|
+
parser.add_argument('-r', dest='rootdomain', default="bet365.com",
|
|
141
|
+
help='Root domain for testing (default: bet365.com)')
|
|
142
|
+
parser.add_argument('-q', dest='query', default="dnsvalidator",
|
|
143
|
+
help='Query prefix for NXDOMAIN testing')
|
|
144
|
+
parser.add_argument('-threads', dest='threads', default=5,
|
|
145
|
+
type=lambda x: InputHelper.check_positive(parser, x),
|
|
146
|
+
help='Max concurrent threads (default: 5)')
|
|
147
|
+
parser.add_argument('-timeout', dest='timeout', default=600,
|
|
148
|
+
type=lambda x: InputHelper.check_positive(parser, x),
|
|
149
|
+
help='Timeout in seconds (default: 600)')
|
|
150
|
+
parser.add_argument('--no-color', dest='nocolor', action='store_true',
|
|
151
|
+
help='Disable colored output')
|
|
152
|
+
|
|
153
|
+
output_types = parser.add_mutually_exclusive_group()
|
|
154
|
+
output_types.add_argument('-v', '--verbose', dest='verbose', action='store_true',
|
|
155
|
+
help='Enable verbose output')
|
|
156
|
+
output_types.add_argument('--silent', dest='silent', action='store_true',
|
|
157
|
+
help='Only output valid server IPs')
|
|
158
|
+
|
|
159
|
+
parser.add_argument('--format', dest='output_format', default='text',
|
|
160
|
+
choices=['text', 'json', 'text-with-speed'],
|
|
161
|
+
help='Output format (default: text)')
|
|
162
|
+
parser.add_argument('--max-speed', dest='max_speed', type=float,
|
|
163
|
+
help='Max response time in ms (filter slower servers)')
|
|
164
|
+
parser.add_argument('--min-speed', dest='min_speed', type=float,
|
|
165
|
+
help='Min response time in ms (filter faster servers)')
|
|
166
|
+
|
|
167
|
+
return parser
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
"""Output formatting and terminal display."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from enum import IntEnum
|
|
6
|
+
from time import localtime, strftime
|
|
7
|
+
from typing import Any, Union
|
|
8
|
+
|
|
9
|
+
from colorclass import Color, disable_all_colors
|
|
10
|
+
|
|
11
|
+
from pyresolvers.lib.core.__version__ import __version__
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Level(IntEnum):
|
|
15
|
+
"""Log level enumeration."""
|
|
16
|
+
VERBOSE, INFO, ACCEPTED, REJECTED, ERROR = range(5)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class OutputHelper:
|
|
20
|
+
"""Formatted terminal output handler."""
|
|
21
|
+
|
|
22
|
+
_FORMATS = {
|
|
23
|
+
Level.VERBOSE: Color('{autoblue}[VERBOSE]{/autoblue}'),
|
|
24
|
+
Level.INFO: Color('{autoyellow}[INFO]{/autoyellow}'),
|
|
25
|
+
Level.ACCEPTED: Color('{autogreen}[ACCEPTED]{/autogreen}'),
|
|
26
|
+
Level.REJECTED: Color('{autored}[REJECTED]{/autored}'),
|
|
27
|
+
Level.ERROR: Color('{autobgyellow}{autored}[ERROR]{/autored}{/autobgyellow}')
|
|
28
|
+
}
|
|
29
|
+
_SEP = "=" * 55
|
|
30
|
+
|
|
31
|
+
def __init__(self, arguments: Any) -> None:
|
|
32
|
+
if getattr(arguments, 'nocolor', False):
|
|
33
|
+
disable_all_colors()
|
|
34
|
+
self.verbose = getattr(arguments, 'verbose', False)
|
|
35
|
+
self.silent = getattr(arguments, 'silent', False)
|
|
36
|
+
self.output = getattr(arguments, 'output', None)
|
|
37
|
+
|
|
38
|
+
def print_banner(self) -> None:
|
|
39
|
+
"""Print application banner."""
|
|
40
|
+
if not self.silent:
|
|
41
|
+
print(f"{self._SEP}\npyresolvers v{__version__} - DNS Resolver Validator\n{self._SEP}", flush=True)
|
|
42
|
+
|
|
43
|
+
def terminal(self, level: Level, target: Union[str, int], message: str = "") -> None:
|
|
44
|
+
"""Print formatted message."""
|
|
45
|
+
if level == Level.VERBOSE and not self.verbose:
|
|
46
|
+
return
|
|
47
|
+
|
|
48
|
+
if self.silent:
|
|
49
|
+
if level == Level.ACCEPTED:
|
|
50
|
+
print(target, flush=True)
|
|
51
|
+
return
|
|
52
|
+
|
|
53
|
+
leader = self._FORMATS.get(level, '[#]')
|
|
54
|
+
time_str = strftime("%H:%M:%S", localtime())
|
|
55
|
+
|
|
56
|
+
if target == 0 or target == "0":
|
|
57
|
+
print(f'[{time_str}] {leader} {message}', flush=True)
|
|
58
|
+
else:
|
|
59
|
+
print(f'[{time_str}] {leader} [{target}] {message}', flush=True)
|
|
60
|
+
|
|
61
|
+
if self.output and level == Level.ACCEPTED:
|
|
62
|
+
try:
|
|
63
|
+
with open(self.output, 'a', encoding='utf-8') as f:
|
|
64
|
+
f.write(f"{target}\n")
|
|
65
|
+
except IOError:
|
|
66
|
+
pass
|
pyresolvers/validator.py
ADDED
|
@@ -0,0 +1,342 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""High-performance async DNS resolver validation."""
|
|
3
|
+
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
|
|
6
|
+
import asyncio
|
|
7
|
+
import json
|
|
8
|
+
import random
|
|
9
|
+
import string
|
|
10
|
+
import time
|
|
11
|
+
from dataclasses import asdict, dataclass
|
|
12
|
+
from ipaddress import IPv4Address, IPv6Address, ip_address
|
|
13
|
+
from typing import AsyncIterator, Dict, List, Optional, Tuple
|
|
14
|
+
|
|
15
|
+
try:
|
|
16
|
+
import aiodns
|
|
17
|
+
AIODNS_AVAILABLE = True
|
|
18
|
+
except ImportError:
|
|
19
|
+
AIODNS_AVAILABLE = False
|
|
20
|
+
|
|
21
|
+
# Configuration
|
|
22
|
+
TRUSTED_RESOLVERS = ["1.1.1.1", "8.8.8.8"]
|
|
23
|
+
TEST_DOMAINS = ["bet365.com", "telegram.com"]
|
|
24
|
+
POISON_CHECK_DOMAINS = ["facebook.com", "paypal.com", "google.com", "bet365.com", "wikileaks.com"]
|
|
25
|
+
BASELINE_DOMAIN = "bet365.com"
|
|
26
|
+
QUERY_PREFIX = "dnsvalidator"
|
|
27
|
+
DEFAULT_CONCURRENCY = 50
|
|
28
|
+
DEFAULT_TIMEOUT = 5
|
|
29
|
+
FAST_TIMEOUT = 1 # Quick timeout for dead server detection
|
|
30
|
+
SUBDOMAIN_LENGTH = 10
|
|
31
|
+
BATCH_SIZE = 100
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@dataclass
|
|
35
|
+
class ValidationResult:
|
|
36
|
+
"""DNS resolver validation result."""
|
|
37
|
+
server: str
|
|
38
|
+
valid: bool
|
|
39
|
+
latency_ms: float
|
|
40
|
+
error: Optional[str] = None
|
|
41
|
+
|
|
42
|
+
def to_dict(self) -> Dict:
|
|
43
|
+
return asdict(self)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class Validator:
|
|
47
|
+
"""High-performance async DNS resolver validator."""
|
|
48
|
+
|
|
49
|
+
def __init__(
|
|
50
|
+
self,
|
|
51
|
+
trusted_resolvers: Optional[List[str]] = None,
|
|
52
|
+
test_domains: Optional[List[str]] = None,
|
|
53
|
+
poison_check_domains: Optional[List[str]] = None,
|
|
54
|
+
baseline_domain: str = BASELINE_DOMAIN,
|
|
55
|
+
query_prefix: str = QUERY_PREFIX,
|
|
56
|
+
concurrency: int = DEFAULT_CONCURRENCY,
|
|
57
|
+
timeout: int = DEFAULT_TIMEOUT,
|
|
58
|
+
use_fast_timeout: bool = False,
|
|
59
|
+
batch_size: int = BATCH_SIZE,
|
|
60
|
+
verbose: bool = False
|
|
61
|
+
) -> None:
|
|
62
|
+
if not AIODNS_AVAILABLE:
|
|
63
|
+
raise ImportError(
|
|
64
|
+
"aiodns required for Validator. Install with: pip install aiodns\n"
|
|
65
|
+
"Or: pip install -r requirements.txt"
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
self.trusted_resolvers = trusted_resolvers or TRUSTED_RESOLVERS
|
|
69
|
+
self.test_domains = test_domains or TEST_DOMAINS
|
|
70
|
+
self.poison_check_domains = poison_check_domains or POISON_CHECK_DOMAINS
|
|
71
|
+
self.baseline_domain = baseline_domain
|
|
72
|
+
self.query_prefix = query_prefix
|
|
73
|
+
self.concurrency = concurrency
|
|
74
|
+
self.timeout = timeout
|
|
75
|
+
self.use_fast_timeout = use_fast_timeout
|
|
76
|
+
self.batch_size = batch_size
|
|
77
|
+
self.verbose = verbose
|
|
78
|
+
self._baseline_ip = ""
|
|
79
|
+
self._baseline_data: Dict[str, Dict] = {}
|
|
80
|
+
|
|
81
|
+
@staticmethod
|
|
82
|
+
def _random_subdomain(length: int = SUBDOMAIN_LENGTH) -> str:
|
|
83
|
+
"""Generate random subdomain for testing."""
|
|
84
|
+
return ''.join(random.choices(string.ascii_lowercase, k=length))
|
|
85
|
+
|
|
86
|
+
@staticmethod
|
|
87
|
+
def _is_valid_ip(ip_str: str) -> bool:
|
|
88
|
+
"""Validate IPv4/IPv6 address format."""
|
|
89
|
+
try:
|
|
90
|
+
return isinstance(ip_address(ip_str), (IPv4Address, IPv6Address))
|
|
91
|
+
except ValueError:
|
|
92
|
+
return False
|
|
93
|
+
|
|
94
|
+
def _log(self, msg: str) -> None:
|
|
95
|
+
"""Log message if verbose mode enabled."""
|
|
96
|
+
if self.verbose:
|
|
97
|
+
print(msg)
|
|
98
|
+
|
|
99
|
+
async def _setup_baseline_single(self, resolver_ip: str) -> bool:
|
|
100
|
+
"""Setup baseline from single trusted resolver."""
|
|
101
|
+
self._log(f"[INFO] {resolver_ip} - Establishing baseline")
|
|
102
|
+
try:
|
|
103
|
+
resolver = aiodns.DNSResolver(nameservers=[resolver_ip], timeout=self.timeout)
|
|
104
|
+
data = {}
|
|
105
|
+
|
|
106
|
+
# Get baseline IP
|
|
107
|
+
result = await resolver.query(self.baseline_domain, 'A')
|
|
108
|
+
data["ip"] = self._baseline_ip = result[0].host
|
|
109
|
+
|
|
110
|
+
# Test domains in parallel
|
|
111
|
+
domain_tasks = [resolver.query(domain, 'A') for domain in self.test_domains]
|
|
112
|
+
domain_results = await asyncio.gather(*domain_tasks, return_exceptions=True)
|
|
113
|
+
|
|
114
|
+
data["domains"] = {}
|
|
115
|
+
for domain, result in zip(self.test_domains, domain_results):
|
|
116
|
+
if not isinstance(result, Exception):
|
|
117
|
+
data["domains"][domain] = result[0].host
|
|
118
|
+
|
|
119
|
+
# NXDOMAIN check
|
|
120
|
+
try:
|
|
121
|
+
await resolver.query(self.query_prefix + self.baseline_domain, 'A')
|
|
122
|
+
data["nxdomain"] = False
|
|
123
|
+
except aiodns.error.DNSError:
|
|
124
|
+
data["nxdomain"] = True
|
|
125
|
+
|
|
126
|
+
self._baseline_data[resolver_ip] = data
|
|
127
|
+
return True
|
|
128
|
+
except Exception as e:
|
|
129
|
+
self._log(f"[ERROR] {resolver_ip} - {str(e)}")
|
|
130
|
+
return False
|
|
131
|
+
|
|
132
|
+
async def _setup_baseline(self) -> bool:
|
|
133
|
+
"""Setup baseline from all trusted resolvers in parallel."""
|
|
134
|
+
tasks = [self._setup_baseline_single(ip) for ip in self.trusted_resolvers]
|
|
135
|
+
results = await asyncio.gather(*tasks, return_exceptions=True)
|
|
136
|
+
success_count = sum(1 for r in results if r is True)
|
|
137
|
+
return success_count == len(self.trusted_resolvers)
|
|
138
|
+
|
|
139
|
+
async def _check_poisoning(self, resolver: aiodns.DNSResolver, server: str) -> Optional[str]:
|
|
140
|
+
"""Check for DNS poisoning with parallel queries."""
|
|
141
|
+
subdomains = [f"{self._random_subdomain()}.{domain}" for domain in self.poison_check_domains]
|
|
142
|
+
tasks = [resolver.query(subdomain, 'A') for subdomain in subdomains]
|
|
143
|
+
results = await asyncio.gather(*tasks, return_exceptions=True)
|
|
144
|
+
|
|
145
|
+
for subdomain, result in zip(subdomains, results):
|
|
146
|
+
if not isinstance(result, Exception):
|
|
147
|
+
self._log(f"[ERROR] {server} - Poisoning detected: {subdomain}")
|
|
148
|
+
return "DNS poisoning"
|
|
149
|
+
return None
|
|
150
|
+
|
|
151
|
+
async def _check_nxdomain_and_baseline(
|
|
152
|
+
self, resolver: aiodns.DNSResolver, server: str
|
|
153
|
+
) -> Tuple[bool, bool, Optional[str]]:
|
|
154
|
+
"""Combined NXDOMAIN and baseline validation check."""
|
|
155
|
+
subdomain = f"{self._random_subdomain()}.{self.baseline_domain}"
|
|
156
|
+
|
|
157
|
+
try:
|
|
158
|
+
# Check NXDOMAIN and baseline in parallel
|
|
159
|
+
nxdomain_task = resolver.query(subdomain, 'A')
|
|
160
|
+
baseline_task = resolver.query(self.baseline_domain, 'A')
|
|
161
|
+
|
|
162
|
+
nxdomain_result, baseline_result = await asyncio.gather(
|
|
163
|
+
nxdomain_task, baseline_task, return_exceptions=True
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
# NXDOMAIN should fail
|
|
167
|
+
has_nxdomain = isinstance(nxdomain_result, Exception)
|
|
168
|
+
|
|
169
|
+
# Baseline should match
|
|
170
|
+
baseline_matches = False
|
|
171
|
+
if not isinstance(baseline_result, Exception):
|
|
172
|
+
resolved_ip = baseline_result[0].host
|
|
173
|
+
baseline_matches = resolved_ip == self._baseline_ip
|
|
174
|
+
|
|
175
|
+
return has_nxdomain, baseline_matches, None
|
|
176
|
+
|
|
177
|
+
except Exception as e:
|
|
178
|
+
return False, False, f"Error: {str(e)}"
|
|
179
|
+
|
|
180
|
+
def _matches_baseline(self, has_nxdomain: bool) -> bool:
|
|
181
|
+
"""Verify resolver matches baseline behavior."""
|
|
182
|
+
matches = sum(
|
|
183
|
+
1 for data in self._baseline_data.values()
|
|
184
|
+
if data.get("ip") == self._baseline_ip and data.get("nxdomain") == has_nxdomain
|
|
185
|
+
)
|
|
186
|
+
return matches == len(self.trusted_resolvers)
|
|
187
|
+
|
|
188
|
+
async def _validate_server(self, server: str) -> ValidationResult:
|
|
189
|
+
"""Validate single DNS server with fast timeout."""
|
|
190
|
+
if not self._is_valid_ip(server):
|
|
191
|
+
return ValidationResult(server, False, -1, "Invalid IP")
|
|
192
|
+
|
|
193
|
+
self._log(f"[INFO] {server} - Validating...")
|
|
194
|
+
start = time.time()
|
|
195
|
+
|
|
196
|
+
# Use fast timeout for quick dead server detection
|
|
197
|
+
timeout = FAST_TIMEOUT if self.use_fast_timeout else self.timeout
|
|
198
|
+
|
|
199
|
+
try:
|
|
200
|
+
resolver = aiodns.DNSResolver(nameservers=[server], timeout=timeout)
|
|
201
|
+
|
|
202
|
+
# Check poisoning
|
|
203
|
+
error = await self._check_poisoning(resolver, server)
|
|
204
|
+
if error:
|
|
205
|
+
return ValidationResult(server, False, -1, error)
|
|
206
|
+
|
|
207
|
+
# If fast timeout worked, use full timeout for validation
|
|
208
|
+
if self.use_fast_timeout and timeout < self.timeout:
|
|
209
|
+
resolver = aiodns.DNSResolver(nameservers=[server], timeout=self.timeout)
|
|
210
|
+
|
|
211
|
+
# Combined NXDOMAIN and baseline check
|
|
212
|
+
has_nxdomain, baseline_matches, error = await self._check_nxdomain_and_baseline(resolver, server)
|
|
213
|
+
if error:
|
|
214
|
+
return ValidationResult(server, False, -1, error)
|
|
215
|
+
|
|
216
|
+
latency = (time.time() - start) * 1000
|
|
217
|
+
valid = baseline_matches and self._matches_baseline(has_nxdomain)
|
|
218
|
+
|
|
219
|
+
self._log(f"[{'OK' if valid else 'FAIL'}] {server} - {latency:.2f}ms")
|
|
220
|
+
return ValidationResult(server, valid, latency, None if valid else "Invalid")
|
|
221
|
+
|
|
222
|
+
except asyncio.TimeoutError:
|
|
223
|
+
return ValidationResult(server, False, (time.time() - start) * 1000, "Timeout")
|
|
224
|
+
except Exception as e:
|
|
225
|
+
return ValidationResult(server, False, (time.time() - start) * 1000, str(e))
|
|
226
|
+
|
|
227
|
+
async def _validate_batch(self, servers: List[str]) -> List[ValidationResult]:
|
|
228
|
+
"""Validate batch of servers with concurrency limit."""
|
|
229
|
+
semaphore = asyncio.Semaphore(self.concurrency)
|
|
230
|
+
|
|
231
|
+
async def bounded_validate(server: str) -> ValidationResult:
|
|
232
|
+
async with semaphore:
|
|
233
|
+
return await self._validate_server(server)
|
|
234
|
+
|
|
235
|
+
tasks = [bounded_validate(server) for server in servers]
|
|
236
|
+
return await asyncio.gather(*tasks)
|
|
237
|
+
|
|
238
|
+
async def validate_async(self, servers: List[str]) -> List[ValidationResult]:
|
|
239
|
+
"""Validate multiple DNS servers asynchronously."""
|
|
240
|
+
if not await self._setup_baseline():
|
|
241
|
+
raise RuntimeError("Baseline setup failed")
|
|
242
|
+
|
|
243
|
+
# Process in batches for memory efficiency
|
|
244
|
+
all_results = []
|
|
245
|
+
for i in range(0, len(servers), self.batch_size):
|
|
246
|
+
batch = servers[i:i + self.batch_size]
|
|
247
|
+
results = await self._validate_batch(batch)
|
|
248
|
+
all_results.extend(results)
|
|
249
|
+
|
|
250
|
+
return all_results
|
|
251
|
+
|
|
252
|
+
async def validate_by_speed_async(
|
|
253
|
+
self, servers: List[str], min_ms: Optional[float] = None, max_ms: Optional[float] = None
|
|
254
|
+
) -> List[Tuple[str, float]]:
|
|
255
|
+
"""Get valid servers ordered by speed."""
|
|
256
|
+
results = await self.validate_async(servers)
|
|
257
|
+
|
|
258
|
+
# Filter and sort
|
|
259
|
+
filtered = [
|
|
260
|
+
(r.server, r.latency_ms)
|
|
261
|
+
for r in results
|
|
262
|
+
if r.valid and r.latency_ms > 0
|
|
263
|
+
]
|
|
264
|
+
|
|
265
|
+
if min_ms:
|
|
266
|
+
filtered = [(s, t) for s, t in filtered if t >= min_ms]
|
|
267
|
+
if max_ms:
|
|
268
|
+
filtered = [(s, t) for s, t in filtered if t <= max_ms]
|
|
269
|
+
|
|
270
|
+
return sorted(filtered, key=lambda x: x[1])
|
|
271
|
+
|
|
272
|
+
async def validate_streaming_async(
|
|
273
|
+
self, servers: List[str], min_ms: Optional[float] = None, max_ms: Optional[float] = None
|
|
274
|
+
) -> AsyncIterator[Tuple[str, float]]:
|
|
275
|
+
"""Stream valid servers as they're validated (memory efficient)."""
|
|
276
|
+
if not await self._setup_baseline():
|
|
277
|
+
raise RuntimeError("Baseline setup failed")
|
|
278
|
+
|
|
279
|
+
semaphore = asyncio.Semaphore(self.concurrency)
|
|
280
|
+
|
|
281
|
+
async def validate_and_filter(server: str) -> Optional[Tuple[str, float]]:
|
|
282
|
+
async with semaphore:
|
|
283
|
+
result = await self._validate_server(server)
|
|
284
|
+
if result.valid and result.latency_ms > 0:
|
|
285
|
+
if (not min_ms or result.latency_ms >= min_ms) and \
|
|
286
|
+
(not max_ms or result.latency_ms <= max_ms):
|
|
287
|
+
return (result.server, result.latency_ms)
|
|
288
|
+
return None
|
|
289
|
+
|
|
290
|
+
# Process in batches and yield results
|
|
291
|
+
for i in range(0, len(servers), self.batch_size):
|
|
292
|
+
batch = servers[i:i + self.batch_size]
|
|
293
|
+
tasks = [validate_and_filter(server) for server in batch]
|
|
294
|
+
results = await asyncio.gather(*tasks)
|
|
295
|
+
|
|
296
|
+
for result in results:
|
|
297
|
+
if result is not None:
|
|
298
|
+
yield result
|
|
299
|
+
|
|
300
|
+
async def to_json_async(
|
|
301
|
+
self, servers: List[str], min_ms: Optional[float] = None, max_ms: Optional[float] = None, pretty: bool = True
|
|
302
|
+
) -> str:
|
|
303
|
+
"""Export validation results as JSON."""
|
|
304
|
+
results = await self.validate_by_speed_async(servers, min_ms, max_ms)
|
|
305
|
+
output = {
|
|
306
|
+
"servers": [{"ip": s, "latency_ms": round(t, 2)} for s, t in results],
|
|
307
|
+
"count": len(results),
|
|
308
|
+
"filters": {"min_ms": min_ms, "max_ms": max_ms}
|
|
309
|
+
}
|
|
310
|
+
return json.dumps(output, indent=2 if pretty else None)
|
|
311
|
+
|
|
312
|
+
async def to_text_async(
|
|
313
|
+
self, servers: List[str], min_ms: Optional[float] = None, max_ms: Optional[float] = None, show_speed: bool = False
|
|
314
|
+
) -> str:
|
|
315
|
+
"""Export validation results as plain text."""
|
|
316
|
+
results = await self.validate_by_speed_async(servers, min_ms, max_ms)
|
|
317
|
+
if show_speed:
|
|
318
|
+
return '\n'.join(f"{s} {t:.2f}ms" for s, t in results)
|
|
319
|
+
return '\n'.join(s for s, _ in results)
|
|
320
|
+
|
|
321
|
+
# Sync wrappers for compatibility
|
|
322
|
+
def validate(self, servers: List[str]) -> List[ValidationResult]:
|
|
323
|
+
"""Validate servers (sync wrapper)."""
|
|
324
|
+
return asyncio.run(self.validate_async(servers))
|
|
325
|
+
|
|
326
|
+
def validate_by_speed(
|
|
327
|
+
self, servers: List[str], min_ms: Optional[float] = None, max_ms: Optional[float] = None
|
|
328
|
+
) -> List[Tuple[str, float]]:
|
|
329
|
+
"""Get valid servers ordered by speed (sync wrapper)."""
|
|
330
|
+
return asyncio.run(self.validate_by_speed_async(servers, min_ms, max_ms))
|
|
331
|
+
|
|
332
|
+
def to_json(
|
|
333
|
+
self, servers: List[str], min_ms: Optional[float] = None, max_ms: Optional[float] = None, pretty: bool = True
|
|
334
|
+
) -> str:
|
|
335
|
+
"""Export as JSON (sync wrapper)."""
|
|
336
|
+
return asyncio.run(self.to_json_async(servers, min_ms, max_ms, pretty))
|
|
337
|
+
|
|
338
|
+
def to_text(
|
|
339
|
+
self, servers: List[str], min_ms: Optional[float] = None, max_ms: Optional[float] = None, show_speed: bool = False
|
|
340
|
+
) -> str:
|
|
341
|
+
"""Export as text (sync wrapper)."""
|
|
342
|
+
return asyncio.run(self.to_text_async(servers, min_ms, max_ms, show_speed))
|
|
@@ -0,0 +1,386 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: pyresolvers
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: High-performance async DNS resolver validation and speed testing library
|
|
5
|
+
Home-page: https://github.com/PigeonSec/pyresolvers
|
|
6
|
+
Author: Karl
|
|
7
|
+
Author-email:
|
|
8
|
+
License: GPL-3.0
|
|
9
|
+
Project-URL: Homepage, https://github.com/PigeonSec/pyresolvers
|
|
10
|
+
Project-URL: Documentation, https://github.com/PigeonSec/pyresolvers#readme
|
|
11
|
+
Project-URL: Repository, https://github.com/PigeonSec/pyresolvers
|
|
12
|
+
Project-URL: Bug Tracker, https://github.com/PigeonSec/pyresolvers/issues
|
|
13
|
+
Keywords: dns,resolver,validation,speed-test,async,networking
|
|
14
|
+
Classifier: Development Status :: 4 - Beta
|
|
15
|
+
Classifier: Intended Audience :: Developers
|
|
16
|
+
Classifier: Intended Audience :: System Administrators
|
|
17
|
+
Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
|
|
18
|
+
Classifier: Programming Language :: Python :: 3
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
23
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
24
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
25
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
26
|
+
Classifier: Topic :: Internet :: Name Service (DNS)
|
|
27
|
+
Classifier: Topic :: System :: Networking
|
|
28
|
+
Requires-Python: >=3.8
|
|
29
|
+
Description-Content-Type: text/markdown
|
|
30
|
+
Requires-Dist: aiodns>=3.1.0
|
|
31
|
+
Requires-Dist: pycares>=4.0.0
|
|
32
|
+
Requires-Dist: colorclass>=2.2.2
|
|
33
|
+
Dynamic: home-page
|
|
34
|
+
Dynamic: requires-python
|
|
35
|
+
|
|
36
|
+
<div align="center">
|
|
37
|
+
|
|
38
|
+
<img src="logo.webp" alt="PyResolvers Logo" width="200" height="200">
|
|
39
|
+
|
|
40
|
+
# PyResolvers
|
|
41
|
+
|
|
42
|
+
**High-Performance Async DNS Resolver Validation & Speed Testing**
|
|
43
|
+
|
|
44
|
+
[](https://github.com/PigeonSec/pyresolvers/actions/workflows/test.yml)
|
|
45
|
+
[](https://www.python.org/)
|
|
46
|
+
[](https://pypi.org/project/pyresolvers/)
|
|
47
|
+
[](https://www.gnu.org/licenses/gpl-3.0)
|
|
48
|
+
[](https://github.com/psf/black)
|
|
49
|
+
|
|
50
|
+
*Validate DNS resolvers, measure response times, identify the fastest servers*
|
|
51
|
+
|
|
52
|
+
</div>
|
|
53
|
+
|
|
54
|
+
---
|
|
55
|
+
|
|
56
|
+
## Overview
|
|
57
|
+
|
|
58
|
+
PyResolvers is a high-performance async Python library and CLI tool for validating DNS resolvers. It performs comprehensive validation (baseline comparison, poisoning detection, NXDOMAIN verification) and orders results by speed.
|
|
59
|
+
|
|
60
|
+
### Features
|
|
61
|
+
|
|
62
|
+
- ⚡ **High-Performance Async** - 2-3x faster than thread-based validators
|
|
63
|
+
- 🚀 **Speed Testing** - Measures and orders resolvers by latency
|
|
64
|
+
- 🔍 **Comprehensive Validation** - Multiple validation layers
|
|
65
|
+
- 🛡️ **Poisoning Detection** - Identifies DNS hijacking
|
|
66
|
+
- 📊 **Multiple Formats** - JSON, plain text, text+speed
|
|
67
|
+
- 🎯 **Speed Filtering** - Filter by min/max latency thresholds
|
|
68
|
+
|
|
69
|
+
### Performance
|
|
70
|
+
|
|
71
|
+
| Method | Time (5 servers) | Improvement |
|
|
72
|
+
|--------|------------------|-------------|
|
|
73
|
+
| Original (unoptimized) | 5.36s | baseline |
|
|
74
|
+
| **PyResolvers** | **2.32s** | **56.7% faster** ⚡ |
|
|
75
|
+
| Speedup | | **2.31x** |
|
|
76
|
+
|
|
77
|
+
---
|
|
78
|
+
|
|
79
|
+
## Installation
|
|
80
|
+
|
|
81
|
+
```bash
|
|
82
|
+
git clone https://github.com/PigeonSec/pyresolvers.git
|
|
83
|
+
cd pyresolvers
|
|
84
|
+
python3 -m venv venv
|
|
85
|
+
source venv/bin/activate # Windows: venv\Scripts\activate
|
|
86
|
+
pip install -e .
|
|
87
|
+
```
|
|
88
|
+
|
|
89
|
+
**Or install from PyPI:**
|
|
90
|
+
|
|
91
|
+
```bash
|
|
92
|
+
pip install pyresolvers
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
**Requirements**: Python 3.8+, aiodns, pycares, colorclass
|
|
96
|
+
|
|
97
|
+
---
|
|
98
|
+
|
|
99
|
+
## Quick Start
|
|
100
|
+
|
|
101
|
+
### Command Line
|
|
102
|
+
|
|
103
|
+
```bash
|
|
104
|
+
# Test single resolver
|
|
105
|
+
pyresolvers -t 1.1.1.1
|
|
106
|
+
|
|
107
|
+
# Test from file
|
|
108
|
+
pyresolvers -tL dns_servers.txt
|
|
109
|
+
|
|
110
|
+
# Get fastest resolvers (< 50ms)
|
|
111
|
+
pyresolvers -tL resolvers.txt --max-speed 50 --format text-with-speed
|
|
112
|
+
|
|
113
|
+
# Export as JSON
|
|
114
|
+
pyresolvers -tL resolvers.txt --format json -o valid_dns.json
|
|
115
|
+
```
|
|
116
|
+
|
|
117
|
+
### Python Library
|
|
118
|
+
|
|
119
|
+
```python
|
|
120
|
+
from pyresolvers import Validator
|
|
121
|
+
|
|
122
|
+
# Basic usage
|
|
123
|
+
validator = Validator()
|
|
124
|
+
servers = ['1.1.1.1', '8.8.8.8', '9.9.9.9']
|
|
125
|
+
results = validator.validate_by_speed(servers)
|
|
126
|
+
|
|
127
|
+
for server, latency in results:
|
|
128
|
+
print(f"{server}: {latency:.2f}ms")
|
|
129
|
+
```
|
|
130
|
+
|
|
131
|
+
```python
|
|
132
|
+
# High concurrency for large lists
|
|
133
|
+
validator = Validator(concurrency=100)
|
|
134
|
+
results = validator.validate_by_speed(large_server_list, max_ms=100)
|
|
135
|
+
```
|
|
136
|
+
|
|
137
|
+
```python
|
|
138
|
+
# Async usage
|
|
139
|
+
import asyncio
|
|
140
|
+
|
|
141
|
+
async def main():
|
|
142
|
+
validator = Validator(concurrency=200)
|
|
143
|
+
results = await validator.validate_by_speed_async(servers)
|
|
144
|
+
return results
|
|
145
|
+
|
|
146
|
+
results = asyncio.run(main())
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
---
|
|
150
|
+
|
|
151
|
+
## Examples
|
|
152
|
+
|
|
153
|
+
### CLI Usage
|
|
154
|
+
|
|
155
|
+
```bash
|
|
156
|
+
# Speed filtering
|
|
157
|
+
pyresolvers -tL resolvers.txt --min-speed 10 --max-speed 100
|
|
158
|
+
|
|
159
|
+
# Silent mode (IPs only)
|
|
160
|
+
pyresolvers -tL resolvers.txt --silent
|
|
161
|
+
|
|
162
|
+
# Exclude specific servers
|
|
163
|
+
pyresolvers -tL all_resolvers.txt -e 8.8.8.8
|
|
164
|
+
|
|
165
|
+
# High performance (100 concurrent)
|
|
166
|
+
pyresolvers -tL large_list.txt -threads 100
|
|
167
|
+
```
|
|
168
|
+
|
|
169
|
+
### Library Usage
|
|
170
|
+
|
|
171
|
+
**Filter by Speed:**
|
|
172
|
+
```python
|
|
173
|
+
validator = Validator(concurrency=50)
|
|
174
|
+
fast = validator.validate_by_speed(servers, max_ms=50)
|
|
175
|
+
```
|
|
176
|
+
|
|
177
|
+
**Detailed Results:**
|
|
178
|
+
```python
|
|
179
|
+
results = validator.validate(servers)
|
|
180
|
+
for r in results:
|
|
181
|
+
if r.valid:
|
|
182
|
+
print(f"✓ {r.server}: {r.latency_ms:.2f}ms")
|
|
183
|
+
else:
|
|
184
|
+
print(f"✗ {r.server}: {r.error}")
|
|
185
|
+
```
|
|
186
|
+
|
|
187
|
+
**JSON Export:**
|
|
188
|
+
```python
|
|
189
|
+
json_output = validator.to_json(servers, max_ms=100, pretty=True)
|
|
190
|
+
with open('valid_dns.json', 'w') as f:
|
|
191
|
+
f.write(json_output)
|
|
192
|
+
```
|
|
193
|
+
|
|
194
|
+
**Streaming (Memory Efficient):**
|
|
195
|
+
```python
|
|
196
|
+
async def process_huge_list():
|
|
197
|
+
validator = Validator(concurrency=100)
|
|
198
|
+
async for server, latency in validator.validate_streaming_async(servers):
|
|
199
|
+
print(f"{server}: {latency:.2f}ms")
|
|
200
|
+
|
|
201
|
+
asyncio.run(process_huge_list())
|
|
202
|
+
```
|
|
203
|
+
|
|
204
|
+
### Cronjob Example
|
|
205
|
+
|
|
206
|
+
**Bash Script:**
|
|
207
|
+
```bash
|
|
208
|
+
#!/bin/bash
|
|
209
|
+
# /usr/local/bin/dns_monitor.sh
|
|
210
|
+
|
|
211
|
+
API_URL="https://api.example.com/dns/update"
|
|
212
|
+
OUTPUT_DIR="/var/lib/dns-monitor"
|
|
213
|
+
|
|
214
|
+
mkdir -p "$OUTPUT_DIR"
|
|
215
|
+
|
|
216
|
+
# Validate and save
|
|
217
|
+
pyresolvers -tL https://public-dns.info/nameservers.txt \
|
|
218
|
+
--max-speed 100 \
|
|
219
|
+
--format json \
|
|
220
|
+
-o "$OUTPUT_DIR/resolvers.json"
|
|
221
|
+
|
|
222
|
+
# Send to API
|
|
223
|
+
curl -X POST "$API_URL" \
|
|
224
|
+
-H "Content-Type: application/json" \
|
|
225
|
+
-d @"$OUTPUT_DIR/resolvers.json"
|
|
226
|
+
```
|
|
227
|
+
|
|
228
|
+
**Crontab:**
|
|
229
|
+
```cron
|
|
230
|
+
# Run every 6 hours
|
|
231
|
+
0 */6 * * * /usr/local/bin/dns_monitor.sh >> /var/log/dns-monitor.log 2>&1
|
|
232
|
+
```
|
|
233
|
+
|
|
234
|
+
---
|
|
235
|
+
|
|
236
|
+
## API Reference
|
|
237
|
+
|
|
238
|
+
### Validator
|
|
239
|
+
|
|
240
|
+
High-performance async DNS validator.
|
|
241
|
+
|
|
242
|
+
```python
|
|
243
|
+
Validator(
|
|
244
|
+
trusted_resolvers: Optional[List[str]] = None, # ["1.1.1.1", "8.8.8.8"]
|
|
245
|
+
test_domains: Optional[List[str]] = None, # ["bet365.com", "telegram.com"]
|
|
246
|
+
poison_check_domains: Optional[List[str]] = None,
|
|
247
|
+
baseline_domain: str = "bet365.com",
|
|
248
|
+
query_prefix: str = "dnsvalidator",
|
|
249
|
+
concurrency: int = 50, # Async concurrency
|
|
250
|
+
timeout: int = 5, # DNS timeout (seconds)
|
|
251
|
+
use_fast_timeout: bool = False, # Fast dead server detection (optional speedup)
|
|
252
|
+
batch_size: int = 100, # Memory management
|
|
253
|
+
verbose: bool = False
|
|
254
|
+
)
|
|
255
|
+
```
|
|
256
|
+
|
|
257
|
+
**Methods:**
|
|
258
|
+
|
|
259
|
+
- `validate(servers)` → `List[ValidationResult]` - Validate servers
|
|
260
|
+
- `validate_by_speed(servers, min_ms, max_ms)` → `List[Tuple[str, float]]` - Get valid servers ordered by speed
|
|
261
|
+
- `to_json(servers, min_ms, max_ms)` → `str` - Export as JSON
|
|
262
|
+
- `to_text(servers, min_ms, max_ms, show_speed)` → `str` - Export as text
|
|
263
|
+
|
|
264
|
+
**Async Methods:**
|
|
265
|
+
|
|
266
|
+
- `await validate_async(servers)` - Async validation
|
|
267
|
+
- `await validate_by_speed_async(servers, min_ms, max_ms)` - Async speed validation
|
|
268
|
+
- `async for server, latency in validate_streaming_async(servers)` - Async streaming
|
|
269
|
+
|
|
270
|
+
### ValidationResult
|
|
271
|
+
|
|
272
|
+
```python
|
|
273
|
+
@dataclass
|
|
274
|
+
class ValidationResult:
|
|
275
|
+
server: str
|
|
276
|
+
valid: bool
|
|
277
|
+
latency_ms: float
|
|
278
|
+
error: Optional[str] = None
|
|
279
|
+
```
|
|
280
|
+
|
|
281
|
+
---
|
|
282
|
+
|
|
283
|
+
## CLI Options
|
|
284
|
+
|
|
285
|
+
| Option | Description |
|
|
286
|
+
|--------|-------------|
|
|
287
|
+
| `-t SERVER` | Test single server |
|
|
288
|
+
| `-tL FILE/URL` | Test from file or URL |
|
|
289
|
+
| `-e SERVER` | Exclude server |
|
|
290
|
+
| `-eL FILE/URL` | Exclude from file/URL |
|
|
291
|
+
| `-r DOMAIN` | Baseline domain (default: bet365.com) |
|
|
292
|
+
| `-threads N` | Concurrency (default: 5) |
|
|
293
|
+
| `-timeout N` | Timeout seconds (default: 600) |
|
|
294
|
+
| `-o FILE` | Output file |
|
|
295
|
+
| `--format FORMAT` | text, json, text-with-speed |
|
|
296
|
+
| `--max-speed MS` | Max latency filter (ms) |
|
|
297
|
+
| `--min-speed MS` | Min latency filter (ms) |
|
|
298
|
+
| `--silent` | Only output IPs |
|
|
299
|
+
| `-v, --verbose` | Verbose output |
|
|
300
|
+
| `--no-color` | Disable colors |
|
|
301
|
+
|
|
302
|
+
---
|
|
303
|
+
|
|
304
|
+
## Performance Tips
|
|
305
|
+
|
|
306
|
+
- **Concurrency**: 50-100 for best performance on most systems
|
|
307
|
+
- **Timeout**: Lower (3s) for speed, higher (10s+) for thoroughness
|
|
308
|
+
- **Fast Timeout**: Enable (`use_fast_timeout=True`) for 30-50% speedup (may miss slow servers)
|
|
309
|
+
- **Batch Size**: Increase for more memory, decrease for less
|
|
310
|
+
- **Network**: Run from VPS to avoid ISP throttling
|
|
311
|
+
|
|
312
|
+
### Optimization Features
|
|
313
|
+
|
|
314
|
+
1. **Async I/O** - Non-blocking DNS queries with aiodns
|
|
315
|
+
2. **Parallel Baseline** - Queries trusted resolvers simultaneously
|
|
316
|
+
3. **Fast Timeout** - 1s initial timeout for dead server detection
|
|
317
|
+
4. **Combined Queries** - Reduces DNS round trips
|
|
318
|
+
5. **Batch Processing** - Memory-efficient for huge lists
|
|
319
|
+
6. **Streaming** - Progressive results without holding all in memory
|
|
320
|
+
|
|
321
|
+
---
|
|
322
|
+
|
|
323
|
+
## How It Works
|
|
324
|
+
|
|
325
|
+
1. **Baseline** - Query trusted DNS (Cloudflare, Google) for ground truth
|
|
326
|
+
2. **Poisoning Check** - Test random subdomains to detect hijacking
|
|
327
|
+
3. **NXDOMAIN** - Verify correct NXDOMAIN behavior
|
|
328
|
+
4. **Baseline Compare** - Ensure responses match baseline
|
|
329
|
+
5. **Speed Test** - Measure latency and order results
|
|
330
|
+
|
|
331
|
+
---
|
|
332
|
+
|
|
333
|
+
## Important Notes
|
|
334
|
+
|
|
335
|
+
### Thread Count
|
|
336
|
+
|
|
337
|
+
Keep concurrency reasonable (50-100) to avoid triggering rate limits. Very high concurrency may be blocked by ISPs or DNS providers.
|
|
338
|
+
|
|
339
|
+
### Domain Selection
|
|
340
|
+
|
|
341
|
+
Use **non-geolocated** domains for baseline (bet365.com works well). Avoid google.com, facebook.com as they return different IPs by location.
|
|
342
|
+
|
|
343
|
+
---
|
|
344
|
+
|
|
345
|
+
## Project Structure
|
|
346
|
+
|
|
347
|
+
```
|
|
348
|
+
pyresolvers/
|
|
349
|
+
├── pyresolvers/
|
|
350
|
+
│ ├── __init__.py # Package exports
|
|
351
|
+
│ ├── __main__.py # CLI entry
|
|
352
|
+
│ ├── validator.py # Async validation
|
|
353
|
+
│ └── lib/core/
|
|
354
|
+
│ ├── input.py # CLI args
|
|
355
|
+
│ ├── output.py # Formatting
|
|
356
|
+
│ └── __version__.py
|
|
357
|
+
├── requirements.txt
|
|
358
|
+
├── setup.py
|
|
359
|
+
└── README.md
|
|
360
|
+
```
|
|
361
|
+
|
|
362
|
+
---
|
|
363
|
+
|
|
364
|
+
## License
|
|
365
|
+
|
|
366
|
+
GNU General Public License v3.0 - see [LICENSE](LICENSE)
|
|
367
|
+
|
|
368
|
+
---
|
|
369
|
+
|
|
370
|
+
## Acknowledgments
|
|
371
|
+
|
|
372
|
+
Based on [dnsvalidator](https://github.com/vortexau/dnsvalidator) by:
|
|
373
|
+
- **James McLean** ([@vortexau](https://twitter.com/vortexau))
|
|
374
|
+
- **Michael Skelton** ([@codingo_](https://twitter.com/codingo_))
|
|
375
|
+
|
|
376
|
+
Enhanced with async architecture, speed testing, and performance optimizations by Karl.
|
|
377
|
+
|
|
378
|
+
---
|
|
379
|
+
|
|
380
|
+
<div align="center">
|
|
381
|
+
|
|
382
|
+
**[⬆ back to top](#pyresolvers)**
|
|
383
|
+
|
|
384
|
+
Made with ❤️ by Karl | Based on dnsvalidator by @vortexau & @codingo_
|
|
385
|
+
|
|
386
|
+
</div>
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
pyresolvers/__init__.py,sha256=aIdA6BsOAIAGHqX_2P5xmOPe2BrY48u9GUM3muia1Ws,830
|
|
2
|
+
pyresolvers/__main__.py,sha256=nmnFQoziQgqmeWgOJt5eNYzFNdzsOdprhJB7-rylxaQ,3385
|
|
3
|
+
pyresolvers/validator.py,sha256=EpgDH40bb5vZlWSozUFIpQQtELuZR0HcIsMRk9gRl-c,13761
|
|
4
|
+
pyresolvers/lib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
|
+
pyresolvers/lib/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
+
pyresolvers/lib/core/__version__.py,sha256=eJIiVTKvshwRqBCzqfJOiJwmYYTZkPA-nQ1po_XxDO8,23
|
|
7
|
+
pyresolvers/lib/core/input.py,sha256=JkRH5c9AsW_c5Isiy9b8LoCIAKlNQDOS1QJqF0Letmc,6362
|
|
8
|
+
pyresolvers/lib/core/output.py,sha256=rFLLiPtQhS4od1oZjT4MnMDmSa3j1EfCIM1YePTuriU,2237
|
|
9
|
+
pyresolvers-1.0.0.dist-info/METADATA,sha256=yGM0b7hcsYUwHAu_Upnc7evHb6TM0n6SwvdjC1u6a6A,10855
|
|
10
|
+
pyresolvers-1.0.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
11
|
+
pyresolvers-1.0.0.dist-info/entry_points.txt,sha256=kkUh0xu55gO9WRwxiMbguLC4DEy3aHd7e5YDUTzdN6I,58
|
|
12
|
+
pyresolvers-1.0.0.dist-info/top_level.txt,sha256=55wYsw2v1_vxEjWtRIdfEmB5yNCl3cNL-69tSLagC1k,12
|
|
13
|
+
pyresolvers-1.0.0.dist-info/RECORD,,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
pyresolvers
|