oneforall-kjl 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. OneForAll/__init__.py +15 -0
  2. OneForAll/brute.py +503 -0
  3. OneForAll/common/check.py +41 -0
  4. OneForAll/common/crawl.py +10 -0
  5. OneForAll/common/database.py +277 -0
  6. OneForAll/common/domain.py +63 -0
  7. OneForAll/common/ipasn.py +42 -0
  8. OneForAll/common/ipreg.py +139 -0
  9. OneForAll/common/lookup.py +28 -0
  10. OneForAll/common/module.py +369 -0
  11. OneForAll/common/query.py +9 -0
  12. OneForAll/common/records.py +363 -0
  13. OneForAll/common/request.py +264 -0
  14. OneForAll/common/resolve.py +173 -0
  15. OneForAll/common/search.py +78 -0
  16. OneForAll/common/similarity.py +138 -0
  17. OneForAll/common/tablib/__init__.py +0 -0
  18. OneForAll/common/tablib/format.py +89 -0
  19. OneForAll/common/tablib/tablib.py +360 -0
  20. OneForAll/common/tldextract.py +240 -0
  21. OneForAll/common/utils.py +789 -0
  22. OneForAll/config/__init__.py +17 -0
  23. OneForAll/config/api.py +94 -0
  24. OneForAll/config/default.py +255 -0
  25. OneForAll/config/log.py +38 -0
  26. OneForAll/config/setting.py +108 -0
  27. OneForAll/export.py +72 -0
  28. OneForAll/modules/altdns.py +216 -0
  29. OneForAll/modules/autotake/github.py +105 -0
  30. OneForAll/modules/certificates/censys_api.py +73 -0
  31. OneForAll/modules/certificates/certspotter.py +48 -0
  32. OneForAll/modules/certificates/crtsh.py +84 -0
  33. OneForAll/modules/certificates/google.py +48 -0
  34. OneForAll/modules/certificates/myssl.py +46 -0
  35. OneForAll/modules/certificates/racent.py +49 -0
  36. OneForAll/modules/check/axfr.py +97 -0
  37. OneForAll/modules/check/cdx.py +44 -0
  38. OneForAll/modules/check/cert.py +58 -0
  39. OneForAll/modules/check/csp.py +94 -0
  40. OneForAll/modules/check/nsec.py +58 -0
  41. OneForAll/modules/check/robots.py +44 -0
  42. OneForAll/modules/check/sitemap.py +44 -0
  43. OneForAll/modules/collect.py +70 -0
  44. OneForAll/modules/crawl/archivecrawl.py +59 -0
  45. OneForAll/modules/crawl/commoncrawl.py +59 -0
  46. OneForAll/modules/datasets/anubis.py +45 -0
  47. OneForAll/modules/datasets/bevigil.py +50 -0
  48. OneForAll/modules/datasets/binaryedge_api.py +50 -0
  49. OneForAll/modules/datasets/cebaidu.py +45 -0
  50. OneForAll/modules/datasets/chinaz.py +45 -0
  51. OneForAll/modules/datasets/chinaz_api.py +49 -0
  52. OneForAll/modules/datasets/circl_api.py +49 -0
  53. OneForAll/modules/datasets/cloudflare_api.py +130 -0
  54. OneForAll/modules/datasets/dnsdb_api.py +51 -0
  55. OneForAll/modules/datasets/dnsdumpster.py +52 -0
  56. OneForAll/modules/datasets/dnsgrep.py +44 -0
  57. OneForAll/modules/datasets/fullhunt.py +48 -0
  58. OneForAll/modules/datasets/hackertarget.py +45 -0
  59. OneForAll/modules/datasets/ip138.py +45 -0
  60. OneForAll/modules/datasets/ipv4info_api.py +73 -0
  61. OneForAll/modules/datasets/netcraft.py +66 -0
  62. OneForAll/modules/datasets/passivedns_api.py +51 -0
  63. OneForAll/modules/datasets/qianxun.py +61 -0
  64. OneForAll/modules/datasets/rapiddns.py +45 -0
  65. OneForAll/modules/datasets/riddler.py +45 -0
  66. OneForAll/modules/datasets/robtex.py +58 -0
  67. OneForAll/modules/datasets/securitytrails_api.py +56 -0
  68. OneForAll/modules/datasets/sitedossier.py +57 -0
  69. OneForAll/modules/datasets/spyse_api.py +62 -0
  70. OneForAll/modules/datasets/sublist3r.py +45 -0
  71. OneForAll/modules/datasets/urlscan.py +45 -0
  72. OneForAll/modules/datasets/windvane.py +92 -0
  73. OneForAll/modules/dnsquery/mx.py +35 -0
  74. OneForAll/modules/dnsquery/ns.py +35 -0
  75. OneForAll/modules/dnsquery/soa.py +35 -0
  76. OneForAll/modules/dnsquery/spf.py +35 -0
  77. OneForAll/modules/dnsquery/txt.py +35 -0
  78. OneForAll/modules/enrich.py +72 -0
  79. OneForAll/modules/finder.py +206 -0
  80. OneForAll/modules/intelligence/alienvault.py +50 -0
  81. OneForAll/modules/intelligence/riskiq_api.py +58 -0
  82. OneForAll/modules/intelligence/threatbook_api.py +50 -0
  83. OneForAll/modules/intelligence/threatminer.py +45 -0
  84. OneForAll/modules/intelligence/virustotal.py +60 -0
  85. OneForAll/modules/intelligence/virustotal_api.py +59 -0
  86. OneForAll/modules/iscdn.py +86 -0
  87. OneForAll/modules/search/ask.py +69 -0
  88. OneForAll/modules/search/baidu.py +96 -0
  89. OneForAll/modules/search/bing.py +79 -0
  90. OneForAll/modules/search/bing_api.py +78 -0
  91. OneForAll/modules/search/fofa_api.py +74 -0
  92. OneForAll/modules/search/gitee.py +71 -0
  93. OneForAll/modules/search/github_api.py +86 -0
  94. OneForAll/modules/search/google.py +83 -0
  95. OneForAll/modules/search/google_api.py +77 -0
  96. OneForAll/modules/search/hunter_api.py +72 -0
  97. OneForAll/modules/search/quake_api.py +72 -0
  98. OneForAll/modules/search/shodan_api.py +53 -0
  99. OneForAll/modules/search/so.py +75 -0
  100. OneForAll/modules/search/sogou.py +72 -0
  101. OneForAll/modules/search/wzsearch.py +68 -0
  102. OneForAll/modules/search/yahoo.py +81 -0
  103. OneForAll/modules/search/yandex.py +80 -0
  104. OneForAll/modules/search/zoomeye_api.py +73 -0
  105. OneForAll/modules/srv.py +75 -0
  106. OneForAll/modules/wildcard.py +319 -0
  107. OneForAll/oneforall.py +275 -0
  108. OneForAll/takeover.py +168 -0
  109. OneForAll/test.py +23 -0
  110. oneforall_kjl-0.1.1.dist-info/METADATA +18 -0
  111. oneforall_kjl-0.1.1.dist-info/RECORD +114 -0
  112. oneforall_kjl-0.1.1.dist-info/WHEEL +5 -0
  113. oneforall_kjl-0.1.1.dist-info/entry_points.txt +2 -0
  114. oneforall_kjl-0.1.1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,789 @@
1
+ import os
2
+ import re
3
+ import sys
4
+ import time
5
+ import json
6
+ import socket
7
+ import random
8
+ import string
9
+ import platform
10
+ import subprocess
11
+ from urllib.parse import scheme_chars
12
+ from ipaddress import IPv4Address, ip_address
13
+ from distutils.version import LooseVersion
14
+ from pathlib import Path
15
+ from stat import S_IXUSR
16
+
17
+ import requests
18
+ import tenacity
19
+ from dns.resolver import Resolver
20
+
21
+ from common.database import Database
22
+ from common.domain import Domain
23
+ from common.records import Record, RecordCollection
24
+ from config import settings
25
+ from config.log import logger
26
+
27
+ user_agents = [
28
+ 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 '
29
+ '(KHTML, like Gecko) Chrome/76.0.3809.100 Safari/537.36',
30
+ 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 '
31
+ '(KHTML, like Gecko) Chrome/76.0.3809.100 Safari/537.36',
32
+ 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 '
33
+ '(KHTML, like Gecko) Chrome/76.0.3809.100 Safari/537.36',
34
+ 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:54.0) Gecko/20100101 Firefox/68.0',
35
+ 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:61.0) '
36
+ 'Gecko/20100101 Firefox/68.0',
37
+ 'Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/68.0']
38
+
39
+ IP_RE = re.compile(r'^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$') # pylint: disable=line-too-long
40
+ SCHEME_RE = re.compile(r'^([' + scheme_chars + ']+:)?//')
41
+
42
+
43
+ def gen_random_ip():
44
+ """
45
+ Generate random decimal IP string
46
+ """
47
+ while True:
48
+ ip = IPv4Address(random.randint(0, 2 ** 32 - 1))
49
+ if ip.is_global:
50
+ return ip.exploded
51
+
52
+
53
+ def gen_fake_header():
54
+ """
55
+ Generate fake request headers
56
+ """
57
+ headers = settings.request_default_headers.copy()
58
+ if not isinstance(headers, dict):
59
+ headers = dict()
60
+ if settings.enable_random_ua:
61
+ ua = random.choice(user_agents)
62
+ headers['User-Agent'] = ua
63
+ headers['Accept-Encoding'] = 'gzip, deflate'
64
+ return headers
65
+
66
+
67
+ def get_random_header():
68
+ """
69
+ Get random header
70
+ """
71
+ headers = gen_fake_header()
72
+ if not isinstance(headers, dict):
73
+ headers = None
74
+ return headers
75
+
76
+
77
+ def get_random_proxy():
78
+ """
79
+ Get random proxy
80
+ """
81
+ try:
82
+ return random.choice(settings.request_proxy_pool)
83
+ except IndexError:
84
+ return None
85
+
86
+
87
+ def get_proxy():
88
+ """
89
+ Get proxy
90
+ """
91
+ if settings.enable_request_proxy:
92
+ return get_random_proxy()
93
+ return None
94
+
95
+
96
+ def split_list(ls, size):
97
+ """
98
+ Split list
99
+
100
+ :param list ls: list
101
+ :param int size: size
102
+ :return list: result
103
+
104
+ >>> split_list([1, 2, 3, 4], 3)
105
+ [[1, 2, 3], [4]]
106
+ """
107
+ if size == 0:
108
+ return ls
109
+ return [ls[i:i + size] for i in range(0, len(ls), size)]
110
+
111
+
112
+ def match_main_domain(domain):
113
+ if not isinstance(domain, str):
114
+ return None
115
+ item = domain.lower().strip()
116
+ return Domain(item).match()
117
+
118
+
119
+ def read_target_file(target):
120
+ domains = list()
121
+ with open(target, encoding='utf-8', errors='ignore') as file:
122
+ for line in file:
123
+ domain = match_main_domain(line)
124
+ if not domain:
125
+ continue
126
+ domains.append(domain)
127
+ sorted_domains = sorted(set(domains), key=domains.index)
128
+ return sorted_domains
129
+
130
+
131
+ def get_from_target(target):
132
+ domains = set()
133
+ if isinstance(target, str):
134
+ if target.endswith('.txt'):
135
+ logger.log('FATAL', 'Use targets parameter for multiple domain names')
136
+ exit(1)
137
+ domain = match_main_domain(target)
138
+ if not domain:
139
+ return domains
140
+ domains.add(domain)
141
+ return domains
142
+
143
+
144
+ def get_from_targets(targets):
145
+ domains = set()
146
+ if not isinstance(targets, str):
147
+ return domains
148
+ try:
149
+ path = Path(targets)
150
+ except Exception as e:
151
+ logger.log('ERROR', e.args)
152
+ return domains
153
+ if path.exists() and path.is_file():
154
+ domains = read_target_file(targets)
155
+ return domains
156
+ return domains
157
+
158
+
159
+ def get_domains(target, targets=None):
160
+ logger.log('DEBUG', f'Getting domains')
161
+ target_domains = get_from_target(target)
162
+ targets_domains = get_from_targets(targets)
163
+ domains = list(target_domains.union(targets_domains))
164
+ if targets_domains:
165
+ domains = sorted(domains, key=targets_domains.index) # 按照targets原本的index排序
166
+ if not domains:
167
+ logger.log('ERROR', f'Did not get a valid domain name')
168
+ logger.log('DEBUG', f'The obtained domains \n{domains}')
169
+ return domains
170
+
171
+
172
+ def check_dir(dir_path):
173
+ if not dir_path.exists():
174
+ logger.log('INFOR', f'{dir_path} does not exist, directory will be created')
175
+ dir_path.mkdir(parents=True, exist_ok=True)
176
+
177
+
178
+ def check_path(path, name, fmt):
179
+ """
180
+ 检查结果输出目录路径
181
+
182
+ :param path: 保存路径
183
+ :param name: 导出名字
184
+ :param fmt: 保存格式
185
+ :return: 保存路径
186
+ """
187
+ filename = f'{name}.{fmt}'
188
+ default_path = settings.result_save_dir.joinpath(filename)
189
+ if isinstance(path, str):
190
+ path = repr(path).replace('\\', '/') # 将路径中的反斜杠替换为正斜杠
191
+ path = path.replace('\'', '') # 去除多余的转义
192
+ else:
193
+ path = default_path
194
+ path = Path(path)
195
+ if path.is_dir(): # 输入是目录的情况
196
+ path = path.joinpath(filename)
197
+ parent_dir = path.parent
198
+ if not parent_dir.exists():
199
+ logger.log('ALERT', f'{parent_dir} does not exist, directory will be created')
200
+ parent_dir.mkdir(parents=True, exist_ok=True)
201
+ if path.exists():
202
+ logger.log('ALERT', f'The {path} exists and will be overwritten')
203
+ return path
204
+
205
+
206
+ def check_format(fmt):
207
+ """
208
+ 检查导出格式
209
+
210
+ :param fmt: 传入的导出格式
211
+ :return: 导出格式
212
+ """
213
+ formats = ['csv', 'json', ]
214
+ if fmt in formats:
215
+ return fmt
216
+ else:
217
+ logger.log('ALERT', f'Does not support {fmt} format')
218
+ logger.log('ALERT', 'So use csv format by default')
219
+ return 'csv'
220
+
221
+
222
+ def load_json(path):
223
+ with open(path) as fp:
224
+ return json.load(fp)
225
+
226
+
227
+ def save_to_db(name, data, module):
228
+ """
229
+ Save request results to database
230
+
231
+ :param str name: table name
232
+ :param list data: data to be saved
233
+ :param str module: module name
234
+ """
235
+ db = Database()
236
+ db.drop_table(name)
237
+ db.create_table(name)
238
+ db.save_db(name, data, module)
239
+ db.close()
240
+
241
+
242
+ def save_to_file(path, data):
243
+ """
244
+ 保存数据到文件
245
+
246
+ :param path: 保存路径
247
+ :param data: 待存数据
248
+ :return: 保存成功与否
249
+ """
250
+ try:
251
+ with open(path, 'w', errors='ignore', newline='') as file:
252
+ file.write(data)
253
+ return True
254
+ except TypeError:
255
+ with open(path, 'wb') as file:
256
+ file.write(data)
257
+ return True
258
+ except Exception as e:
259
+ logger.log('ERROR', e.args)
260
+ return False
261
+
262
+
263
+ def check_response(method, resp):
264
+ """
265
+ 检查响应 输出非正常响应返回json的信息
266
+
267
+ :param method: 请求方法
268
+ :param resp: 响应体
269
+ :return: 是否正常响应
270
+ """
271
+ if resp.status_code == 200 and resp.content:
272
+ return True
273
+ logger.log('ALERT', f'{method} {resp.url} {resp.status_code} - '
274
+ f'{resp.reason} {len(resp.content)}')
275
+ content_type = resp.headers.get('Content-Type')
276
+ if content_type and 'json' in content_type and resp.content:
277
+ try:
278
+ msg = resp.json()
279
+ except Exception as e:
280
+ logger.log('DEBUG', e.args)
281
+ else:
282
+ logger.log('ALERT', msg)
283
+ return False
284
+
285
+
286
+ def mark_subdomain(old_data, now_data):
287
+ """
288
+ 标记新增子域并返回新的数据集
289
+
290
+ :param list old_data: 之前子域数据
291
+ :param list now_data: 现在子域数据
292
+ :return: 标记后的的子域数据
293
+ :rtype: list
294
+ """
295
+ # 第一次收集子域的情况
296
+ mark_data = now_data.copy()
297
+ if not old_data:
298
+ for index, item in enumerate(mark_data):
299
+ item['new'] = 1
300
+ mark_data[index] = item
301
+ return mark_data
302
+ # 非第一次收集子域的情况
303
+ old_subdomains = {item.get('subdomain') for item in old_data}
304
+ for index, item in enumerate(mark_data):
305
+ subdomain = item.get('subdomain')
306
+ if subdomain in old_subdomains:
307
+ item['new'] = 0
308
+ else:
309
+ item['new'] = 1
310
+ mark_data[index] = item
311
+ return mark_data
312
+
313
+
314
+ def remove_invalid_string(string):
315
+ # Excel文件中单元格值不能直接存储以下非法字符
316
+ return re.sub(r'[\000-\010]|[\013-\014]|[\016-\037]', r'', string)
317
+
318
+
319
+ def export_all_results(path, name, fmt, datas):
320
+ path = check_path(path, name, fmt)
321
+ logger.log('ALERT', f'The subdomain result for all main domains: {path}')
322
+ row_list = list()
323
+ for row in datas:
324
+ if 'header' in row:
325
+ row.pop('header')
326
+ if 'response' in row:
327
+ row.pop('response')
328
+ keys = row.keys()
329
+ values = row.values()
330
+ row_list.append(Record(keys, values))
331
+ rows = RecordCollection(iter(row_list))
332
+ content = rows.export(fmt)
333
+ if fmt == 'csv':
334
+ content = '\ufeff' + content
335
+ save_to_file(path, content)
336
+
337
+
338
+ def export_all_subdomains(alive, path, name, datas):
339
+ path = check_path(path, name, 'txt')
340
+ logger.log('ALERT', f'The txt subdomain result for all main domains: {path}')
341
+ subdomains = set()
342
+ for row in datas:
343
+ subdomain = row.get('subdomain')
344
+ if alive:
345
+ if not row.get('alive'):
346
+ continue
347
+ subdomains.add(subdomain)
348
+ else:
349
+ subdomains.add(subdomain)
350
+ data = '\n'.join(subdomains)
351
+ save_to_file(path, data)
352
+
353
+
354
+ def export_all(alive, fmt, path, datas):
355
+ """
356
+ 将所有结果数据导出
357
+
358
+ :param bool alive: 只导出存活子域结果
359
+ :param str fmt: 导出文件格式
360
+ :param str path: 导出文件路径
361
+ :param list datas: 待导出的结果数据
362
+ """
363
+ fmt = check_format(fmt)
364
+ timestamp = get_timestring()
365
+ name = f'all_subdomain_result_{timestamp}'
366
+ export_all_results(path, name, fmt, datas)
367
+ export_all_subdomains(alive, path, name, datas)
368
+
369
+
370
+ def dns_resolver():
371
+ """
372
+ dns解析器
373
+ """
374
+ resolver = Resolver()
375
+ resolver.nameservers = settings.resolver_nameservers
376
+ resolver.timeout = settings.resolver_timeout
377
+ resolver.lifetime = settings.resolver_lifetime
378
+ return resolver
379
+
380
+
381
+ def dns_query(qname, qtype):
382
+ """
383
+ 查询域名DNS记录
384
+
385
+ :param str qname: 待查域名
386
+ :param str qtype: 查询类型
387
+ :return: 查询结果
388
+ """
389
+ logger.log('TRACE', f'Try to query {qtype} record of {qname}')
390
+ resolver = dns_resolver()
391
+ try:
392
+ answer = resolver.query(qname, qtype)
393
+ except Exception as e:
394
+ logger.log('TRACE', e.args)
395
+ logger.log('TRACE', f'Query {qtype} record of {qname} failed')
396
+ return None
397
+ else:
398
+ logger.log('TRACE', f'Query {qtype} record of {qname} succeeded')
399
+ return answer
400
+
401
+
402
+ def get_timestamp():
403
+ return int(time.time())
404
+
405
+
406
+ def get_timestring():
407
+ return time.strftime('%Y%m%d_%H%M%S', time.localtime(time.time()))
408
+
409
+
410
+ def get_classname(classobj):
411
+ return classobj.__class__.__name__
412
+
413
+
414
+ def python_version():
415
+ return sys.version
416
+
417
+
418
+ def calc_alive(data):
419
+ return len(list(filter(lambda item: item.get('alive') == 1, data)))
420
+
421
+
422
+ def count_alive(name):
423
+ db = Database()
424
+ result = db.count_alive(name)
425
+ count = result.scalar()
426
+ db.close()
427
+ return count
428
+
429
+
430
+ def get_subdomains(data):
431
+ return set(map(lambda item: item.get('subdomain'), data))
432
+
433
+
434
+ def set_id_none(data):
435
+ new_data = []
436
+ for item in data:
437
+ item['id'] = None
438
+ new_data.append(item)
439
+ return new_data
440
+
441
+
442
+ def get_filtered_data(data):
443
+ filtered_data = []
444
+ for item in data:
445
+ resolve = item.get('resolve')
446
+ if resolve != 1:
447
+ filtered_data.append(item)
448
+ return filtered_data
449
+
450
+
451
+ def get_sample_banner(headers):
452
+ temp_list = []
453
+ server = headers.get('Server')
454
+ if server:
455
+ temp_list.append(server)
456
+ via = headers.get('Via')
457
+ if via:
458
+ temp_list.append(via)
459
+ power = headers.get('X-Powered-By')
460
+ if power:
461
+ temp_list.append(power)
462
+ banner = ','.join(temp_list)
463
+ return banner
464
+
465
+
466
+ def check_ip_public(ip_list):
467
+ for ip_str in ip_list:
468
+ ip = ip_address(ip_str)
469
+ if not ip.is_global:
470
+ return 0
471
+ return 1
472
+
473
+
474
+ def ip_is_public(ip_str):
475
+ ip = ip_address(ip_str)
476
+ if not ip.is_global:
477
+ return 0
478
+ return 1
479
+
480
+
481
+ def get_request_count():
482
+ return os.cpu_count() * 16
483
+
484
+
485
+ def uniq_dict_list(dict_list):
486
+ return list(filter(lambda name: dict_list.count(name) == 1, dict_list))
487
+
488
+
489
+ def delete_file(*paths):
490
+ for path in paths:
491
+ try:
492
+ path.unlink()
493
+ except Exception as e:
494
+ logger.log('ERROR', e.args)
495
+
496
+
497
+ @tenacity.retry(stop=tenacity.stop_after_attempt(3),
498
+ wait=tenacity.wait_fixed(2))
499
+ def check_net():
500
+ times = 0
501
+ while True:
502
+ times += 1
503
+ urls = ['https://www.baidu.com', 'https://www.bing.com',
504
+ 'https://www.cloudflare.com', 'https://www.akamai.com/',
505
+ 'https://www.fastly.com/', 'https://www.amazon.com/']
506
+ url = random.choice(urls)
507
+ logger.log('DEBUG', f'Trying to access {url}')
508
+ header = get_random_header()
509
+ proxy = get_proxy()
510
+ timeout = settings.request_timeout_second
511
+ verify = settings.request_ssl_verify
512
+ session = requests.Session()
513
+ session.trust_env = False
514
+ session = requests.Session()
515
+ session.trust_env = False
516
+ try:
517
+ rsp = session.get(url, headers=header, proxies=proxy,
518
+ timeout=timeout, verify=verify)
519
+ except Exception as e:
520
+ logger.log('ERROR', e.args)
521
+ logger.log('ALERT', f'Unable to access Internet, retrying for the {times}th time')
522
+ else:
523
+ if rsp.status_code == 200:
524
+ logger.log('DEBUG', 'Access to Internet OK')
525
+ return True
526
+ if times >= 3:
527
+ logger.log('ALERT', 'Access to Internet failed')
528
+ return False
529
+
530
+
531
+ def check_dep():
532
+ logger.log('INFOR', 'Checking dependent environment')
533
+ implementation = platform.python_implementation()
534
+ version = platform.python_version()
535
+ if implementation != 'CPython':
536
+ logger.log('FATAL', f'OneForAll only passed the test under CPython')
537
+ exit(1)
538
+ if LooseVersion(version) < LooseVersion('3.6'):
539
+ logger.log('FATAL', 'OneForAll requires Python 3.6 or higher')
540
+ exit(1)
541
+
542
+
543
+ def get_net_env():
544
+ logger.log('INFOR', 'Checking network environment')
545
+ try:
546
+ result = check_net()
547
+ except Exception as e:
548
+ logger.log('DEBUG', e.args)
549
+ logger.log('ALERT', 'Please check your network environment.')
550
+ return False
551
+ return result
552
+
553
+
554
+ def check_version(local):
555
+ logger.log('INFOR', 'Checking for the latest version')
556
+ api = 'https://api.github.com/repos/shmilylty/OneForAll/releases/latest'
557
+ header = get_random_header()
558
+ proxy = get_proxy()
559
+ timeout = settings.request_timeout_second
560
+ verify = settings.request_ssl_verify
561
+ session = requests.Session()
562
+ session.trust_env = False
563
+ try:
564
+ resp = session.get(url=api, headers=header, proxies=proxy,
565
+ timeout=timeout, verify=verify)
566
+ resp_json = resp.json()
567
+ latest = resp_json['tag_name']
568
+ except Exception as e:
569
+ logger.log('ALERT', 'An error occurred while checking the latest version')
570
+ logger.log('DEBUG', e.args)
571
+ return
572
+ if latest > local:
573
+ change = resp_json.get("body")
574
+ logger.log('ALERT', f'The current version is {local} '
575
+ f'but the latest version is {latest}')
576
+ logger.log('ALERT', f'The {latest} version mainly has the following changes')
577
+ logger.log('ALERT', change)
578
+ else:
579
+ logger.log('INFOR', f'The current version {local} is already the latest version')
580
+
581
+
582
+ def get_main_domain(domain):
583
+ if not isinstance(domain, str):
584
+ return None
585
+ return Domain(domain).registered()
586
+
587
+
588
+ def call_massdns(massdns_path, dict_path, ns_path, output_path, log_path,
589
+ query_type='A', process_num=1, concurrent_num=10000,
590
+ quiet_mode=False):
591
+ logger.log('DEBUG', 'Start running massdns')
592
+ quiet = ''
593
+ if quiet_mode:
594
+ quiet = '--quiet'
595
+ status_format = settings.brute_status_format
596
+ socket_num = settings.brute_socket_num
597
+ resolve_num = settings.brute_resolve_num
598
+ cmd = f'{massdns_path} {quiet} --status-format {status_format} ' \
599
+ f'--processes {process_num} --socket-count {socket_num} ' \
600
+ f'--hashmap-size {concurrent_num} --resolvers {ns_path} ' \
601
+ f'--resolve-count {resolve_num} --type {query_type} ' \
602
+ f'--flush --output J --outfile {output_path} ' \
603
+ f'--root --error-log {log_path} {dict_path} --filter OK ' \
604
+ f'--sndbuf 0 --rcvbuf 0'
605
+ logger.log('DEBUG', f'Run command {cmd}')
606
+ subprocess.run(args=cmd, shell=True)
607
+ logger.log('DEBUG', f'Finished massdns')
608
+
609
+
610
+ def get_massdns_path(massdns_dir):
611
+ path = settings.brute_massdns_path
612
+ if path:
613
+ return path
614
+ system = platform.system().lower()
615
+ machine = platform.machine().lower()
616
+ name = f'massdns_{system}_{machine}'
617
+ if system == 'windows':
618
+ name = f'massdns.exe'
619
+ if machine == 'amd64':
620
+ massdns_dir = massdns_dir.joinpath('windows', 'x64')
621
+ else:
622
+ massdns_dir = massdns_dir.joinpath('windows', 'x86')
623
+ path = massdns_dir.joinpath(name)
624
+ path.chmod(S_IXUSR)
625
+ if not path.exists():
626
+ logger.log('FATAL', 'There is no massdns for this platform or architecture')
627
+ logger.log('INFOR', 'Please try to compile massdns yourself '
628
+ 'and specify the path in the configuration')
629
+ exit(0)
630
+ return path
631
+
632
+
633
+ def is_subname(name):
634
+ chars = string.ascii_lowercase + string.digits + '.-'
635
+ for char in name:
636
+ if char not in chars:
637
+ return False
638
+ return True
639
+
640
+
641
+ def ip_to_int(ip):
642
+ if isinstance(ip, int):
643
+ return ip
644
+ try:
645
+ ipv4 = IPv4Address(ip)
646
+ except Exception as e:
647
+ logger.log('ERROR', e.args)
648
+ return 0
649
+ return int(ipv4)
650
+
651
+
652
+ def match_subdomains(domain, html, distinct=True, fuzzy=True):
653
+ """
654
+ Use regexp to match subdomains
655
+
656
+ :param str domain: main domain
657
+ :param str html: response html text
658
+ :param bool distinct: deduplicate results or not (default True)
659
+ :param bool fuzzy: fuzzy match subdomain or not (default True)
660
+ :return set/list: result set or list
661
+ """
662
+ logger.log('TRACE', f'Use regexp to match subdomains in the response body')
663
+ if fuzzy:
664
+ regexp = r'(?:[a-z0-9](?:[a-z0-9\-]{0,61}[a-z0-9])?\.){0,}' \
665
+ + domain.replace('.', r'\.')
666
+ result = re.findall(regexp, html, re.I)
667
+ if not result:
668
+ return set()
669
+ deal = map(lambda s: s.lower(), result)
670
+ if distinct:
671
+ return set(deal)
672
+ else:
673
+ return list(deal)
674
+ else:
675
+ regexp = r'(?:\>|\"|\'|\=|\,)(?:http\:\/\/|https\:\/\/)?' \
676
+ r'(?:[a-z0-9](?:[a-z0-9\-]{0,61}[a-z0-9])?\.){0,}' \
677
+ + domain.replace('.', r'\.')
678
+ result = re.findall(regexp, html, re.I)
679
+ if not result:
680
+ return set()
681
+ regexp = r'(?:http://|https://)'
682
+ deal = map(lambda s: re.sub(regexp, '', s[1:].lower()), result)
683
+ if distinct:
684
+ return set(deal)
685
+ else:
686
+ return list(deal)
687
+
688
+
689
+ def check_random_subdomain(subdomains):
690
+ if not subdomains:
691
+ logger.log('ALERT', f'The generated dictionary is empty')
692
+ return
693
+ for subdomain in subdomains:
694
+ if subdomain:
695
+ logger.log('ALERT', f'Please check whether {subdomain} is correct or not')
696
+ return
697
+
698
+
699
+ def get_url_resp(url):
700
+ logger.log('INFOR', f'Attempting to request {url}')
701
+ timeout = settings.request_timeout_second
702
+ verify = settings.request_ssl_verify
703
+ session = requests.Session()
704
+ session.trust_env = False
705
+ try:
706
+ resp = session.get(url, params=None, timeout=timeout, verify=verify)
707
+ except Exception as e:
708
+ logger.log('ALERT', f'Error request {url}')
709
+ logger.log('DEBUG', e.args)
710
+ return None
711
+ return resp
712
+
713
+
714
+ def decode_resp_text(resp):
715
+ content = resp.content
716
+ if not content:
717
+ return str('')
718
+ try:
719
+ # 先尝试用utf-8严格解码
720
+ content = str(content, encoding='utf-8', errors='strict')
721
+ except (LookupError, TypeError, UnicodeError):
722
+ try:
723
+ # 再尝试用gb18030严格解码
724
+ content = str(content, encoding='gb18030', errors='strict')
725
+ except (LookupError, TypeError, UnicodeError):
726
+ # 最后尝试自动解码
727
+ content = str(content, errors='replace')
728
+ return content
729
+
730
+
731
+ def sort_by_subdomain(data):
732
+ return sorted(data, key=lambda item: item.get('subdomain'))
733
+
734
+
735
+ def looks_like_ip(maybe_ip):
736
+ """Does the given str look like an IP address?"""
737
+ if not maybe_ip[0].isdigit():
738
+ return False
739
+
740
+ try:
741
+ socket.inet_aton(maybe_ip)
742
+ return True
743
+ except (AttributeError, UnicodeError):
744
+ if IP_RE.match(maybe_ip):
745
+ return True
746
+ except socket.error:
747
+ return False
748
+
749
+
750
+ def deal_data(domain):
751
+ db = Database()
752
+ db.remove_invalid(domain)
753
+ db.deduplicate_subdomain(domain)
754
+ db.close()
755
+
756
+
757
+ def get_data(domain):
758
+ db = Database()
759
+ data = db.get_data(domain).as_dict()
760
+ db.close()
761
+ return data
762
+
763
+
764
+ def clear_data(domain):
765
+ db = Database()
766
+ db.drop_table(domain)
767
+ db.close()
768
+
769
+
770
+ def get_ns_path(in_china=None, enable_wildcard=None, ns_ip_list=None):
771
+ data_dir = settings.data_storage_dir
772
+ path = data_dir.joinpath('nameservers.txt')
773
+ if in_china:
774
+ path = data_dir.joinpath('nameservers_cn.txt')
775
+ if not enable_wildcard:
776
+ return path
777
+ if not ns_ip_list:
778
+ return path
779
+ path = settings.authoritative_dns_path
780
+ ns_data = '\n'.join(ns_ip_list)
781
+ save_to_file(path, ns_data)
782
+ return path
783
+
784
+
785
+ def init_table(domain):
786
+ db = Database()
787
+ db.drop_table(domain)
788
+ db.create_table(domain)
789
+ db.close()