oneforall-kjl 0.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (118) hide show
  1. oneforall_kjl-0.1.1/OneForAll/__init__.py +15 -0
  2. oneforall_kjl-0.1.1/OneForAll/brute.py +503 -0
  3. oneforall_kjl-0.1.1/OneForAll/common/check.py +41 -0
  4. oneforall_kjl-0.1.1/OneForAll/common/crawl.py +10 -0
  5. oneforall_kjl-0.1.1/OneForAll/common/database.py +277 -0
  6. oneforall_kjl-0.1.1/OneForAll/common/domain.py +63 -0
  7. oneforall_kjl-0.1.1/OneForAll/common/ipasn.py +42 -0
  8. oneforall_kjl-0.1.1/OneForAll/common/ipreg.py +139 -0
  9. oneforall_kjl-0.1.1/OneForAll/common/lookup.py +28 -0
  10. oneforall_kjl-0.1.1/OneForAll/common/module.py +369 -0
  11. oneforall_kjl-0.1.1/OneForAll/common/query.py +9 -0
  12. oneforall_kjl-0.1.1/OneForAll/common/records.py +363 -0
  13. oneforall_kjl-0.1.1/OneForAll/common/request.py +264 -0
  14. oneforall_kjl-0.1.1/OneForAll/common/resolve.py +173 -0
  15. oneforall_kjl-0.1.1/OneForAll/common/search.py +78 -0
  16. oneforall_kjl-0.1.1/OneForAll/common/similarity.py +138 -0
  17. oneforall_kjl-0.1.1/OneForAll/common/tablib/__init__.py +0 -0
  18. oneforall_kjl-0.1.1/OneForAll/common/tablib/format.py +89 -0
  19. oneforall_kjl-0.1.1/OneForAll/common/tablib/tablib.py +360 -0
  20. oneforall_kjl-0.1.1/OneForAll/common/tldextract.py +240 -0
  21. oneforall_kjl-0.1.1/OneForAll/common/utils.py +789 -0
  22. oneforall_kjl-0.1.1/OneForAll/config/__init__.py +17 -0
  23. oneforall_kjl-0.1.1/OneForAll/config/api.py +94 -0
  24. oneforall_kjl-0.1.1/OneForAll/config/default.py +255 -0
  25. oneforall_kjl-0.1.1/OneForAll/config/log.py +38 -0
  26. oneforall_kjl-0.1.1/OneForAll/config/setting.py +108 -0
  27. oneforall_kjl-0.1.1/OneForAll/export.py +72 -0
  28. oneforall_kjl-0.1.1/OneForAll/modules/altdns.py +216 -0
  29. oneforall_kjl-0.1.1/OneForAll/modules/autotake/github.py +105 -0
  30. oneforall_kjl-0.1.1/OneForAll/modules/certificates/censys_api.py +73 -0
  31. oneforall_kjl-0.1.1/OneForAll/modules/certificates/certspotter.py +48 -0
  32. oneforall_kjl-0.1.1/OneForAll/modules/certificates/crtsh.py +84 -0
  33. oneforall_kjl-0.1.1/OneForAll/modules/certificates/google.py +48 -0
  34. oneforall_kjl-0.1.1/OneForAll/modules/certificates/myssl.py +46 -0
  35. oneforall_kjl-0.1.1/OneForAll/modules/certificates/racent.py +49 -0
  36. oneforall_kjl-0.1.1/OneForAll/modules/check/axfr.py +97 -0
  37. oneforall_kjl-0.1.1/OneForAll/modules/check/cdx.py +44 -0
  38. oneforall_kjl-0.1.1/OneForAll/modules/check/cert.py +58 -0
  39. oneforall_kjl-0.1.1/OneForAll/modules/check/csp.py +94 -0
  40. oneforall_kjl-0.1.1/OneForAll/modules/check/nsec.py +58 -0
  41. oneforall_kjl-0.1.1/OneForAll/modules/check/robots.py +44 -0
  42. oneforall_kjl-0.1.1/OneForAll/modules/check/sitemap.py +44 -0
  43. oneforall_kjl-0.1.1/OneForAll/modules/collect.py +70 -0
  44. oneforall_kjl-0.1.1/OneForAll/modules/crawl/archivecrawl.py +59 -0
  45. oneforall_kjl-0.1.1/OneForAll/modules/crawl/commoncrawl.py +59 -0
  46. oneforall_kjl-0.1.1/OneForAll/modules/datasets/anubis.py +45 -0
  47. oneforall_kjl-0.1.1/OneForAll/modules/datasets/bevigil.py +50 -0
  48. oneforall_kjl-0.1.1/OneForAll/modules/datasets/binaryedge_api.py +50 -0
  49. oneforall_kjl-0.1.1/OneForAll/modules/datasets/cebaidu.py +45 -0
  50. oneforall_kjl-0.1.1/OneForAll/modules/datasets/chinaz.py +45 -0
  51. oneforall_kjl-0.1.1/OneForAll/modules/datasets/chinaz_api.py +49 -0
  52. oneforall_kjl-0.1.1/OneForAll/modules/datasets/circl_api.py +49 -0
  53. oneforall_kjl-0.1.1/OneForAll/modules/datasets/cloudflare_api.py +130 -0
  54. oneforall_kjl-0.1.1/OneForAll/modules/datasets/dnsdb_api.py +51 -0
  55. oneforall_kjl-0.1.1/OneForAll/modules/datasets/dnsdumpster.py +52 -0
  56. oneforall_kjl-0.1.1/OneForAll/modules/datasets/dnsgrep.py +44 -0
  57. oneforall_kjl-0.1.1/OneForAll/modules/datasets/fullhunt.py +48 -0
  58. oneforall_kjl-0.1.1/OneForAll/modules/datasets/hackertarget.py +45 -0
  59. oneforall_kjl-0.1.1/OneForAll/modules/datasets/ip138.py +45 -0
  60. oneforall_kjl-0.1.1/OneForAll/modules/datasets/ipv4info_api.py +73 -0
  61. oneforall_kjl-0.1.1/OneForAll/modules/datasets/netcraft.py +66 -0
  62. oneforall_kjl-0.1.1/OneForAll/modules/datasets/passivedns_api.py +51 -0
  63. oneforall_kjl-0.1.1/OneForAll/modules/datasets/qianxun.py +61 -0
  64. oneforall_kjl-0.1.1/OneForAll/modules/datasets/rapiddns.py +45 -0
  65. oneforall_kjl-0.1.1/OneForAll/modules/datasets/riddler.py +45 -0
  66. oneforall_kjl-0.1.1/OneForAll/modules/datasets/robtex.py +58 -0
  67. oneforall_kjl-0.1.1/OneForAll/modules/datasets/securitytrails_api.py +56 -0
  68. oneforall_kjl-0.1.1/OneForAll/modules/datasets/sitedossier.py +57 -0
  69. oneforall_kjl-0.1.1/OneForAll/modules/datasets/spyse_api.py +62 -0
  70. oneforall_kjl-0.1.1/OneForAll/modules/datasets/sublist3r.py +45 -0
  71. oneforall_kjl-0.1.1/OneForAll/modules/datasets/urlscan.py +45 -0
  72. oneforall_kjl-0.1.1/OneForAll/modules/datasets/windvane.py +92 -0
  73. oneforall_kjl-0.1.1/OneForAll/modules/dnsquery/mx.py +35 -0
  74. oneforall_kjl-0.1.1/OneForAll/modules/dnsquery/ns.py +35 -0
  75. oneforall_kjl-0.1.1/OneForAll/modules/dnsquery/soa.py +35 -0
  76. oneforall_kjl-0.1.1/OneForAll/modules/dnsquery/spf.py +35 -0
  77. oneforall_kjl-0.1.1/OneForAll/modules/dnsquery/txt.py +35 -0
  78. oneforall_kjl-0.1.1/OneForAll/modules/enrich.py +72 -0
  79. oneforall_kjl-0.1.1/OneForAll/modules/finder.py +206 -0
  80. oneforall_kjl-0.1.1/OneForAll/modules/intelligence/alienvault.py +50 -0
  81. oneforall_kjl-0.1.1/OneForAll/modules/intelligence/riskiq_api.py +58 -0
  82. oneforall_kjl-0.1.1/OneForAll/modules/intelligence/threatbook_api.py +50 -0
  83. oneforall_kjl-0.1.1/OneForAll/modules/intelligence/threatminer.py +45 -0
  84. oneforall_kjl-0.1.1/OneForAll/modules/intelligence/virustotal.py +60 -0
  85. oneforall_kjl-0.1.1/OneForAll/modules/intelligence/virustotal_api.py +59 -0
  86. oneforall_kjl-0.1.1/OneForAll/modules/iscdn.py +86 -0
  87. oneforall_kjl-0.1.1/OneForAll/modules/search/ask.py +69 -0
  88. oneforall_kjl-0.1.1/OneForAll/modules/search/baidu.py +96 -0
  89. oneforall_kjl-0.1.1/OneForAll/modules/search/bing.py +79 -0
  90. oneforall_kjl-0.1.1/OneForAll/modules/search/bing_api.py +78 -0
  91. oneforall_kjl-0.1.1/OneForAll/modules/search/fofa_api.py +74 -0
  92. oneforall_kjl-0.1.1/OneForAll/modules/search/gitee.py +71 -0
  93. oneforall_kjl-0.1.1/OneForAll/modules/search/github_api.py +86 -0
  94. oneforall_kjl-0.1.1/OneForAll/modules/search/google.py +83 -0
  95. oneforall_kjl-0.1.1/OneForAll/modules/search/google_api.py +77 -0
  96. oneforall_kjl-0.1.1/OneForAll/modules/search/hunter_api.py +72 -0
  97. oneforall_kjl-0.1.1/OneForAll/modules/search/quake_api.py +72 -0
  98. oneforall_kjl-0.1.1/OneForAll/modules/search/shodan_api.py +53 -0
  99. oneforall_kjl-0.1.1/OneForAll/modules/search/so.py +75 -0
  100. oneforall_kjl-0.1.1/OneForAll/modules/search/sogou.py +72 -0
  101. oneforall_kjl-0.1.1/OneForAll/modules/search/wzsearch.py +68 -0
  102. oneforall_kjl-0.1.1/OneForAll/modules/search/yahoo.py +81 -0
  103. oneforall_kjl-0.1.1/OneForAll/modules/search/yandex.py +80 -0
  104. oneforall_kjl-0.1.1/OneForAll/modules/search/zoomeye_api.py +73 -0
  105. oneforall_kjl-0.1.1/OneForAll/modules/srv.py +75 -0
  106. oneforall_kjl-0.1.1/OneForAll/modules/wildcard.py +319 -0
  107. oneforall_kjl-0.1.1/OneForAll/oneforall.py +275 -0
  108. oneforall_kjl-0.1.1/OneForAll/takeover.py +168 -0
  109. oneforall_kjl-0.1.1/OneForAll/test.py +23 -0
  110. oneforall_kjl-0.1.1/PKG-INFO +18 -0
  111. oneforall_kjl-0.1.1/README.md +0 -0
  112. oneforall_kjl-0.1.1/oneforall_kjl.egg-info/PKG-INFO +18 -0
  113. oneforall_kjl-0.1.1/oneforall_kjl.egg-info/SOURCES.txt +116 -0
  114. oneforall_kjl-0.1.1/oneforall_kjl.egg-info/dependency_links.txt +1 -0
  115. oneforall_kjl-0.1.1/oneforall_kjl.egg-info/entry_points.txt +2 -0
  116. oneforall_kjl-0.1.1/oneforall_kjl.egg-info/top_level.txt +1 -0
  117. oneforall_kjl-0.1.1/pyproject.toml +34 -0
  118. oneforall_kjl-0.1.1/setup.cfg +4 -0
@@ -0,0 +1,15 @@
1
+ """
2
+ oneforall打包
3
+ """
4
+
5
+ __version__ = "0.1.1"
6
+ __author__ = "Your Name"
7
+ __email__ = "your.email@example.com"
8
+
9
+ import common
10
+ import config
11
+ import data
12
+ import images
13
+ import modules
14
+ import thirdparty
15
+ import oneforall, brute, takeover
@@ -0,0 +1,503 @@
1
+ #!/usr/bin/python3
2
+ # coding=utf-8
3
+
4
+ """
5
+ OneForAll subdomain brute module
6
+
7
+ :copyright: Copyright (c) 2019, Jing Ling. All rights reserved.
8
+ :license: GNU General Public License v3.0, see LICENSE for more details.
9
+ """
10
+ import gc
11
+ import json
12
+ import time
13
+
14
+ import exrex
15
+ import fire
16
+
17
+ import export
18
+ from common import utils
19
+ from config import settings
20
+ from common.module import Module
21
+ from modules import wildcard
22
+ from config.log import logger
23
+
24
+
25
+ def gen_subdomains(expression, path):
26
+ """
27
+ Generate subdomains
28
+
29
+ :param str expression: generate subdomains expression
30
+ :param str path: path of wordlist
31
+ :return set subdomains: list of subdomains
32
+ """
33
+ subdomains = set()
34
+ with open(path, encoding='utf-8', errors='ignore') as fd:
35
+ for line in fd:
36
+ word = line.strip().lower()
37
+ if len(word) == 0:
38
+ continue
39
+ if not utils.is_subname(word):
40
+ continue
41
+ if word.startswith('.'):
42
+ word = word[1:]
43
+ if word.endswith('.'):
44
+ word = word[:-1]
45
+ subdomain = expression.replace('*', word)
46
+ subdomains.add(subdomain)
47
+ size = len(subdomains)
48
+ logger.log('DEBUG', f'The size of the dictionary generated by {path} is {size}')
49
+ if size == 0:
50
+ logger.log('ALERT', 'Please check the dictionary content!')
51
+ else:
52
+ utils.check_random_subdomain(subdomains)
53
+ return subdomains
54
+
55
+
56
+ def gen_fuzz_subdomains(expression, rule, fuzzlist):
57
+ """
58
+ Generate subdomains based on fuzz mode
59
+
60
+ :param str expression: generate subdomains expression
61
+ :param str rule: regexp rule
62
+ :param str fuzzlist: fuzz dictionary
63
+ :return set subdomains: list of subdomains
64
+ """
65
+ subdomains = set()
66
+ if fuzzlist:
67
+ fuzz_domain = gen_subdomains(expression, fuzzlist)
68
+ subdomains.update(fuzz_domain)
69
+ if rule:
70
+ fuzz_count = exrex.count(rule)
71
+ if fuzz_count > 10000000:
72
+ logger.log('ALERT', f'The dictionary generated by this rule is too large: '
73
+ f'{fuzz_count} > 10000000')
74
+ for fuzz_string in exrex.generate(rule):
75
+ fuzz_string = fuzz_string.lower()
76
+ if not fuzz_string.isalnum():
77
+ continue
78
+ fuzz_domain = expression.replace('*', fuzz_string)
79
+ subdomains.add(fuzz_domain)
80
+ utils.check_random_subdomain(subdomains)
81
+ logger.log('DEBUG', f'Dictionary size based on fuzz mode: {len(subdomains)}')
82
+ return subdomains
83
+
84
+
85
+ def gen_word_subdomains(expression, path):
86
+ """
87
+ Generate subdomains based on word mode
88
+
89
+ :param str expression: generate subdomains expression
90
+ :param str path: path of wordlist
91
+ :return set subdomains: list of subdomains
92
+ """
93
+ subdomains = gen_subdomains(expression, path)
94
+ logger.log('DEBUG', f'Dictionary based on word mode size: {len(subdomains)}')
95
+ return subdomains
96
+
97
+
98
+ def query_domain_ns_a(ns_list):
99
+ logger.log('INFOR', f'Querying A record from authoritative name server: {ns_list} ')
100
+ if not isinstance(ns_list, list):
101
+ return list()
102
+ ns_ip_list = []
103
+ resolver = utils.dns_resolver()
104
+ for ns in ns_list:
105
+ try:
106
+ answer = resolver.query(ns, 'A')
107
+ except Exception as e:
108
+ logger.log('ERROR', e.args)
109
+ logger.log('ERROR', f'Query authoritative name server {ns} A record error')
110
+ continue
111
+ if answer:
112
+ for item in answer:
113
+ ns_ip_list.append(item.address)
114
+ logger.log('INFOR', f'Authoritative name server A record result: {ns_ip_list}')
115
+ return ns_ip_list
116
+
117
+
118
+ def query_domain_ns(domain):
119
+ logger.log('INFOR', f'Querying NS records of {domain}')
120
+ domain = utils.get_main_domain(domain)
121
+ resolver = utils.dns_resolver()
122
+ try:
123
+ answer = resolver.query(domain, 'NS')
124
+ except Exception as e:
125
+ logger.log('ERROR', e.args)
126
+ logger.log('ERROR', f'Querying NS records of {domain} error')
127
+ return list()
128
+ ns = [item.to_text() for item in answer]
129
+ logger.log('INFOR', f'{domain}\'s authoritative name server is {ns}')
130
+ return ns
131
+
132
+
133
+ def check_dict():
134
+ if not settings.enable_check_dict:
135
+ return
136
+ sec = settings.check_time
137
+ logger.log('ALERT', f'You have {sec} seconds to check '
138
+ f'whether the configuration is correct or not')
139
+ logger.log('ALERT', f'If you want to exit, please use `Ctrl + C`')
140
+ try:
141
+ time.sleep(sec)
142
+ except KeyboardInterrupt:
143
+ logger.log('INFOR', 'Due to configuration incorrect, exited')
144
+ exit(0)
145
+
146
+
147
+ def gen_result_infos(items, infos, subdomains, appear_times, wc_ips, wc_ttl):
148
+ qname = items.get('name')[:-1] # 去除最右边的`.`点号
149
+ reason = items.get('status')
150
+ resolver = items.get('resolver')
151
+ data = items.get('data')
152
+ answers = data.get('answers')
153
+ info = dict()
154
+ cnames = list()
155
+ ips = list()
156
+ ip_times = list()
157
+ cname_times = list()
158
+ ttls = list()
159
+ is_valid_flags = list()
160
+ have_a_record = False
161
+ for answer in answers:
162
+ if answer.get('type') != 'A':
163
+ continue
164
+ have_a_record = True
165
+ ttl = answer.get('ttl')
166
+ ttls.append(ttl)
167
+ name = answer.get('name') # 去除最右边的`.`点号
168
+ cname = name[:-1].lower() # 去除最右边的`.`点号
169
+ cnames.append(cname)
170
+ cname_num = appear_times.get(cname)
171
+ cname_times.append(cname_num)
172
+ ip = answer.get('data')
173
+ ips.append(ip)
174
+ ip_num = appear_times.get(ip)
175
+ ip_times.append(ip_num)
176
+ isvalid, reason = wildcard.is_valid_subdomain(ip, ip_num, cname, cname_num, ttl, wc_ttl, wc_ips)
177
+ logger.log('TRACE', f'{ip} effective: {isvalid} reason: {reason}')
178
+ is_valid_flags.append(isvalid)
179
+ if not have_a_record:
180
+ logger.log('TRACE', f'All query result of {qname} no A record{answers}')
181
+ # 为了优化内存 只添加有A记录且通过判断的子域到记录中
182
+ if have_a_record and all(is_valid_flags):
183
+ info['resolve'] = 1
184
+ info['reason'] = reason
185
+ info['ttl'] = ttls
186
+ info['cname'] = cnames
187
+ info['ip'] = ips
188
+ info['ip_times'] = ip_times
189
+ info['cname_times'] = cname_times
190
+ info['resolver'] = resolver
191
+ infos[qname] = info
192
+ subdomains.append(qname)
193
+ return infos, subdomains
194
+
195
+
196
+ def stat_appear_times(result_path):
197
+ logger.log('INFOR', f'Counting IP cname appear times')
198
+ times = dict()
199
+ logger.log('DEBUG', f'Reading {result_path}')
200
+ with open(result_path) as fd:
201
+ for line in fd:
202
+ line = line.strip()
203
+ try:
204
+ items = json.loads(line)
205
+ except Exception as e:
206
+ logger.log('ERROR', e.args)
207
+ logger.log('ERROR', f'Error parsing {result_path} '
208
+ f'line {line} Skip this line')
209
+ continue
210
+ status = items.get('status')
211
+ if status != 'NOERROR':
212
+ continue
213
+ data = items.get('data')
214
+ if 'answers' not in data:
215
+ continue
216
+ answers = data.get('answers')
217
+ for answer in answers:
218
+ if answer.get('type') == 'A':
219
+ ip = answer.get('data')
220
+ # 取值 如果是首次出现的IP集合 出现次数先赋值0
221
+ value_one = times.setdefault(ip, 0)
222
+ times[ip] = value_one + 1
223
+ name = answer.get('data')
224
+ cname = name[:-1].lower() # 去除最右边的`.`点号
225
+ # 取值 如果是首次出现的IP集合 出现次数先赋值0
226
+ value_two = times.setdefault(cname, 0)
227
+ times[cname] = value_two + 1
228
+ if answer.get('type') == 'CNAME':
229
+ name = answer.get('data')
230
+ cname = name[:-1].lower() # 去除最右边的`.`点号
231
+ # 取值 如果是首次出现的IP集合 出现次数先赋值0
232
+ value_three = times.setdefault(cname, 0)
233
+ times[cname] = value_three + 1
234
+ return times
235
+
236
+
237
+ def deal_output(output_path, appear_times, wildcard_ips, wildcard_ttl):
238
+ logger.log('INFOR', f'Processing result')
239
+ infos = dict() # 用来记录所有域名有关信息
240
+ subdomains = list() # 用来保存所有通过有效性检查的子域
241
+ logger.log('DEBUG', f'Processing {output_path}')
242
+ with open(output_path) as fd:
243
+ for line in fd:
244
+ line = line.strip()
245
+ try:
246
+ items = json.loads(line)
247
+ except Exception as e:
248
+ logger.log('ERROR', e.args)
249
+ logger.log('ERROR', f'Error parsing {line} Skip this line')
250
+ continue
251
+ qname = items.get('name')[:-1] # 去除最右边的`.`点号
252
+ status = items.get('status')
253
+ if status != 'NOERROR':
254
+ logger.log('TRACE', f'Found {qname}\'s result {status} '
255
+ f'while processing {line}')
256
+ continue
257
+ data = items.get('data')
258
+ if 'answers' not in data:
259
+ logger.log('TRACE', f'Processing {line}, {qname} no response')
260
+ continue
261
+ infos, subdomains = gen_result_infos(items, infos, subdomains,
262
+ appear_times, wildcard_ips,
263
+ wildcard_ttl)
264
+ return infos, subdomains
265
+
266
+
267
+ def save_brute_dict(dict_path, dict_set):
268
+ dict_data = '\n'.join(dict_set)
269
+ if not utils.save_to_file(dict_path, dict_data):
270
+ logger.log('FATAL', 'Saving dictionary error')
271
+ exit(1)
272
+
273
+
274
+ def delete_file(dict_path, output_path):
275
+ if settings.delete_generated_dict:
276
+ dict_path.unlink()
277
+ if settings.delete_massdns_result:
278
+ output_path.unlink()
279
+
280
+
281
+ class Brute(Module):
282
+ """
283
+ OneForAll subdomain brute module
284
+
285
+ Example:
286
+ brute.py --target domain.com --word True run
287
+ brute.py --targets ./domains.txt --word True run
288
+ brute.py --target domain.com --word True --concurrent 2000 run
289
+ brute.py --target domain.com --word True --wordlist subnames.txt run
290
+ brute.py --target domain.com --word True --recursive True --depth 2 run
291
+ brute.py --target d.com --fuzz True --place m.*.d.com --rule '[a-z]' run
292
+ brute.py --target d.com --fuzz True --place m.*.d.com --fuzzlist subnames.txt run
293
+
294
+ Note:
295
+ --fmt csv/json (result format)
296
+ --path Result path (default None, automatically generated)
297
+
298
+
299
+ :param str target: One domain (target or targets must be provided)
300
+ :param str targets: File path of one domain per line
301
+ :param int concurrent: Number of concurrent (default 2000)
302
+ :param bool word: Use word mode generate dictionary (default False)
303
+ :param str wordlist: Dictionary path used in word mode (default use ./config/default.py)
304
+ :param bool recursive: Use recursion (default False)
305
+ :param int depth: Recursive depth (default 2)
306
+ :param str nextlist: Dictionary file path used by recursive (default use ./config/default.py)
307
+ :param bool fuzz: Use fuzz mode generate dictionary (default False)
308
+ :param bool alive: Only export alive subdomains (default False)
309
+ :param str place: Designated fuzz position (required if use fuzz mode)
310
+ :param str rule: Specify the regexp rules used in fuzz mode (required if use fuzz mode)
311
+ :param str fuzzlist: Dictionary path used in fuzz mode (default use ./config/default.py)
312
+ :param bool export: Export the results (default True)
313
+ :param str fmt: Result format (default csv)
314
+ :param str path: Result directory (default None)
315
+ """
316
+ def __init__(self, target=None, targets=None, concurrent=None,
317
+ word=False, wordlist=None, recursive=False, depth=None,
318
+ nextlist=None, fuzz=False, place=None, rule=None, fuzzlist=None,
319
+ export=True, alive=True, fmt='csv', path=None):
320
+ Module.__init__(self)
321
+ self.module = 'Brute'
322
+ self.source = 'Brute'
323
+ self.target = target
324
+ self.targets = targets
325
+ self.concurrent_num = concurrent or settings.brute_concurrent_num
326
+ self.word = word
327
+ self.wordlist = wordlist or settings.brute_wordlist_path
328
+ self.recursive_brute = recursive or settings.enable_recursive_brute
329
+ self.recursive_depth = depth or settings.brute_recursive_depth
330
+ self.recursive_nextlist = nextlist or settings.recursive_nextlist_path
331
+ self.fuzz = fuzz or settings.enable_fuzz
332
+ self.place = place or settings.fuzz_place
333
+ self.rule = rule or settings.fuzz_rule
334
+ self.fuzzlist = fuzzlist or settings.fuzz_list
335
+ self.export = export
336
+ self.alive = alive
337
+ self.fmt = fmt
338
+ self.path = path
339
+ self.bulk = False # 是否是批量爆破场景
340
+ self.domains = list() # 待爆破的所有域名集合
341
+ self.domain = str() # 当前正在进行爆破的域名
342
+ self.ips_times = dict() # IP集合出现次数
343
+ self.enable_wildcard = None # 当前域名是否使用泛解析
344
+ self.quite = False
345
+
346
+ def gen_brute_dict(self, domain):
347
+ logger.log('INFOR', f'Generating dictionary for {domain}')
348
+ dict_set = set()
349
+ # 如果domain不是self.subdomain 而是self.domain的子域则生成递归爆破字典
350
+ if self.word:
351
+ self.place = ''
352
+ if not self.place:
353
+ self.place = '*.' + domain
354
+ wordlist = self.wordlist
355
+ main_domain = utils.get_main_domain(domain)
356
+ if domain != main_domain:
357
+ wordlist = self.recursive_nextlist
358
+ if self.word:
359
+ word_subdomains = gen_word_subdomains(self.place, wordlist)
360
+ dict_set.update(word_subdomains)
361
+ if self.fuzz:
362
+ fuzz_subdomains = gen_fuzz_subdomains(self.place, self.rule, self.fuzzlist)
363
+ dict_set.update(fuzz_subdomains)
364
+ count = len(dict_set)
365
+ logger.log('INFOR', f'Dictionary size: {count}')
366
+ if count > 10000000:
367
+ logger.log('ALERT', f'The generated dictionary is '
368
+ f'too large {count} > 10000000')
369
+ return dict_set
370
+
371
+ def check_brute_params(self):
372
+ if not (self.word or self.fuzz):
373
+ logger.log('FATAL', f'Please specify at least one brute mode')
374
+ exit(1)
375
+ if len(self.domains) > 1:
376
+ self.bulk = True
377
+ if self.fuzz:
378
+ if self.place is None:
379
+ logger.log('FATAL', f'No fuzz position specified')
380
+ exit(1)
381
+ if self.rule is None and self.fuzzlist is None:
382
+ logger.log('FATAL', f'No fuzz rules or fuzz dictionary specified')
383
+ exit(1)
384
+ if self.bulk:
385
+ logger.log('FATAL', f'Cannot use fuzz mode in the bulk brute')
386
+ exit(1)
387
+ if self.recursive_brute:
388
+ logger.log('FATAL', f'Cannot use recursive brute in fuzz mode')
389
+ exit(1)
390
+ fuzz_count = self.place.count('*')
391
+ if fuzz_count < 1:
392
+ logger.log('FATAL', f'No fuzz position specified')
393
+ exit(1)
394
+ if fuzz_count > 1:
395
+ logger.log('FATAL', f'Only one fuzz position can be specified')
396
+ exit(1)
397
+ if self.domain not in self.place:
398
+ logger.log('FATAL', f'Incorrect domain for fuzz')
399
+ exit(1)
400
+
401
+ def init_dict_path(self):
402
+ data_dir = settings.data_storage_dir
403
+ if self.wordlist is None:
404
+ self.wordlist = settings.brute_wordlist_path or data_dir.joinpath('subnames.txt')
405
+ if self.recursive_nextlist is None:
406
+ self.recursive_nextlist = settings.recursive_nextlist_path or data_dir.joinpath('subnames_next.txt')
407
+
408
+ def main(self, domain):
409
+ start = time.time()
410
+ logger.log('INFOR', f'Blasting {domain} ')
411
+ massdns_dir = settings.third_party_dir.joinpath('massdns')
412
+ result_dir = settings.result_save_dir
413
+ temp_dir = result_dir.joinpath('temp')
414
+ utils.check_dir(temp_dir)
415
+ massdns_path = utils.get_massdns_path(massdns_dir)
416
+ timestring = utils.get_timestring()
417
+
418
+ wildcard_ips = list() # 泛解析IP列表
419
+ wildcard_ttl = int() # 泛解析TTL整型值
420
+ ns_list = query_domain_ns(self.domain)
421
+ ns_ip_list = query_domain_ns_a(ns_list) # DNS权威名称服务器对应A记录列表
422
+ if self.enable_wildcard is None:
423
+ self.enable_wildcard = wildcard.detect_wildcard(domain)
424
+
425
+ if self.enable_wildcard:
426
+ wildcard_ips, wildcard_ttl = wildcard.collect_wildcard_record(domain, ns_ip_list)
427
+ ns_path = utils.get_ns_path(settings.use_china_nameservers, self.enable_wildcard, ns_ip_list)
428
+
429
+ dict_set = self.gen_brute_dict(domain)
430
+
431
+ dict_name = f'generated_subdomains_{domain}_{timestring}.txt'
432
+ dict_path = temp_dir.joinpath(dict_name)
433
+ save_brute_dict(dict_path, dict_set)
434
+ del dict_set
435
+ gc.collect()
436
+
437
+ output_name = f'resolved_result_{domain}_{timestring}.json'
438
+ output_path = temp_dir.joinpath(output_name)
439
+ log_path = result_dir.joinpath('massdns.log')
440
+ check_dict()
441
+ logger.log('INFOR', f'Running massdns to brute subdomains')
442
+ utils.call_massdns(massdns_path, dict_path, ns_path, output_path,
443
+ log_path, quiet_mode=self.quite,
444
+ concurrent_num=self.concurrent_num)
445
+ appear_times = stat_appear_times(output_path)
446
+ self.infos, self.subdomains = deal_output(output_path, appear_times,
447
+ wildcard_ips, wildcard_ttl)
448
+ delete_file(dict_path, output_path)
449
+ end = time.time()
450
+ self.elapse = round(end - start, 1)
451
+ logger.log('ALERT', f'{self.source} module takes {self.elapse} seconds, '
452
+ f'found {len(self.subdomains)} subdomains of {domain}')
453
+ logger.log('DEBUG', f'{self.source} module found subdomains of {domain}: '
454
+ f'{self.subdomains}')
455
+ self.gen_result()
456
+ self.save_db()
457
+ return self.subdomains
458
+
459
+ def run(self):
460
+ logger.log('INFOR', f'Start running {self.source} module')
461
+ self.domains = utils.get_domains(self.target, self.targets)
462
+ for self.domain in self.domains:
463
+ self.results = list() # 置空
464
+ all_subdomains = list()
465
+ self.init_dict_path()
466
+ self.check_brute_params()
467
+ if self.recursive_brute:
468
+ logger.log('INFOR', f'Start recursively brute the 1 layer subdomain'
469
+ f' of {self.domain}')
470
+ valid_subdomains = self.main(self.domain)
471
+
472
+ all_subdomains.extend(valid_subdomains)
473
+
474
+ # 递归爆破下一层的子域
475
+ # fuzz模式不使用递归爆破
476
+ if self.recursive_brute:
477
+ for layer_num in range(1, self.recursive_depth):
478
+ # 之前已经做过1层子域爆破 当前实际递归层数是layer+1
479
+ logger.log('INFOR', f'Start recursively brute the {layer_num + 1} '
480
+ f'layer subdomain of {self.domain}')
481
+ for subdomain in all_subdomains:
482
+ self.place = '*.' + subdomain
483
+ # 进行下一层子域爆破的限制条件
484
+ num = subdomain.count('.') - self.domain.count('.')
485
+ if num == layer_num:
486
+ valid_subdomains = self.main(subdomain)
487
+ all_subdomains.extend(valid_subdomains)
488
+
489
+ logger.log('INFOR', f'Finished {self.source} module to brute {self.domain}')
490
+ if not self.path:
491
+ name = f'{self.domain}_brute_result.{self.fmt}'
492
+ self.path = settings.result_save_dir.joinpath(name)
493
+ # 数据库导出
494
+ if self.export:
495
+ export.export_data(self.domain,
496
+ alive=self.alive,
497
+ limit='resolve',
498
+ path=self.path,
499
+ fmt=self.fmt)
500
+
501
+
502
+ if __name__ == '__main__':
503
+ fire.Fire(Brute)
@@ -0,0 +1,41 @@
1
+ import requests
2
+ from config.log import logger
3
+ from common.module import Module
4
+
5
+
6
+ class Check(Module):
7
+ """
8
+ Check base class
9
+ """
10
+
11
+ def __init__(self):
12
+ Module.__init__(self)
13
+ self.request_status = 1
14
+
15
+ def to_check(self, filenames):
16
+ urls = set()
17
+ urls_www = set()
18
+ for filename in filenames:
19
+ urls.update((
20
+ f'http://{self.domain}/{filename}',
21
+ f'https://{self.domain}/{filename}',
22
+ ))
23
+ urls_www.update((
24
+ f'http://www.{self.domain}/{filename}',
25
+ f'https://www.{self.domain}/{filename}'
26
+ ))
27
+ self.check_loop(urls)
28
+ self.check_loop(urls_www)
29
+
30
+ def check_loop(self, urls):
31
+ for url in urls:
32
+ self.header = self.get_header()
33
+ self.proxy = self.get_proxy(self.source)
34
+ try:
35
+ resp = self.get(url, check=False, ignore=True, raise_error=True)
36
+ except requests.exceptions.ConnectTimeout:
37
+ logger.log('DEBUG', f'Connection to {url} timed out, so break check')
38
+ break
39
+ self.subdomains = self.collect_subdomains(resp)
40
+ if self.subdomains:
41
+ break
@@ -0,0 +1,10 @@
1
+ from common.module import Module
2
+
3
+
4
+ class Crawl(Module):
5
+ """
6
+ Crawl base class
7
+ """
8
+
9
+ def __init__(self):
10
+ Module.__init__(self)