oneforall-kjl 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- OneForAll/__init__.py +15 -0
- OneForAll/brute.py +503 -0
- OneForAll/common/check.py +41 -0
- OneForAll/common/crawl.py +10 -0
- OneForAll/common/database.py +277 -0
- OneForAll/common/domain.py +63 -0
- OneForAll/common/ipasn.py +42 -0
- OneForAll/common/ipreg.py +139 -0
- OneForAll/common/lookup.py +28 -0
- OneForAll/common/module.py +369 -0
- OneForAll/common/query.py +9 -0
- OneForAll/common/records.py +363 -0
- OneForAll/common/request.py +264 -0
- OneForAll/common/resolve.py +173 -0
- OneForAll/common/search.py +78 -0
- OneForAll/common/similarity.py +138 -0
- OneForAll/common/tablib/__init__.py +0 -0
- OneForAll/common/tablib/format.py +89 -0
- OneForAll/common/tablib/tablib.py +360 -0
- OneForAll/common/tldextract.py +240 -0
- OneForAll/common/utils.py +789 -0
- OneForAll/config/__init__.py +17 -0
- OneForAll/config/api.py +94 -0
- OneForAll/config/default.py +255 -0
- OneForAll/config/log.py +38 -0
- OneForAll/config/setting.py +108 -0
- OneForAll/export.py +72 -0
- OneForAll/modules/altdns.py +216 -0
- OneForAll/modules/autotake/github.py +105 -0
- OneForAll/modules/certificates/censys_api.py +73 -0
- OneForAll/modules/certificates/certspotter.py +48 -0
- OneForAll/modules/certificates/crtsh.py +84 -0
- OneForAll/modules/certificates/google.py +48 -0
- OneForAll/modules/certificates/myssl.py +46 -0
- OneForAll/modules/certificates/racent.py +49 -0
- OneForAll/modules/check/axfr.py +97 -0
- OneForAll/modules/check/cdx.py +44 -0
- OneForAll/modules/check/cert.py +58 -0
- OneForAll/modules/check/csp.py +94 -0
- OneForAll/modules/check/nsec.py +58 -0
- OneForAll/modules/check/robots.py +44 -0
- OneForAll/modules/check/sitemap.py +44 -0
- OneForAll/modules/collect.py +70 -0
- OneForAll/modules/crawl/archivecrawl.py +59 -0
- OneForAll/modules/crawl/commoncrawl.py +59 -0
- OneForAll/modules/datasets/anubis.py +45 -0
- OneForAll/modules/datasets/bevigil.py +50 -0
- OneForAll/modules/datasets/binaryedge_api.py +50 -0
- OneForAll/modules/datasets/cebaidu.py +45 -0
- OneForAll/modules/datasets/chinaz.py +45 -0
- OneForAll/modules/datasets/chinaz_api.py +49 -0
- OneForAll/modules/datasets/circl_api.py +49 -0
- OneForAll/modules/datasets/cloudflare_api.py +130 -0
- OneForAll/modules/datasets/dnsdb_api.py +51 -0
- OneForAll/modules/datasets/dnsdumpster.py +52 -0
- OneForAll/modules/datasets/dnsgrep.py +44 -0
- OneForAll/modules/datasets/fullhunt.py +48 -0
- OneForAll/modules/datasets/hackertarget.py +45 -0
- OneForAll/modules/datasets/ip138.py +45 -0
- OneForAll/modules/datasets/ipv4info_api.py +73 -0
- OneForAll/modules/datasets/netcraft.py +66 -0
- OneForAll/modules/datasets/passivedns_api.py +51 -0
- OneForAll/modules/datasets/qianxun.py +61 -0
- OneForAll/modules/datasets/rapiddns.py +45 -0
- OneForAll/modules/datasets/riddler.py +45 -0
- OneForAll/modules/datasets/robtex.py +58 -0
- OneForAll/modules/datasets/securitytrails_api.py +56 -0
- OneForAll/modules/datasets/sitedossier.py +57 -0
- OneForAll/modules/datasets/spyse_api.py +62 -0
- OneForAll/modules/datasets/sublist3r.py +45 -0
- OneForAll/modules/datasets/urlscan.py +45 -0
- OneForAll/modules/datasets/windvane.py +92 -0
- OneForAll/modules/dnsquery/mx.py +35 -0
- OneForAll/modules/dnsquery/ns.py +35 -0
- OneForAll/modules/dnsquery/soa.py +35 -0
- OneForAll/modules/dnsquery/spf.py +35 -0
- OneForAll/modules/dnsquery/txt.py +35 -0
- OneForAll/modules/enrich.py +72 -0
- OneForAll/modules/finder.py +206 -0
- OneForAll/modules/intelligence/alienvault.py +50 -0
- OneForAll/modules/intelligence/riskiq_api.py +58 -0
- OneForAll/modules/intelligence/threatbook_api.py +50 -0
- OneForAll/modules/intelligence/threatminer.py +45 -0
- OneForAll/modules/intelligence/virustotal.py +60 -0
- OneForAll/modules/intelligence/virustotal_api.py +59 -0
- OneForAll/modules/iscdn.py +86 -0
- OneForAll/modules/search/ask.py +69 -0
- OneForAll/modules/search/baidu.py +96 -0
- OneForAll/modules/search/bing.py +79 -0
- OneForAll/modules/search/bing_api.py +78 -0
- OneForAll/modules/search/fofa_api.py +74 -0
- OneForAll/modules/search/gitee.py +71 -0
- OneForAll/modules/search/github_api.py +86 -0
- OneForAll/modules/search/google.py +83 -0
- OneForAll/modules/search/google_api.py +77 -0
- OneForAll/modules/search/hunter_api.py +72 -0
- OneForAll/modules/search/quake_api.py +72 -0
- OneForAll/modules/search/shodan_api.py +53 -0
- OneForAll/modules/search/so.py +75 -0
- OneForAll/modules/search/sogou.py +72 -0
- OneForAll/modules/search/wzsearch.py +68 -0
- OneForAll/modules/search/yahoo.py +81 -0
- OneForAll/modules/search/yandex.py +80 -0
- OneForAll/modules/search/zoomeye_api.py +73 -0
- OneForAll/modules/srv.py +75 -0
- OneForAll/modules/wildcard.py +319 -0
- OneForAll/oneforall.py +275 -0
- OneForAll/takeover.py +168 -0
- OneForAll/test.py +23 -0
- oneforall_kjl-0.1.1.dist-info/METADATA +18 -0
- oneforall_kjl-0.1.1.dist-info/RECORD +114 -0
- oneforall_kjl-0.1.1.dist-info/WHEEL +5 -0
- oneforall_kjl-0.1.1.dist-info/entry_points.txt +2 -0
- oneforall_kjl-0.1.1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,277 @@
|
|
1
|
+
"""
|
2
|
+
SQLite database initialization and operation
|
3
|
+
"""
|
4
|
+
|
5
|
+
from common import records
|
6
|
+
|
7
|
+
from common.records import Connection
|
8
|
+
from config.log import logger
|
9
|
+
from config import settings
|
10
|
+
|
11
|
+
|
12
|
+
class Database(object):
|
13
|
+
def __init__(self, db_path=None):
|
14
|
+
self.conn = self.get_conn(db_path)
|
15
|
+
|
16
|
+
@staticmethod
|
17
|
+
def get_conn(db_path):
|
18
|
+
"""
|
19
|
+
Get database connection
|
20
|
+
|
21
|
+
:param db_path: Database path
|
22
|
+
:return: db_conn: SQLite database connection
|
23
|
+
"""
|
24
|
+
logger.log('TRACE', f'Establishing database connection')
|
25
|
+
if isinstance(db_path, Connection):
|
26
|
+
return db_path
|
27
|
+
protocol = 'sqlite:///'
|
28
|
+
if not db_path: # 数据库路径为空连接默认数据库
|
29
|
+
db_path = f'{protocol}{settings.result_save_dir}/result.sqlite3'
|
30
|
+
else:
|
31
|
+
db_path = f'{protocol}{db_path}'
|
32
|
+
db = records.Database(db_path) # 不存在数据库时会新建一个数据库
|
33
|
+
logger.log('TRACE', f'Use the database: {db_path}')
|
34
|
+
return db.get_connection()
|
35
|
+
|
36
|
+
def query(self, sql):
|
37
|
+
try:
|
38
|
+
results = self.conn.query(sql)
|
39
|
+
except Exception as e:
|
40
|
+
logger.log('ERROR', e.args)
|
41
|
+
return None
|
42
|
+
return results
|
43
|
+
|
44
|
+
def create_table(self, table_name):
|
45
|
+
"""
|
46
|
+
Create table
|
47
|
+
|
48
|
+
:param str table_name: table name
|
49
|
+
"""
|
50
|
+
table_name = table_name.replace('.', '_')
|
51
|
+
if self.exist_table(table_name):
|
52
|
+
logger.log('TRACE', f'{table_name} table already exists')
|
53
|
+
return
|
54
|
+
logger.log('TRACE', f'Creating {table_name} table')
|
55
|
+
self.query(f'create table "{table_name}" ('
|
56
|
+
f'id integer primary key,'
|
57
|
+
f'alive int,'
|
58
|
+
f'request int,'
|
59
|
+
f'resolve int,'
|
60
|
+
f'url text,'
|
61
|
+
f'subdomain text,'
|
62
|
+
f'port int,'
|
63
|
+
f'level int,'
|
64
|
+
f'cname text,'
|
65
|
+
f'ip text,'
|
66
|
+
f'public int,'
|
67
|
+
f'cdn int,'
|
68
|
+
f'status int,'
|
69
|
+
f'reason text,'
|
70
|
+
f'title text,'
|
71
|
+
f'banner text,'
|
72
|
+
f'header text,'
|
73
|
+
f'history text,'
|
74
|
+
f'response text,'
|
75
|
+
f'ip_times text,'
|
76
|
+
f'cname_times text,'
|
77
|
+
f'ttl text,'
|
78
|
+
f'cidr text,'
|
79
|
+
f'asn text,'
|
80
|
+
f'org text,'
|
81
|
+
f'addr text,'
|
82
|
+
f'isp text,'
|
83
|
+
f'resolver text,'
|
84
|
+
f'module text,'
|
85
|
+
f'source text,'
|
86
|
+
f'elapse float,'
|
87
|
+
f'find int)')
|
88
|
+
|
89
|
+
def insert_table(self, table_name, result):
|
90
|
+
table_name = table_name.replace('.', '_')
|
91
|
+
self.conn.query(
|
92
|
+
f'insert into "{table_name}" '
|
93
|
+
f'(id, alive, resolve, request, url, subdomain, port, level,'
|
94
|
+
f'cname, ip, public, cdn, status, reason, title, banner, header,'
|
95
|
+
f'history, response, ip_times, cname_times, ttl, cidr, asn, org,'
|
96
|
+
f'addr, isp, resolver, module, source, elapse, find) '
|
97
|
+
f'values (:id, :alive, :resolve, :request, :url,'
|
98
|
+
f':subdomain, :port, :level, :cname, :ip, :public, :cdn,'
|
99
|
+
f':status, :reason, :title, :banner, :header, :history, :response,'
|
100
|
+
f':ip_times, :cname_times, :ttl, :cidr, :asn, :org, :addr, :isp,'
|
101
|
+
f':resolver, :module, :source, :elapse, :find)', **result)
|
102
|
+
|
103
|
+
def save_db(self, table_name, results, module_name=None):
|
104
|
+
"""
|
105
|
+
Save the results of each module in the database
|
106
|
+
|
107
|
+
:param str table_name: table name
|
108
|
+
:param list results: results list
|
109
|
+
:param str module_name: module
|
110
|
+
"""
|
111
|
+
logger.log('TRACE', f'Saving the subdomain results of {table_name} '
|
112
|
+
f'found by module {module_name} into database')
|
113
|
+
table_name = table_name.replace('.', '_')
|
114
|
+
if results:
|
115
|
+
try:
|
116
|
+
self.conn.bulk_query(
|
117
|
+
f'insert into "{table_name}" '
|
118
|
+
f'(id, alive, resolve, request, url, subdomain, port, level, '
|
119
|
+
f'cname, ip, public, cdn, status, reason, title, banner, header, '
|
120
|
+
f'history, response, ip_times, cname_times, ttl, cidr, asn, org, '
|
121
|
+
f'addr, isp, resolver, module, source, elapse, find) '
|
122
|
+
f'values (:id, :alive, :resolve, :request, :url, '
|
123
|
+
f':subdomain, :port, :level, :cname, :ip, :public, :cdn,'
|
124
|
+
f':status, :reason, :title, :banner, :header, :history, :response, '
|
125
|
+
f':ip_times, :cname_times, :ttl, :cidr, :asn, :org, :addr, :isp, '
|
126
|
+
f':resolver, :module, :source, :elapse, :find)', results)
|
127
|
+
except Exception as e:
|
128
|
+
logger.log('ERROR', e)
|
129
|
+
|
130
|
+
def exist_table(self, table_name):
|
131
|
+
"""
|
132
|
+
Determine table exists
|
133
|
+
|
134
|
+
:param str table_name: table name
|
135
|
+
:return bool: Whether table exists
|
136
|
+
"""
|
137
|
+
table_name = table_name.replace('.', '_')
|
138
|
+
logger.log('TRACE', f'Determining whether the {table_name} table exists')
|
139
|
+
results = self.query(f'select count() from sqlite_master where type = "table" and'
|
140
|
+
f' name = "{table_name}"')
|
141
|
+
if results.scalar() == 0:
|
142
|
+
return False
|
143
|
+
else:
|
144
|
+
return True
|
145
|
+
|
146
|
+
def copy_table(self, table_name, bak_table_name):
|
147
|
+
"""
|
148
|
+
Copy table to create backup
|
149
|
+
|
150
|
+
:param str table_name: table name
|
151
|
+
:param str bak_table_name: new table name
|
152
|
+
"""
|
153
|
+
table_name = table_name.replace('.', '_')
|
154
|
+
bak_table_name = bak_table_name.replace('.', '_')
|
155
|
+
logger.log('TRACE', f'Copying {table_name} table to {bak_table_name} new table')
|
156
|
+
self.query(f'drop table if exists "{bak_table_name}"')
|
157
|
+
self.query(f'create table "{bak_table_name}" '
|
158
|
+
f'as select * from "{table_name}"')
|
159
|
+
|
160
|
+
def clear_table(self, table_name):
|
161
|
+
"""
|
162
|
+
Clear the table
|
163
|
+
|
164
|
+
:param str table_name: table name
|
165
|
+
"""
|
166
|
+
table_name = table_name.replace('.', '_')
|
167
|
+
logger.log('TRACE', f'Clearing data in table {table_name}')
|
168
|
+
self.query(f'delete from "{table_name}"')
|
169
|
+
|
170
|
+
def drop_table(self, table_name):
|
171
|
+
"""
|
172
|
+
Delete table
|
173
|
+
|
174
|
+
:param str table_name: table name
|
175
|
+
"""
|
176
|
+
table_name = table_name.replace('.', '_')
|
177
|
+
logger.log('TRACE', f'Deleting {table_name} table')
|
178
|
+
self.query(f'drop table if exists "{table_name}"')
|
179
|
+
|
180
|
+
def rename_table(self, table_name, new_table_name):
|
181
|
+
"""
|
182
|
+
Rename table name
|
183
|
+
|
184
|
+
:param str table_name: old table name
|
185
|
+
:param str new_table_name: new table name
|
186
|
+
"""
|
187
|
+
table_name = table_name.replace('.', '_')
|
188
|
+
new_table_name = new_table_name.replace('.', '_')
|
189
|
+
logger.log('TRACE', f'Renaming {table_name} table to {new_table_name} table')
|
190
|
+
self.query(f'alter table "{table_name}" '
|
191
|
+
f'rename to "{new_table_name}"')
|
192
|
+
|
193
|
+
def deduplicate_subdomain(self, table_name):
|
194
|
+
"""
|
195
|
+
Deduplicate subdomains in the table
|
196
|
+
|
197
|
+
:param str table_name: table name
|
198
|
+
"""
|
199
|
+
table_name = table_name.replace('.', '_')
|
200
|
+
logger.log('TRACE', f'Deduplicating subdomains in {table_name} table')
|
201
|
+
self.query(f'delete from "{table_name}" where '
|
202
|
+
f'id not in (select min(id) '
|
203
|
+
f'from "{table_name}" group by subdomain)')
|
204
|
+
|
205
|
+
def remove_invalid(self, table_name):
|
206
|
+
"""
|
207
|
+
Remove nulls or invalid subdomains in the table
|
208
|
+
|
209
|
+
:param str table_name: table name
|
210
|
+
"""
|
211
|
+
table_name = table_name.replace('.', '_')
|
212
|
+
logger.log('TRACE', f'Removing invalid subdomains in {table_name} table')
|
213
|
+
self.query(f'delete from "{table_name}" where '
|
214
|
+
f'subdomain is null or resolve == 0')
|
215
|
+
|
216
|
+
def get_data(self, table_name):
|
217
|
+
"""
|
218
|
+
Get all the data in the table
|
219
|
+
|
220
|
+
:param str table_name: table name
|
221
|
+
"""
|
222
|
+
table_name = table_name.replace('.', '_')
|
223
|
+
logger.log('TRACE', f'Get all the data from {table_name} table')
|
224
|
+
return self.query(f'select * from "{table_name}"')
|
225
|
+
|
226
|
+
def export_data(self, table_name, alive, limit):
|
227
|
+
"""
|
228
|
+
Get part of the data in the table
|
229
|
+
|
230
|
+
:param str table_name: table name
|
231
|
+
:param any alive: alive flag
|
232
|
+
:param str limit: limit value
|
233
|
+
"""
|
234
|
+
table_name = table_name.replace('.', '_')
|
235
|
+
sql = f'select id, alive, request, resolve, url, subdomain, level,' \
|
236
|
+
f'cname, ip, public, cdn, port, status, reason, title, banner,' \
|
237
|
+
f'cidr, asn, org, addr, isp, source from "{table_name}" '
|
238
|
+
if alive and limit:
|
239
|
+
if limit in ['resolve', 'request']:
|
240
|
+
where = f' where {limit} = 1'
|
241
|
+
sql += where
|
242
|
+
elif alive:
|
243
|
+
where = f' where alive = 1'
|
244
|
+
sql += where
|
245
|
+
sql += ' order by subdomain'
|
246
|
+
logger.log('TRACE', f'Get the data from {table_name} table')
|
247
|
+
return self.query(sql)
|
248
|
+
|
249
|
+
def count_alive(self, table_name):
|
250
|
+
table_name = table_name.replace('.', '_')
|
251
|
+
sql = f'select count() from "{table_name}" where alive = 1'
|
252
|
+
return self.query(sql)
|
253
|
+
|
254
|
+
def get_resp_by_url(self, table_name, url):
|
255
|
+
table_name = table_name.replace('.', '_')
|
256
|
+
sql = f'select response from "{table_name}" where url = "{url}"'
|
257
|
+
logger.log('TRACE', f'Get response data from {url}')
|
258
|
+
return self.query(sql).scalar()
|
259
|
+
|
260
|
+
def get_data_by_fields(self, table_name, fields):
|
261
|
+
table_name = table_name.replace('.', '_')
|
262
|
+
field_str = ', '.join(fields)
|
263
|
+
sql = f'select {field_str} from "{table_name}"'
|
264
|
+
logger.log('TRACE', f'Get specified field data {fields} from {table_name} table')
|
265
|
+
return self.query(sql)
|
266
|
+
|
267
|
+
def update_data_by_url(self, table_name, info, url):
|
268
|
+
table_name = table_name.replace('.', '_')
|
269
|
+
field_str = ', '.join(map(lambda kv: f'{kv[0]} = "{kv[1]}"', info.items()))
|
270
|
+
sql = f'update "{table_name}" set {field_str} where url = "{url}"'
|
271
|
+
return self.query(sql)
|
272
|
+
|
273
|
+
def close(self):
|
274
|
+
"""
|
275
|
+
Close the database connection
|
276
|
+
"""
|
277
|
+
self.conn.close()
|
@@ -0,0 +1,63 @@
|
|
1
|
+
import re
|
2
|
+
from common import tldextract
|
3
|
+
from config import settings
|
4
|
+
|
5
|
+
|
6
|
+
class Domain(object):
|
7
|
+
"""
|
8
|
+
Processing domain class
|
9
|
+
|
10
|
+
:param str string: input string
|
11
|
+
"""
|
12
|
+
def __init__(self, string):
|
13
|
+
self.string = str(string)
|
14
|
+
self.regexp = r'\b((?=[a-z0-9-]{1,63}\.)(xn--)?[a-z0-9]+(-[a-z0-9]+)*\.)+[a-z]{2,63}\b'
|
15
|
+
self.domain = None
|
16
|
+
|
17
|
+
def match(self):
|
18
|
+
"""
|
19
|
+
match domain
|
20
|
+
|
21
|
+
:return : result
|
22
|
+
"""
|
23
|
+
result = re.search(self.regexp, self.string, re.I)
|
24
|
+
if result:
|
25
|
+
return result.group()
|
26
|
+
return None
|
27
|
+
|
28
|
+
def extract(self):
|
29
|
+
"""
|
30
|
+
extract domain
|
31
|
+
|
32
|
+
>>> d = Domain('www.example.com')
|
33
|
+
<domain.Domain object>
|
34
|
+
>>> d.extract()
|
35
|
+
ExtractResult(subdomain='www', domain='example', suffix='com')
|
36
|
+
|
37
|
+
:return: extracted domain results
|
38
|
+
"""
|
39
|
+
data_storage_dir = settings.data_storage_dir
|
40
|
+
extract_cache_file = data_storage_dir.joinpath('public_suffix_list.dat')
|
41
|
+
ext = tldextract.TLDExtract(extract_cache_file)
|
42
|
+
result = self.match()
|
43
|
+
if result:
|
44
|
+
return ext(result)
|
45
|
+
return None
|
46
|
+
|
47
|
+
def registered(self):
|
48
|
+
"""
|
49
|
+
registered domain
|
50
|
+
|
51
|
+
>>> d = Domain('www.example.com')
|
52
|
+
<domain.Domain object>
|
53
|
+
>>> d.registered()
|
54
|
+
example.com
|
55
|
+
|
56
|
+
:return: registered domain result
|
57
|
+
"""
|
58
|
+
if not settings.use_tld_extract:
|
59
|
+
return self.string
|
60
|
+
result = self.extract()
|
61
|
+
if result:
|
62
|
+
return result.registered_domain
|
63
|
+
return None
|
@@ -0,0 +1,42 @@
|
|
1
|
+
import zipfile
|
2
|
+
|
3
|
+
from common.utils import ip_to_int
|
4
|
+
from config.setting import data_storage_dir
|
5
|
+
from common.database import Database
|
6
|
+
|
7
|
+
|
8
|
+
def get_db_path():
|
9
|
+
zip_path = data_storage_dir.joinpath('ip2location.zip')
|
10
|
+
db_path = data_storage_dir.joinpath('ip2location.db')
|
11
|
+
if db_path.exists():
|
12
|
+
return db_path
|
13
|
+
zf = zipfile.ZipFile(str(zip_path))
|
14
|
+
zf.extract('ip2location.db', data_storage_dir)
|
15
|
+
return db_path
|
16
|
+
|
17
|
+
|
18
|
+
class IPAsnInfo(Database):
|
19
|
+
def __init__(self):
|
20
|
+
path = get_db_path()
|
21
|
+
Database.__init__(self, path)
|
22
|
+
|
23
|
+
def find(self, ip):
|
24
|
+
info = {'cidr': '', 'asn': '', 'org': ''}
|
25
|
+
if isinstance(ip, (int, str)):
|
26
|
+
ip = ip_to_int(ip)
|
27
|
+
else:
|
28
|
+
return info
|
29
|
+
sql = f'SELECT * FROM asn WHERE ip_from <= {ip} AND ip_to >= {ip} LIMIT 1;'
|
30
|
+
result = self.query(sql)
|
31
|
+
if not hasattr(result, 'dataset'):
|
32
|
+
return info
|
33
|
+
asn = result.as_dict()
|
34
|
+
info['cidr'] = asn[0]['cidr']
|
35
|
+
info['asn'] = f"AS{asn[0]['asn']}"
|
36
|
+
info['org'] = asn[0]['as']
|
37
|
+
return info
|
38
|
+
|
39
|
+
|
40
|
+
if __name__ == "__main__":
|
41
|
+
asn_info = IPAsnInfo()
|
42
|
+
print(asn_info.find("188.81.94.77"))
|
@@ -0,0 +1,139 @@
|
|
1
|
+
"""
|
2
|
+
" ip2region python searcher client module
|
3
|
+
"
|
4
|
+
" Author: koma<komazhang@foxmail.com>
|
5
|
+
" Date : 2015-11-06
|
6
|
+
"""
|
7
|
+
import io
|
8
|
+
import sys
|
9
|
+
import socket
|
10
|
+
import struct
|
11
|
+
|
12
|
+
from config import settings
|
13
|
+
|
14
|
+
|
15
|
+
class IpRegInfo(object):
|
16
|
+
__INDEX_BLOCK_LENGTH = 12
|
17
|
+
__TOTAL_HEADER_LENGTH = 8192
|
18
|
+
|
19
|
+
__f = None
|
20
|
+
__headerSip = []
|
21
|
+
__headerPtr = []
|
22
|
+
__headerLen = 0
|
23
|
+
__indexSPtr = 0
|
24
|
+
__indexLPtr = 0
|
25
|
+
__indexCount = 0
|
26
|
+
__dbBinStr = ''
|
27
|
+
|
28
|
+
def __init__(self, db_file):
|
29
|
+
self.init_database(db_file)
|
30
|
+
|
31
|
+
def memory_search(self, ip):
|
32
|
+
"""
|
33
|
+
" memory search method
|
34
|
+
" param: ip
|
35
|
+
"""
|
36
|
+
if not ip.isdigit():
|
37
|
+
ip = self.ip2long(ip)
|
38
|
+
|
39
|
+
if self.__dbBinStr == '':
|
40
|
+
self.__dbBinStr = self.__f.read() # read all the contents in file
|
41
|
+
self.__indexSPtr = self.get_long(self.__dbBinStr, 0)
|
42
|
+
self.__indexLPtr = self.get_long(self.__dbBinStr, 4)
|
43
|
+
self.__indexCount = int((self.__indexLPtr - self.__indexSPtr) /
|
44
|
+
self.__INDEX_BLOCK_LENGTH) + 1
|
45
|
+
|
46
|
+
l, h, data_ptr = (0, self.__indexCount, 0)
|
47
|
+
while l <= h:
|
48
|
+
m = int((l + h) >> 1)
|
49
|
+
p = self.__indexSPtr + m * self.__INDEX_BLOCK_LENGTH
|
50
|
+
sip = self.get_long(self.__dbBinStr, p)
|
51
|
+
|
52
|
+
if ip < sip:
|
53
|
+
h = m - 1
|
54
|
+
else:
|
55
|
+
eip = self.get_long(self.__dbBinStr, p + 4)
|
56
|
+
if ip > eip:
|
57
|
+
l = m + 1
|
58
|
+
else:
|
59
|
+
data_ptr = self.get_long(self.__dbBinStr, p + 8)
|
60
|
+
break
|
61
|
+
|
62
|
+
if data_ptr == 0:
|
63
|
+
raise Exception("Data pointer not found")
|
64
|
+
|
65
|
+
return self.return_data(data_ptr)
|
66
|
+
|
67
|
+
def init_database(self, db_file):
|
68
|
+
"""
|
69
|
+
" initialize the database for search
|
70
|
+
" param: dbFile
|
71
|
+
"""
|
72
|
+
try:
|
73
|
+
self.__f = io.open(db_file, "rb")
|
74
|
+
except IOError as e:
|
75
|
+
print("[Error]: %s" % e)
|
76
|
+
sys.exit()
|
77
|
+
|
78
|
+
def return_data(self, data_ptr):
|
79
|
+
"""
|
80
|
+
" get ip data from db file by data start ptr
|
81
|
+
" param: data ptr
|
82
|
+
"""
|
83
|
+
data_len = (data_ptr >> 24) & 0xFF
|
84
|
+
data_ptr = data_ptr & 0x00FFFFFF
|
85
|
+
|
86
|
+
self.__f.seek(data_ptr)
|
87
|
+
data = self.__f.read(data_len)
|
88
|
+
|
89
|
+
info = {"city_id": self.get_long(data, 0),
|
90
|
+
"region": data[4:].decode('utf-8')}
|
91
|
+
return info
|
92
|
+
|
93
|
+
@staticmethod
|
94
|
+
def ip2long(ip):
|
95
|
+
_ip = socket.inet_aton(ip)
|
96
|
+
return struct.unpack("!L", _ip)[0]
|
97
|
+
|
98
|
+
@staticmethod
|
99
|
+
def is_ip(ip):
|
100
|
+
p = ip.split(".")
|
101
|
+
if len(p) != 4:
|
102
|
+
return False
|
103
|
+
for pp in p:
|
104
|
+
if not pp.isdigit():
|
105
|
+
return False
|
106
|
+
if len(pp) > 3:
|
107
|
+
return False
|
108
|
+
if int(pp) > 255:
|
109
|
+
return False
|
110
|
+
return True
|
111
|
+
|
112
|
+
@staticmethod
|
113
|
+
def get_long(b, offset):
|
114
|
+
if len(b[offset:offset + 4]) == 4:
|
115
|
+
return struct.unpack('I', b[offset:offset + 4])[0]
|
116
|
+
return 0
|
117
|
+
|
118
|
+
def close(self):
|
119
|
+
if self.__f is not None:
|
120
|
+
self.__f.close()
|
121
|
+
self.__dbBinStr = None
|
122
|
+
self.__headerPtr = None
|
123
|
+
self.__headerSip = None
|
124
|
+
|
125
|
+
|
126
|
+
class IpRegData(IpRegInfo):
|
127
|
+
def __init__(self):
|
128
|
+
path = settings.data_storage_dir.joinpath('ip2region.db')
|
129
|
+
IpRegInfo.__init__(self, path)
|
130
|
+
|
131
|
+
def query(self, ip):
|
132
|
+
result = self.memory_search(ip)
|
133
|
+
addr_list = result.get('region').split('|')
|
134
|
+
addr = ''.join(filter(lambda x: x != '0', addr_list[:-1]))
|
135
|
+
isp = addr_list[-1]
|
136
|
+
if isp == '0':
|
137
|
+
isp = '未知'
|
138
|
+
info = {'addr': addr, 'isp': isp}
|
139
|
+
return info
|
@@ -0,0 +1,28 @@
|
|
1
|
+
from common.module import Module
|
2
|
+
from common import utils
|
3
|
+
from config.log import logger
|
4
|
+
|
5
|
+
|
6
|
+
class Lookup(Module):
|
7
|
+
"""
|
8
|
+
DNS query base class
|
9
|
+
"""
|
10
|
+
|
11
|
+
def __init__(self):
|
12
|
+
Module.__init__(self)
|
13
|
+
self.qtype = ''
|
14
|
+
|
15
|
+
def query(self):
|
16
|
+
"""
|
17
|
+
Query the TXT record of domain
|
18
|
+
:return: query result
|
19
|
+
"""
|
20
|
+
answer = utils.dns_query(self.domain, self.qtype)
|
21
|
+
if answer is None:
|
22
|
+
return None
|
23
|
+
for item in answer:
|
24
|
+
record = item.to_text()
|
25
|
+
subdomains = self.match_subdomains(record)
|
26
|
+
self.subdomains.update(subdomains)
|
27
|
+
logger.log('DEBUG', record)
|
28
|
+
return self.subdomains
|