dorothy2 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +21 -0
- data/Gemfile +4 -0
- data/LICENSE +644 -0
- data/README.md +231 -0
- data/Rakefile +1 -0
- data/bin/dorothy_start +176 -0
- data/bin/dorothy_stop +28 -0
- data/bin/dparser_start +66 -0
- data/bin/dparser_stop +23 -0
- data/dorothy2.gemspec +30 -0
- data/etc/ddl/dorothive.ddl +1803 -0
- data/etc/dorothy copy.yml.example +39 -0
- data/etc/sandboxes.yml.example +20 -0
- data/etc/sources.yml.example +32 -0
- data/lib/doroParser.rb +518 -0
- data/lib/dorothy2/BFM.rb +156 -0
- data/lib/dorothy2/MAM.rb +239 -0
- data/lib/dorothy2/Settings.rb +35 -0
- data/lib/dorothy2/deep_symbolize.rb +67 -0
- data/lib/dorothy2/do-init.rb +296 -0
- data/lib/dorothy2/do-logger.rb +43 -0
- data/lib/dorothy2/do-parsers.rb +468 -0
- data/lib/dorothy2/do-utils.rb +223 -0
- data/lib/dorothy2/environment.rb +29 -0
- data/lib/dorothy2/version.rb +3 -0
- data/lib/dorothy2/vtotal.rb +84 -0
- data/lib/dorothy2.rb +470 -0
- data/share/img/Dorothy-Basic.pdf +0 -0
- data/share/img/Setup-Advanced.pdf +0 -0
- data/share/img/The_big_picture.pdf +0 -0
- data/test/tc_dorothy_full.rb +95 -0
- data/var/log/parser.log +0 -0
- metadata +260 -0
@@ -0,0 +1,468 @@
|
|
1
|
+
# Copyright (C) 2010-2013 marco riccardi.
|
2
|
+
# This file is part of Dorothy - http://www.honeynet.it/dorothy
|
3
|
+
# See the file 'LICENSE' for copying permission.
|
4
|
+
|
5
|
+
#To install on MacOSX
|
6
|
+
#Go to postgres website and download the OSX installer
|
7
|
+
#none:ruby-pg-0.7.9.2008.01.28 root# gem install ruby-pg -- --with-pg-config="/Library/PostgreSQL/9.0/bin/pg_config" --with-pgsql-lib=/Library/PostgreSQL/9.0/lib --with-pgsql-include=/Library/PostgreSQL/9.0/include
|
8
|
+
#
|
9
|
+
#To install on Debian
|
10
|
+
#apt-get install postgres-8.3
|
11
|
+
#apt-get install libpq-dev
|
12
|
+
#sudo gem install pg -- --with-pgsql-lib=/usr/lib/postgresql/8.3/lib/ --with-pg-config=/usr/bin/pg_config
|
13
|
+
|
14
|
+
|
15
|
+
#include Pcap
|
16
|
+
|
17
|
+
module DoroParser
|
18
|
+
|
19
|
+
class Mydns
|
20
|
+
def initialize(data)
|
21
|
+
raw = data.to_s.gsub(/(\000|\001|\002|\003|\004|\005|\006|\007|\008|\009|\010|\011|\012|\013|\014|\015|\016|\017|\018|\019|\020|\021\022|\023|\024|\025|\026|\027)/, '.')
|
22
|
+
@query = raw.grep(/([a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,6}/){$&}
|
23
|
+
end
|
24
|
+
|
25
|
+
def query
|
26
|
+
return @query if @query
|
27
|
+
end
|
28
|
+
|
29
|
+
def query?
|
30
|
+
return true if @query
|
31
|
+
end
|
32
|
+
|
33
|
+
def dns_class
|
34
|
+
#todo
|
35
|
+
end
|
36
|
+
|
37
|
+
def type
|
38
|
+
#todo
|
39
|
+
end
|
40
|
+
|
41
|
+
def response
|
42
|
+
#todo
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
|
47
|
+
class Parser_old
|
48
|
+
|
49
|
+
def initialize(data)
|
50
|
+
noutf = Iconv.new('US-ASCII//TRANSLIT', 'UTF-8')
|
51
|
+
if data and data =~ /(USER |USERHOST |PRIVMSG |PASS |NICK |JOIN |MODE |MSG |KCIK |rssr )(.*)\n/
|
52
|
+
@irc = true
|
53
|
+
@command = $1
|
54
|
+
begin
|
55
|
+
#@command2 = noutf.iconv($2).gsub(/"|'|\\/, "-")
|
56
|
+
rescue
|
57
|
+
@command2 = "null"
|
58
|
+
end
|
59
|
+
elsif data =~ /from\W*([0-9]{1,3}(\.[0-9]{1,3}){3}).*by\W*(([a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,6}).*From:.*<(\S+)>\W*To:.*<(\S+)>.*Subject:(.*)\W*Date/m
|
60
|
+
@mail = true
|
61
|
+
@fromip = $1
|
62
|
+
@by =$3
|
63
|
+
@from = $6
|
64
|
+
@to = $7
|
65
|
+
@subj = $8.chomp.lstrip
|
66
|
+
|
67
|
+
|
68
|
+
elsif data and data =~ /^(GET|POST)\s+(\S+)/
|
69
|
+
@http = true
|
70
|
+
@method = $1
|
71
|
+
@path = $2
|
72
|
+
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
end
|
77
|
+
|
78
|
+
|
79
|
+
class Parser
|
80
|
+
attr_reader :service
|
81
|
+
|
82
|
+
def self.guess(a)
|
83
|
+
|
84
|
+
if a =~ /USER |USERHOST |PRIVMSG |PASS |NICK |JOIN |MODE |MSG/
|
85
|
+
t = IRC.new a
|
86
|
+
elsif a =~ /from(.*)From(.*)Subject/m
|
87
|
+
t = SMTP.init a
|
88
|
+
elsif a =~ /(\d*)(.*)\n/
|
89
|
+
t = SMTP.new a
|
90
|
+
end
|
91
|
+
|
92
|
+
return t
|
93
|
+
end
|
94
|
+
|
95
|
+
|
96
|
+
|
97
|
+
|
98
|
+
class IRC
|
99
|
+
attr_reader :command
|
100
|
+
attr_reader :content
|
101
|
+
|
102
|
+
|
103
|
+
def initialize(data)
|
104
|
+
if data =~ /(USER |USERHOST |PRIVMSG |PASS |NICK |JOIN |MODE |MSG )(.*)\n/
|
105
|
+
noutf = Iconv.new('US-ASCII//TRANSLIT', 'UTF-8')
|
106
|
+
@command = $1
|
107
|
+
@content = noutf.iconv($2).gsub(/"|'|\\/, "-") #xcode bug ->>> ")
|
108
|
+
end
|
109
|
+
#return true if !@command.nil?
|
110
|
+
return self
|
111
|
+
end
|
112
|
+
|
113
|
+
|
114
|
+
|
115
|
+
end
|
116
|
+
|
117
|
+
|
118
|
+
|
119
|
+
class SMTP
|
120
|
+
#todo make a initialize class
|
121
|
+
attr_reader :hcmd
|
122
|
+
attr_reader :hcont
|
123
|
+
attr_accessor :body
|
124
|
+
attr_accessor :rdata
|
125
|
+
|
126
|
+
def self.body(data)
|
127
|
+
|
128
|
+
email = TMail::Mail.parse(data)
|
129
|
+
return email
|
130
|
+
|
131
|
+
end
|
132
|
+
|
133
|
+
def self.header?(data)
|
134
|
+
|
135
|
+
if data =~ /(MAIL FROM: |EHLO |HELO |TO: |RSET)(.*)\n/
|
136
|
+
return true
|
137
|
+
end
|
138
|
+
|
139
|
+
end
|
140
|
+
|
141
|
+
def self.hasbody?(data)
|
142
|
+
|
143
|
+
if data =~ /from(.*)From(.*)Subject/m
|
144
|
+
return true
|
145
|
+
end
|
146
|
+
|
147
|
+
end
|
148
|
+
|
149
|
+
def self.response(data)
|
150
|
+
|
151
|
+
if data =~ /(\d*)(.*)\n/
|
152
|
+
rcode = $1
|
153
|
+
rcont = $2
|
154
|
+
return rcode, rcont
|
155
|
+
end
|
156
|
+
|
157
|
+
end
|
158
|
+
|
159
|
+
def initialize a
|
160
|
+
|
161
|
+
if a =~ /(MAIL FROM: |EHLO |HELO |TO: |RSET)(.*)\n/
|
162
|
+
@hcmd = $1
|
163
|
+
@hcont = $2
|
164
|
+
end
|
165
|
+
|
166
|
+
end
|
167
|
+
|
168
|
+
end
|
169
|
+
|
170
|
+
end
|
171
|
+
|
172
|
+
class Geoinfo
|
173
|
+
attr_reader :updated
|
174
|
+
attr_reader :country
|
175
|
+
attr_reader :coord
|
176
|
+
attr_reader :asn
|
177
|
+
attr_reader :city
|
178
|
+
attr_reader :updated
|
179
|
+
|
180
|
+
|
181
|
+
@updated = "null"
|
182
|
+
LOCALNET = "10.10.10.0/24"
|
183
|
+
|
184
|
+
|
185
|
+
|
186
|
+
def initialize(ip)
|
187
|
+
noutf = Iconv.new('US-ASCII//TRANSLIT', 'UTF-8')
|
188
|
+
@updated = 'null' #TODO take the creation file date of the .dat archive
|
189
|
+
|
190
|
+
#year = geoip.database_info.grep(/(\S+) (\d{4})(\d{2})(\d{2})/){$2}
|
191
|
+
#month = geoip.database_info.grep(/(\S+) (\d{4})(\d{2})(\d{2})/){$3}
|
192
|
+
#day = geoip.database_info.grep(/(\S+) (\d{4})(\d{2})(\d{2})/){$4}
|
193
|
+
#@updated = year.to_s + "/" + month.to_s + "/" + day.to_s
|
194
|
+
localnetwork = IPAddr.new(LOCALNET)
|
195
|
+
|
196
|
+
if !localnetwork.include?(ip)
|
197
|
+
|
198
|
+
begin
|
199
|
+
|
200
|
+
geoip = GeoIP.new(GEOIP)
|
201
|
+
geoasn = GeoIP.new(GEPASN)
|
202
|
+
|
203
|
+
if geoip.country(ip)
|
204
|
+
@city = noutf.iconv(geoip.country(ip).city_name).gsub(/"|'|\\/, "-") #xcode bug ->>> ")
|
205
|
+
@country = geoip.country(ip).country_code2
|
206
|
+
@coord = geoip.country(ip).latitude.to_s.gsub(/\(|\)/,'') + "," + geoip.country(ip).longitude.to_s.gsub(/\(|\)/,'')
|
207
|
+
|
208
|
+
else
|
209
|
+
|
210
|
+
@city, @country, @coord = "null", "null", "null"
|
211
|
+
|
212
|
+
end
|
213
|
+
|
214
|
+
@asn = (geoasn.asn(ip) ? geoasn.asn(ip).as_num.to_s.grep(/\d+/){$&} : "null" )
|
215
|
+
|
216
|
+
rescue
|
217
|
+
LOGGER_PARSER.fatal "GEO", "Error while fetching GeoIP dat file for IP: " + ip
|
218
|
+
LOGGER_PARSER.fatal "GEO", "#{$!}"
|
219
|
+
@city, @country, @coord, @asn = "null", "null", "null", "null"
|
220
|
+
end
|
221
|
+
|
222
|
+
else
|
223
|
+
@city, @country, @coord, @asn = "null", "null", "null", "null"
|
224
|
+
|
225
|
+
end
|
226
|
+
end
|
227
|
+
|
228
|
+
end
|
229
|
+
|
230
|
+
|
231
|
+
class DoroHttp
|
232
|
+
attr_reader :contype
|
233
|
+
attr_reader :method
|
234
|
+
attr_reader :uri
|
235
|
+
attr_reader :ssl
|
236
|
+
attr_reader :size
|
237
|
+
attr_accessor :data
|
238
|
+
|
239
|
+
def initialize(flowdeep)
|
240
|
+
@data = 'null'
|
241
|
+
@method = (flowdeep.values('http.request.method')[0] ? flowdeep.values('http.request.method')[0].value : 'null')
|
242
|
+
@ssl = false #TODO
|
243
|
+
@size = (flowdeep.values('http.content.length')[0] ? flowdeep.values('http.content.length')[0].value : 'null')
|
244
|
+
@uri = (flowdeep.values('http.request.uri')[0] ? flowdeep.values('http.request.uri')[0].value : 'null' )
|
245
|
+
@contype = (flowdeep.values('http.content.type')[0] ? flowdeep.values('http.content.type')[0].value : 'null')
|
246
|
+
end
|
247
|
+
|
248
|
+
end
|
249
|
+
|
250
|
+
class DoroDNS
|
251
|
+
attr_accessor :dns
|
252
|
+
attr_accessor :ttl
|
253
|
+
attr_accessor :name
|
254
|
+
attr_accessor :type
|
255
|
+
attr_accessor :type_i
|
256
|
+
attr_accessor :cls
|
257
|
+
attr_accessor :cls_i
|
258
|
+
attr_accessor :address
|
259
|
+
attr_accessor :data
|
260
|
+
|
261
|
+
def initialize(c)
|
262
|
+
@dns = Net::DNS::Packet::parse(c)
|
263
|
+
if qry? #is a query
|
264
|
+
q = @dns.question.first
|
265
|
+
@cls_i = q.qClass #Net::DNS::Question.parse(c[12..offset]).qClass
|
266
|
+
@name = q.qName
|
267
|
+
@type_i = q.qType
|
268
|
+
@ttl = 'null'
|
269
|
+
@address = 'null'
|
270
|
+
@data = 'null'
|
271
|
+
|
272
|
+
elsif !@dns.answer.empty? #contain an asnwer
|
273
|
+
#TODO only the first answer is parsed
|
274
|
+
a = @dns.answer.each.first
|
275
|
+
@ttl = a.ttl
|
276
|
+
@name = a.name
|
277
|
+
|
278
|
+
@type = a.type
|
279
|
+
@type_i = Net::DNS::RR::Types.new @type
|
280
|
+
|
281
|
+
|
282
|
+
@cls = a.cls
|
283
|
+
@cls_i = Net::DNS::RR::Classes.new @cls
|
284
|
+
|
285
|
+
|
286
|
+
case @type
|
287
|
+
|
288
|
+
when "A"
|
289
|
+
@address = @dns.answer.each.first.address
|
290
|
+
@data = 'null'
|
291
|
+
|
292
|
+
when "AAAA"
|
293
|
+
@address = @dns.answer.each.first.address
|
294
|
+
@data = 'null'
|
295
|
+
|
296
|
+
when "MX" then
|
297
|
+
|
298
|
+
@data = @dns.answer.each.first.exchange
|
299
|
+
@address = 'null'
|
300
|
+
|
301
|
+
when "CNAME" then
|
302
|
+
|
303
|
+
@data = @dns.answer.each.first.cname
|
304
|
+
@address = 'null'
|
305
|
+
|
306
|
+
else
|
307
|
+
|
308
|
+
@address = 'null'
|
309
|
+
@data = 'null'
|
310
|
+
|
311
|
+
end
|
312
|
+
end
|
313
|
+
|
314
|
+
end
|
315
|
+
|
316
|
+
|
317
|
+
def qry?
|
318
|
+
@dns.answer.empty?
|
319
|
+
end
|
320
|
+
|
321
|
+
end
|
322
|
+
|
323
|
+
class DoroFile
|
324
|
+
attr_accessor :sha2
|
325
|
+
attr_accessor :cont
|
326
|
+
attr_reader :path
|
327
|
+
attr_reader :date
|
328
|
+
attr_accessor :size
|
329
|
+
|
330
|
+
def initialize(hash)
|
331
|
+
repo = "./downloads"
|
332
|
+
@path = "#{repo}/#{hash}.exe"
|
333
|
+
@date = Time.new.strftime("%m/%d/%y %H:%M:%S")
|
334
|
+
end
|
335
|
+
|
336
|
+
def self.sha2(content)
|
337
|
+
@sha2 = Digest::SHA2.new
|
338
|
+
@sha2 << content
|
339
|
+
end
|
340
|
+
end
|
341
|
+
|
342
|
+
|
343
|
+
class Doroxtractr < Mu::Xtractr #PcaprLocal::Xtractr.new
|
344
|
+
|
345
|
+
#def lol(id)
|
346
|
+
# self.create "http://172.20.250.13:8080/pcaps/1/pcap/#{id}"
|
347
|
+
#end
|
348
|
+
|
349
|
+
|
350
|
+
def summaryhttp(fast=0, v=0)
|
351
|
+
ids = []
|
352
|
+
self.flows('flow.service:HTTP').each { |flow|
|
353
|
+
method = self.flows("flow.id:#{flow.id}").values('http.request.method')[0].value
|
354
|
+
if fast == 0
|
355
|
+
puts "#{flow.id} #{flow.src.address} > #{flow.dst.address} - #{method} - #{flow.stream.flow.contents.first.body.length}"
|
356
|
+
else
|
357
|
+
puts "#{flow.id} #{flow.src.address} > #{flow.dst.address} - #{method}"
|
358
|
+
end
|
359
|
+
ids.push(flow.id)
|
360
|
+
}
|
361
|
+
return ids
|
362
|
+
end
|
363
|
+
|
364
|
+
def flowinfo(id)
|
365
|
+
f = self.flows("flow.id:#{id}").first.inspect
|
366
|
+
f << self.flows("flow.id:#{id}").first.time.to_s
|
367
|
+
return f
|
368
|
+
end
|
369
|
+
|
370
|
+
#Find the HTTP requests made by the host (Zeus uses it to send stolen data to its dropzone)
|
371
|
+
#The biggest post refers to the STATS one (by default is sent every 20 min)
|
372
|
+
#the smallest post refers to the LOG one (by default is sent every minute)
|
373
|
+
#the biggest GET refers to the Configuration file downloaded by the Zeus C&C
|
374
|
+
def findzeusdata(re, type, cc='192.168.10.3')
|
375
|
+
flowids = {}
|
376
|
+
self.flows("flow.service:HTTP flow.dst: #{cc}").each do |flow|
|
377
|
+
method = self.flows("flow.id:#{flow.id}").values('http.request.method')[0].value
|
378
|
+
flowids[flow.id] = flow.stream.flow.contents.first.body.length if method =~ /#{Regexp.escape(re)}/
|
379
|
+
end
|
380
|
+
if type == "ping"
|
381
|
+
return flowids.sort {|a,b| a[1]<=>b[1]}.first
|
382
|
+
elsif type == "stat" || type == "conf"
|
383
|
+
return flowids.sort {|a,b| a[1]<=>b[1]}.last
|
384
|
+
else
|
385
|
+
puts "Error, choose one argument from: ping, stat, conf"
|
386
|
+
return 1
|
387
|
+
end
|
388
|
+
end
|
389
|
+
|
390
|
+
|
391
|
+
#Find the HTTP GET request made by the host (Zeus uses it to send stolen data to its dropzone)
|
392
|
+
#Is the first get request made to the C&C [!?]
|
393
|
+
def findconfget
|
394
|
+
self.flows("flow.service:HTTP flow.dst: #{cc}")
|
395
|
+
end
|
396
|
+
|
397
|
+
def summaryhttpmethod(re, fast=0)
|
398
|
+
self.flows('flow.service:HTTP').each { |flow|
|
399
|
+
flowdeep = self.flows("flow.id:#{flow.id}")
|
400
|
+
if fast == 0
|
401
|
+
puts "#{flow.id} #{flow.src.address} > #{flow.dst.address} - #{flow.stream.flow.contents.first.body.length}" if flowdeep.values('http.request.method')[0] && flowdeep.values('http.request.method')[0].value =~ /#{Regexp.escape(re)}/
|
402
|
+
else
|
403
|
+
puts "#{flow.id} #{flow.src.address} > #{flow.dst.address}" if flowdeep.values('http.request.method')[0] && flowdeep.values('http.request.method')[0].value =~ /#{Regexp.escape(re)}/
|
404
|
+
end
|
405
|
+
}
|
406
|
+
end
|
407
|
+
|
408
|
+
def flowsummary(verbose=0)
|
409
|
+
self.flows.each { |flow|
|
410
|
+
flowdeep = self.flows("flow.id:#{flow.id}")
|
411
|
+
if verbose == 1
|
412
|
+
puts "#{flow.id}: #{flow.time} : #{flow.src.address} > #{flow.dst.address} - #{flow.packets} - #{flow.bytes} - #{flow.duration} - #{flow.title}"
|
413
|
+
else
|
414
|
+
puts "| #{flow.id}: #{flow.src.address} > #{flow.service.name} > #{flow.dst.address} : #{flow.title}"
|
415
|
+
end
|
416
|
+
}
|
417
|
+
end
|
418
|
+
|
419
|
+
|
420
|
+
def summaryport(port)
|
421
|
+
self.flows("flow.dport:#{port}").each do |f|
|
422
|
+
f.contents.each do |c|
|
423
|
+
puts "#{f.id}: #{flow.id} #{flow.src.address} > #{flow.dst.address} #{f.title} : #{c.body.length}"
|
424
|
+
end
|
425
|
+
end
|
426
|
+
end
|
427
|
+
|
428
|
+
def flowgrep(id, re)
|
429
|
+
self.flows("flow.id:#{id}").each do |f|
|
430
|
+
@t = false
|
431
|
+
f.stream.each do |mex|
|
432
|
+
if mex.bytes =~ /#{re}/
|
433
|
+
puts "#{f.id}: > #{f.dst.address} - #{$1}"
|
434
|
+
@t = true
|
435
|
+
end
|
436
|
+
end
|
437
|
+
end
|
438
|
+
return @t
|
439
|
+
end
|
440
|
+
|
441
|
+
def streamdata(id)
|
442
|
+
data = []
|
443
|
+
self.flows("flow.id:#{id}").each do |f|
|
444
|
+
f.stream.each do |mex|
|
445
|
+
t = [mex.bytes, mex.dir]
|
446
|
+
data.push t
|
447
|
+
end
|
448
|
+
end
|
449
|
+
return data
|
450
|
+
end
|
451
|
+
|
452
|
+
|
453
|
+
|
454
|
+
|
455
|
+
#Retrieve the content of a specified flow-ID
|
456
|
+
def flowcontent(id)
|
457
|
+
body = ""
|
458
|
+
self.flows("flow.id:#{id}").each do |flow|
|
459
|
+
flow.contents.each do |c|
|
460
|
+
body << c.body
|
461
|
+
end
|
462
|
+
end
|
463
|
+
return body
|
464
|
+
end
|
465
|
+
|
466
|
+
end
|
467
|
+
|
468
|
+
end
|
@@ -0,0 +1,223 @@
|
|
1
|
+
# Copyright (C) 2010-2013 marco riccardi.
|
2
|
+
# This file is part of Dorothy - http://www.honeynet.it/dorothy
|
3
|
+
# See the file 'LICENSE' for copying permission.
|
4
|
+
|
5
|
+
module Dorothy
|
6
|
+
|
7
|
+
module Util
|
8
|
+
|
9
|
+
extend self
|
10
|
+
|
11
|
+
def write(file, string)
|
12
|
+
File.open(file , 'w') {|f| f.write(string) }
|
13
|
+
end
|
14
|
+
|
15
|
+
def exists?(file)
|
16
|
+
File.exist?(file)
|
17
|
+
end
|
18
|
+
|
19
|
+
def init_db(force=false)
|
20
|
+
LOGGER.warn "DB", "The database is going to be initialized, all the data present will be lost. Continue?(write yes)"
|
21
|
+
answ = "yes"
|
22
|
+
answ = gets.chop unless force
|
23
|
+
|
24
|
+
if answ == "yes"
|
25
|
+
begin
|
26
|
+
#ugly, I know, but couldn't find a better and easier way..
|
27
|
+
raise 'An error occurred' unless system "psql -h #{DoroSettings.dorothive[:dbhost]} -U #{DoroSettings.dorothive[:dbuser]} -f #{DoroSettings.dorothive[:ddl]}"
|
28
|
+
LOGGER.info "DB", "Database correctly initialized."
|
29
|
+
rescue => e
|
30
|
+
LOGGER.error "DB", $!
|
31
|
+
LOGGER.debug "DB", e.inspect
|
32
|
+
end
|
33
|
+
else
|
34
|
+
LOGGER.error "DB", "Database untouched, quitting."
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
end
|
39
|
+
|
40
|
+
module Ssh
|
41
|
+
|
42
|
+
extend self
|
43
|
+
|
44
|
+
def download(host, user, pass, file, dest, port=22)
|
45
|
+
Net::SSH.start(host, user, :password => pass, :port =>port) do |ssh|
|
46
|
+
ssh.scp.download! file, dest
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
class Insertdb
|
52
|
+
|
53
|
+
def initialize
|
54
|
+
@db = PGconn.open(:host=> DoroSettings.dorothive[:dbhost], :dbname=>DoroSettings.dorothive[:dbname], :user=>DoroSettings.dorothive[:dbuser], :password=>DoroSettings.dorothive[:dbpass])
|
55
|
+
end
|
56
|
+
|
57
|
+
def begin_t
|
58
|
+
@db.exec("BEGIN")
|
59
|
+
end
|
60
|
+
|
61
|
+
def commit
|
62
|
+
@db.exec("COMMIT")
|
63
|
+
end
|
64
|
+
|
65
|
+
def status
|
66
|
+
@db.transaction_status
|
67
|
+
end
|
68
|
+
|
69
|
+
def close
|
70
|
+
@db.close
|
71
|
+
end
|
72
|
+
|
73
|
+
def rollback
|
74
|
+
LOGGER.error "DB", "DB ROLLBACK"
|
75
|
+
@db.exec("ROLLBACK")
|
76
|
+
end
|
77
|
+
|
78
|
+
|
79
|
+
def insert(table,values)
|
80
|
+
n = 1
|
81
|
+
@sqlstring = ""
|
82
|
+
|
83
|
+
values.each { |value|
|
84
|
+
if value == "default"
|
85
|
+
value1 = value
|
86
|
+
elsif value == "null"
|
87
|
+
value1 = value
|
88
|
+
elsif value == "lastval()"
|
89
|
+
value1 = value
|
90
|
+
elsif value =~ /currval/
|
91
|
+
value1 = value
|
92
|
+
else
|
93
|
+
value1 = "'#{value}'"
|
94
|
+
end
|
95
|
+
if n == values.size
|
96
|
+
@sqlstring << value1
|
97
|
+
elsif
|
98
|
+
@sqlstring << value1 + ","
|
99
|
+
end
|
100
|
+
n += 1
|
101
|
+
}
|
102
|
+
#p "Inserting in dorothy.#{table}:"
|
103
|
+
#p "#{@sqlstring}"
|
104
|
+
|
105
|
+
begin
|
106
|
+
@db.exec("INSERT into dorothy.#{table} values (#{@sqlstring})")
|
107
|
+
rescue => e
|
108
|
+
LOGGER.debug "DB", $!
|
109
|
+
LOGGER.debug "DB", e.inspect
|
110
|
+
#self.rollback
|
111
|
+
return false
|
112
|
+
#exit 1
|
113
|
+
end
|
114
|
+
|
115
|
+
#p "Insertion OK"
|
116
|
+
|
117
|
+
end
|
118
|
+
|
119
|
+
def raw_insert(table, data)
|
120
|
+
begin
|
121
|
+
@db.exec("INSERT into dorothy.#{table} values (#{data})")
|
122
|
+
rescue
|
123
|
+
LOGGER.error "DB", "#{$!}"
|
124
|
+
#self.rollback
|
125
|
+
return false
|
126
|
+
#exit 1
|
127
|
+
end
|
128
|
+
end
|
129
|
+
|
130
|
+
def select(table, column, value, column2=nil, value2=nil, column3=nil, value3=nil)
|
131
|
+
column2&&value2 ? ( column3&&value3 ? chk = @db.exec("SELECT * from dorothy.#{table} where #{column} = '#{value}' AND #{column2} = '#{value2}' AND #{column3} = '#{value3}' ") : chk = @db.exec("SELECT * from dorothy.#{table} where #{column} = '#{value}' AND #{column2} = '#{value2}'")) : chk = @db.exec("SELECT * from dorothy.#{table} where #{column} = '#{value}'")
|
132
|
+
|
133
|
+
#puts ".::WARNING #{value} already present in dorothy.#{table}".red.bold if chk
|
134
|
+
return chk
|
135
|
+
end
|
136
|
+
|
137
|
+
def get_anal_id
|
138
|
+
@db.exec("SELECT nextval('dorothy.analyses_id_seq')").first["nextval"].to_i
|
139
|
+
end
|
140
|
+
|
141
|
+
def self.escape_bytea(data)
|
142
|
+
escaped = PGconn.escape_bytea data
|
143
|
+
return escaped
|
144
|
+
end
|
145
|
+
|
146
|
+
def table_empty?(table)
|
147
|
+
@db.exec("SELECT CASE WHEN EXISTS (SELECT * FROM dorothy.#{table} LIMIT 1) THEN FALSE ELSE TRUE END").first["case"] == "t" ? true : false
|
148
|
+
end
|
149
|
+
|
150
|
+
def update_proto(role, ip)
|
151
|
+
@db.exec("UPDATE dorothy.host_roles set app_protocol = '#{proto}' where id = currval('connections_id_seq')")
|
152
|
+
end
|
153
|
+
|
154
|
+
def set_analyzed(hash)
|
155
|
+
@db.exec("UPDATE dorothy.traffic_dumps set parsed = true where hash = '#{hash}'")
|
156
|
+
end
|
157
|
+
|
158
|
+
def find_seq(seq)
|
159
|
+
@db.exec("SELECT currval('dorothy.#{seq}')")
|
160
|
+
end
|
161
|
+
|
162
|
+
def flush_table(table)
|
163
|
+
@db.exec("TRUNCATE dorothy.#{table} CASCADE")
|
164
|
+
end
|
165
|
+
|
166
|
+
def malware_list
|
167
|
+
malwares = []
|
168
|
+
@db.exec("SELECT samples.hash FROM dorothy.samples").each do |q|
|
169
|
+
malwares.push q
|
170
|
+
end
|
171
|
+
return malwares
|
172
|
+
end
|
173
|
+
|
174
|
+
def find_pcap
|
175
|
+
@pcaps = []
|
176
|
+
begin
|
177
|
+
@db.exec("SELECT traffic_dumps.hash, traffic_dumps.pcapr_id, traffic_dumps.size, traffic_dumps.binary, traffic_dumps.parsed, samples.md5 as \"sample\", analyses.date as \"date\" FROM dorothy.traffic_dumps, dorothy.samples, dorothy.analyses WHERE analyses.traffic_dump = traffic_dumps.hash AND analyses.sample = samples.hash AND traffic_dumps.parsed = false").each do |q|
|
178
|
+
@pcaps.push q
|
179
|
+
end
|
180
|
+
rescue
|
181
|
+
LOGGER.error "DB","Error while fetching traffic_dumps table\n " + $!
|
182
|
+
end
|
183
|
+
|
184
|
+
end
|
185
|
+
|
186
|
+
def find_vm
|
187
|
+
vm = @db.exec("SELECT id, hostname, ipaddress, username, password FROM dorothy.sandboxes where is_available is true").first
|
188
|
+
if vm.nil?
|
189
|
+
LOGGER.warn "DB","At this time there are no free VM available"
|
190
|
+
return false
|
191
|
+
else
|
192
|
+
@db.exec("UPDATE dorothy.sandboxes set is_available = false where id = '#{vm["id"]}'")
|
193
|
+
return vm["id"].to_i, vm["hostname"], vm["ipaddress"], vm["username"], vm["password"]
|
194
|
+
end
|
195
|
+
end
|
196
|
+
|
197
|
+
def free_vm(vmid)
|
198
|
+
r = @db.exec("SELECT hostname FROM dorothy.sandboxes where id = '#{vmid}' AND is_available is false")
|
199
|
+
if !r.first.nil? #check if the issued VM is already free
|
200
|
+
begin
|
201
|
+
@db.exec("UPDATE dorothy.sandboxes set is_available = true where id = '#{vmid}'")
|
202
|
+
LOGGER.info "DB", "VM #{vmid} succesfully released"
|
203
|
+
return true
|
204
|
+
rescue
|
205
|
+
LOGGER.error "DB", "An error occurred while releasing the VM"
|
206
|
+
LOGGER.debug "DB", $!
|
207
|
+
return false
|
208
|
+
end
|
209
|
+
else
|
210
|
+
LOGGER.warn "DB", "Dorothy is trying to release the VM #{vmid} that is already available!!"
|
211
|
+
return false
|
212
|
+
end
|
213
|
+
end
|
214
|
+
|
215
|
+
def vm_init
|
216
|
+
@db.exec("UPDATE dorothy.sandboxes set is_available = true")
|
217
|
+
LOGGER.debug "DB", "All VM are now available"
|
218
|
+
#TODO - revert them too?
|
219
|
+
end
|
220
|
+
|
221
|
+
end
|
222
|
+
|
223
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
module Process
|
2
|
+
# Supply daemon for pre ruby 1.9
|
3
|
+
# Adapted from lib/active_support/core_ext/process/daemon.rb
|
4
|
+
def self.daemon(nochdir = nil, noclose = nil)
|
5
|
+
exit! if fork # Parent exits, child continues.
|
6
|
+
Process.setsid # Become session leader.
|
7
|
+
exit! if fork # Zap session leader. See [1].
|
8
|
+
|
9
|
+
unless nochdir
|
10
|
+
Dir.chdir "/" # Release old working directory.
|
11
|
+
end
|
12
|
+
|
13
|
+
unless noclose
|
14
|
+
STDIN.reopen "/dev/null" # Free file descriptors and
|
15
|
+
STDOUT.reopen "/dev/null", "a" # point them somewhere sensible.
|
16
|
+
STDERR.reopen '/dev/null', 'a'
|
17
|
+
end
|
18
|
+
|
19
|
+
trap("TERM") { exit }
|
20
|
+
|
21
|
+
return 0
|
22
|
+
|
23
|
+
end unless self.respond_to? :daemon
|
24
|
+
end
|
25
|
+
|
26
|
+
module Dorothy
|
27
|
+
ROOT = File.expand_path(File.dirname(File.dirname(__FILE__)))
|
28
|
+
$: << ROOT
|
29
|
+
end
|