dorothy2 1.2.0 → 2.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (47) hide show
  1. checksums.yaml +8 -8
  2. data/CHANGELOG +39 -14
  3. data/README.md +80 -62
  4. data/UPDATE +6 -14
  5. data/bin/dorothy2 +472 -0
  6. data/dorothy2.gemspec +22 -16
  7. data/etc/ddl/dorothive.ddl +619 -373
  8. data/etc/sources.yml.example +27 -2
  9. data/lib/doroGUI.rb +232 -0
  10. data/lib/doroParser.rb +34 -78
  11. data/lib/dorothy2.rb +288 -248
  12. data/lib/dorothy2/BFM.rb +114 -61
  13. data/lib/dorothy2/DEM.rb +3 -1
  14. data/lib/dorothy2/NAM.rb +2 -2
  15. data/lib/dorothy2/Settings.rb +2 -1
  16. data/lib/dorothy2/VSM.rb +2 -1
  17. data/lib/dorothy2/deep_symbolize.rb +2 -7
  18. data/lib/dorothy2/do-init.rb +286 -19
  19. data/lib/dorothy2/do-logger.rb +1 -1
  20. data/lib/dorothy2/do-utils.rb +382 -33
  21. data/lib/dorothy2/version.rb +1 -1
  22. data/lib/dorothy2/vtotal.rb +30 -20
  23. data/lib/mu/xtractr.rb +11 -11
  24. data/lib/mu/xtractr/stream.rb +1 -1
  25. data/lib/www/public/reset.css +153 -0
  26. data/lib/www/public/style.css +65 -0
  27. data/lib/www/views/analyses.erb +28 -0
  28. data/lib/www/views/email.erb +63 -0
  29. data/lib/www/views/flows.erb +30 -0
  30. data/lib/www/views/layout.erb +27 -0
  31. data/lib/www/views/profile.erb +49 -0
  32. data/lib/www/views/queue.erb +28 -0
  33. data/lib/www/views/resume.erb +135 -0
  34. data/lib/www/views/resume.erb~ +88 -0
  35. data/lib/www/views/samples.erb +20 -0
  36. data/lib/www/views/upload.erb +154 -0
  37. data/share/img/The_big_picture.pdf +0 -0
  38. data/test/tc_dorothy_full.rb +3 -0
  39. metadata +169 -70
  40. data/TODO +0 -27
  41. data/bin/dorothy_start +0 -225
  42. data/bin/dorothy_stop +0 -28
  43. data/bin/dparser_start +0 -94
  44. data/bin/dparser_stop +0 -31
  45. data/etc/dorothy copy.yml.example +0 -39
  46. data/etc/extensions.yml +0 -41
  47. data/share/update-dorothive.sql +0 -19
@@ -20,7 +20,7 @@ class DoroLogger < Logger
20
20
  end
21
21
 
22
22
  def debug(progname, text, &block)
23
- add(DEBUG, text, progname, &block)
23
+ add(DEBUG, text, progname, &block) if VERBOSE
24
24
  end
25
25
 
26
26
  def warn(progname, text, &block)
@@ -14,10 +14,83 @@ module Dorothy
14
14
  File.open(file , 'w') {|f| f.write(string) }
15
15
  end
16
16
 
17
+ def get_time(local=Time.new)
18
+ time = local
19
+ case local.class.to_s
20
+ when 'Time'
21
+ time.utc.strftime("%Y-%m-%d %H:%M:%S")
22
+ when 'DateTime'
23
+ time.strftime("%Y-%m-%d %H:%M:%S")
24
+ else
25
+ time
26
+ end
27
+ end
28
+
17
29
  def exists?(file)
18
30
  File.exist?(file)
19
31
  end
20
32
 
33
+
34
+ def load_profile(p_name)
35
+ p = YAML.load_file(DoroSettings.env[:home] + '/etc/profiles.yml').select {|k| k == p_name}.first
36
+
37
+ if p.nil?
38
+ LOGGER.warn "PROFILE", "Warning, the profile specified (#{p_name}) doesn't exist in profiles.yml. Skipping"
39
+ false
40
+ else
41
+ p
42
+ end
43
+
44
+ end
45
+
46
+ def check_pid_file(file)
47
+ if File.exist? file
48
+ # If we get Errno::ESRCH then process does not exist and
49
+ # we can safely cleanup the pid file.
50
+ pid = File.read(file).to_i
51
+ begin
52
+ Process.kill(0, pid)
53
+ rescue Errno::ESRCH
54
+ stale_pid = true
55
+ end
56
+
57
+ unless stale_pid
58
+ puts "[" + "+".red + "] " + "[Dorothy]".yellow + " Dorothy is already running (pid=#{pid})"
59
+ false
60
+ end
61
+ true
62
+ end
63
+ end
64
+
65
+ def create_pid_file(file, pid)
66
+ File.open(file, "w") { |f| f.puts pid }
67
+
68
+ ## Sends SIGTERM to process in pidfile. Server should trap this
69
+ # and shutdown cleanly.
70
+ at_exit do
71
+ if File.exist? file
72
+ File.unlink file
73
+ end
74
+ end
75
+ end
76
+
77
+ def stop_process(doro_module)
78
+
79
+ pid_file = DoroSettings.env[:pidfiles] + '/' + doro_module + '.pid'
80
+
81
+ doro_module.upcase!
82
+
83
+ puts "[" + "+".red + "]" + " The #{doro_module} module is shutting now.."
84
+
85
+ if pid_file and File.exist? pid_file
86
+ pid = Integer(File.read(pid_file))
87
+ Process.kill(-2,-pid)
88
+ puts "[" + "+".red + "]" + " The #{doro_module} module (PID #{pid}) was terminated"
89
+ else
90
+ puts "[" + "+".red + "]" + "Can't find PID file, is #{doro_module} really running?"
91
+ end
92
+ end
93
+
21
94
  def init_db(ddl=DoroSettings.dorothive[:ddl], force=false)
22
95
  LOGGER.warn "DB", "The database is going to be initialized with the file #{ddl}. If the Dorothive is already present, " + "all its data will be lost".red + ". Continue?(write yes)"
23
96
  answ = "yes"
@@ -26,7 +99,14 @@ module Dorothy
26
99
  if answ == "yes"
27
100
  begin
28
101
  #ugly, I know, but couldn't find a better and easier way..
29
- raise 'An error occurred' unless system "sh -c 'psql -h #{DoroSettings.dorothive[:dbhost]} -U #{DoroSettings.dorothive[:dbuser]} -f #{ddl} 1> /dev/null'"
102
+ LOGGER.info "DB", "Creating DB #{DoroSettings.dorothive[:dbname]}"
103
+ if system "sh -c 'createdb -h #{DoroSettings.dorothive[:dbhost]} -U #{DoroSettings.dorothive[:dbuser]} -e #{DoroSettings.dorothive[:dbname]} 1> /dev/null'"
104
+ LOGGER.info "DB", "Importing the dorothive DDL from #{ddl}"
105
+ system "sh -c 'psql -d #{DoroSettings.dorothive[:dbname]} -h #{DoroSettings.dorothive[:dbhost]} -U #{DoroSettings.dorothive[:dbuser]} -f #{ddl} 1> /dev/null'"
106
+ else
107
+ raise 'An error occurred'
108
+ end
109
+
30
110
  LOGGER.info "DB", "Database correctly initialized. Now you can restart Dorothy!"
31
111
  rescue => e
32
112
  LOGGER.error "DB", $!
@@ -37,6 +117,7 @@ module Dorothy
37
117
  end
38
118
  end
39
119
 
120
+
40
121
  end
41
122
 
42
123
  module Ssh
@@ -50,6 +131,73 @@ module Dorothy
50
131
  end
51
132
  end
52
133
 
134
+ module QueueManager
135
+ extend self
136
+
137
+ def add(f, sourceinfo, profile, priority, mail_id=nil)
138
+
139
+ bin = Loadmalw.new(f)
140
+
141
+ if bin.size == 0 || bin.sha.empty?
142
+ LOGGER.warn "BFM", "Warning - Empty file #{bin.filename}, deleting and skipping.."
143
+ FileUtils.rm bin.binpath
144
+ return false
145
+ end
146
+
147
+ begin
148
+ push_malw(bin, sourceinfo, profile, priority, mail_id)
149
+ rescue => e
150
+ LOGGER.error "DB", $!
151
+ LOGGER.debug "DB", e.backtrace
152
+ raise e
153
+ end
154
+
155
+ end
156
+
157
+ #push the binary meta info into the DB
158
+ def push_malw(bin, sourceinfo, profile, priority, mail_id)
159
+
160
+ db = Insertdb.new
161
+ db.begin_t
162
+
163
+ unless db.select("samples", "sha256", bin.sha).one? #is bin.sha already present in my db?
164
+ samplevalues = [bin.sha, bin.size, bin.binpath_repo, bin.filename, bin.md5, bin.type ]
165
+
166
+ if db.insert("samples", samplevalues) #no it isn't, insert it
167
+ #Move the binary to the bin repo
168
+ LOGGER.debug "BFM", "Moving file from the source's directory to the Dorothy's repository"
169
+ FileUtils.mv(bin.binpath,bin.binpath_repo, :force => true)
170
+ else
171
+ raise "A DB error occurred"
172
+ end
173
+
174
+ else #yes it is, don't insert in sample table
175
+ date = db.select("sightings", "sample", bin.sha).first["date"]
176
+ LOGGER.warn "BFM", "The binary #{bin.sha} was already added on #{date}"
177
+ FileUtils.rm bin.binpath
178
+ end
179
+
180
+
181
+ #Add to sighting
182
+ sigh_id = db.get_sighting_id
183
+ sighvalues = [bin.sha, db.check_source_db(sourceinfo)["id"], bin.ctime, sigh_id, mail_id]
184
+ raise "A DB error occurred" unless db.insert("sightings", sighvalues)
185
+
186
+ # explanation: I don't want to insert the same malware twice but I do want to
187
+ # insert the sighting value anyway ("the malware X has been downloaded 1 time but
188
+ # has been spoted 32 times")
189
+
190
+ #Add to the queue
191
+ @id = db.analysis_queue_add(bin.sha, sourceinfo, bin.filename, profile, priority, nil, sigh_id )
192
+
193
+ db.commit
194
+ db.close
195
+
196
+ @id
197
+
198
+ end
199
+ end
200
+
53
201
  class Insertdb
54
202
 
55
203
  def initialize
@@ -87,6 +235,8 @@ module Dorothy
87
235
  value1 = value
88
236
  elsif value == "null"
89
237
  value1 = value
238
+ elsif value == nil
239
+ value1 = "null"
90
240
  elsif value == "lastval()"
91
241
  value1 = value
92
242
  elsif value =~ /currval/
@@ -110,9 +260,8 @@ module Dorothy
110
260
 
111
261
  begin
112
262
  @db.exec("INSERT into dorothy.#{table} values (#{@sqlstring})")
113
- rescue => e
114
- LOGGER.debug "DB", $!
115
- LOGGER.debug "DB", e.inspect
263
+ rescue PG::Error => err
264
+ LOGGER.error "DB", err.result.error_field( PG::Result::PG_DIAG_MESSAGE_PRIMARY )
116
265
  #self.rollback
117
266
  return false
118
267
  #exit 1
@@ -125,8 +274,8 @@ module Dorothy
125
274
  def raw_insert(table, data)
126
275
  begin
127
276
  @db.exec("INSERT into dorothy.#{table} values (#{data})")
128
- rescue
129
- LOGGER.error "DB", "#{$!}"
277
+ rescue PG::Error => err
278
+ LOGGER.error "DB", err.result.error_field( PG::Result::PG_DIAG_MESSAGE_PRIMARY )
130
279
  #self.rollback
131
280
  return false
132
281
  #exit 1
@@ -144,6 +293,19 @@ module Dorothy
144
293
  @db.exec("SELECT nextval('dorothy.analyses_id_seq')").first["nextval"].to_i
145
294
  end
146
295
 
296
+ def get_email_id
297
+ @db.exec("SELECT nextval('dorothy.emails_id_seq')").first["nextval"].to_i
298
+ end
299
+
300
+ def get_sighting_id
301
+ @db.exec("SELECT nextval('dorothy.sightings_id_seq')").first["nextval"].to_i
302
+ end
303
+
304
+ def get_curr_queue_id
305
+ @db.exec("SELECT currval('dorothy.queue_id_seq')").first["currval"].to_i
306
+ end
307
+
308
+
147
309
  def self.escape_bytea(data)
148
310
  escaped = PGconn.escape_bytea data
149
311
  return escaped
@@ -154,7 +316,7 @@ module Dorothy
154
316
  end
155
317
 
156
318
  def update_sample_path(sample, path)
157
- @db.exec("UPDATE dorothy.samples set path = '#{path}' where sha256 = #{sample}")
319
+ @db.exec("UPDATE dorothy.samples set path = '#{path}' where sha256 = '#{sample}'")
158
320
  end
159
321
 
160
322
  def set_analyzed(hash)
@@ -174,25 +336,170 @@ module Dorothy
174
336
  @db.exec("SELECT samples.sha256 FROM dorothy.samples").each do |q|
175
337
  malwares.push q
176
338
  end
177
- return malwares
339
+ malwares
178
340
  end
179
341
 
342
+ def push_email_data(m, forwarded_by='null')
343
+ #m is a message object from the Mail class
344
+ id = get_email_id
345
+ values = " '#{m.from[0]}', E'#{m.subject}', E'#{Insertdb.escape_bytea(m.raw_source)}', '#{id}', null, null, null, null, null, '#{Util.get_time(m.date)}', '#{m.message_id}', '#{m.has_attachments?}', '#{m.charset}', '#{Digest::SHA2.hexdigest(m.body.raw_source)}', #{forwarded_by}"
346
+ raise "A DB error occurred while adding data into the emails table" unless raw_insert('emails', values)
347
+
348
+ #adding receivers
349
+ #TO
350
+ m.to_addrs.each {|addr| raise "A DB error occurred while adding data into the emails_receivers table" unless insert('email_receivers', [addr, id, 'to'])}
351
+ #CC
352
+ m.cc_addrs.each {|addr| raise "A DB error occurred while adding data into the emails_receivers table" unless insert('email_receivers', [addr, id, 'cc'])}
353
+
354
+
355
+ id
356
+ end
357
+
358
+ def analysis_queue_add(bin, source, filename, profile='default', priority=0, user='system', sigh_id)
359
+ id = "default"
360
+ time = Util.get_time
361
+ values = [id, time, bin, priority, profile, check_source_db(source)["id"], user, filename, "pending", sigh_id.to_i]
362
+
363
+ raise "A DB error occurred while adding data into the anaylsis_queue table" unless insert("analysis_queue", values)
364
+
365
+ @id = get_curr_queue_id #race condition?
366
+ end
367
+
368
+ def analysis_queue_mark(id,status)
369
+ begin
370
+ @db.exec("UPDATE dorothy.analysis_queue set status = '#{status}' where id = '#{id}'")
371
+ end
372
+ rescue PG::Error => err
373
+ LOGGER.error "DB","Error while updating analysis_queue_mark_analysed table: " + err.result.error_field( PG::Result::PG_DIAG_MESSAGE_PRIMARY )
374
+ raise err
375
+ end
376
+
377
+ def analysis_queue_pull
378
+ @bins = []
379
+ begin
380
+ @db.exec("SELECT analysis_queue.id, analysis_queue.binary, samples.path, analysis_queue.filename, analysis_queue.priority, analysis_queue.profile, analysis_queue.source, analysis_queue.date FROM dorothy.analysis_queue, dorothy.samples WHERE analysis_queue.binary = samples.sha256 AND analysis_queue.status = 'pending' ORDER BY analysis_queue.priority DESC, analysis_queue.id ASC").each do |q|
381
+ @bins.push q
382
+ end
383
+ rescue PG::Error => err
384
+ LOGGER.error "DB","Error while fetching traffic_dumps table " + err.result.error_field( PG::Result::PG_DIAG_MESSAGE_PRIMARY )
385
+ end
386
+ @bins
387
+ end
388
+
389
+ #Mark all the pending analyses as analyzed
390
+ def analysis_queue_mark_all
391
+ analysis_queue_pull.each do |qentry|
392
+ analysis_queue_mark(qentry["id"], "analysed")
393
+ end
394
+ LOGGER.debug "DB", "Pending analyses removed from the queue"
395
+ end
396
+
397
+ #List pending analyses
398
+ def analysis_queue_view
399
+ LOGGER.info "QUEUE", "Pending analyses:"
400
+ puts "\n[" + "-".red + "] " + "\tID\tAdded\t\t\tSource\tFilename"
401
+ puts "[" + "-".red + "] " + "\t--\t-----\t\t\t------\t--------\n"
402
+
403
+ analysis_queue_pull.each do |qentry|
404
+ puts "[" + "*".red + "] " + "\t#{qentry["id"]}\t#{qentry["date"]}\t#{qentry["source"]}\t#{qentry["filename"]}"
405
+ puts ""
406
+ end
407
+ end
408
+
409
+ def find_last_conf_chksum(conf)
410
+ begin
411
+ r = @db.exec("SELECT cfg_chk.md5_chksum FROM dorothy.cfg_chk WHERE cfg_chk.conf_file = '#{conf}' ORDER BY cfg_chk.id DESC LIMIT 1")
412
+ r.first.nil? ? nil : r.first["md5_chksum"]
413
+ rescue PG::Error => err
414
+ LOGGER.error "DB","Error while fetching conf_chk table " + err.result.error_field( PG::Result::PG_DIAG_MESSAGE_PRIMARY )
415
+ end
416
+ end
417
+
418
+ def disable_source_db(id)
419
+ begin
420
+ @db.exec("UPDATE dorothy.sources set disabled = true, last_modified = '#{Util.get_time}'where id = '#{id}'")
421
+ true
422
+ rescue PG::Error => err
423
+ LOGGER.error "DB", "An error occurred while adding data into sources table " + err.result.error_field( PG::Result::PG_DIAG_MESSAGE_PRIMARY )
424
+ false
425
+ end
426
+ end
427
+
428
+ def check_source_db(source)
429
+ begin
430
+ r = @db.exec("SELECT sources.id, sources.sname, sources.stype, sources.host, sources.localdir FROM dorothy.sources WHERE sources.disabled = FALSE AND sources.sname = '#{source}'")
431
+ r.first.nil? ? nil : r.first
432
+ rescue PG::Error => err
433
+ LOGGER.error "DB", "An error occurred while accessing sources table" + err.result.error_field( PG::Result::PG_DIAG_MESSAGE_PRIMARY )
434
+ end
435
+ end
436
+
437
+ def enabled_sources_db
438
+ begin
439
+ r = @db.exec("SELECT sources.id, sources.sname, sources.stype, sources.host, sources.localdir FROM dorothy.sources WHERE sources.disabled = FALSE")
440
+ r.first.nil? ? nil : r
441
+ rescue PG::Error => err
442
+ LOGGER.error "DB", "An error occurred while accessing sources table" + err.result.error_field( PG::Result::PG_DIAG_MESSAGE_PRIMARY )
443
+ end
444
+ end
445
+
446
+
447
+ def check_sources_modifications(source)
448
+
449
+ db_sources = enabled_sources_db
450
+
451
+ unless db_sources.nil?
452
+ db_sources.each do |s|
453
+ unless source.has_key?(s["sname"])
454
+ LOGGER.warn "CheckCONF", "#{s["sname"]} was removed, disabling"
455
+ disable_source_db(s["id"])
456
+ end
457
+ end
458
+ end
459
+
460
+
461
+ source.each do |k,v|
462
+ values = ['default', k, v["type"], 'default', v["host"], 0, Util.get_time, Util.get_time, v["localdir"]]
463
+
464
+ db_source = check_source_db(k)
465
+ if db_source.nil?
466
+ LOGGER.warn "CheckCONF", "#{k} Added"
467
+ insert("sources", values)
468
+ elsif v["type"] != db_source["stype"] || v["host"] != db_source["host"] || v["localdir"] != db_source["localdir"]
469
+
470
+ LOGGER.warn "CheckCONF", "#{k} MODIFIED"
471
+
472
+ disable_source_db(db_source["id"])
473
+ LOGGER.warn "CheckCONF", "#{k} DISABLED"
474
+
475
+ insert("sources", values)
476
+ LOGGER.warn "CheckCONF", "#{k} ADDED"
477
+ end
478
+ end
479
+ end
480
+
481
+
482
+
180
483
  def find_pcap
181
484
  @pcaps = []
182
485
  begin
183
486
  @db.exec("SELECT traffic_dumps.sha256, traffic_dumps.pcapr_id, traffic_dumps.size, traffic_dumps.binary, traffic_dumps.parsed, samples.md5 as \"sample\", analyses.date as \"date\", analyses.id as \"anal_id\" FROM dorothy.traffic_dumps, dorothy.samples, dorothy.analyses WHERE analyses.traffic_dump = traffic_dumps.sha256 AND analyses.sample = samples.sha256 AND traffic_dumps.parsed = false").each do |q|
184
487
  @pcaps.push q
185
488
  end
186
- rescue
187
- LOGGER.error "DB","Error while fetching traffic_dumps table\n " + $!
489
+ rescue PG::Error => err
490
+ LOGGER.error "DB","Error while fetching traffic_dumps table " + err.result.error_field( PG::Result::PG_DIAG_MESSAGE_PRIMARY )
188
491
  end
189
492
 
190
493
  end
191
494
 
192
- def find_vm
193
- vm = @db.exec("SELECT id, hostname, ipaddress, username, password FROM dorothy.sandboxes where is_available is true").first
495
+ def get_curr_malwares_id
496
+ @db.exec("SELECT nextval('dorothy.malwares_id_seq')").first["nextval"].to_i
497
+ end
498
+
499
+ def find_vm(os_type, os_version, os_lang)
500
+ vm = @db.exec("SELECT id, hostname, ipaddress, username, password FROM dorothy.sandboxes where os = '#{os_type}' AND version = '#{os_version}' AND os_lang = '#{os_lang}' AND is_available is true").first
194
501
  if vm.nil?
195
- LOGGER.debug "DB","At this time there are no free VM available" if VERBOSE
502
+ LOGGER.debug "DB","At this time there are no free VM available that matches the selected profile" if VERBOSE
196
503
  return false
197
504
  else
198
505
  @db.exec("UPDATE dorothy.sandboxes set is_available = false where id = '#{vm["id"]}'")
@@ -207,9 +514,8 @@ module Dorothy
207
514
  @db.exec("UPDATE dorothy.sandboxes set is_available = true where id = '#{vmid}'")
208
515
  LOGGER.info "DB", "VM #{vmid} succesfully released"
209
516
  return true
210
- rescue
211
- LOGGER.error "DB", "An error occurred while releasing the VM"
212
- LOGGER.debug "DB", $!
517
+ rescue PG::Error => err
518
+ LOGGER.error "DB", "An error occurred while releasing the VM " + err.result.error_field( PG::Result::PG_DIAG_MESSAGE_PRIMARY )
213
519
  return false
214
520
  end
215
521
  else
@@ -226,6 +532,44 @@ module Dorothy
226
532
 
227
533
  end
228
534
 
535
+ ##CLASS MAILER FROM SALVATORE
536
+ class Mailer
537
+ attr_reader :n_emails
538
+ attr_reader :delete_once_downloaded
539
+
540
+ def initialize(account)
541
+
542
+ @n_emails = account[:n_emails]
543
+ @delete_once_downloaded = account[:delete_once_downloaded]
544
+
545
+ @mailbox = Mail.defaults do
546
+ retriever_method :pop3,
547
+ :address => account[:address],
548
+ :user_name => account[:username],
549
+ :password => account[:password],
550
+ :port => account[:port],
551
+ :enable_ssl => account[:ssl]
552
+ end
553
+ end
554
+
555
+ def read_from_string(string)
556
+ Mail.read_from_string(string)
557
+ end
558
+
559
+
560
+ def get_emails
561
+ begin
562
+ @emails = @mailbox.find(:what => :first, :count => @n_emails, :order => :asc, :delete_after_find => @delete_once_downloaded)
563
+ rescue Net::POPError => e
564
+ LOGGER.error "MAIL", e.message
565
+ raise
566
+ end
567
+
568
+ end
569
+
570
+
571
+ end
572
+
229
573
  class Loadmalw
230
574
  attr_reader :pcaprid
231
575
  attr_reader :type
@@ -234,39 +578,44 @@ module Dorothy
234
578
  attr_reader :md5
235
579
  attr_reader :binpath
236
580
  attr_reader :filename
237
- attr_reader :full_filename #here i'm sure that the file has an extension and can be executed by windows
581
+
582
+ #Here i'm sure that the file has an extension and can be executed by windows
583
+ attr_reader :full_filename
238
584
  attr_reader :ctime
239
585
  attr_reader :size
240
586
  attr_reader :pcapsize
241
587
  attr_reader :extension
242
- attr_accessor :sourceinfo #used for storing info about where the binary come from (if needed)
243
588
 
244
- # attr_accessor :dir_home
589
+ #Used for storing info about where the binary comes from (if needed)
590
+ attr_accessor :sourceinfo
591
+
592
+ #binaries' repository where all the samples go.
593
+ attr_reader :binpath_repo
594
+
595
+ #Analysis folder where the files will be created
245
596
  attr_accessor :dir_pcap
246
597
  attr_accessor :dir_bin
247
598
  attr_accessor :dir_screens
248
599
  attr_accessor :dir_downloads
249
600
 
250
- def initialize(file)
601
+ def initialize(file, change_filename=nil)
251
602
 
252
603
  fm = FileMagic.new
253
- sha = Digest::SHA2.new
254
- md5 = Digest::MD5.new
255
604
  @binpath = file
256
- @filename = File.basename(file)
257
- @extension = File.extname(file)[1..-1]
605
+ change_filename ||= File.basename(file).strip
606
+
607
+ @filename = change_filename
608
+ @extension = File.extname(change_filename)[1..-1]
609
+
610
+
611
+ @md5 = Digest::MD5.hexdigest(File.read(file))
612
+ @sha = Digest::SHA2.hexdigest(File.read(file))
258
613
 
259
- File.open(file, 'rb') do |fh1|
260
- while buffer1 = fh1.read(1024)
261
- @sha = sha << buffer1
262
- @md5 = md5 << buffer1
263
- end
264
- end
265
614
 
266
- @sha = @sha.to_s
267
- @md5 = @md5.to_s.rstrip
268
615
  @sourceinfo = nil
269
616
 
617
+ @binpath_repo = DoroSettings.env[:bins_repository] + '/' + @md5
618
+
270
619
  timetmp = File.ctime(file)
271
620
  @ctime= timetmp.strftime("%m/%d/%y %H:%M:%S")
272
621
  @type = fm.file(file)
@@ -307,4 +656,4 @@ module Dorothy
307
656
  end
308
657
 
309
658
 
310
- end
659
+ end