mihari 5.4.1 → 5.4.3

Sign up to get free protection for your applications and to get access to all the features.
Files changed (68) hide show
  1. checksums.yaml +4 -4
  2. data/frontend/package-lock.json +145 -146
  3. data/frontend/package.json +8 -8
  4. data/frontend/src/swagger.yaml +306 -272
  5. data/lib/mihari/analyzers/base.rb +0 -4
  6. data/lib/mihari/analyzers/binaryedge.rb +4 -44
  7. data/lib/mihari/analyzers/censys.rb +4 -20
  8. data/lib/mihari/analyzers/circl.rb +2 -26
  9. data/lib/mihari/analyzers/crtsh.rb +2 -17
  10. data/lib/mihari/analyzers/dnstwister.rb +1 -3
  11. data/lib/mihari/analyzers/greynoise.rb +5 -4
  12. data/lib/mihari/analyzers/hunterhow.rb +8 -23
  13. data/lib/mihari/analyzers/onyphe.rb +5 -39
  14. data/lib/mihari/analyzers/otx.rb +2 -38
  15. data/lib/mihari/analyzers/passivetotal.rb +3 -41
  16. data/lib/mihari/analyzers/securitytrails.rb +3 -41
  17. data/lib/mihari/analyzers/shodan.rb +7 -39
  18. data/lib/mihari/analyzers/urlscan.rb +2 -38
  19. data/lib/mihari/analyzers/virustotal_intelligence.rb +2 -25
  20. data/lib/mihari/analyzers/zoomeye.rb +17 -83
  21. data/lib/mihari/cli/alert.rb +11 -0
  22. data/lib/mihari/cli/main.rb +6 -1
  23. data/lib/mihari/clients/base.rb +9 -1
  24. data/lib/mihari/clients/binaryedge.rb +27 -2
  25. data/lib/mihari/clients/censys.rb +32 -2
  26. data/lib/mihari/clients/circl.rb +28 -1
  27. data/lib/mihari/clients/crtsh.rb +9 -2
  28. data/lib/mihari/clients/dnstwister.rb +4 -2
  29. data/lib/mihari/clients/greynoise.rb +31 -4
  30. data/lib/mihari/clients/hunterhow.rb +41 -3
  31. data/lib/mihari/clients/onyphe.rb +25 -3
  32. data/lib/mihari/clients/otx.rb +40 -0
  33. data/lib/mihari/clients/passivetotal.rb +33 -15
  34. data/lib/mihari/clients/securitytrails.rb +44 -0
  35. data/lib/mihari/clients/shodan.rb +30 -2
  36. data/lib/mihari/clients/urlscan.rb +32 -6
  37. data/lib/mihari/clients/virustotal.rb +29 -4
  38. data/lib/mihari/clients/zoomeye.rb +53 -2
  39. data/lib/mihari/commands/alert.rb +42 -0
  40. data/lib/mihari/commands/rule.rb +2 -2
  41. data/lib/mihari/commands/search.rb +20 -59
  42. data/lib/mihari/commands/web.rb +1 -1
  43. data/lib/mihari/config.rb +2 -2
  44. data/lib/mihari/emitters/base.rb +1 -1
  45. data/lib/mihari/emitters/database.rb +2 -2
  46. data/lib/mihari/errors.rb +23 -2
  47. data/lib/mihari/http.rb +7 -1
  48. data/lib/mihari/schemas/alert.rb +14 -0
  49. data/lib/mihari/services/alert_proxy.rb +106 -0
  50. data/lib/mihari/services/alert_runner.rb +22 -0
  51. data/lib/mihari/services/{rule.rb → rule_proxy.rb} +10 -6
  52. data/lib/mihari/services/rule_runner.rb +49 -0
  53. data/lib/mihari/structs/censys.rb +11 -11
  54. data/lib/mihari/structs/greynoise.rb +17 -8
  55. data/lib/mihari/structs/onyphe.rb +7 -7
  56. data/lib/mihari/structs/shodan.rb +5 -5
  57. data/lib/mihari/structs/urlscan.rb +3 -3
  58. data/lib/mihari/structs/virustotal_intelligence.rb +3 -3
  59. data/lib/mihari/version.rb +1 -1
  60. data/lib/mihari/web/endpoints/alerts.rb +22 -0
  61. data/lib/mihari/web/endpoints/rules.rb +8 -8
  62. data/lib/mihari/web/public/assets/{index-61dc587c.js → index-4d7eda9f.js} +1 -1
  63. data/lib/mihari/web/public/index.html +1 -1
  64. data/lib/mihari/web/public/redoc-static.html +29 -27
  65. data/lib/mihari.rb +6 -1
  66. data/mihari.gemspec +9 -10
  67. metadata +28 -37
  68. data/Steepfile +0 -31
@@ -25,9 +25,13 @@ module Mihari
25
25
  def artifacts
26
26
  case type
27
27
  when "host"
28
- host_search
28
+ client.host_search_with_pagination(query).map do |res|
29
+ convert(res)
30
+ end.flatten
29
31
  when "web"
30
- web_search
32
+ client.web_search_with_pagination(query).map do |res|
33
+ convert(res)
34
+ end.flatten
31
35
  else
32
36
  raise InvalidInputError, "#{type} type is not supported." unless valid_type?
33
37
  end
@@ -39,8 +43,6 @@ module Mihari
39
43
 
40
44
  private
41
45
 
42
- PAGE_SIZE = 10
43
-
44
46
  #
45
47
  # Check whether a type is valid or not
46
48
  #
@@ -51,95 +53,27 @@ module Mihari
51
53
  end
52
54
 
53
55
  def client
54
- @client ||= Clients::ZoomEye.new(api_key: api_key)
56
+ @client ||= Clients::ZoomEye.new(api_key: api_key, interval: interval)
55
57
  end
56
58
 
57
59
  #
58
60
  # Convert responses into an array of String
59
61
  #
60
- # @param [Array<Hash>] responses
62
+ # @param [Hash] response
61
63
  #
62
64
  # @return [Array<Mihari::Artifact>]
63
65
  #
64
- def convert_responses(responses)
65
- responses.map do |res|
66
- matches = res["matches"] || []
67
- matches.map do |match|
68
- data = match["ip"]
66
+ def convert(res)
67
+ matches = res["matches"] || []
68
+ matches.map do |match|
69
+ data = match["ip"]
69
70
 
70
- if data.is_a?(Array)
71
- data.map { |d| Artifact.new(data: d, source: source, metadata: match) }
72
- else
73
- Artifact.new(data: data, source: source, metadata: match)
74
- end
71
+ if data.is_a?(Array)
72
+ data.map { |d| Artifact.new(data: d, source: source, metadata: match) }
73
+ else
74
+ Artifact.new(data: data, source: source, metadata: match)
75
75
  end
76
- end.flatten.compact.uniq
77
- end
78
-
79
- #
80
- # Host search
81
- #
82
- # @param [String] query
83
- # @param [Integer] page
84
- #
85
- # @return [Hash, nil]
86
- #
87
- def _host_search(query, page: 1)
88
- client.host_search(query, page: page)
89
- end
90
-
91
- #
92
- # Host search
93
- #
94
- # @return [Array<String>]
95
- #
96
- def host_search
97
- responses = []
98
- (1..pagination_limit).each do |page|
99
- res = _host_search(query, page: page)
100
- break unless res
101
-
102
- total = res["total"].to_i
103
- responses << res
104
- break if total <= page * PAGE_SIZE
105
-
106
- # sleep #{interval} seconds to avoid the rate limitation (if it is set)
107
- sleep_interval
108
- end
109
- convert_responses responses.compact
110
- end
111
-
112
- #
113
- # Web search
114
- #
115
- # @param [String] query
116
- # @param [Integer] page
117
- #
118
- # @return [Hash, nil]
119
- #
120
- def _web_search(query, page: 1)
121
- client.web_search(query, page: page)
122
- end
123
-
124
- #
125
- # Web search
126
- #
127
- # @return [Array<String>]
128
- #
129
- def web_search
130
- responses = []
131
- (1..pagination_limit).each do |page|
132
- res = _web_search(query, page: page)
133
- break unless res
134
-
135
- total = res["total"].to_i
136
- responses << res
137
- break if total <= page * PAGE_SIZE
138
-
139
- # sleep #{interval} seconds to avoid the rate limitation (if it is set)
140
- sleep_interval
141
- end
142
- convert_responses responses.compact
76
+ end.flatten
143
77
  end
144
78
  end
145
79
  end
@@ -0,0 +1,11 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "mihari/commands/alert"
4
+
5
+ module Mihari
6
+ module CLI
7
+ class Alert < Base
8
+ include Mihari::Commands::Alert
9
+ end
10
+ end
11
+ end
@@ -3,14 +3,16 @@
3
3
  require "thor"
4
4
 
5
5
  # Commands
6
+ require "mihari/commands/alert"
7
+ require "mihari/commands/database"
6
8
  require "mihari/commands/search"
7
9
  require "mihari/commands/version"
8
10
  require "mihari/commands/web"
9
- require "mihari/commands/database"
10
11
 
11
12
  # CLIs
12
13
  require "mihari/cli/base"
13
14
 
15
+ require "mihari/cli/alert"
14
16
  require "mihari/cli/database"
15
17
  require "mihari/cli/rule"
16
18
 
@@ -26,6 +28,9 @@ module Mihari
26
28
 
27
29
  desc "rule", "Sub commands for rule"
28
30
  subcommand "rule", Rule
31
+
32
+ desc "alert", "Sub commands for alert"
33
+ subcommand "alert", Alert
29
34
  end
30
35
  end
31
36
  end
@@ -9,17 +9,25 @@ module Mihari
9
9
  # @return [Hash]
10
10
  attr_reader :headers
11
11
 
12
+ # @return [Integer, nil]
13
+ attr_reader :interval
14
+
12
15
  #
13
16
  # @param [String] base_url
14
17
  # @param [Hash] headers
15
18
  #
16
- def initialize(base_url, headers: {})
19
+ def initialize(base_url, headers: {}, interval: nil)
17
20
  @base_url = base_url
18
21
  @headers = headers || {}
22
+ @interval = interval
19
23
  end
20
24
 
21
25
  private
22
26
 
27
+ def sleep_interval
28
+ sleep(interval) if interval
29
+ end
30
+
23
31
  #
24
32
  # @param [String] path
25
33
  #
@@ -7,13 +7,14 @@ module Mihari
7
7
  # @param [String] base_url
8
8
  # @param [String, nil] api_key
9
9
  # @param [Hash] headers
10
+ # @param [Integer.nil ] interval
10
11
  #
11
- def initialize(base_url = "https://api.binaryedge.io/v2", api_key:, headers: {})
12
+ def initialize(base_url = "https://api.binaryedge.io/v2", api_key:, headers: {}, interval: nil)
12
13
  raise(ArgumentError, "'api_key' argument is required") unless api_key
13
14
 
14
15
  headers["x-key"] = api_key
15
16
 
16
- super(base_url, headers: headers)
17
+ super(base_url, headers: headers, interval: interval)
17
18
  end
18
19
 
19
20
  #
@@ -33,6 +34,30 @@ module Mihari
33
34
  res = get("/query/search", params: params)
34
35
  JSON.parse(res.body.to_s)
35
36
  end
37
+
38
+ #
39
+ # @param [String] query
40
+ # @param [Integer, nil] only_ips
41
+ # @param [Integer] pagination_limit
42
+ #
43
+ # @return [Enumerable<Hash>]
44
+ #
45
+ def search_with_pagination(query, only_ips: nil, pagination_limit: Mihari.config.pagination_limit)
46
+ Enumerator.new do |y|
47
+ (1..pagination_limit).each do |page|
48
+ res = search(query, page: page, only_ips: only_ips)
49
+
50
+ page_size = res["pagesize"].to_i
51
+ events = res["events"] || []
52
+
53
+ y.yield res
54
+
55
+ break if events.length < page_size
56
+
57
+ sleep_interval
58
+ end
59
+ end
60
+ end
36
61
  end
37
62
  end
38
63
  end
@@ -10,14 +10,15 @@ module Mihari
10
10
  # @param [String, nil] id
11
11
  # @param [String, nil] secret
12
12
  # @param [Hash] headers
13
+ # @param [Integer, nil] interval
13
14
  #
14
- def initialize(base_url = "https://search.censys.io", id:, secret:, headers: {})
15
+ def initialize(base_url = "https://search.censys.io", id:, secret:, headers: {}, interval: nil)
15
16
  raise(ArgumentError, "'id' argument is required") if id.nil?
16
17
  raise(ArgumentError, "'secret' argument is required") if secret.nil?
17
18
 
18
19
  headers["authorization"] = "Basic #{Base64.strict_encode64("#{id}:#{secret}")}"
19
20
 
20
- super(base_url, headers: headers)
21
+ super(base_url, headers: headers, interval: interval)
21
22
  end
22
23
 
23
24
  #
@@ -37,6 +38,35 @@ module Mihari
37
38
  res = get("/api/v2/hosts/search", params: params)
38
39
  Structs::Censys::Response.from_dynamic! JSON.parse(res.body.to_s)
39
40
  end
41
+
42
+ #
43
+ # @param [String] query
44
+ # @param [Integer, nil] per_page
45
+ # @param [Integer] pagination_limit
46
+ #
47
+ # @return [Enumerable<Structs::Censys::Response>]
48
+ #
49
+ def search_with_pagination(query, per_page: nil, pagination_limit: Mihari.config.pagination_limit)
50
+ cursor = nil
51
+
52
+ Enumerator.new do |y|
53
+ pagination_limit.times do
54
+ res = search(query, per_page: per_page, cursor: cursor)
55
+
56
+ y.yield res
57
+
58
+ cursor = res.result.links.next
59
+ # NOTE: Censys's search API is unstable recently
60
+ # it may returns empty links or empty string cursors
61
+ # - Empty links: "links": {}
62
+ # - Empty cursors: "links": { "next": "", "prev": "" }
63
+ # So it needs to check both cases
64
+ break if cursor.nil? || cursor.empty?
65
+
66
+ sleep_interval
67
+ end
68
+ end
69
+ end
40
70
  end
41
71
  end
42
72
  end
@@ -20,6 +20,34 @@ module Mihari
20
20
  super(base_url, headers: headers)
21
21
  end
22
22
 
23
+ #
24
+ # Passive DNS search
25
+ #
26
+ # @param [String] query
27
+ #
28
+ # @return [Array<String>]
29
+ #
30
+ def passive_dns_search(query)
31
+ results = dns_query(query)
32
+ results.filter_map do |result|
33
+ type = result["rrtype"]
34
+ (type == "A") ? result["rdata"] : nil
35
+ end.uniq
36
+ end
37
+
38
+ #
39
+ # Passive SSL search
40
+ #
41
+ # @param [String] query
42
+ #
43
+ # @return [Array<String>]
44
+ #
45
+ def passive_ssl_search(query)
46
+ result = ssl_cquery(query)
47
+ seen = result["seen"] || []
48
+ seen.uniq
49
+ end
50
+
23
51
  #
24
52
  # @param [String] query
25
53
  #
@@ -40,7 +68,6 @@ module Mihari
40
68
 
41
69
  private
42
70
 
43
- #
44
71
  #
45
72
  # @param [String] path
46
73
  # @param [Hash] params
@@ -18,13 +18,20 @@ module Mihari
18
18
  # @param [String, nil] match "=", "ILIKE", "LIKE", "single", "any" or nil
19
19
  # @param [String, nil] exclude "expired" or nil
20
20
  #
21
- # @return [Array<Hash>]
21
+ # @return [Array<Mihari::Artifact>]
22
22
  #
23
23
  def search(identity, match: nil, exclude: nil)
24
24
  params = { identity: identity, match: match, exclude: exclude, output: "json" }.compact
25
25
 
26
26
  res = get("/", params: params)
27
- JSON.parse(res.body.to_s)
27
+ parsed = JSON.parse(res.body.to_s)
28
+
29
+ parsed.map do |result|
30
+ values = result["name_value"].to_s.lines.map(&:chomp)
31
+ values.map do |value|
32
+ Artifact.new(data: value, metadata: result)
33
+ end
34
+ end.flatten
28
35
  end
29
36
  end
30
37
  end
@@ -16,11 +16,13 @@ module Mihari
16
16
  #
17
17
  # @param [String] domain
18
18
  #
19
- # @return [Hash]
19
+ # @return [Array<String>]
20
20
  #
21
21
  def fuzz(domain)
22
22
  res = get("/api/fuzz/#{to_hex(domain)}")
23
- JSON.parse(res.body.to_s)
23
+ res = JSON.parse(res.body.to_s)
24
+ fuzzy_domains = res["fuzzy_domains"] || []
25
+ fuzzy_domains.map { |d| d["domain"] }
24
26
  end
25
27
 
26
28
  private
@@ -3,32 +3,59 @@
3
3
  module Mihari
4
4
  module Clients
5
5
  class GreyNoise < Base
6
+ PAGE_SIZE = 10_000
7
+
6
8
  #
7
9
  # @param [String] base_url
8
10
  # @param [String, nil] api_key
9
11
  # @param [Hash] headers
12
+ # @param [Integer, nil] interval
10
13
  #
11
- def initialize(base_url = "https://api.greynoise.io", api_key:, headers: {})
14
+ def initialize(base_url = "https://api.greynoise.io", api_key:, headers: {}, interval: nil)
12
15
  raise(ArgumentError, "'api_key' argument is required") unless api_key
13
16
 
14
17
  headers["key"] = api_key
15
- super(base_url, headers: headers)
18
+ super(base_url, headers: headers, interval: interval)
16
19
  end
17
20
 
18
21
  #
19
22
  # GNQL (GreyNoise Query Language) is a domain-specific query language that uses Lucene deep under the hood
20
23
  #
21
24
  # @param [String] query GNQL query string
22
- # @param [Integer, nil] size Maximum amount of results to grab
25
+ # @param [Integer] size Maximum amount of results to grab
23
26
  # @param [Integer, nil] scroll Scroll token to paginate through results
24
27
  #
25
28
  # @return [Hash]
26
29
  #
27
- def gnql_search(query, size: nil, scroll: nil)
30
+ def gnql_search(query, size: PAGE_SIZE, scroll: nil)
28
31
  params = { query: query, size: size, scroll: scroll }.compact
29
32
  res = get("/v2/experimental/gnql", params: params)
30
33
  Structs::GreyNoise::Response.from_dynamic! JSON.parse(res.body.to_s)
31
34
  end
35
+
36
+ #
37
+ # @param [String] query
38
+ # @param [Integer] size
39
+ # @param [Integer] pagination_limit
40
+ #
41
+ # @return [Enumerable<Structs::GreyNoise::Response>]
42
+ #
43
+ def gnql_search_with_pagination(query, size: PAGE_SIZE, pagination_limit: Mihari.config.pagination_limit)
44
+ scroll = nil
45
+
46
+ Enumerator.new do |y|
47
+ pagination_limit.times do
48
+ res = gnql_search(query, size: size, scroll: scroll)
49
+
50
+ y.yield res
51
+
52
+ scroll = res.scroll
53
+ break if scroll.nil?
54
+
55
+ sleep_interval
56
+ end
57
+ end
58
+ end
32
59
  end
33
60
  end
34
61
  end
@@ -5,6 +5,8 @@ require "base64"
5
5
  module Mihari
6
6
  module Clients
7
7
  class HunterHow < Base
8
+ PAGE_SIZE = 100
9
+
8
10
  # @return [String]
9
11
  attr_reader :api_key
10
12
 
@@ -12,11 +14,12 @@ module Mihari
12
14
  # @param [String] base_url
13
15
  # @param [String, nil] api_key
14
16
  # @param [Hash] headers
17
+ # @param [Integer, nil] interval
15
18
  #
16
- def initialize(base_url = "https://api.hunter.how/", api_key:, headers: {})
19
+ def initialize(base_url = "https://api.hunter.how/", api_key:, headers: {}, interval: nil)
17
20
  raise(ArgumentError, "'api_key' argument is required") unless api_key
18
21
 
19
- super(base_url, headers: headers)
22
+ super(base_url, headers: headers, interval: interval)
20
23
 
21
24
  @api_key = api_key
22
25
  end
@@ -30,7 +33,7 @@ module Mihari
30
33
  #
31
34
  # @return [Structs::HunterHow::Response]
32
35
  #
33
- def search(query, start_time:, end_time:, page: 1, page_size: 10)
36
+ def search(query, start_time:, end_time:, page: 1, page_size: PAGE_SIZE)
34
37
  params = {
35
38
  query: Base64.urlsafe_encode64(query),
36
39
  page: page,
@@ -42,6 +45,41 @@ module Mihari
42
45
  res = get("/search", params: params)
43
46
  Structs::HunterHow::Response.from_dynamic! JSON.parse(res.body.to_s)
44
47
  end
48
+
49
+ #
50
+ # @param [String] query String used to query our data
51
+ # @param [Integer] page_size Default 100, Maximum: 100
52
+ # @param [Integer] pagination_limit
53
+ # @param [String] start_time
54
+ # @param [String] end_time
55
+ #
56
+ # @return [Enumerable<Structs::HunterHow::Response>]
57
+ #
58
+ def search_with_pagination(
59
+ query,
60
+ start_time:,
61
+ end_time:,
62
+ page_size: PAGE_SIZE,
63
+ pagination_limit: Mihari.config.pagination_limit
64
+ )
65
+ Enumerator.new do |y|
66
+ (1..pagination_limit).each do |page|
67
+ res = search(
68
+ query,
69
+ start_time: start_time,
70
+ end_time: end_time,
71
+ page: page,
72
+ page_size: page_size
73
+ )
74
+
75
+ y.yield res
76
+
77
+ break if res.data.list.length < page_size
78
+
79
+ sleep_interval
80
+ end
81
+ end
82
+ end
45
83
  end
46
84
  end
47
85
  end
@@ -3,6 +3,8 @@
3
3
  module Mihari
4
4
  module Clients
5
5
  class Onyphe < Base
6
+ PAGE_SIZE = 10
7
+
6
8
  # @return [String]
7
9
  attr_reader :api_key
8
10
 
@@ -11,10 +13,10 @@ module Mihari
11
13
  # @param [String, nil] api_key
12
14
  # @param [Hash] headers
13
15
  #
14
- def initialize(base_url = "https://www.onyphe.io", api_key:, headers: {})
16
+ def initialize(base_url = "https://www.onyphe.io", api_key:, headers: {}, interval: nil)
15
17
  raise(ArgumentError, "'api_key' argument is required") if api_key.nil?
16
18
 
17
- super(base_url, headers: headers)
19
+ super(base_url, headers: headers, interval: interval)
18
20
 
19
21
  @api_key = api_key
20
22
  end
@@ -23,13 +25,33 @@ module Mihari
23
25
  # @param [String] query
24
26
  # @param [Integer] page
25
27
  #
26
- # @return [Hash]
28
+ # @return [Structs::Onyphe::Response]
27
29
  #
28
30
  def datascan(query, page: 1)
29
31
  params = { page: page, apikey: api_key }
30
32
  res = get("/api/v2/simple/datascan/#{query}", params: params)
31
33
  Structs::Onyphe::Response.from_dynamic! JSON.parse(res.body.to_s)
32
34
  end
35
+
36
+ #
37
+ # @param [String] query
38
+ # @param [Integer] pagination_limit
39
+ #
40
+ # @return [Enumerable<Structs::Onyphe::Response>]
41
+ #
42
+ def datascan_with_pagination(query, pagination_limit: Mihari.config.pagination_limit)
43
+ Enumerator.new do |y|
44
+ (1..pagination_limit).each do |page|
45
+ res = datascan(query, page: page)
46
+
47
+ y.yield res
48
+
49
+ break if res.total <= page * PAGE_SIZE
50
+
51
+ sleep_interval
52
+ end
53
+ end
54
+ end
33
55
  end
34
56
  end
35
57
  end
@@ -15,6 +15,46 @@ module Mihari
15
15
  super(base_url, headers: headers)
16
16
  end
17
17
 
18
+ #
19
+ # Domain search
20
+ #
21
+ # @param [String] query
22
+ #
23
+ # @return [Array<String>]
24
+ #
25
+ def domain_search(query)
26
+ res = query_by_domain(query)
27
+ return [] if res.nil?
28
+
29
+ records = res["passive_dns"] || []
30
+ records.filter_map do |record|
31
+ record_type = record["record_type"]
32
+ address = record["address"]
33
+
34
+ address if record_type == "A"
35
+ end.uniq
36
+ end
37
+
38
+ #
39
+ # IP search
40
+ #
41
+ # @param [String] query
42
+ #
43
+ # @return [Array<String>]
44
+ #
45
+ def ip_search(query)
46
+ res = query_by_ip(query)
47
+ return [] if res.nil?
48
+
49
+ records = res["passive_dns"] || []
50
+ records.filter_map do |record|
51
+ record_type = record["record_type"]
52
+ hostname = record["hostname"]
53
+
54
+ hostname if record_type == "A"
55
+ end.uniq
56
+ end
57
+
18
58
  #
19
59
  # @param [String] ip
20
60
  #
@@ -21,35 +21,53 @@ module Mihari
21
21
  end
22
22
 
23
23
  #
24
- # @param [String] query
25
- #
26
- def ssl_search(query)
27
- params = { query: query }
28
- _get("/v2/ssl-certificate/history", params: params)
29
- end
30
-
24
+ # Passive DNS search
31
25
  #
32
26
  # @param [String] query
33
27
  #
34
- # @return [Hash]
28
+ # @return [Array<String>]
35
29
  #
36
30
  def passive_dns_search(query)
37
31
  params = { query: query }
38
- _get("/v2/dns/passive/unique", params: params)
32
+ res = _get("/v2/dns/passive/unique", params: params)
33
+ res["results"] || []
39
34
  end
40
35
 
41
36
  #
42
- # @param [String] query the domain being queried
43
- # @param [String] field whether to return historical results
37
+ # Reverse whois search
44
38
  #
45
- # @return [Hash]
39
+ # @param [String] query
40
+ #
41
+ # @return [Array<Mihari::Artifact>]
46
42
  #
47
- def reverse_whois_search(query:, field:)
43
+ def reverse_whois_search(query)
48
44
  params = {
49
45
  query: query,
50
- field: field
46
+ field: "email"
51
47
  }.compact
52
- _get("/v2/whois/search", params: params)
48
+ res = _get("/v2/whois/search", params: params)
49
+ results = res["results"] || []
50
+ results.map do |result|
51
+ data = result["domain"]
52
+ Artifact.new(data: data, metadata: result)
53
+ end.flatten
54
+ end
55
+
56
+ #
57
+ # Passive SSL search
58
+ #
59
+ # @param [String] query
60
+ #
61
+ # @return [Array<Mihari::Artifact>]
62
+ #
63
+ def ssl_search(query)
64
+ params = { query: query }
65
+ res = _get("/v2/ssl-certificate/history", params: params)
66
+ results = res["results"] || []
67
+ results.map do |result|
68
+ data = result["ipAddresses"]
69
+ data.map { |d| Artifact.new(data: d, metadata: result) }
70
+ end.flatten
53
71
  end
54
72
 
55
73
  private