mihari 3.9.1 → 3.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +1 -0
  3. data/docker/Dockerfile +1 -1
  4. data/lib/mihari/analyzers/binaryedge.rb +5 -0
  5. data/lib/mihari/analyzers/censys.rb +5 -0
  6. data/lib/mihari/analyzers/greynoise.rb +65 -0
  7. data/lib/mihari/analyzers/onyphe.rb +5 -0
  8. data/lib/mihari/analyzers/rule.rb +7 -0
  9. data/lib/mihari/analyzers/shodan.rb +16 -5
  10. data/lib/mihari/analyzers/urlscan.rb +37 -13
  11. data/lib/mihari/analyzers/virustotal_intelligence.rb +5 -0
  12. data/lib/mihari/analyzers/zoomeye.rb +8 -0
  13. data/lib/mihari/cli/analyzer.rb +3 -0
  14. data/lib/mihari/commands/greynoise.rb +21 -0
  15. data/lib/mihari/commands/search.rb +3 -2
  16. data/lib/mihari/commands/urlscan.rb +1 -2
  17. data/lib/mihari/schemas/configuration.rb +3 -2
  18. data/lib/mihari/schemas/rule.rb +9 -1
  19. data/lib/mihari/structs/greynoise.rb +55 -0
  20. data/lib/mihari/structs/shodan.rb +6 -6
  21. data/lib/mihari/structs/urlscan.rb +51 -0
  22. data/lib/mihari/types.rb +4 -0
  23. data/lib/mihari/version.rb +1 -1
  24. data/lib/mihari/web/public/index.html +1 -1
  25. data/lib/mihari/web/public/redoc-static.html +1 -1
  26. data/lib/mihari/web/public/static/js/app.0a0cc502.js +21 -0
  27. data/lib/mihari/web/public/static/js/app.0a0cc502.js.map +1 -0
  28. data/lib/mihari/web/public/static/js/app.14008741.js +21 -0
  29. data/lib/mihari/web/public/static/js/app.14008741.js.map +1 -0
  30. data/lib/mihari/web/public/static/js/app.fbc19869.js +21 -0
  31. data/lib/mihari/web/public/static/js/app.fbc19869.js.map +1 -0
  32. data/lib/mihari.rb +6 -2
  33. data/mihari.gemspec +7 -5
  34. data/sig/lib/mihari/analyzers/binaryedge.rbs +2 -0
  35. data/sig/lib/mihari/analyzers/censys.rbs +2 -0
  36. data/sig/lib/mihari/analyzers/onyphe.rbs +2 -0
  37. data/sig/lib/mihari/analyzers/shodan.rbs +2 -0
  38. data/sig/lib/mihari/analyzers/urlscan.rbs +5 -2
  39. data/sig/lib/mihari/analyzers/virustotal_intelligence.rbs +2 -0
  40. data/sig/lib/mihari/analyzers/zoomeye.rbs +2 -0
  41. data/sig/lib/mihari/structs/greynoise.rbs +30 -0
  42. data/sig/lib/mihari/structs/shodan.rbs +3 -3
  43. data/sig/lib/mihari/structs/urlscan.rbs +28 -0
  44. metadata +52 -12
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: d2e67ff3e1ae2bf328a9a77ef7c9a88dce779749c422490a97106d3529a9a3b1
4
- data.tar.gz: a71ee49c8fcb0b06e180739a588930783dabbc40078d234f791314dd3f8af9b4
3
+ metadata.gz: 2c8fda839e1bb1ec4733b7da17e20e3b47bd5795d5aca25371d09e5a0fa9a575
4
+ data.tar.gz: a4551c61c625bd08167608051750bf1785a31ad5215e95e857753fc5fed31d00
5
5
  SHA512:
6
- metadata.gz: 41c30a97d80e6d96f425230401b7f0ee947979dff6b2a8c458bb72c38ed34c2577bed1b007f25e2607cacc1c70f6d7de183722bac0b604f0c54f582758db1e53
7
- data.tar.gz: 6cb1d47e4efec3fb54bd93ddacf5e8f55e423d3b28c1e3c6ae2ea852898b88d3995d8eb767e89535ba50bfffb6b533ce946068eb348cfbd33afe41ace587a5df
6
+ metadata.gz: 81a1ca91b65b03f98bb3504a85c8113b2cf189a4911109e113da1058fb9a1bd61c8807e6f3bec72484786b89e68d0027782d38013676bd6ae78aa57434315972
7
+ data.tar.gz: b42b1658310a7ee87940fbebb6507edca2187d6be92957a149f9234af8bf32ba06e702c401d321b553901dc56094964e78ff602549734f51abbf24b4b7a1fa4d
data/README.md CHANGED
@@ -38,6 +38,7 @@ Mihari supports the following services by default.
38
38
  - [crt.sh](https://crt.sh/)
39
39
  - [DN Pedia](https://dnpedia.com/)
40
40
  - [dnstwister](https://dnstwister.report/)
41
+ - [GreyNoise](https://www.greynoise.io/)
41
42
  - [Onyphe](https://onyphe.io)
42
43
  - [OTX](https://otx.alienvault.com/)
43
44
  - [PassiveTotal](https://community.riskiq.com/)
data/docker/Dockerfile CHANGED
@@ -1,4 +1,4 @@
1
- FROM ruby:3.0.2-alpine3.13
1
+ FROM ruby:3.0.3-alpine3.13
2
2
 
3
3
  RUN apk --no-cache add git build-base ruby-dev sqlite-dev postgresql-dev mysql-client mysql-dev \
4
4
  && gem install pg mysql2 \
@@ -10,6 +10,8 @@ module Mihari
10
10
  option :description, default: proc { "query = #{query}" }
11
11
  option :tags, default: proc { [] }
12
12
 
13
+ option :interval, default: proc { 0 }
14
+
13
15
  def artifacts
14
16
  results = search
15
17
  return [] unless results || results.empty?
@@ -55,6 +57,9 @@ module Mihari
55
57
 
56
58
  responses << res
57
59
  break if total <= page * PAGE_SIZE
60
+
61
+ # sleep #{interval} seconds to avoid the rate limitation (if it is set)
62
+ sleep interval
58
63
  end
59
64
  responses
60
65
  end
@@ -10,6 +10,8 @@ module Mihari
10
10
  option :description, default: proc { "query = #{query}" }
11
11
  option :tags, default: proc { [] }
12
12
 
13
+ option :interval, default: proc { 0 }
14
+
13
15
  def artifacts
14
16
  search
15
17
  end
@@ -33,6 +35,9 @@ module Mihari
33
35
 
34
36
  cursor = response.result.links.next
35
37
  break if cursor == ""
38
+
39
+ # sleep #{interval} seconds to avoid the rate limitation (if it is set)
40
+ sleep interval
36
41
  end
37
42
 
38
43
  artifacts.flatten.uniq(&:data)
@@ -0,0 +1,65 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "greynoise"
4
+
5
+ module Mihari
6
+ module Analyzers
7
+ class GreyNoise < Base
8
+ param :query
9
+ option :title, default: proc { "GreyNoise search" }
10
+ option :description, default: proc { "query = #{query}" }
11
+ option :tags, default: proc { [] }
12
+
13
+ def artifacts
14
+ res = Structs::GreyNoise::Response.from_dynamic!(search)
15
+ res.data.map do |datum|
16
+ build_artifact datum
17
+ end
18
+ end
19
+
20
+ private
21
+
22
+ PAGE_SIZE = 10_000
23
+
24
+ def configuration_keys
25
+ %w[greynoise_api_key]
26
+ end
27
+
28
+ def api
29
+ @api ||= ::GreyNoise::API.new(key: Mihari.config.greynoise_api_key)
30
+ end
31
+
32
+ #
33
+ # Search
34
+ #
35
+ # @return [Hash]
36
+ #
37
+ def search
38
+ api.experimental.gnql(query, size: PAGE_SIZE)
39
+ end
40
+
41
+ #
42
+ # Build an artifact from a GreyNoise search API response
43
+ #
44
+ # @param [Structs::GreyNoise::Datum] datum
45
+ #
46
+ # @return [Artifact]
47
+ #
48
+ def build_artifact(datum)
49
+ as = AutonomousSystem.new(asn: normalize_asn(datum.metadata.asn))
50
+
51
+ geolocation = Geolocation.new(
52
+ country: datum.metadata.country,
53
+ country_code: datum.metadata.country_code
54
+ )
55
+
56
+ Artifact.new(
57
+ data: datum.ip,
58
+ source: source,
59
+ autonomous_system: as,
60
+ geolocation: geolocation
61
+ )
62
+ end
63
+ end
64
+ end
65
+ end
@@ -11,6 +11,8 @@ module Mihari
11
11
  option :description, default: proc { "query = #{query}" }
12
12
  option :tags, default: proc { [] }
13
13
 
14
+ option :interval, default: proc { 0 }
15
+
14
16
  def artifacts
15
17
  responses = search
16
18
  return [] unless responses
@@ -59,6 +61,9 @@ module Mihari
59
61
 
60
62
  total = res.total
61
63
  break if total <= page * PAGE_SIZE
64
+
65
+ # sleep #{interval} seconds to avoid the rate limitation (if it is set)
66
+ sleep interval
62
67
  end
63
68
  responses
64
69
  end
@@ -11,6 +11,7 @@ module Mihari
11
11
  "crtsh" => Crtsh,
12
12
  "dnpedia" => DNPedia,
13
13
  "dnstwister" => DNSTwister,
14
+ "greynoise" => GreyNoise,
14
15
  "onyphe" => Onyphe,
15
16
  "otx" => OTX,
16
17
  "passivetotal" => PassiveTotal,
@@ -63,6 +64,12 @@ module Mihari
63
64
  klass = get_analyzer_class(analyzer_name)
64
65
 
65
66
  query = params[:query]
67
+
68
+ # set interval in the top level
69
+ options = params[:options] || {}
70
+ interval = options[:interval]
71
+ params[:interval] = interval
72
+
66
73
  analyzer = klass.new(query, **params)
67
74
 
68
75
  # Use #normalized_artifacts method to get atrifacts as Array<Mihari::Artifact>
@@ -10,6 +10,8 @@ module Mihari
10
10
  option :description, default: proc { "query = #{query}" }
11
11
  option :tags, default: proc { [] }
12
12
 
13
+ option :interval, default: proc { 0 }
14
+
13
15
  def artifacts
14
16
  results = search
15
17
  return [] unless results || results.empty?
@@ -58,10 +60,14 @@ module Mihari
58
60
  responses = []
59
61
  (1..Float::INFINITY).each do |page|
60
62
  res = search_with_page(query, page: page)
63
+
61
64
  break unless res
62
65
 
63
66
  responses << res
64
67
  break if res["total"].to_i <= page * PAGE_SIZE
68
+
69
+ # sleep #{interval} seconds to avoid the rate limitation (if it is set)
70
+ sleep interval
65
71
  rescue JSON::ParserError
66
72
  # ignore JSON::ParserError
67
73
  # ref. https://github.com/ninoseki/mihari/issues/197
@@ -78,11 +84,16 @@ module Mihari
78
84
  # @return [Artifact]
79
85
  #
80
86
  def build_artifact(match)
81
- as = AutonomousSystem.new(asn: normalize_asn(match.asn))
82
- geolocation = Geolocation.new(
83
- country: match.location.country_name,
84
- country_code: match.location.country_code
85
- )
87
+ as = nil
88
+ as = AutonomousSystem.new(asn: normalize_asn(match.asn)) unless match.asn.nil?
89
+
90
+ geolocation = nil
91
+ if !match.location.country_name.nil? && !match.location.country_code.nil?
92
+ geolocation = Geolocation.new(
93
+ country: match.location.country_name,
94
+ country_code: match.location.country_code
95
+ )
96
+ end
86
97
 
87
98
  Artifact.new(
88
99
  data: match.ip_str,
@@ -2,8 +2,6 @@
2
2
 
3
3
  require "urlscan"
4
4
 
5
- SUPPORTED_DATA_TYPES = %w[url domain ip].freeze
6
-
7
5
  module Mihari
8
6
  module Analyzers
9
7
  class Urlscan < Base
@@ -12,7 +10,11 @@ module Mihari
12
10
  option :description, default: proc { "query = #{query}" }
13
11
  option :tags, default: proc { [] }
14
12
  option :allowed_data_types, default: proc { SUPPORTED_DATA_TYPES }
15
- option :use_similarity, default: proc { false }
13
+
14
+ option :interval, default: proc { 0 }
15
+
16
+ SUPPORTED_DATA_TYPES = %w[url domain ip].freeze
17
+ SIZE = 1000
16
18
 
17
19
  def initialize(*args, **kwargs)
18
20
  super
@@ -21,16 +23,15 @@ module Mihari
21
23
  end
22
24
 
23
25
  def artifacts
24
- result = search
25
- return [] unless result
26
-
27
- results = result["results"] || []
26
+ responses = search
27
+ results = responses.map(&:results).flatten
28
28
 
29
29
  allowed_data_types.map do |type|
30
- results.filter_map do |match|
31
- match.dig "page", type
30
+ results.filter_map do |result|
31
+ page = result.page
32
+ page.send(type.to_sym)
32
33
  end.uniq
33
- end.flatten
34
+ end.flatten.compact
34
35
  end
35
36
 
36
37
  private
@@ -43,15 +44,38 @@ module Mihari
43
44
  @api ||= ::UrlScan::API.new(Mihari.config.urlscan_api_key)
44
45
  end
45
46
 
47
+ #
48
+ # Search with search_after option
49
+ #
50
+ # @return [Structs::Urlscan::Response]
51
+ #
52
+ def search_with_search_after(search_after: nil)
53
+ res = api.search(query, size: SIZE, search_after: search_after)
54
+ Structs::Urlscan::Response.from_dynamic! res
55
+ end
56
+
46
57
  #
47
58
  # Search
48
59
  #
49
- # @return [Array<Hash>]
60
+ # @return [Array<Structs::Urlscan::Response>]
50
61
  #
51
62
  def search
52
- return api.pro.similar(query) if use_similarity
63
+ responses = []
64
+
65
+ search_after = nil
66
+ loop do
67
+ res = search_with_search_after(search_after: search_after)
68
+ responses << res
69
+
70
+ break if res.results.length < SIZE
71
+
72
+ search_after = res.results.last.sort.join(",")
73
+
74
+ # sleep #{interval} seconds to avoid the rate limitation (if it is set)
75
+ sleep interval
76
+ end
53
77
 
54
- api.search(query, size: 10_000)
78
+ responses
55
79
  end
56
80
 
57
81
  #
@@ -10,6 +10,8 @@ module Mihari
10
10
  option :description, default: proc { "query = #{query}" }
11
11
  option :tags, default: proc { [] }
12
12
 
13
+ option :interval, default: proc { 0 }
14
+
13
15
  def initialize(*args, **kwargs)
14
16
  super
15
17
 
@@ -54,6 +56,9 @@ module Mihari
54
56
  break if response.meta.cursor.nil?
55
57
 
56
58
  cursor = response.meta.cursor
59
+
60
+ # sleep #{interval} seconds to avoid the rate limitation (if it is set)
61
+ sleep interval
57
62
  end
58
63
 
59
64
  responses
@@ -11,6 +11,8 @@ module Mihari
11
11
  option :tags, default: proc { [] }
12
12
  option :type, default: proc { "host" }
13
13
 
14
+ option :interval, default: proc { 0 }
15
+
14
16
  def artifacts
15
17
  case type
16
18
  when "host"
@@ -87,6 +89,9 @@ module Mihari
87
89
  total = res["total"].to_i
88
90
  responses << res
89
91
  break if total <= page * PAGE_SIZE
92
+
93
+ # sleep #{interval} seconds to avoid the rate limitation (if it is set)
94
+ sleep interval
90
95
  end
91
96
  convert_responses responses.compact
92
97
  end
@@ -119,6 +124,9 @@ module Mihari
119
124
  total = res["total"].to_i
120
125
  responses << res
121
126
  break if total <= page * PAGE_SIZE
127
+
128
+ # sleep #{interval} seconds to avoid the rate limitation (if it is set)
129
+ sleep interval
122
130
  end
123
131
  convert_responses responses.compact
124
132
  end
@@ -6,6 +6,7 @@ require "mihari/commands/circl"
6
6
  require "mihari/commands/crtsh"
7
7
  require "mihari/commands/dnpedia"
8
8
  require "mihari/commands/dnstwister"
9
+ require "mihari/commands/greynoise"
9
10
  require "mihari/commands/onyphe"
10
11
  require "mihari/commands/otx"
11
12
  require "mihari/commands/passivetotal"
@@ -25,6 +26,7 @@ module Mihari
25
26
  class Analyzer < Base
26
27
  class_option :ignore_old_artifacts, type: :boolean, default: false, desc: "Whether to ignore old artifacts from checking or not."
27
28
  class_option :ignore_threshold, type: :numeric, default: 0, desc: "Number of days to define whether an artifact is old or not."
29
+ class_option :interval, type: :numeric, default: 0, desc: "Seconds of the interval while calling API in a row."
28
30
  class_option :config, type: :string, desc: "Path to the config file"
29
31
 
30
32
  include Mihari::Commands::BinaryEdge
@@ -33,6 +35,7 @@ module Mihari
33
35
  include Mihari::Commands::Crtsh
34
36
  include Mihari::Commands::DNPedia
35
37
  include Mihari::Commands::DNSTwister
38
+ include Mihari::Commands::GreyNoise
36
39
  include Mihari::Commands::JSON
37
40
  include Mihari::Commands::Onyphe
38
41
  include Mihari::Commands::OTX
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Mihari
4
+ module Commands
5
+ module GreyNoise
6
+ def self.included(thor)
7
+ thor.class_eval do
8
+ desc "greynoise [QUERY]", "GreyNoise search"
9
+ method_option :title, type: :string, desc: "title"
10
+ method_option :description, type: :string, desc: "description"
11
+ method_option :tags, type: :array, desc: "tags"
12
+ def greynoise(query)
13
+ with_error_handling do
14
+ run_analyzer Analyzers::GreyNoise, query: query, options: options
15
+ end
16
+ end
17
+ end
18
+ end
19
+ end
20
+ end
21
+ end
@@ -10,6 +10,9 @@ module Mihari
10
10
  desc "search [RULE]", "Search by a rule"
11
11
  method_option :config, type: :string, desc: "Path to the config file"
12
12
  def search_by_rule(rule)
13
+ # load configuration
14
+ load_configuration
15
+
13
16
  # convert str(YAML) to hash or str(path/YAML file) to hash
14
17
  rule = load_rule(rule)
15
18
 
@@ -77,8 +80,6 @@ module Mihari
77
80
  # @return [nil]
78
81
  #
79
82
  def run_rule_analyzer(analyzer, ignore_old_artifacts: false, ignore_threshold: 0)
80
- load_configuration
81
-
82
83
  analyzer.ignore_old_artifacts = ignore_old_artifacts
83
84
  analyzer.ignore_threshold = ignore_threshold
84
85
 
@@ -9,8 +9,7 @@ module Mihari
9
9
  method_option :title, type: :string, desc: "title"
10
10
  method_option :description, type: :string, desc: "description"
11
11
  method_option :tags, type: :array, desc: "tags"
12
- method_option :target_type, type: :string, default: "url", desc: "target type to fetch from search results (target type should be 'url', 'domain' or 'ip')"
13
- method_option :use_similarity, type: :boolean, default: false, desc: "use similarity API or not"
12
+ method_option :allowed_data_types, type: :array, default: ["url", "ip", "domain"], desc: "types to fetch from search results ('url', 'domain' or 'ip')"
14
13
  def urlscan(query)
15
14
  with_error_handling do
16
15
  run_analyzer Analyzers::Urlscan, query: query, options: options
@@ -13,6 +13,8 @@ module Mihari
13
13
  optional(:censys_secret).value(:string)
14
14
  optional(:circl_passive_password).value(:string)
15
15
  optional(:circl_passive_username).value(:string)
16
+ optional(:database).value(:string)
17
+ optional(:greynoise_api_key).value(:string)
16
18
  optional(:ipinfo_api_key).value(:string)
17
19
  optional(:misp_api_endpoint).value(:string)
18
20
  optional(:misp_api_key).value(:string)
@@ -30,10 +32,9 @@ module Mihari
30
32
  optional(:thehive_api_key).value(:string)
31
33
  optional(:urlscan_api_key).value(:string)
32
34
  optional(:virustotal_api_key).value(:string)
33
- optional(:zoomeye_api_key).value(:string)
34
35
  optional(:webhook_url).value(:string)
35
36
  optional(:webhook_use_json_body).value(:bool)
36
- optional(:database).value(:string)
37
+ optional(:zoomeye_api_key).value(:string)
37
38
  end
38
39
 
39
40
  class ConfigurationContract < Dry::Validation::Contract
@@ -7,33 +7,41 @@ require "mihari/schemas/macros"
7
7
 
8
8
  module Mihari
9
9
  module Schemas
10
+ AnalyzerOptions = Dry::Schema.Params do
11
+ optional(:interval).value(:integer)
12
+ end
13
+
10
14
  Analyzer = Dry::Schema.Params do
11
15
  required(:analyzer).value(Types::AnalyzerTypes)
12
16
  required(:query).value(:string)
17
+ optional(:options).hash(AnalyzerOptions)
13
18
  end
14
19
 
15
20
  Spyse = Dry::Schema.Params do
16
21
  required(:analyzer).value(Types::String.enum("spyse"))
17
22
  required(:query).value(:string)
18
23
  required(:type).value(Types::String.enum("ip", "domain"))
24
+ optional(:options).hash(AnalyzerOptions)
19
25
  end
20
26
 
21
27
  ZoomEye = Dry::Schema.Params do
22
28
  required(:analyzer).value(Types::String.enum("zoomeye"))
23
29
  required(:query).value(:string)
24
30
  required(:type).value(Types::String.enum("host", "web"))
31
+ optional(:options).hash(AnalyzerOptions)
25
32
  end
26
33
 
27
34
  Crtsh = Dry::Schema.Params do
28
35
  required(:analyzer).value(Types::String.enum("crtsh"))
29
36
  required(:query).value(:string)
30
37
  optional(:exclude_expired).value(:bool).default(true)
38
+ optional(:options).hash(AnalyzerOptions)
31
39
  end
32
40
 
33
41
  Urlscan = Dry::Schema.Params do
34
42
  required(:analyzer).value(Types::String.enum("urlscan"))
35
43
  required(:query).value(:string)
36
- optional(:use_similarity).value(:bool).default(true)
44
+ optional(:options).hash(AnalyzerOptions)
37
45
  end
38
46
 
39
47
  Rule = Dry::Schema.Params do
@@ -0,0 +1,55 @@
1
+ require "json"
2
+ require "dry/struct"
3
+
4
+ module Mihari
5
+ module Structs
6
+ module GreyNoise
7
+ class Metadata < Dry::Struct
8
+ attribute :country, Types::String
9
+ attribute :country_code, Types::String
10
+ attribute :asn, Types::String
11
+
12
+ def self.from_dynamic!(d)
13
+ d = Types::Hash[d]
14
+ new(
15
+ country: d.fetch("country"),
16
+ country_code: d.fetch("country_code"),
17
+ asn: d.fetch("asn")
18
+ )
19
+ end
20
+ end
21
+
22
+ class Datum < Dry::Struct
23
+ attribute :ip, Types::String
24
+ attribute :metadata, Metadata
25
+
26
+ def self.from_dynamic!(d)
27
+ d = Types::Hash[d]
28
+ new(
29
+ ip: d.fetch("ip"),
30
+ metadata: Metadata.from_dynamic!(d.fetch("metadata"))
31
+ )
32
+ end
33
+ end
34
+
35
+ class Response < Dry::Struct
36
+ attribute :complete, Types::Bool
37
+ attribute :count, Types::Int
38
+ attribute :data, Types.Array(Datum)
39
+ attribute :message, Types::String
40
+ attribute :query, Types::String
41
+
42
+ def self.from_dynamic!(d)
43
+ d = Types::Hash[d]
44
+ new(
45
+ complete: d.fetch("complete"),
46
+ count: d.fetch("count"),
47
+ data: d.fetch("data").map { |x| Datum.from_dynamic!(x) },
48
+ message: d.fetch("message"),
49
+ query: d.fetch("query")
50
+ )
51
+ end
52
+ end
53
+ end
54
+ end
55
+ end
@@ -5,20 +5,20 @@ module Mihari
5
5
  module Structs
6
6
  module Shodan
7
7
  class Location < Dry::Struct
8
- attribute :country_code, Types::String
9
- attribute :country_name, Types::String
8
+ attribute :country_code, Types::String.optional
9
+ attribute :country_name, Types::String.optional
10
10
 
11
11
  def self.from_dynamic!(d)
12
12
  d = Types::Hash[d]
13
13
  new(
14
- country_code: d.fetch("country_code"),
15
- country_name: d.fetch("country_name")
14
+ country_code: d["country_code"],
15
+ country_name: d["country_name"]
16
16
  )
17
17
  end
18
18
  end
19
19
 
20
20
  class Match < Dry::Struct
21
- attribute :asn, Types::String
21
+ attribute :asn, Types::String.optional
22
22
  attribute :hostnames, Types.Array(Types::String)
23
23
  attribute :location, Location
24
24
  attribute :domains, Types.Array(Types::String)
@@ -27,7 +27,7 @@ module Mihari
27
27
  def self.from_dynamic!(d)
28
28
  d = Types::Hash[d]
29
29
  new(
30
- asn: d.fetch("asn"),
30
+ asn: d["asn"],
31
31
  hostnames: d.fetch("hostnames"),
32
32
  location: Location.from_dynamic!(d.fetch("location")),
33
33
  domains: d.fetch("domains"),
@@ -0,0 +1,51 @@
1
+ require "json"
2
+ require "dry/struct"
3
+
4
+ module Mihari
5
+ module Structs
6
+ module Urlscan
7
+ class Page < Dry::Struct
8
+ attribute :domain, Types::String.optional
9
+ attribute :ip, Types::String.optional
10
+ attribute :url, Types::String
11
+
12
+ def self.from_dynamic!(d)
13
+ d = Types::Hash[d]
14
+ new(
15
+ domain: d["domain"],
16
+ ip: d["ip"],
17
+ url: d.fetch("url")
18
+ )
19
+ end
20
+ end
21
+
22
+ class Result < Dry::Struct
23
+ attribute :page, Page
24
+ attribute :id, Types::String
25
+ attribute :sort, Types.Array(Types::String | Types::Integer)
26
+
27
+ def self.from_dynamic!(d)
28
+ d = Types::Hash[d]
29
+ new(
30
+ page: Page.from_dynamic!(d.fetch("page")),
31
+ id: d.fetch("_id"),
32
+ sort: d.fetch("sort")
33
+ )
34
+ end
35
+ end
36
+
37
+ class Response < Dry::Struct
38
+ attribute :results, Types.Array(Result)
39
+ attribute :has_more, Types::Bool
40
+
41
+ def self.from_dynamic!(d)
42
+ d = Types::Hash[d]
43
+ new(
44
+ results: d.fetch("results").map { |x| Result.from_dynamic!(x) },
45
+ has_more: d.fetch("has_more")
46
+ )
47
+ end
48
+ end
49
+ end
50
+ end
51
+ end
data/lib/mihari/types.rb CHANGED
@@ -8,17 +8,21 @@ module Mihari
8
8
  Nil = Strict::Nil
9
9
  Hash = Strict::Hash
10
10
  String = Strict::String
11
+ Bool = Strict::Bool
11
12
  Double = Strict::Float | Strict::Integer
12
13
  DateTime = Strict::DateTime
13
14
 
14
15
  DataTypes = Types::String.enum(*ALLOWED_DATA_TYPES)
15
16
 
17
+ UrlscanDataTypes = Types::String.enum("ip", "domain", "url")
18
+
16
19
  AnalyzerTypes = Types::String.enum(
17
20
  "binaryedge",
18
21
  "censys",
19
22
  "circl",
20
23
  "dnpedia",
21
24
  "dnstwister",
25
+ "greynoise",
22
26
  "onyphe",
23
27
  "otx",
24
28
  "passivetotal",
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Mihari
4
- VERSION = "3.9.1"
4
+ VERSION = "3.11.0"
5
5
  end
@@ -1 +1 @@
1
- <!DOCTYPE html><html lang="en"><head><meta charset="utf-8"><meta http-equiv="X-UA-Compatible" content="IE=edge"><meta name="viewport" content="width=device-width,initial-scale=1"><link rel="icon" href="/static/favicon.ico"><title>Mihari</title><link href="/static/js/app.6b636b62.js" rel="preload" as="script"></head><body><noscript><strong>We're sorry but Mihari doesn't work properly without JavaScript enabled. Please enable it to continue.</strong></noscript><div id="app"></div><script src="/static/js/app.6b636b62.js"></script></body></html>
1
+ <!DOCTYPE html><html lang="en"><head><meta charset="utf-8"><meta http-equiv="X-UA-Compatible" content="IE=edge"><meta name="viewport" content="width=device-width,initial-scale=1"><link rel="icon" href="/static/favicon.ico"><title>Mihari</title><link href="/static/js/app.fbc19869.js" rel="preload" as="script"></head><body><noscript><strong>We're sorry but Mihari doesn't work properly without JavaScript enabled. Please enable it to continue.</strong></noscript><div id="app"></div><script src="/static/js/app.fbc19869.js"></script></body></html>