mihari 3.8.1 → 3.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/test.yml +6 -7
  3. data/config.ru +1 -1
  4. data/lib/mihari/analyzers/greynoise.rb +65 -0
  5. data/lib/mihari/analyzers/rule.rb +1 -0
  6. data/lib/mihari/analyzers/shodan.rb +3 -1
  7. data/lib/mihari/cli/analyzer.rb +2 -0
  8. data/lib/mihari/commands/greynoise.rb +21 -0
  9. data/lib/mihari/commands/search.rb +3 -2
  10. data/lib/mihari/commands/web.rb +9 -5
  11. data/lib/mihari/database.rb +1 -1
  12. data/lib/mihari/errors.rb +2 -0
  13. data/lib/mihari/mixins/configuration.rb +12 -2
  14. data/lib/mihari/models/alert.rb +29 -54
  15. data/lib/mihari/models/artifact.rb +3 -0
  16. data/lib/mihari/schemas/configuration.rb +3 -2
  17. data/lib/mihari/structs/alert.rb +45 -0
  18. data/lib/mihari/structs/greynoise.rb +55 -0
  19. data/lib/mihari/structs/ipinfo.rb +3 -4
  20. data/lib/mihari/structs/shodan.rb +2 -2
  21. data/lib/mihari/types.rb +2 -0
  22. data/lib/mihari/version.rb +1 -1
  23. data/lib/mihari/web/api.rb +43 -0
  24. data/lib/mihari/web/app.rb +48 -30
  25. data/lib/mihari/web/endpoints/alerts.rb +74 -0
  26. data/lib/mihari/web/endpoints/artifacts.rb +92 -0
  27. data/lib/mihari/web/endpoints/command.rb +32 -0
  28. data/lib/mihari/web/endpoints/configs.rb +22 -0
  29. data/lib/mihari/web/endpoints/ip_addresses.rb +27 -0
  30. data/lib/mihari/web/endpoints/sources.rb +18 -0
  31. data/lib/mihari/web/endpoints/tags.rb +38 -0
  32. data/lib/mihari/web/entities/alert.rb +23 -0
  33. data/lib/mihari/web/entities/artifact.rb +24 -0
  34. data/lib/mihari/web/entities/autonomous_system.rb +9 -0
  35. data/lib/mihari/web/entities/command.rb +14 -0
  36. data/lib/mihari/web/entities/config.rb +16 -0
  37. data/lib/mihari/web/entities/dns.rb +10 -0
  38. data/lib/mihari/web/entities/geolocation.rb +10 -0
  39. data/lib/mihari/web/entities/ip_address.rb +13 -0
  40. data/lib/mihari/web/entities/message.rb +9 -0
  41. data/lib/mihari/web/entities/reverse_dns.rb +9 -0
  42. data/lib/mihari/web/entities/source.rb +9 -0
  43. data/lib/mihari/web/entities/tag.rb +13 -0
  44. data/lib/mihari/web/entities/whois.rb +16 -0
  45. data/lib/mihari/web/public/grape.rb +73 -0
  46. data/lib/mihari/web/public/index.html +1 -1
  47. data/lib/mihari/web/public/redoc-static.html +53 -27
  48. data/lib/mihari/web/public/static/js/app.0a0cc502.js +21 -0
  49. data/lib/mihari/web/public/static/js/app.0a0cc502.js.map +1 -0
  50. data/lib/mihari/web/public/static/js/app.14008741.js +21 -0
  51. data/lib/mihari/web/public/static/js/app.14008741.js.map +1 -0
  52. data/lib/mihari/web/public/static/js/app.378da3dc.js +50 -0
  53. data/lib/mihari/web/public/static/js/app.378da3dc.js.map +1 -0
  54. data/lib/mihari/web/public/static/js/app.6b636b62.js +50 -0
  55. data/lib/mihari/web/public/static/js/app.6b636b62.js.map +1 -0
  56. data/lib/mihari.rb +8 -14
  57. data/mihari.gemspec +10 -6
  58. data/sig/lib/mihari/analyzers/rule.rbs +1 -1
  59. data/sig/lib/mihari/models/alert.rbs +3 -31
  60. data/sig/lib/mihari/structs/alert.rbs +27 -0
  61. data/sig/lib/mihari/structs/greynoise.rbs +30 -0
  62. data/sig/lib/mihari/structs/shodan.rbs +1 -1
  63. data/sig/lib/mihari/web/app.rbs +2 -2
  64. metadata +150 -76
  65. data/lib/mihari/serializers/alert.rb +0 -14
  66. data/lib/mihari/serializers/artifact.rb +0 -18
  67. data/lib/mihari/serializers/autonomous_system.rb +0 -9
  68. data/lib/mihari/serializers/dns.rb +0 -11
  69. data/lib/mihari/serializers/geolocation.rb +0 -11
  70. data/lib/mihari/serializers/reverse_dns.rb +0 -11
  71. data/lib/mihari/serializers/tag.rb +0 -11
  72. data/lib/mihari/serializers/whois.rb +0 -11
  73. data/lib/mihari/web/controllers/alerts_controller.rb +0 -74
  74. data/lib/mihari/web/controllers/analyzers_controller.rb +0 -38
  75. data/lib/mihari/web/controllers/artifacts_controller.rb +0 -94
  76. data/lib/mihari/web/controllers/base_controller.rb +0 -22
  77. data/lib/mihari/web/controllers/command_controller.rb +0 -26
  78. data/lib/mihari/web/controllers/config_controller.rb +0 -13
  79. data/lib/mihari/web/controllers/ip_address_controller.rb +0 -21
  80. data/lib/mihari/web/controllers/sources_controller.rb +0 -12
  81. data/lib/mihari/web/controllers/tags_controller.rb +0 -30
  82. data/lib/mihari/web/helpers/json.rb +0 -53
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 0a3dcac61a2835aa2f940ab90edf363b10293d0537baf8b1748212686770495f
4
- data.tar.gz: 1f1bb0515227d8cb842e511b3045298bec2b981706ddacd0750b9a6a11aa2625
3
+ metadata.gz: c4d53239fe8be60cdc674dc22c14f801147a71144649cec83f4fd3caaf2c99fa
4
+ data.tar.gz: 71b9b3746164b1f3eec9b98a807a6940e85a9184991efd7dd9abe5bd9b3f3578
5
5
  SHA512:
6
- metadata.gz: ce4ad498c64026a3b349c927dcd5f98bb59415a232acaaf279cde6f45d8f4af18bb62776568c092e261c936e5574f487a02f1fc4089f8af05a367578f06864b5
7
- data.tar.gz: a5223e8a9f2060374df5cb21f6dbcfd475ce66408a04356131471fbb0321ac4622fec0df7ebf354ae2b46c537460a8738beb3dda5ac35c3a11588a4a97642712
6
+ metadata.gz: 6bee10443bd55e0ac483e8495c508bcce95dc56073aeb99f6c1566f38fb73707140baa838c07212c39795017d109aef8e9478f560bc3d04a21cbb949a48c9b61
7
+ data.tar.gz: c35f757168a737c37dcbf9a54d0408eabdfbf807a7a57db77c59c10f9a5b803d6731c615f3360db93b05647ee5e6cdcd35518baf4a7a7efe8d73e198d7f2664e
@@ -43,17 +43,16 @@ jobs:
43
43
 
44
44
  steps:
45
45
  - uses: actions/checkout@v2
46
- - name: Set up Ruby 2.7
47
- uses: ruby/setup-ruby@v1
48
- with:
49
- ruby-version: ${{ matrix.ruby }}
50
- bundler-cache: true
51
46
 
52
47
  - name: Install dependencies
53
48
  run: |
54
49
  sudo apt-get -yqq install libpq-dev libmysqlclient-dev
55
- gem install bundler
56
- bundle install
50
+
51
+ - name: Set up Ruby
52
+ uses: ruby/setup-ruby@v1
53
+ with:
54
+ ruby-version: ${{ matrix.ruby }}
55
+ bundler-cache: true
57
56
 
58
57
  - name: Test with PostgreSQL
59
58
  env:
data/config.ru CHANGED
@@ -4,4 +4,4 @@ require "./lib/mihari"
4
4
  # set rack env as development
5
5
  ENV["RACK_ENV"] ||= "development"
6
6
 
7
- run Mihari::App
7
+ run Mihari::App.instance
@@ -0,0 +1,65 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "greynoise"
4
+
5
+ module Mihari
6
+ module Analyzers
7
+ class GreyNoise < Base
8
+ param :query
9
+ option :title, default: proc { "GreyNoise search" }
10
+ option :description, default: proc { "query = #{query}" }
11
+ option :tags, default: proc { [] }
12
+
13
+ def artifacts
14
+ res = Structs::GreyNoise::Response.from_dynamic!(search)
15
+ res.data.map do |datum|
16
+ build_artifact datum
17
+ end
18
+ end
19
+
20
+ private
21
+
22
+ PAGE_SIZE = 10_000
23
+
24
+ def configuration_keys
25
+ %w[greynoise_api_key]
26
+ end
27
+
28
+ def api
29
+ @api ||= ::GreyNoise::API.new(key: Mihari.config.greynoise_api_key)
30
+ end
31
+
32
+ #
33
+ # Search
34
+ #
35
+ # @return [Hash]
36
+ #
37
+ def search
38
+ api.experimental.gnql(query, size: PAGE_SIZE)
39
+ end
40
+
41
+ #
42
+ # Build an artifact from a GreyNoise search API response
43
+ #
44
+ # @param [Structs::GreyNoise::Datum] datum
45
+ #
46
+ # @return [Artifact]
47
+ #
48
+ def build_artifact(datum)
49
+ as = AutonomousSystem.new(asn: normalize_asn(datum.metadata.asn))
50
+
51
+ geolocation = Geolocation.new(
52
+ country: datum.metadata.country,
53
+ country_code: datum.metadata.country_code
54
+ )
55
+
56
+ Artifact.new(
57
+ data: datum.ip,
58
+ source: source,
59
+ autonomous_system: as,
60
+ geolocation: geolocation
61
+ )
62
+ end
63
+ end
64
+ end
65
+ end
@@ -11,6 +11,7 @@ module Mihari
11
11
  "crtsh" => Crtsh,
12
12
  "dnpedia" => DNPedia,
13
13
  "dnstwister" => DNSTwister,
14
+ "greynoise" => GreyNoise,
14
15
  "onyphe" => Onyphe,
15
16
  "otx" => OTX,
16
17
  "passivetotal" => PassiveTotal,
@@ -78,7 +78,9 @@ module Mihari
78
78
  # @return [Artifact]
79
79
  #
80
80
  def build_artifact(match)
81
- as = AutonomousSystem.new(asn: normalize_asn(match.asn))
81
+ as = nil
82
+ as = AutonomousSystem.new(asn: normalize_asn(match.asn)) unless match.asn.nil?
83
+
82
84
  geolocation = Geolocation.new(
83
85
  country: match.location.country_name,
84
86
  country_code: match.location.country_code
@@ -6,6 +6,7 @@ require "mihari/commands/circl"
6
6
  require "mihari/commands/crtsh"
7
7
  require "mihari/commands/dnpedia"
8
8
  require "mihari/commands/dnstwister"
9
+ require "mihari/commands/greynoise"
9
10
  require "mihari/commands/onyphe"
10
11
  require "mihari/commands/otx"
11
12
  require "mihari/commands/passivetotal"
@@ -33,6 +34,7 @@ module Mihari
33
34
  include Mihari::Commands::Crtsh
34
35
  include Mihari::Commands::DNPedia
35
36
  include Mihari::Commands::DNSTwister
37
+ include Mihari::Commands::GreyNoise
36
38
  include Mihari::Commands::JSON
37
39
  include Mihari::Commands::Onyphe
38
40
  include Mihari::Commands::OTX
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Mihari
4
+ module Commands
5
+ module GreyNoise
6
+ def self.included(thor)
7
+ thor.class_eval do
8
+ desc "greynoise [QUERY]", "GreyNoise search"
9
+ method_option :title, type: :string, desc: "title"
10
+ method_option :description, type: :string, desc: "description"
11
+ method_option :tags, type: :array, desc: "tags"
12
+ def greynoise(query)
13
+ with_error_handling do
14
+ run_analyzer Analyzers::GreyNoise, query: query, options: options
15
+ end
16
+ end
17
+ end
18
+ end
19
+ end
20
+ end
21
+ end
@@ -10,6 +10,9 @@ module Mihari
10
10
  desc "search [RULE]", "Search by a rule"
11
11
  method_option :config, type: :string, desc: "Path to the config file"
12
12
  def search_by_rule(rule)
13
+ # load configuration
14
+ load_configuration
15
+
13
16
  # convert str(YAML) to hash or str(path/YAML file) to hash
14
17
  rule = load_rule(rule)
15
18
 
@@ -77,8 +80,6 @@ module Mihari
77
80
  # @return [nil]
78
81
  #
79
82
  def run_rule_analyzer(analyzer, ignore_old_artifacts: false, ignore_threshold: 0)
80
- load_configuration
81
-
82
83
  analyzer.ignore_old_artifacts = ignore_old_artifacts
83
84
  analyzer.ignore_threshold = ignore_threshold
84
85
 
@@ -6,19 +6,23 @@ module Mihari
6
6
  def self.included(thor)
7
7
  thor.class_eval do
8
8
  desc "web", "Launch the web app"
9
- method_option :port, type: :numeric, default: 9292
10
- method_option :host, type: :string, default: "localhost"
9
+ method_option :port, type: :numeric, default: 9292, desc: "Hostname to listen on"
10
+ method_option :host, type: :string, default: "localhost", desc: "Port to listen on"
11
+ method_option :threads, type: :string, default: "0:16", desc: "min:max threads to use"
12
+ method_option :verbose, type: :boolean, default: true, desc: "Report each request"
11
13
  method_option :config, type: :string, desc: "Path to the config file"
12
14
  def web
13
- port = options["port"].to_i || 9292
14
- host = options["host"] || "localhost"
15
+ port = options["port"]
16
+ host = options["host"]
17
+ threads = options["threads"]
18
+ verbose = options["verbose"]
15
19
 
16
20
  load_configuration
17
21
 
18
22
  # set rack env as production
19
23
  ENV["RACK_ENV"] ||= "production"
20
24
 
21
- Mihari::App.run!(port: port, host: host)
25
+ Mihari::App.run!(port: port, host: host, threads: threads, verbose: verbose)
22
26
  end
23
27
  end
24
28
  end
@@ -106,7 +106,7 @@ module Mihari
106
106
  )
107
107
  end
108
108
 
109
- # ActiveRecord::Base.logger = Logger.new STDOUT
109
+ ActiveRecord::Base.logger = Logger.new($stdout) if ENV["RACK_ENV"] == "development"
110
110
  ActiveRecord::Migration.verbose = false
111
111
 
112
112
  InitialSchema.migrate(:up)
data/lib/mihari/errors.rb CHANGED
@@ -6,4 +6,6 @@ module Mihari
6
6
  class InvalidInputError < Error; end
7
7
 
8
8
  class RetryableError < Error; end
9
+
10
+ class FileNotFoundError < Error; end
9
11
  end
@@ -80,10 +80,20 @@ module Mihari
80
80
  end
81
81
  end
82
82
 
83
+ #
84
+ # Load configuration file
85
+ #
86
+ # @param [String] path
87
+ #
88
+ # @return [Hash]
89
+ #
83
90
  def _load_config(path)
84
- return YAML.safe_load(File.read(path), symbolize_names: true) if Pathname(path).exist?
91
+ unless Pathname(path).exist?
92
+ puts "#{path} does not exist".colorize(:red)
93
+ raise FileNotFoundError
94
+ end
85
95
 
86
- YAML.safe_load(path, symbolize_names: true)
96
+ YAML.safe_load(File.read(path), symbolize_names: true)
87
97
  end
88
98
  end
89
99
  end
@@ -13,92 +13,67 @@ module Mihari
13
13
  #
14
14
  # Search alerts
15
15
  #
16
- # @param [String, nil] artifact_data
17
- # @param [String, nil] description
18
- # @param [String, nil] source
19
- # @param [String, nil] tag_name
20
- # @param [String, nil] title
21
- # @param [DateTime, nil] from_at
22
- # @param [DateTime, nil] to_at
23
- # @param [Integer, nil] limit
24
- # @param [Integer, nil] page
16
+ # @param [Structs::Alert::SearchFilterWithPagination] filter
25
17
  #
26
18
  # @return [Array<Hash>]
27
19
  #
28
- def search(artifact_data: nil, description: nil, source: nil, tag_name: nil, title: nil, from_at: nil, to_at: nil, limit: 10, page: 1)
29
- limit = limit.to_i
20
+ def search(filter)
21
+ limit = filter.limit.to_i
30
22
  raise ArgumentError, "limit should be bigger than zero" unless limit.positive?
31
23
 
32
- page = page.to_i
24
+ page = filter.page.to_i
33
25
  raise ArgumentError, "page should be bigger than zero" unless page.positive?
34
26
 
35
27
  offset = (page - 1) * limit
36
28
 
37
- relation = build_relation(
38
- artifact_data: artifact_data,
39
- title: title,
40
- description: description,
41
- source: source,
42
- tag_name: tag_name,
43
- from_at: from_at,
44
- to_at: to_at
45
- )
29
+ relation = build_relation(filter.without_pagination)
46
30
 
47
31
  # TODO: improve queires
48
32
  alert_ids = relation.limit(limit).offset(offset).order(id: :desc).pluck(:id).uniq
49
- alerts = includes(:artifacts, :tags).where(id: [alert_ids]).order(id: :desc)
50
-
51
- alerts.map do |alert|
52
- json = Serializers::AlertSerializer.new(alert).as_json
53
- json[:artifacts] = json[:artifacts] || []
54
- json[:tags] = json[:tags] || []
55
- json
56
- end
33
+ includes(:artifacts, :tags).where(id: [alert_ids]).order(id: :desc)
57
34
  end
58
35
 
59
36
  #
60
37
  # Count alerts
61
38
  #
62
39
  # @param [String, nil] artifact_data
63
- # @param [String, nil] description
64
- # @param [String, nil] source
65
- # @param [String, nil] tag_name
66
- # @param [String, nil] title
67
- # @param [DateTime, nil] from_at
68
- # @param [DateTime, nil] to_at
69
40
  #
70
41
  # @return [Integer]
71
42
  #
72
- def count(artifact_data: nil, description: nil, source: nil, tag_name: nil, title: nil, from_at: nil, to_at: nil)
73
- relation = build_relation(
74
- artifact_data: artifact_data,
75
- title: title,
76
- description: description,
77
- source: source,
78
- tag_name: tag_name,
79
- from_at: from_at,
80
- to_at: to_at
81
- )
43
+ def count(filter)
44
+ relation = build_relation(filter)
82
45
  relation.distinct("alerts.id").count
83
46
  end
84
47
 
85
48
  private
86
49
 
87
- def build_relation(artifact_data: nil, title: nil, description: nil, source: nil, tag_name: nil, from_at: nil, to_at: nil)
88
- relation = self
50
+ def build_relation(filter)
51
+ artifact_ids = []
52
+ artifact = Artifact.includes(:autonomous_system, :dns_records, :reverse_dns_names)
53
+ artifact = artifact.where(data: filter.artifact_data) if filter.artifact_data
54
+ artifact = artifact.where(autonomous_system: { asn: filter.asn }) if filter.asn
55
+ artifact = artifact.where(dns_records: { value: filter.dns_record }) if filter.dns_record
56
+ artifact = artifact.where(reverse_dns_names: { name: filter.reverse_dns_name }) if filter.reverse_dns_name
57
+ # get artifact ids if there is any valid filter for artifact
58
+ if filter.has_valid_artifact_filters
59
+ artifact_ids = artifact.pluck(:id)
60
+ # set invalid ID if nothing is matched with the filters
61
+ artifact_ids = [-1] if artifact_ids.empty?
62
+ end
89
63
 
64
+ relation = self
90
65
  relation = relation.includes(:artifacts, :tags)
91
66
 
92
- relation = relation.where(artifacts: { data: artifact_data }) if artifact_data
93
- relation = relation.where(tags: { name: tag_name }) if tag_name
67
+ relation = relation.where(artifacts: { id: artifact_ids }) unless artifact_ids.empty?
68
+ relation = relation.where(tags: { name: filter.tag_name }) if filter.tag_name
94
69
 
95
- relation = relation.where(source: source) if source
96
- relation = relation.where(title: title) if title
70
+ relation = relation.where(source: filter.source) if filter.source
71
+ relation = relation.where(title: filter.title) if filter.title
97
72
 
98
- relation = relation.filter(description: { like: "%#{description}%" }) if description
73
+ relation = relation.filter(description: { like: "%#{filter.description}%" }) if filter.description
99
74
 
100
- relation = relation.filter(created_at: { gte: from_at }) if from_at
101
- relation = relation.filter(created_at: { lte: to_at }) if to_at
75
+ relation = relation.filter(created_at: { gte: filter.from_at }) if filter.from_at
76
+ relation = relation.filter(created_at: { lte: filter.to_at }) if filter.to_at
102
77
 
103
78
  relation
104
79
  end
@@ -29,10 +29,13 @@ module Mihari
29
29
 
30
30
  validates_with ArtifactValidator
31
31
 
32
+ attr_accessor :tags
33
+
32
34
  def initialize(attributes)
33
35
  super
34
36
 
35
37
  self.data_type = TypeChecker.type(data)
38
+ self.tags = []
36
39
  end
37
40
 
38
41
  #
@@ -13,6 +13,8 @@ module Mihari
13
13
  optional(:censys_secret).value(:string)
14
14
  optional(:circl_passive_password).value(:string)
15
15
  optional(:circl_passive_username).value(:string)
16
+ optional(:database).value(:string)
17
+ optional(:greynoise_api_key).value(:string)
16
18
  optional(:ipinfo_api_key).value(:string)
17
19
  optional(:misp_api_endpoint).value(:string)
18
20
  optional(:misp_api_key).value(:string)
@@ -30,10 +32,9 @@ module Mihari
30
32
  optional(:thehive_api_key).value(:string)
31
33
  optional(:urlscan_api_key).value(:string)
32
34
  optional(:virustotal_api_key).value(:string)
33
- optional(:zoomeye_api_key).value(:string)
34
35
  optional(:webhook_url).value(:string)
35
36
  optional(:webhook_use_json_body).value(:bool)
36
- optional(:database).value(:string)
37
+ optional(:zoomeye_api_key).value(:string)
37
38
  end
38
39
 
39
40
  class ConfigurationContract < Dry::Validation::Contract
@@ -0,0 +1,45 @@
1
+ require "json"
2
+ require "dry/struct"
3
+
4
+ module Mihari
5
+ module Structs
6
+ module Alert
7
+ class SearchFilter < Dry::Struct
8
+ attribute? :artifact_data, Types::String.optional
9
+ attribute? :description, Types::String.optional
10
+ attribute? :source, Types::String.optional
11
+ attribute? :tag_name, Types::String.optional
12
+ attribute? :title, Types::String.optional
13
+ attribute? :from_at, Types::DateTime.optional
14
+ attribute? :to_at, Types::DateTime.optional
15
+ attribute? :asn, Types::Int.optional
16
+ attribute? :dns_record, Types::String.optional
17
+ attribute? :reverse_dns_name, Types::String.optional
18
+
19
+ def has_valid_artifact_filters
20
+ !(artifact_data || asn || dns_record || reverse_dns_name).nil?
21
+ end
22
+ end
23
+
24
+ class SearchFilterWithPagination < SearchFilter
25
+ attribute? :page, Types::Int.default(1)
26
+ attribute? :limit, Types::Int.default(10)
27
+
28
+ def without_pagination
29
+ SearchFilter.new(
30
+ artifact_data: artifact_data,
31
+ description: description,
32
+ from_at: from_at,
33
+ source: source,
34
+ tag_name: tag_name,
35
+ title: title,
36
+ to_at: to_at,
37
+ asn: asn,
38
+ dns_record: dns_record,
39
+ reverse_dns_name: reverse_dns_name
40
+ )
41
+ end
42
+ end
43
+ end
44
+ end
45
+ end
@@ -0,0 +1,55 @@
1
+ require "json"
2
+ require "dry/struct"
3
+
4
+ module Mihari
5
+ module Structs
6
+ module GreyNoise
7
+ class Metadata < Dry::Struct
8
+ attribute :country, Types::String
9
+ attribute :country_code, Types::String
10
+ attribute :asn, Types::String
11
+
12
+ def self.from_dynamic!(d)
13
+ d = Types::Hash[d]
14
+ new(
15
+ country: d.fetch("country"),
16
+ country_code: d.fetch("country_code"),
17
+ asn: d.fetch("asn")
18
+ )
19
+ end
20
+ end
21
+
22
+ class Datum < Dry::Struct
23
+ attribute :ip, Types::String
24
+ attribute :metadata, Metadata
25
+
26
+ def self.from_dynamic!(d)
27
+ d = Types::Hash[d]
28
+ new(
29
+ ip: d.fetch("ip"),
30
+ metadata: Metadata.from_dynamic!(d.fetch("metadata"))
31
+ )
32
+ end
33
+ end
34
+
35
+ class Response < Dry::Struct
36
+ attribute :complete, Types::Bool
37
+ attribute :count, Types::Int
38
+ attribute :data, Types.Array(Datum)
39
+ attribute :message, Types::String
40
+ attribute :query, Types::String
41
+
42
+ def self.from_dynamic!(d)
43
+ d = Types::Hash[d]
44
+ new(
45
+ complete: d.fetch("complete"),
46
+ count: d.fetch("count"),
47
+ data: d.fetch("data").map { |x| Datum.from_dynamic!(x) },
48
+ message: d.fetch("message"),
49
+ query: d.fetch("query")
50
+ )
51
+ end
52
+ end
53
+ end
54
+ end
55
+ end
@@ -18,10 +18,9 @@ module Mihari
18
18
  d = Types::Hash[d]
19
19
 
20
20
  asn = nil
21
- org = d["org"]
22
- unless org.nil?
23
- asn = org.split.first
24
- asn = normalize_asn(asn)
21
+ asn_ = d.dig("asn", "asn")
22
+ unless asn_.nil?
23
+ asn = normalize_asn(asn_)
25
24
  end
26
25
 
27
26
  new(
@@ -18,7 +18,7 @@ module Mihari
18
18
  end
19
19
 
20
20
  class Match < Dry::Struct
21
- attribute :asn, Types::String
21
+ attribute :asn, Types::String.optional
22
22
  attribute :hostnames, Types.Array(Types::String)
23
23
  attribute :location, Location
24
24
  attribute :domains, Types.Array(Types::String)
@@ -27,7 +27,7 @@ module Mihari
27
27
  def self.from_dynamic!(d)
28
28
  d = Types::Hash[d]
29
29
  new(
30
- asn: d.fetch("asn"),
30
+ asn: d["asn"],
31
31
  hostnames: d.fetch("hostnames"),
32
32
  location: Location.from_dynamic!(d.fetch("location")),
33
33
  domains: d.fetch("domains"),
data/lib/mihari/types.rb CHANGED
@@ -9,6 +9,7 @@ module Mihari
9
9
  Hash = Strict::Hash
10
10
  String = Strict::String
11
11
  Double = Strict::Float | Strict::Integer
12
+ DateTime = Strict::DateTime
12
13
 
13
14
  DataTypes = Types::String.enum(*ALLOWED_DATA_TYPES)
14
15
 
@@ -18,6 +19,7 @@ module Mihari
18
19
  "circl",
19
20
  "dnpedia",
20
21
  "dnstwister",
22
+ "greynoise",
21
23
  "onyphe",
22
24
  "otx",
23
25
  "passivetotal",
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Mihari
4
- VERSION = "3.8.1"
4
+ VERSION = "3.10.0"
5
5
  end
@@ -0,0 +1,43 @@
1
+ # Entities
2
+ require "mihari/web/entities/message"
3
+
4
+ require "mihari/web/entities/autonomous_system"
5
+ require "mihari/web/entities/command"
6
+ require "mihari/web/entities/config"
7
+ require "mihari/web/entities/dns"
8
+ require "mihari/web/entities/geolocation"
9
+ require "mihari/web/entities/ip_address"
10
+ require "mihari/web/entities/reverse_dns"
11
+ require "mihari/web/entities/source"
12
+ require "mihari/web/entities/tag"
13
+ require "mihari/web/entities/whois"
14
+
15
+ require "mihari/web/entities/artifact"
16
+
17
+ require "mihari/web/entities/alert"
18
+
19
+ # Endpoints
20
+ require "mihari/web/endpoints/alerts"
21
+ require "mihari/web/endpoints/artifacts"
22
+ require "mihari/web/endpoints/command"
23
+ require "mihari/web/endpoints/configs"
24
+ require "mihari/web/endpoints/ip_addresses"
25
+ require "mihari/web/endpoints/sources"
26
+ require "mihari/web/endpoints/tags"
27
+
28
+ module Mihari
29
+ class API < Grape::API
30
+ prefix "api"
31
+ format :json
32
+
33
+ mount Endpoints::Alerts
34
+ mount Endpoints::Artifacts
35
+ mount Endpoints::Command
36
+ mount Endpoints::Configs
37
+ mount Endpoints::IPAddresses
38
+ mount Endpoints::Sources
39
+ mount Endpoints::Tags
40
+
41
+ add_swagger_documentation(api_version: "v1", info: { title: "Mihari API" })
42
+ end
43
+ end