logstash-filter-geoip 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,15 @@
1
+ ---
2
+ !binary "U0hBMQ==":
3
+ metadata.gz: !binary |-
4
+ ZjNjMDZhOTg1OTNhNmU5NTI2YzlhYzkyYzJlZDY4MThhZDA0ZDRkNQ==
5
+ data.tar.gz: !binary |-
6
+ NjgzN2Q5YWU1ODgxOWNmMjEwZjVjZjcwMWY4ODIxN2I5M2I5MGQ4Zg==
7
+ SHA512:
8
+ metadata.gz: !binary |-
9
+ MDVmMjY0NmE5MmM3NTI2YWJhYTYzNzY4Nzc0YWY3NDAzMWYzZWVmYzNhYjll
10
+ NjdjODViZGIwOTgxZWMzZmZhYjFiZDU3YmU4NmQ0ZGZlZTU4ZTY0OThiM2Jk
11
+ YTMxNzVkZWY0YmFiMDIxYjhhM2YxZWZmMGZkMGU4MzdlMzU1MGE=
12
+ data.tar.gz: !binary |-
13
+ NDk1NjhmOWMzNWMxMTc3OTNlYzY0MGU1Zjc3M2Q4MjY0NmQyNjRkNTRkMTE2
14
+ M2ZjYzY4OWE2NDdhMWNiMzc2OGViNGU1ODNjOTRkYzcwMjg0ZDI4YzlhOTky
15
+ OGU1OTlhZTQwODFhYjNmYjQzODU2MDI3MzA5M2M3NDExM2FmMTM=
data/.gitignore ADDED
@@ -0,0 +1,4 @@
1
+ *.gem
2
+ Gemfile.lock
3
+ .bundle
4
+ vendor
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source 'http://rubygems.org'
2
+ gem 'rake'
3
+ gem 'gem_publisher'
4
+ gem 'archive-tar-minitar'
data/Rakefile ADDED
@@ -0,0 +1,8 @@
1
+ @files=[ {'url' => 'http://logstash.objects.dreamhost.com/maxmind/GeoLiteCity-2013-01-18.dat.gz', 'sha1' => '15aab9a90ff90c4784b2c48331014d242b86bf82' },
2
+ {'url' => 'http://logstash.objects.dreamhost.com/maxmind/GeoIPASNum-2014-02-12.dat.gz', 'sha1' => '6f33ca0b31e5f233e36d1f66fbeae36909b58f91' }
3
+ ]
4
+
5
+ task :default do
6
+ system("rake -T")
7
+ end
8
+
@@ -0,0 +1,149 @@
1
+ # encoding: utf-8
2
+ require "logstash/filters/base"
3
+ require "logstash/namespace"
4
+ require "tempfile"
5
+
6
+ # The GeoIP filter adds information about the geographical location of IP addresses,
7
+ # based on data from the Maxmind database.
8
+ #
9
+ # Starting with version 1.3.0 of Logstash, a [geoip][location] field is created if
10
+ # the GeoIP lookup returns a latitude and longitude. The field is stored in
11
+ # [GeoJSON](http://geojson.org/geojson-spec.html) format. Additionally,
12
+ # the default Elasticsearch template provided with the
13
+ # [elasticsearch output](../outputs/elasticsearch.html)
14
+ # maps the [geoip][location] field to a
15
+ # [geo_point](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/mapping-geo-point-type.html).
16
+ #
17
+ # As this field is a geo\_point _and_ it is still valid GeoJSON, you get
18
+ # the awesomeness of Elasticsearch's geospatial query, facet and filter functions
19
+ # and the flexibility of having GeoJSON for all other applications (like Kibana's
20
+ # [bettermap panel](https://github.com/elasticsearch/kibana/tree/master/src/app/panels/bettermap)).
21
+ #
22
+ # Logstash releases ship with the GeoLiteCity database made available from
23
+ # Maxmind with a CCA-ShareAlike 3.0 license. For more details on GeoLite, see
24
+ # <http://www.maxmind.com/en/geolite>.
25
+ class LogStash::Filters::GeoIP < LogStash::Filters::Base
26
+ config_name "geoip"
27
+ milestone 3
28
+
29
+ VERSION='0.1.0'
30
+
31
+ # The path to the GeoIP database file which Logstash should use. Country, City, ASN, ISP
32
+ # and organization databases are supported.
33
+ #
34
+ # If not specified, this will default to the GeoLiteCity database that ships
35
+ # with Logstash.
36
+ config :database, :validate => :path
37
+
38
+ # The field containing the IP address or hostname to map via geoip. If
39
+ # this field is an array, only the first value will be used.
40
+ config :source, :validate => :string, :required => true
41
+
42
+ # An array of geoip fields to be included in the event.
43
+ #
44
+ # Possible fields depend on the database type. By default, all geoip fields
45
+ # are included in the event.
46
+ #
47
+ # For the built-in GeoLiteCity database, the following are available:
48
+ # `city\_name`, `continent\_code`, `country\_code2`, `country\_code3`, `country\_name`,
49
+ # `dma\_code`, `ip`, `latitude`, `longitude`, `postal\_code`, `region\_name` and `timezone`.
50
+ config :fields, :validate => :array
51
+
52
+ # Specify the field into which Logstash should store the geoip data.
53
+ # This can be useful, for example, if you have `src\_ip` and `dst\_ip` fields and
54
+ # would like the GeoIP information of both IPs.
55
+ #
56
+ # If you save the data to a target field other than "geoip" and want to use the
57
+ # geo\_point related functions in Elasticsearch, you need to alter the template
58
+ # provided with the Elasticsearch output and configure the output to use the
59
+ # new template.
60
+ #
61
+ # Even if you don't use the geo\_point mapping, the [target][location] field
62
+ # is still valid GeoJSON.
63
+ config :target, :validate => :string, :default => 'geoip'
64
+
65
+ public
66
+ def register
67
+ require "geoip"
68
+ if @database.nil?
69
+ @database = ::Dir.glob(::File.expand_path("../../../vendor/", ::File.dirname(__FILE__))+"/GeoLiteCity*.dat").first
70
+ if !File.exists?(@database)
71
+ raise "You must specify 'database => ...' in your geoip filter (I looked for '#{@database}'"
72
+ end
73
+ end
74
+ @logger.info("Using geoip database", :path => @database)
75
+ # For the purpose of initializing this filter, geoip is initialized here but
76
+ # not set as a global. The geoip module imposes a mutex, so the filter needs
77
+ # to re-initialize this later in the filter() thread, and save that access
78
+ # as a thread-local variable.
79
+ geoip_initialize = ::GeoIP.new(@database)
80
+
81
+ @geoip_type = case geoip_initialize.database_type
82
+ when GeoIP::GEOIP_CITY_EDITION_REV0, GeoIP::GEOIP_CITY_EDITION_REV1
83
+ :city
84
+ when GeoIP::GEOIP_COUNTRY_EDITION
85
+ :country
86
+ when GeoIP::GEOIP_ASNUM_EDITION
87
+ :asn
88
+ when GeoIP::GEOIP_ISP_EDITION, GeoIP::GEOIP_ORG_EDITION
89
+ :isp
90
+ else
91
+ raise RuntimeException.new "This GeoIP database is not currently supported"
92
+ end
93
+
94
+ @threadkey = "geoip-#{self.object_id}"
95
+ end # def register
96
+
97
+ public
98
+ def filter(event)
99
+ return unless filter?(event)
100
+ geo_data = nil
101
+
102
+ # Use thread-local access to GeoIP. The Ruby GeoIP module forces a mutex
103
+ # around access to the database, which can be overcome with :pread.
104
+ # Unfortunately, :pread requires the io-extra gem, with C extensions that
105
+ # aren't supported on JRuby. If / when :pread becomes available, we can stop
106
+ # needing thread-local access.
107
+ if !Thread.current.key?(@threadkey)
108
+ Thread.current[@threadkey] = ::GeoIP.new(@database)
109
+ end
110
+
111
+ begin
112
+ ip = event[@source]
113
+ ip = ip.first if ip.is_a? Array
114
+ geo_data = Thread.current[@threadkey].send(@geoip_type, ip)
115
+ rescue SocketError => e
116
+ @logger.error("IP Field contained invalid IP address or hostname", :field => @field, :event => event)
117
+ rescue Exception => e
118
+ @logger.error("Unknown error while looking up GeoIP data", :exception => e, :field => @field, :event => event)
119
+ end
120
+
121
+ return if geo_data.nil?
122
+
123
+ geo_data_hash = geo_data.to_hash
124
+ geo_data_hash.delete(:request)
125
+ event[@target] = {} if event[@target].nil?
126
+ geo_data_hash.each do |key, value|
127
+ next if value.nil? || (value.is_a?(String) && value.empty?)
128
+ if @fields.nil? || @fields.empty? || @fields.include?(key.to_s)
129
+ # convert key to string (normally a Symbol)
130
+ if value.is_a?(String)
131
+ # Some strings from GeoIP don't have the correct encoding...
132
+ value = case value.encoding
133
+ # I have found strings coming from GeoIP that are ASCII-8BIT are actually
134
+ # ISO-8859-1...
135
+ when Encoding::ASCII_8BIT; value.force_encoding(Encoding::ISO_8859_1).encode(Encoding::UTF_8)
136
+ when Encoding::ISO_8859_1, Encoding::US_ASCII; value.encode(Encoding::UTF_8)
137
+ else; value
138
+ end
139
+ end
140
+ event[@target][key.to_s] = value
141
+ end
142
+ end # geo_data_hash.each
143
+ if event[@target].key?('latitude') && event[@target].key?('longitude')
144
+ # If we have latitude and longitude values, add the location field as GeoJSON array
145
+ event[@target]['location'] = [ event[@target]["longitude"].to_f, event[@target]["latitude"].to_f ]
146
+ end
147
+ filter_matched(event)
148
+ end # def filter
149
+ end # class LogStash::Filters::GeoIP
@@ -0,0 +1,28 @@
1
+ Gem::Specification.new do |s|
2
+
3
+ s.name = 'logstash-filter-geoip'
4
+ s.version = '0.1.0'
5
+ s.licenses = ['Apache License (2.0)']
6
+ s.summary = "$summary"
7
+ s.description = "$description"
8
+ s.authors = ["Elasticsearch"]
9
+ s.email = 'richard.pijnenburg@elasticsearch.com'
10
+ s.homepage = "http://logstash.net/"
11
+ s.require_paths = ["lib"]
12
+
13
+ # Files
14
+ s.files = `git ls-files`.split($\)+::Dir.glob('vendor/*')
15
+
16
+ # Tests
17
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
18
+
19
+ # Special flag to let us know this is actually a logstash plugin
20
+ s.metadata = { "logstash_plugin" => "true", "group" => "filter" }
21
+
22
+ # Gem dependencies
23
+ s.add_runtime_dependency 'logstash', '>= 1.4.0', '< 2.0.0'
24
+
25
+ s.add_runtime_dependency 'geoip', ['>= 1.3.2']
26
+
27
+ end
28
+
@@ -0,0 +1,9 @@
1
+ require "gem_publisher"
2
+
3
+ desc "Publish gem to RubyGems.org"
4
+ task :publish_gem do |t|
5
+ gem_file = Dir.glob(File.expand_path('../*.gemspec',File.dirname(__FILE__))).first
6
+ gem = GemPublisher.publish_if_updated(gem_file, :rubygems)
7
+ puts "Published #{gem}" if gem
8
+ end
9
+
@@ -0,0 +1,169 @@
1
+ require "net/http"
2
+ require "uri"
3
+ require "digest/sha1"
4
+
5
+ def vendor(*args)
6
+ return File.join("vendor", *args)
7
+ end
8
+
9
+ directory "vendor/" => ["vendor"] do |task, args|
10
+ mkdir task.name
11
+ end
12
+
13
+ def fetch(url, sha1, output)
14
+
15
+ puts "Downloading #{url}"
16
+ actual_sha1 = download(url, output)
17
+
18
+ if actual_sha1 != sha1
19
+ fail "SHA1 does not match (expected '#{sha1}' but got '#{actual_sha1}')"
20
+ end
21
+ end # def fetch
22
+
23
+ def file_fetch(url, sha1)
24
+ filename = File.basename( URI(url).path )
25
+ output = "vendor/#{filename}"
26
+ task output => [ "vendor/" ] do
27
+ begin
28
+ actual_sha1 = file_sha1(output)
29
+ if actual_sha1 != sha1
30
+ fetch(url, sha1, output)
31
+ end
32
+ rescue Errno::ENOENT
33
+ fetch(url, sha1, output)
34
+ end
35
+ end.invoke
36
+
37
+ return output
38
+ end
39
+
40
+ def file_sha1(path)
41
+ digest = Digest::SHA1.new
42
+ fd = File.new(path, "r")
43
+ while true
44
+ begin
45
+ digest << fd.sysread(16384)
46
+ rescue EOFError
47
+ break
48
+ end
49
+ end
50
+ return digest.hexdigest
51
+ ensure
52
+ fd.close if fd
53
+ end
54
+
55
+ def download(url, output)
56
+ uri = URI(url)
57
+ digest = Digest::SHA1.new
58
+ tmp = "#{output}.tmp"
59
+ Net::HTTP.start(uri.host, uri.port, :use_ssl => (uri.scheme == "https")) do |http|
60
+ request = Net::HTTP::Get.new(uri.path)
61
+ http.request(request) do |response|
62
+ fail "HTTP fetch failed for #{url}. #{response}" if [200, 301].include?(response.code)
63
+ size = (response["content-length"].to_i || -1).to_f
64
+ count = 0
65
+ File.open(tmp, "w") do |fd|
66
+ response.read_body do |chunk|
67
+ fd.write(chunk)
68
+ digest << chunk
69
+ if size > 0 && $stdout.tty?
70
+ count += chunk.bytesize
71
+ $stdout.write(sprintf("\r%0.2f%%", count/size * 100))
72
+ end
73
+ end
74
+ end
75
+ $stdout.write("\r \r") if $stdout.tty?
76
+ end
77
+ end
78
+
79
+ File.rename(tmp, output)
80
+
81
+ return digest.hexdigest
82
+ rescue SocketError => e
83
+ puts "Failure while downloading #{url}: #{e}"
84
+ raise
85
+ ensure
86
+ File.unlink(tmp) if File.exist?(tmp)
87
+ end # def download
88
+
89
+ def untar(tarball, &block)
90
+ require "archive/tar/minitar"
91
+ tgz = Zlib::GzipReader.new(File.open(tarball))
92
+ # Pull out typesdb
93
+ tar = Archive::Tar::Minitar::Input.open(tgz)
94
+ tar.each do |entry|
95
+ path = block.call(entry)
96
+ next if path.nil?
97
+ parent = File.dirname(path)
98
+
99
+ mkdir_p parent unless File.directory?(parent)
100
+
101
+ # Skip this file if the output file is the same size
102
+ if entry.directory?
103
+ mkdir path unless File.directory?(path)
104
+ else
105
+ entry_mode = entry.instance_eval { @mode } & 0777
106
+ if File.exists?(path)
107
+ stat = File.stat(path)
108
+ # TODO(sissel): Submit a patch to archive-tar-minitar upstream to
109
+ # expose headers in the entry.
110
+ entry_size = entry.instance_eval { @size }
111
+ # If file sizes are same, skip writing.
112
+ next if stat.size == entry_size && (stat.mode & 0777) == entry_mode
113
+ end
114
+ puts "Extracting #{entry.full_name} from #{tarball} #{entry_mode.to_s(8)}"
115
+ File.open(path, "w") do |fd|
116
+ # eof? check lets us skip empty files. Necessary because the API provided by
117
+ # Archive::Tar::Minitar::Reader::EntryStream only mostly acts like an
118
+ # IO object. Something about empty files in this EntryStream causes
119
+ # IO.copy_stream to throw "can't convert nil into String" on JRuby
120
+ # TODO(sissel): File a bug about this.
121
+ while !entry.eof?
122
+ chunk = entry.read(16384)
123
+ fd.write(chunk)
124
+ end
125
+ #IO.copy_stream(entry, fd)
126
+ end
127
+ File.chmod(entry_mode, path)
128
+ end
129
+ end
130
+ tar.close
131
+ File.unlink(tarball) if File.file?(tarball)
132
+ end # def untar
133
+
134
+ def ungz(file)
135
+
136
+ outpath = file.gsub('.gz', '')
137
+ tgz = Zlib::GzipReader.new(File.open(file))
138
+ begin
139
+ File.open(outpath, "w") do |out|
140
+ IO::copy_stream(tgz, out)
141
+ end
142
+ File.unlink(file)
143
+ rescue
144
+ File.unlink(outpath) if File.file?(outpath)
145
+ raise
146
+ end
147
+ tgz.close
148
+ end
149
+
150
+ desc "Process any vendor files required for this plugin"
151
+ task "vendor" do |task, args|
152
+
153
+ @files.each do |file|
154
+ download = file_fetch(file['url'], file['sha1'])
155
+ if download =~ /.tar.gz/
156
+ prefix = download.gsub('.tar.gz', '').gsub('vendor/', '')
157
+ untar(download) do |entry|
158
+ if !file['files'].nil?
159
+ next unless file['files'].include?(entry.full_name.gsub(prefix, ''))
160
+ out = entry.full_name.split("/").last
161
+ end
162
+ File.join('vendor', out)
163
+ end
164
+ elsif download =~ /.gz/
165
+ ungz(download)
166
+ end
167
+ end
168
+
169
+ end
@@ -0,0 +1,120 @@
1
+ require "spec_helper"
2
+ require "logstash/filters/geoip"
3
+
4
+ describe LogStash::Filters::GeoIP do
5
+
6
+ describe "defaults" do
7
+ config <<-CONFIG
8
+ filter {
9
+ geoip {
10
+ source => "ip"
11
+ #database => "vendor/geoip/GeoLiteCity.dat"
12
+ }
13
+ }
14
+ CONFIG
15
+
16
+ sample("ip" => "8.8.8.8") do
17
+ insist { subject }.include?("geoip")
18
+
19
+ expected_fields = %w(ip country_code2 country_code3 country_name
20
+ continent_code region_name city_name postal_code
21
+ latitude longitude dma_code area_code timezone
22
+ location )
23
+ expected_fields.each do |f|
24
+ insist { subject["geoip"] }.include?(f)
25
+ end
26
+ end
27
+
28
+ sample("ip" => "127.0.0.1") do
29
+ # assume geoip fails on localhost lookups
30
+ reject { subject }.include?("geoip")
31
+ end
32
+ end
33
+
34
+ describe "Specify the target" do
35
+ config <<-CONFIG
36
+ filter {
37
+ geoip {
38
+ source => "ip"
39
+ #database => "vendor/geoip/GeoLiteCity.dat"
40
+ target => src_ip
41
+ }
42
+ }
43
+ CONFIG
44
+
45
+ sample("ip" => "8.8.8.8") do
46
+ insist { subject }.include?("src_ip")
47
+
48
+ expected_fields = %w(ip country_code2 country_code3 country_name
49
+ continent_code region_name city_name postal_code
50
+ latitude longitude dma_code area_code timezone
51
+ location )
52
+ expected_fields.each do |f|
53
+ insist { subject["src_ip"] }.include?(f)
54
+ end
55
+ end
56
+
57
+ sample("ip" => "127.0.0.1") do
58
+ # assume geoip fails on localhost lookups
59
+ reject { subject }.include?("src_ip")
60
+ end
61
+ end
62
+
63
+ describe "correct encodings with default db" do
64
+ config <<-CONFIG
65
+ filter {
66
+ geoip {
67
+ source => "ip"
68
+ }
69
+ }
70
+ CONFIG
71
+ expected_fields = %w(ip country_code2 country_code3 country_name
72
+ continent_code region_name city_name postal_code
73
+ dma_code area_code timezone)
74
+
75
+ sample("ip" => "1.1.1.1") do
76
+ checked = 0
77
+ expected_fields.each do |f|
78
+ next unless subject["geoip"][f]
79
+ checked += 1
80
+ insist { subject["geoip"][f].encoding } == Encoding::UTF_8
81
+ end
82
+ insist { checked } > 0
83
+ end
84
+ sample("ip" => "189.2.0.0") do
85
+ checked = 0
86
+ expected_fields.each do |f|
87
+ next unless subject["geoip"][f]
88
+ checked += 1
89
+ insist { subject["geoip"][f].encoding } == Encoding::UTF_8
90
+ end
91
+ insist { checked } > 0
92
+ end
93
+
94
+ end
95
+
96
+ describe "correct encodings with ASN db" do
97
+ config <<-CONFIG
98
+ filter {
99
+ geoip {
100
+ source => "ip"
101
+ database => "vendor/geoip/GeoIPASNum.dat"
102
+ }
103
+ }
104
+ CONFIG
105
+
106
+
107
+ sample("ip" => "1.1.1.1") do
108
+ insist { subject["geoip"]["asn"].encoding } == Encoding::UTF_8
109
+ end
110
+ sample("ip" => "187.2.0.0") do
111
+ insist { subject["geoip"]["asn"].encoding } == Encoding::UTF_8
112
+ end
113
+ sample("ip" => "189.2.0.0") do
114
+ insist { subject["geoip"]["asn"].encoding } == Encoding::UTF_8
115
+ end
116
+ sample("ip" => "161.24.0.0") do
117
+ insist { subject["geoip"]["asn"].encoding } == Encoding::UTF_8
118
+ end
119
+ end
120
+ end
metadata ADDED
@@ -0,0 +1,90 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: logstash-filter-geoip
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Elasticsearch
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2014-11-02 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: logstash
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ! '>='
18
+ - !ruby/object:Gem::Version
19
+ version: 1.4.0
20
+ - - <
21
+ - !ruby/object:Gem::Version
22
+ version: 2.0.0
23
+ type: :runtime
24
+ prerelease: false
25
+ version_requirements: !ruby/object:Gem::Requirement
26
+ requirements:
27
+ - - ! '>='
28
+ - !ruby/object:Gem::Version
29
+ version: 1.4.0
30
+ - - <
31
+ - !ruby/object:Gem::Version
32
+ version: 2.0.0
33
+ - !ruby/object:Gem::Dependency
34
+ name: geoip
35
+ requirement: !ruby/object:Gem::Requirement
36
+ requirements:
37
+ - - ! '>='
38
+ - !ruby/object:Gem::Version
39
+ version: 1.3.2
40
+ type: :runtime
41
+ prerelease: false
42
+ version_requirements: !ruby/object:Gem::Requirement
43
+ requirements:
44
+ - - ! '>='
45
+ - !ruby/object:Gem::Version
46
+ version: 1.3.2
47
+ description: $description
48
+ email: richard.pijnenburg@elasticsearch.com
49
+ executables: []
50
+ extensions: []
51
+ extra_rdoc_files: []
52
+ files:
53
+ - .gitignore
54
+ - Gemfile
55
+ - Rakefile
56
+ - lib/logstash/filters/geoip.rb
57
+ - logstash-filter-geoip.gemspec
58
+ - rakelib/publish.rake
59
+ - rakelib/vendor.rake
60
+ - spec/filters/geoip_spec.rb
61
+ - vendor/GeoIPASNum-2014-02-12.dat
62
+ - vendor/GeoLiteCity-2013-01-18.dat
63
+ homepage: http://logstash.net/
64
+ licenses:
65
+ - Apache License (2.0)
66
+ metadata:
67
+ logstash_plugin: 'true'
68
+ group: filter
69
+ post_install_message:
70
+ rdoc_options: []
71
+ require_paths:
72
+ - lib
73
+ required_ruby_version: !ruby/object:Gem::Requirement
74
+ requirements:
75
+ - - ! '>='
76
+ - !ruby/object:Gem::Version
77
+ version: '0'
78
+ required_rubygems_version: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - ! '>='
81
+ - !ruby/object:Gem::Version
82
+ version: '0'
83
+ requirements: []
84
+ rubyforge_project:
85
+ rubygems_version: 2.4.1
86
+ signing_key:
87
+ specification_version: 4
88
+ summary: $summary
89
+ test_files:
90
+ - spec/filters/geoip_spec.rb