bot_detection 1.0.4 → 1.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,15 @@
1
1
  ---
2
- SHA1:
3
- metadata.gz: 1de592441758a62ef6b17cad0dec024dc6626a7b
4
- data.tar.gz: 3b0970eab0d212bcb9c42d63ab19e4067c30b558
2
+ !binary "U0hBMQ==":
3
+ metadata.gz: !binary |-
4
+ MTY3MjcyZGJiMzgwOGFkN2JkNGUwZDNjY2NjNzUzODNmZGE2ZDdkMA==
5
+ data.tar.gz: !binary |-
6
+ MDdjNmFlMzFmZDk5OWIzNDFhZTU2YWZkZmRjYTdiY2EwZGEyYjUwNQ==
5
7
  SHA512:
6
- metadata.gz: 5b085d9816a53ad6f6a7a1e39f2d5621a15ef1165909e269fff6ff9e69b8461f4404c2712f048c6e871f7e915b120530ffd0f9d39d80f1c6d3d29c46ef37976a
7
- data.tar.gz: bb6391774125b04847a251727e1d28aea89872a8a5579345523b902d8674dcf4729c786c02201e422f9c7cf69e50bffa9ec5db138d43254abd8209326a487c80
8
+ metadata.gz: !binary |-
9
+ ODFjODA2ZjY3NmQ5NzU3OGZhNWY0M2RkNTAyNmUzMTY4OTZmYmQ5NjU2NjE5
10
+ YThlODNhNmQzOThlZWU5MDg2MzY1MjMwMzEyMDNjYjRhOWVhNjZmZTAwMDY3
11
+ ZWExZTk4MzNlYTBiZGI5YjAyMjcwZjQ2NTU1OTgxZjk3ZjIwYjg=
12
+ data.tar.gz: !binary |-
13
+ YzRiODFiODBhNjlkYzZjOGQyYWFiNGJjYjFmNGU1MDU0NmIyOTkxYmQ3MWI1
14
+ NTBhYTY3NzRlYWUyMjJhNzM2MDJhZmQyYzgwOWE3YmJhYmQ4YzQxM2FhYjJm
15
+ MjQ3MzEyNjIzNDA2MWI3YTM5MWJhNTAxZGM4NGY0YjcyMGYzOWE=
data/CHANGELOG.md CHANGED
@@ -1,5 +1,9 @@
1
1
  # Changelog
2
2
 
3
+ #### Release 1.0.5
4
+
5
+ - added several new user agents, which have been spotted around, to the list
6
+
3
7
  #### Release 1.0.4
4
8
 
5
9
  - added several new user agents, which have been spotted around, to the list
@@ -22,4 +22,25 @@ BotDetection::BotUserAgents = [
22
22
  "Googlebot (gocrawl v0.4)",
23
23
  "Mozilla/5.0 (compatible; Alexabot/1.0; +http://www.alexa.com/help/certifyscan; certifyscan@alexa.com)",
24
24
  "Flamingo_SearchEngine (+http://www.flamingosearch.com/bot)", # perhaps a search engine?
25
+ "ContextAd Bot 1.0",
26
+ "WeSEE:Ads/PageBot (http://www.wesee.com/bot/)",
27
+ "Mozilla/5.0 (compatible; GrapeshotCrawler/2.0; +http://www.grapeshot.co.uk/crawler.php)",
28
+ "Mozilla/5.0 (compatible; grapeFX/0.9; crawler@grapeshot.co.uk",
29
+ "Mozilla/5.0 (compatible; BLEXBot/1.0; +http://webmeup-crawler.com/)",
30
+ "CodeGator Crawler v1.0",
31
+ "German Wikipedia Broken Weblinks Bot; contact: gifti@tools.wmflabs.org",
32
+ "Mozilla/5.0 (compatible; SeznamBot/3.2; +http://fulltext.sblog.cz/)",
33
+ "Mozilla/5.0 (compatible; TwitterCrawler)",
34
+ "rogerbot/1.0 (http://moz.com/help/pro/what-is-rogerbot-, rogerbot-crawler+shiny@moz.com)",
35
+ "crawler4j (http://code.google.com/p/crawler4j/)",
36
+ "Mozilla/5.0 (compatible; PaperLiBot/2.1; http://support.paper.li/entries/20023257-what-is-paper-li)",
37
+ "LivelapBot/0.2 (http://site.livelap.com/crawler)",
38
+ "LivelapBot/0.2 (http://site.livelap.com/crawler)",
39
+ "Mozilla/5.0 (Linux; U; Android 4.2.2; en-us; CUBOT X6 Build/JDQ39) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30",
40
+ "Clicapack Bot",
41
+ "CRAZYWEBCRAWLER 0.9.1, http://www.crazywebcrawler.com",
42
+ "Mozilla/4.0 (compatible; MSIE 5.01; Windows 95; MSIECrawler)",
43
+ "bitlybot",
44
+ "Mozilla/5.0 (TweetmemeBot/4.0; +http://datasift.com/bot.html) Gecko/20100101 Firefox/31.0",
45
+ "Mozilla/5.0 (Linux; U; Android 4.2.2; bg-bg; CUBOT X6 Build/JDQ39) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30",
25
46
  ]
@@ -48,6 +48,6 @@ protected
48
48
  end
49
49
 
50
50
  def user_agent
51
- (request.env['HTTP_USER_AGENT'] || request.user_agent).to_s
51
+ (request.env['HTTP_USER_AGENT'] || request.user_agent).to_s.strip
52
52
  end
53
53
  end
@@ -1,3 +1,3 @@
1
1
  module BotDetection
2
- VERSION = "1.0.4"
2
+ VERSION = "1.0.5"
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: bot_detection
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.4
4
+ version: 1.0.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - Nils Berenbold
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-11-13 00:00:00.000000000 Z
11
+ date: 2014-11-18 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: public_suffix
@@ -42,14 +42,14 @@ dependencies:
42
42
  name: rake
43
43
  requirement: !ruby/object:Gem::Requirement
44
44
  requirements:
45
- - - '>='
45
+ - - ! '>='
46
46
  - !ruby/object:Gem::Version
47
47
  version: '0'
48
48
  type: :development
49
49
  prerelease: false
50
50
  version_requirements: !ruby/object:Gem::Requirement
51
51
  requirements:
52
- - - '>='
52
+ - - ! '>='
53
53
  - !ruby/object:Gem::Version
54
54
  version: '0'
55
55
  description:
@@ -82,17 +82,17 @@ require_paths:
82
82
  - lib
83
83
  required_ruby_version: !ruby/object:Gem::Requirement
84
84
  requirements:
85
- - - '>='
85
+ - - ! '>='
86
86
  - !ruby/object:Gem::Version
87
87
  version: '0'
88
88
  required_rubygems_version: !ruby/object:Gem::Requirement
89
89
  requirements:
90
- - - '>='
90
+ - - ! '>='
91
91
  - !ruby/object:Gem::Version
92
92
  version: '0'
93
93
  requirements: []
94
94
  rubyforge_project:
95
- rubygems_version: 2.0.14
95
+ rubygems_version: 2.4.2
96
96
  signing_key:
97
97
  specification_version: 4
98
98
  summary: Detects Search Engine crawlers by reverse DNS lookups.