rate-limiting 1.0.3 → 1.0.4

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,5 +1,5 @@
1
1
  module Rate
2
2
  module Limiting
3
- VERSION = "1.0.3"
3
+ VERSION = "1.0.4"
4
4
  end
5
5
  end
@@ -5,6 +5,7 @@ class RateLimiting
5
5
 
6
6
  def initialize(app, &block)
7
7
  @app = app
8
+ @logger = nil
8
9
  @rules = []
9
10
  @cache = {}
10
11
  block.call(self)
@@ -12,6 +13,7 @@ class RateLimiting
12
13
 
13
14
  def call(env)
14
15
  request = Rack::Request.new(env)
16
+ @logger = env['rack.logger']
15
17
  (limit_header = allowed?(request)) ? respond(env, limit_header) : rate_limit_exceeded(env['HTTP_ACCEPT'])
16
18
  end
17
19
 
@@ -22,9 +24,9 @@ class RateLimiting
22
24
 
23
25
  def rate_limit_exceeded(accept)
24
26
  case accept.gsub(/;.*/, "").split(',')[0]
25
- when "text/xml" then message, type = xml_error("403", "Rate Limit Exceeded"), "text/xml"
27
+ when "text/xml" then message, type = xml_error("403", "Rate Limit Exceeded"), "text/xml"
26
28
  when "application/json" then message, type = ["Rate Limit Exceeded"].to_json, "application/json"
27
- else
29
+ else
28
30
  message, type = ["Rate Limit Exceeded"], "text/html"
29
31
  end
30
32
  [403, {"Content-Type" => type}, message]
@@ -46,11 +48,13 @@ class RateLimiting
46
48
  end
47
49
 
48
50
  def cache_has?(key)
49
- case
51
+ case
50
52
  when cache.respond_to?(:has_key?)
51
53
  cache.has_key?(key)
52
54
  when cache.respond_to?(:get)
53
55
  cache.get(key) rescue false
56
+ when cache.respond_to?(:exist?)
57
+ cache.exist?(key)
54
58
  else false
55
59
  end
56
60
  end
@@ -61,6 +65,8 @@ class RateLimiting
61
65
  return cache[key]
62
66
  when cache.respond_to?(:get)
63
67
  return cache.get(key) || nil
68
+ when cache.respond_to?(:fetch)
69
+ return cache.fetch(key)
64
70
  end
65
71
  end
66
72
 
@@ -74,11 +80,22 @@ class RateLimiting
74
80
  end
75
81
  when cache.respond_to?(:set)
76
82
  cache.set(key, value)
83
+ when cache.respond_to?(:write)
84
+ begin
85
+ cache.write(key, value)
86
+ rescue TypeError => e
87
+ cache.write(key, value.to_s)
88
+ end
77
89
  end
78
90
  end
79
91
 
92
+ def logger
93
+ @logger || Rack::NullLogger.new(nil)
94
+ end
95
+
80
96
  def allowed?(request)
81
97
  if rule = find_matching_rule(request)
98
+ logger.debug "[#{self}] #{request.ip}:#{request.path}: Rate limiting rule matched."
82
99
  apply_rule(request, rule)
83
100
  else
84
101
  true
@@ -96,30 +113,35 @@ class RateLimiting
96
113
  key = rule.get_key(request)
97
114
  if cache_has?(key)
98
115
  record = cache_get(key)
99
- if (reset = record.split(':')[1]) > Time.now.strftime("%d%m%y%H%M%S")
100
- if (times = record.split(':')[0].to_i) < rule.limit
116
+ logger.debug "[#{self}] #{request.ip}:#{request.path}: Rate limiting entry: '#{key}' => #{record}"
117
+ if (reset = Time.at(record.split(':')[1].to_i)) > Time.now
118
+ # rule hasn't been reset yet
119
+ times = record.split(':')[0].to_i
120
+ cache_set(key, "#{times + 1}:#{reset.to_i}")
121
+ if (times) < rule.limit
122
+ # within rate limit
101
123
  response = get_header(times + 1, reset, rule.limit)
102
- record = record.gsub(/.*:/, "#{times + 1}:")
103
124
  else
125
+ logger.debug "[#{self}] #{request.ip}:#{request.path}: Rate limited; request rejected."
104
126
  return false
105
127
  end
106
128
  else
107
- response = get_header(1, reset = rule.get_expiration, rule.limit)
108
- cache_set(key, "1:" + rule.get_expiration)
129
+ response = get_header(1, rule.get_expiration, rule.limit)
130
+ cache_set(key, "1:#{rule.get_expiration.to_i}")
109
131
  end
110
132
  else
111
- response = get_header(1, reset = rule.get_expiration, rule.limit)
112
- cache_set(key, "1:" + rule.get_expiration)
133
+ response = get_header(1, rule.get_expiration, rule.limit)
134
+ cache_set(key, "1:#{rule.get_expiration.to_i}")
113
135
  end
114
136
  response
115
137
  end
116
138
 
117
139
  def get_header(times, reset, limit)
118
- {'x-RateLimit-Limit' => limit.to_s, 'x-RateLimit-Remaining' => (limit - times).to_s, 'x-RateLimit-Reset' => reset.to_s }
140
+ {'x-RateLimit-Limit' => limit.to_s, 'x-RateLimit-Remaining' => (limit - times).to_s, 'x-RateLimit-Reset' => reset.strftime("%d%m%y%H%M%S") }
119
141
  end
120
142
 
121
143
  def xml_error(code, message)
122
144
  "<?xml version=\"1.0\"?>\n<error>\n <code>#{code}</code>\n <message>#{message}</message>\n</error>"
123
145
  end
124
-
146
+
125
147
  end
@@ -1,7 +1,7 @@
1
1
  class Rule
2
2
 
3
3
  def initialize(options)
4
- default_options = {
4
+ default_options = {
5
5
  :match => /.*/,
6
6
  :metric => :rph,
7
7
  :type => :frequency,
@@ -9,9 +9,9 @@ class Rule
9
9
  :per_ip => true,
10
10
  :per_url => false,
11
11
  :token => false
12
- }
12
+ }
13
13
  @options = default_options.merge(options)
14
-
14
+
15
15
  end
16
16
 
17
17
  def match
@@ -20,11 +20,11 @@ class Rule
20
20
 
21
21
  def limit
22
22
  (@options[:type] == :frequency ? 1 : @options[:limit])
23
- end
23
+ end
24
24
 
25
25
  def get_expiration
26
- (Time.now + ( @options[:type] == :frequency ? get_frequency : get_fixed )).strftime("%d%m%y%H%M%S")
27
- end
26
+ (Time.now + ( @options[:type] == :frequency ? get_frequency : get_fixed ))
27
+ end
28
28
 
29
29
  def get_frequency
30
30
  case @options[:metric]
data/readme.md CHANGED
@@ -1,7 +1,7 @@
1
1
  Rate Limiting
2
2
  ===============
3
3
 
4
- Rate Limiting is a rack middleware that rate-limit HTTP requests in many different ways.
4
+ Rate Limiting is a rack middleware that rate-limit HTTP requests in many different ways.
5
5
  It provides tools for creating rules which can rate-limit routes separately.
6
6
 
7
7
 
@@ -26,7 +26,7 @@ config/application.rb
26
26
  # Add your rules here, ex:
27
27
 
28
28
  r.define_rule( :match => '/resource', :type => :fixed, :metric => :rph, :limit => 300 )
29
- r.define_rule(:match => '/html', :limit => 1)
29
+ r.define_rule(:match => '/html', :limit => 1)
30
30
  r.define_rule(:match => '/json', :metric => :rph, :type => :frequency, :limit => 60)
31
31
  r.define_rule(:match => '/xml', :metric => :rph, :type => :frequency, :limit => 60)
32
32
  r.define_rule(:match => '/token/ip', :limit => 1, :token => :id, :per_ip => true)
@@ -65,7 +65,7 @@ Accepts aimed resource path or Regexp like '/resource' or "/resource/.*"
65
65
 
66
66
  :fixed - limit requests per time
67
67
 
68
- Examples:
68
+ Examples:
69
69
 
70
70
  r.define_rule(:match => "/resource", :metric => :rph, :type => :frequency, :limit => 3)
71
71
 
@@ -84,3 +84,50 @@ Examples:
84
84
 
85
85
  Boolean, true = limit by IP
86
86
 
87
+ ### per_url
88
+
89
+ Option used when the match option is a Regexp.
90
+ If true, it will limit every url catch separately.
91
+
92
+ Example:
93
+
94
+ r.define_rule(:match => '/resource/.*', :metric => :rph, :type => :fixed, :limit => 1, :per_url => true)
95
+
96
+ This example will let 1 request per hour for each url caught. ('/resource/url1', '/resource/url2', etc...)
97
+
98
+ Limit Entry Storage
99
+ ----------------
100
+ By default, the record store used to keep track of request matches is a hash stored as a class instance variable in app instance memory. For a distributed or concurrent application, this will not yeild desired results and should be changed to a different store.
101
+
102
+ Set the cache by calling `set_cache` in the configuration block
103
+ ```
104
+ r.set_store(Rails.cache)
105
+ ```
106
+
107
+ Any traditional store will work, including Memcache, Redis, or an ActiveSupport::Cache::Store. Which is the best choice is an application specific decision, but a fast, shared store is highly recommended.
108
+
109
+ A more robust cache configuration example:
110
+ ```
111
+ store = case
112
+ when ENV['REDIS_RATE_LIMIT_URL'].present?
113
+ # use a separate redis DB
114
+ Redis.new(url: ENV['REDIS_RATE_LIMIT_URL'])
115
+ when ENV['REDIS_PROVIDER'].present?
116
+ # no separate redis DB available, share primary redis DB
117
+ Redis.new(url: ENV[ENV['REDIS_PROVIDER']])
118
+ when (redis = Redis.new) && (redis.client.connect rescue false)
119
+ # a standard redis connection on port 6379 is available
120
+ redis
121
+ when Rails.application.config.cache_store != :null_store
122
+ # no redis store is available, use the rails cache
123
+ Rails.cache
124
+ else
125
+ # no distributed store available,
126
+ # a class instance variable will be used
127
+ nil
128
+ end
129
+
130
+ r.set_cache(store) if store.present?
131
+ Rails.logger.debug "=> Rate Limiting Store Configured: #{r.cache}"
132
+ ```
133
+
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rate-limiting
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.3
4
+ version: 1.0.4
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2012-05-24 00:00:00.000000000 Z
12
+ date: 2013-06-07 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: rspec
@@ -109,7 +109,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
109
109
  version: '0'
110
110
  requirements: []
111
111
  rubyforge_project: rate-limiting
112
- rubygems_version: 1.8.23
112
+ rubygems_version: 1.8.24
113
113
  signing_key:
114
114
  specification_version: 3
115
115
  summary: Rack Rate-Limit Gem