wafris 0.0.1 → 0.1.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/lua/dist/get_graph_data.lua +81 -0
- data/lib/lua/dist/wafris_core.lua +96 -0
- data/lib/lua/src/get_time_buckets.lua +58 -0
- data/lib/lua/src/queries.lua +14 -0
- data/lib/lua/src/seeds/data_load.lua +104 -0
- data/lib/lua/src/time_bucket.lua +40 -0
- data/lib/wafris/configuration.rb +17 -3
- data/lib/wafris/version.rb +1 -1
- data/lib/wafris.rb +59 -4
- metadata +15 -10
- data/lib/wafris/wafris_core.lua +0 -42
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 05707fe625f71f24a02f26585d89356156b1dc83887195f1042a47856d3c86cf
|
4
|
+
data.tar.gz: 05e798e3298518a0310f1cdf6a68426b2b12f9c066acc74e78833987e26f2e31
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: b41d4ee2190f5a88e263ef6905ffe43e6727923df4f3f1a600f203d10ea2010409e95c02c965cc2161fce47d5d541aaa1217beeb05097134fcaecdcfcd4305f1
|
7
|
+
data.tar.gz: dd940d846b7892a4607c78767f2ca364c62ab33782deb76b863d8b393f1a5c82134f0b3f6c76a438f9993e7d7702df67899d9e6485bc5491e03fcc3e02a38c1e
|
@@ -0,0 +1,81 @@
|
|
1
|
+
-- Compiled from:
|
2
|
+
-- src/get_time_buckets.lua
|
3
|
+
|
4
|
+
-- Code was pulled from https://otland.net/threads/how-convert-timestamp-to-date-type.251657/
|
5
|
+
-- An alternate solution is https://gist.github.com/markuman/e96d04139cd8acc33604
|
6
|
+
local function get_time_bucket_from_timestamp(unix_time_milliseconds)
|
7
|
+
local function calculate_years_number_of_days(yr)
|
8
|
+
return (yr % 4 == 0 and (yr % 100 ~= 0 or yr % 400 == 0)) and 366 or 365
|
9
|
+
end
|
10
|
+
|
11
|
+
local function get_year_and_day_number(year, days)
|
12
|
+
while days >= calculate_years_number_of_days(year) do
|
13
|
+
days = days - calculate_years_number_of_days(year)
|
14
|
+
year = year + 1
|
15
|
+
end
|
16
|
+
return year, days
|
17
|
+
end
|
18
|
+
|
19
|
+
local function get_month_and_month_day(days, year)
|
20
|
+
local days_in_each_month = {
|
21
|
+
31,
|
22
|
+
(calculate_years_number_of_days(year) == 366 and 29 or 28),
|
23
|
+
31,
|
24
|
+
30,
|
25
|
+
31,
|
26
|
+
30,
|
27
|
+
31,
|
28
|
+
31,
|
29
|
+
30,
|
30
|
+
31,
|
31
|
+
30,
|
32
|
+
31,
|
33
|
+
}
|
34
|
+
|
35
|
+
for month = 1, #days_in_each_month do
|
36
|
+
if days - days_in_each_month[month] <= 0 then
|
37
|
+
return month, days
|
38
|
+
end
|
39
|
+
days = days - days_in_each_month[month]
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
local unix_time = unix_time_milliseconds / 1000
|
44
|
+
local year = 1970
|
45
|
+
local days = math.ceil(unix_time / 86400)
|
46
|
+
local month = nil
|
47
|
+
|
48
|
+
year, days = get_year_and_day_number(year, days)
|
49
|
+
month, days = get_month_and_month_day(days, year)
|
50
|
+
local hours = math.floor(unix_time / 3600 % 24)
|
51
|
+
-- local minutes, seconds = math.floor(unix_time / 60 % 60), math.floor(unix_time % 60)
|
52
|
+
-- hours = hours > 12 and hours - 12 or hours == 0 and 12 or hours
|
53
|
+
return string.format("%04d-%02d-%02d-%02d", year, month, days, hours)
|
54
|
+
end
|
55
|
+
|
56
|
+
local function get_time_buckets(unix_time_milliseconds)
|
57
|
+
local time_buckets = {}
|
58
|
+
|
59
|
+
for i = 23, 0, -1 do
|
60
|
+
table.insert(time_buckets, get_time_bucket_from_timestamp(unix_time_milliseconds - (1000 * 60 * 60 * i)))
|
61
|
+
end
|
62
|
+
return time_buckets
|
63
|
+
end
|
64
|
+
|
65
|
+
local function num_requests(time_bucket)
|
66
|
+
local request_keys = redis.call("KEYS", "unique-requests:" .. time_bucket)
|
67
|
+
redis.call("PFMERGE", "merged_unique-requests", unpack(request_keys))
|
68
|
+
return redis.call("PFCOUNT", "merged_unique-requests")
|
69
|
+
end
|
70
|
+
|
71
|
+
local graph_data = {}
|
72
|
+
local unix_time_milliseconds = ARGV[1]
|
73
|
+
local time_buckets = get_time_buckets(unix_time_milliseconds)
|
74
|
+
-- use the get_time_buckets method to get each time bucket and
|
75
|
+
-- the associated count for that bucket
|
76
|
+
for bucket in pairs(time_buckets) do
|
77
|
+
table.insert(graph_data, time_buckets[bucket])
|
78
|
+
table.insert(graph_data, num_requests(time_buckets[bucket]))
|
79
|
+
end
|
80
|
+
|
81
|
+
return graph_data
|
@@ -0,0 +1,96 @@
|
|
1
|
+
local function get_time_bucket_from_timestamp(unix_time_milliseconds)
|
2
|
+
local function calculate_years_number_of_days(yr)
|
3
|
+
return (yr % 4 == 0 and (yr % 100 ~= 0 or yr % 400 == 0)) and 366 or 365
|
4
|
+
end
|
5
|
+
|
6
|
+
local function get_year_and_day_number(year, days)
|
7
|
+
while days >= calculate_years_number_of_days(year) do
|
8
|
+
days = days - calculate_years_number_of_days(year)
|
9
|
+
year = year + 1
|
10
|
+
end
|
11
|
+
return year, days
|
12
|
+
end
|
13
|
+
|
14
|
+
local function get_month_and_month_day(days, year)
|
15
|
+
local days_in_each_month = {
|
16
|
+
31,
|
17
|
+
(calculate_years_number_of_days(year) == 366 and 29 or 28),
|
18
|
+
31,
|
19
|
+
30,
|
20
|
+
31,
|
21
|
+
30,
|
22
|
+
31,
|
23
|
+
31,
|
24
|
+
30,
|
25
|
+
31,
|
26
|
+
30,
|
27
|
+
31,
|
28
|
+
}
|
29
|
+
|
30
|
+
for month = 1, #days_in_each_month do
|
31
|
+
if days - days_in_each_month[month] <= 0 then
|
32
|
+
return month, days
|
33
|
+
end
|
34
|
+
days = days - days_in_each_month[month]
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
local unix_time = unix_time_milliseconds / 1000
|
39
|
+
local year = 1970
|
40
|
+
local days = math.ceil(unix_time / 86400)
|
41
|
+
local month = nil
|
42
|
+
|
43
|
+
year, days = get_year_and_day_number(year, days)
|
44
|
+
month, days = get_month_and_month_day(days, year)
|
45
|
+
local hours = math.floor(unix_time / 3600 % 24)
|
46
|
+
-- local minutes, seconds = math.floor(unix_time / 60 % 60), math.floor(unix_time % 60)
|
47
|
+
-- hours = hours > 12 and hours - 12 or hours == 0 and 12 or hours
|
48
|
+
return string.format("%04d-%02d-%02d-%02d", year, month, days, hours)
|
49
|
+
end
|
50
|
+
|
51
|
+
-- For: Relationship of IP to time of Request (Stream)
|
52
|
+
local function get_request_id(timestamp, ip, max_requests)
|
53
|
+
timestamp = timestamp or "*"
|
54
|
+
local request_id = redis.call("XADD", "ip-requests-stream", "MAXLEN", "~", max_requests, timestamp, "ip", ip)
|
55
|
+
return request_id
|
56
|
+
end
|
57
|
+
|
58
|
+
local function add_to_HLL_request_count(timebucket, request_id)
|
59
|
+
redis.call("PFADD", "unique-requests:" .. timebucket, request_id)
|
60
|
+
end
|
61
|
+
|
62
|
+
-- For: Leaderboard of IPs with Request count as score
|
63
|
+
local function increment_timebucket_for_ip(timebucket, ip)
|
64
|
+
redis.call("ZINCRBY", "ip-leader-sset:" .. timebucket, 1, ip)
|
65
|
+
end
|
66
|
+
|
67
|
+
-- Configuration
|
68
|
+
local max_requests = 100000
|
69
|
+
local max_requests_per_ip = 10000
|
70
|
+
|
71
|
+
local ip = ARGV[1]
|
72
|
+
local ip_to_decimal = ARGV[2]
|
73
|
+
local unix_time_milliseconds = ARGV[3]
|
74
|
+
local unix_time = ARGV[3] / 1000
|
75
|
+
|
76
|
+
-- Initialize local variables
|
77
|
+
local request_id = get_request_id(nil, ip, max_requests)
|
78
|
+
local current_timebucket = get_time_bucket_from_timestamp(unix_time_milliseconds)
|
79
|
+
|
80
|
+
-- GRAPH DATA COLLECTION
|
81
|
+
add_to_HLL_request_count(current_timebucket, request_id)
|
82
|
+
|
83
|
+
-- LEADERBOARD DATA COLLECTION
|
84
|
+
increment_timebucket_for_ip(current_timebucket, ip)
|
85
|
+
|
86
|
+
-- BLOCKING LOGIC
|
87
|
+
-- Safelist Range Check
|
88
|
+
if next(redis.call("ZRANGEBYSCORE", "allowed_ranges", ip_to_decimal, "+inf", "LIMIT", 0, 1)) then
|
89
|
+
return "Allowed"
|
90
|
+
-- Blocklist Range Check
|
91
|
+
elseif next(redis.call("ZRANGEBYSCORE", "blocked_ranges", ip_to_decimal, "+inf", "LIMIT", 0, 1)) then
|
92
|
+
return "Blocked"
|
93
|
+
-- No Matches
|
94
|
+
else
|
95
|
+
return "Not found"
|
96
|
+
end
|
@@ -0,0 +1,58 @@
|
|
1
|
+
function get_time_bucket_from_timestamp(unix_time_milliseconds)
|
2
|
+
local function calculate_years_number_of_days(yr)
|
3
|
+
return (yr % 4 == 0 and (yr % 100 ~= 0 or yr % 400 == 0)) and 366 or 365
|
4
|
+
end
|
5
|
+
|
6
|
+
local function get_year_and_day_number(year, days)
|
7
|
+
while days >= calculate_years_number_of_days(year) do
|
8
|
+
days = days - calculate_years_number_of_days(year)
|
9
|
+
year = year + 1
|
10
|
+
end
|
11
|
+
return year, days
|
12
|
+
end
|
13
|
+
|
14
|
+
local function get_month_and_month_day(days, year)
|
15
|
+
local days_in_each_month = {
|
16
|
+
31,
|
17
|
+
(calculate_years_number_of_days(year) == 366 and 29 or 28),
|
18
|
+
31,
|
19
|
+
30,
|
20
|
+
31,
|
21
|
+
30,
|
22
|
+
31,
|
23
|
+
31,
|
24
|
+
30,
|
25
|
+
31,
|
26
|
+
30,
|
27
|
+
31,
|
28
|
+
}
|
29
|
+
|
30
|
+
for month = 1, #days_in_each_month do
|
31
|
+
if days - days_in_each_month[month] <= 0 then
|
32
|
+
return month, days
|
33
|
+
end
|
34
|
+
days = days - days_in_each_month[month]
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
local unix_time = unix_time_milliseconds / 1000
|
39
|
+
local year = 1970
|
40
|
+
local days = math.ceil(unix_time / 86400)
|
41
|
+
local month = nil
|
42
|
+
|
43
|
+
year, days = get_year_and_day_number(year, days)
|
44
|
+
month, days = get_month_and_month_day(days, year)
|
45
|
+
local hours = math.floor(unix_time / 3600 % 24)
|
46
|
+
-- local minutes, seconds = math.floor(unix_time / 60 % 60), math.floor(unix_time % 60)
|
47
|
+
-- hours = hours > 12 and hours - 12 or hours == 0 and 12 or hours
|
48
|
+
return string.format("%04d-%02d-%02d-%02d", year, month, days, hours)
|
49
|
+
end
|
50
|
+
|
51
|
+
function get_time_buckets(unix_time_milliseconds)
|
52
|
+
local time_buckets = {}
|
53
|
+
|
54
|
+
for i = 23, 0, -1 do
|
55
|
+
table.insert(time_buckets, get_time_bucket_from_timestamp(unix_time_milliseconds - (1000 * 60 * 60 * i)))
|
56
|
+
end
|
57
|
+
return time_buckets
|
58
|
+
end
|
@@ -0,0 +1,14 @@
|
|
1
|
+
local function num_requests(start_time, end_time)
|
2
|
+
local request_keys = redis.call('KEYS', 'unique-requests:*')
|
3
|
+
redis.call('PFMERGE', 'merged_unique-requests', unpack(request_keys))
|
4
|
+
return redis.call('PFCOUNT', 'merged_unique-requests')
|
5
|
+
end
|
6
|
+
|
7
|
+
local function unique_ips(start_time, end_time)
|
8
|
+
local ip_keys = redis.call('KEYS', 'unique-ips:*')
|
9
|
+
redis.call('PFMERGE', 'merged_unique-ips', unpack(ip_keys))
|
10
|
+
return redis.call('PFCOUNT', 'merged_unique-ips')
|
11
|
+
end
|
12
|
+
|
13
|
+
redis.debug("Request count: ", num_requests(0, 10000000))
|
14
|
+
redis.debug("IP request count: ", unique_ips(0, 10000000))
|
@@ -0,0 +1,104 @@
|
|
1
|
+
-- Template strings below are replaced with generated
|
2
|
+
-- data from the ip_data_generator.rb script
|
3
|
+
-- local ipArray = { }
|
4
|
+
-- local timestampArray = { }
|
5
|
+
-- redis.debug("Timestamp count: ", #timestampArray)
|
6
|
+
|
7
|
+
local function get_time_bucket_from_timestamp(unix_time_milliseconds)
|
8
|
+
local function calculate_years_number_of_days(yr)
|
9
|
+
return (yr % 4 == 0 and (yr % 100 ~= 0 or yr % 400 == 0)) and 366 or 365
|
10
|
+
end
|
11
|
+
|
12
|
+
local function get_year_and_day_number(year, days)
|
13
|
+
while days >= calculate_years_number_of_days(year) do
|
14
|
+
days = days - calculate_years_number_of_days(year)
|
15
|
+
year = year + 1
|
16
|
+
end
|
17
|
+
return year, days
|
18
|
+
end
|
19
|
+
|
20
|
+
local function get_month_and_month_day(days, year)
|
21
|
+
local days_in_each_month = {
|
22
|
+
31,
|
23
|
+
(calculate_years_number_of_days(year) == 366 and 29 or 28),
|
24
|
+
31,
|
25
|
+
30,
|
26
|
+
31,
|
27
|
+
30,
|
28
|
+
31,
|
29
|
+
31,
|
30
|
+
30,
|
31
|
+
31,
|
32
|
+
30,
|
33
|
+
31,
|
34
|
+
}
|
35
|
+
|
36
|
+
for month = 1, #days_in_each_month do
|
37
|
+
if days - days_in_each_month[month] <= 0 then
|
38
|
+
return month, days
|
39
|
+
end
|
40
|
+
days = days - days_in_each_month[month]
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
local unix_time = unix_time_milliseconds / 1000
|
45
|
+
local year = 1970
|
46
|
+
local days = math.ceil(unix_time / 86400)
|
47
|
+
local month = nil
|
48
|
+
|
49
|
+
year, days = get_year_and_day_number(year, days)
|
50
|
+
month, days = get_month_and_month_day(days, year)
|
51
|
+
local hours = math.floor(unix_time / 3600 % 24)
|
52
|
+
-- local minutes, seconds = math.floor(unix_time / 60 % 60), math.floor(unix_time % 60)
|
53
|
+
-- hours = hours > 12 and hours - 12 or hours == 0 and 12 or hours
|
54
|
+
return string.format("%04d-%02d-%02d-%02d", year, month, days, hours)
|
55
|
+
end
|
56
|
+
|
57
|
+
-- For: Relationship of IP to time of Request (Stream)
|
58
|
+
local function get_request_id(timestamp, ip, max_requests)
|
59
|
+
timestamp = timestamp or "*"
|
60
|
+
local request_id = redis.call("XADD", "ip-requests-stream", "MAXLEN", "~", max_requests, timestamp, "ip", ip)
|
61
|
+
return request_id
|
62
|
+
end
|
63
|
+
|
64
|
+
local function add_to_HLL_request_count(timebucket, request_id)
|
65
|
+
redis.call("PFADD", "unique-requests:" .. timebucket, request_id)
|
66
|
+
end
|
67
|
+
|
68
|
+
-- Configuration
|
69
|
+
local max_requests = 100000
|
70
|
+
local max_requests_per_ip = 10000
|
71
|
+
|
72
|
+
-- Interior of this for loop is what should go into wafris_core.lua
|
73
|
+
for i = 1, #timestampArray do
|
74
|
+
-- Setup
|
75
|
+
local ip = ipArray[math.random(#ipArray)]
|
76
|
+
local timestamp = timestampArray[i]
|
77
|
+
|
78
|
+
local request_id = get_request_id(timestamp, ip, max_requests)
|
79
|
+
|
80
|
+
-- GRAPH DATA COLLECTION
|
81
|
+
local current_timebucket = get_time_bucket_from_timestamp(timestamp)
|
82
|
+
add_to_HLL_request_count(current_timebucket, request_id)
|
83
|
+
|
84
|
+
-- For: Looking up Requests an IP has made (Stream) / time of request
|
85
|
+
local ip_stream_key = "ip-stream:" .. ip
|
86
|
+
local ip_stream_id =
|
87
|
+
redis.call("XADD", ip_stream_key, "MAXLEN", "~", max_requests_per_ip, "*", "request_id", request_id)
|
88
|
+
|
89
|
+
-- For: Precalc of Number of Requests (Key)
|
90
|
+
local requests_count_key = "requests-count:" .. current_timebucket
|
91
|
+
redis.call("INCR", requests_count_key)
|
92
|
+
|
93
|
+
-- For: Precalc of Number of Requests from an IP (Key)
|
94
|
+
local ips_count_bucket_key = "ips-count:" .. ip .. ":" .. current_timebucket
|
95
|
+
redis.call("INCR", ips_count_bucket_key)
|
96
|
+
|
97
|
+
-- For: Precalc of Number of Unique IPs making Requests (HLL)
|
98
|
+
local ips_count_hll_key = "unique-ips:" .. current_timebucket
|
99
|
+
redis.call("PFADD", ips_count_hll_key, ip)
|
100
|
+
|
101
|
+
-- For: Leaderboard of IPs with Request count as score
|
102
|
+
local ip_leaderboard_sset_key = "ip-leader-sset:" .. current_timebucket
|
103
|
+
redis.call("ZINCRBY", ip_leaderboard_sset_key, 1, ip)
|
104
|
+
end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
-- Code was pulled from https://otland.net/threads/how-convert-timestamp-to-date-type.251657/
|
2
|
+
-- An alternate solution is https://gist.github.com/markuman/e96d04139cd8acc33604
|
3
|
+
local function get_time_bucket_from_timestamp(unix_time_milliseconds)
|
4
|
+
local function calculate_years_number_of_days(yr)
|
5
|
+
return (yr % 4 == 0 and (yr % 100 ~= 0 or yr % 400 == 0)) and 366 or 365
|
6
|
+
end
|
7
|
+
|
8
|
+
local function get_year_and_day_number(year, days)
|
9
|
+
while days >= calculate_years_number_of_days(year) do
|
10
|
+
days = days - calculate_years_number_of_days(year)
|
11
|
+
year = year + 1
|
12
|
+
end
|
13
|
+
return year, days
|
14
|
+
end
|
15
|
+
|
16
|
+
local function get_month_and_month_day(days, year)
|
17
|
+
local days_in_each_month = {
|
18
|
+
31,
|
19
|
+
(calculate_years_number_of_days(year) == 366 and 29 or 28),
|
20
|
+
31, 30, 31,30,31,31,30,31,30,31
|
21
|
+
}
|
22
|
+
|
23
|
+
for month = 1, #days_in_each_month do
|
24
|
+
if days - days_in_each_month[month] <= 0 then return month, days end
|
25
|
+
days = days - days_in_each_month[month]
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
local unix_time = unix_time_milliseconds / 1000
|
30
|
+
local year = 1970
|
31
|
+
local days = math.ceil(unix_time/86400)
|
32
|
+
local month = nil
|
33
|
+
|
34
|
+
year, days = get_year_and_day_number(year, days)
|
35
|
+
month, days = get_month_and_month_day(days, year)
|
36
|
+
local hours = math.floor(unix_time / 3600 % 24)
|
37
|
+
-- local minutes, seconds = math.floor(unix_time / 60 % 60), math.floor(unix_time % 60)
|
38
|
+
-- hours = hours > 12 and hours - 12 or hours == 0 and 12 or hours
|
39
|
+
return string.format("%04d-%02d-%02d-%02d", year, month, days, hours)
|
40
|
+
end
|
data/lib/wafris/configuration.rb
CHANGED
@@ -28,15 +28,29 @@ module Wafris
|
|
28
28
|
CONNECTION_ERROR
|
29
29
|
end
|
30
30
|
|
31
|
-
def
|
32
|
-
@
|
31
|
+
def core_sha
|
32
|
+
@core_sha ||= redis.script(:load, wafris_core)
|
33
33
|
end
|
34
34
|
|
35
35
|
def wafris_core
|
36
|
+
read_lua_dist("wafris_core")
|
37
|
+
end
|
38
|
+
|
39
|
+
def graph_sha
|
40
|
+
@graph_sha ||= redis.script(:load, wafris_graph)
|
41
|
+
end
|
42
|
+
|
43
|
+
def wafris_graph
|
44
|
+
read_lua_dist("get_graph_data")
|
45
|
+
end
|
46
|
+
|
47
|
+
private
|
48
|
+
|
49
|
+
def read_lua_dist(filename)
|
36
50
|
File.read(
|
37
51
|
File.join(
|
38
52
|
File.dirname(__FILE__),
|
39
|
-
|
53
|
+
"../lua/dist/#{filename}.lua"
|
40
54
|
)
|
41
55
|
)
|
42
56
|
end
|
data/lib/wafris/version.rb
CHANGED
data/lib/wafris.rb
CHANGED
@@ -31,14 +31,13 @@ module Wafris
|
|
31
31
|
|
32
32
|
def allow_request?(request)
|
33
33
|
configuration.connection_pool.with do |conn|
|
34
|
-
time = Time.now
|
34
|
+
time = Time.now.to_f * 1000
|
35
35
|
status = conn.evalsha(
|
36
|
-
configuration.
|
36
|
+
configuration.core_sha,
|
37
37
|
argv: [
|
38
38
|
request.ip,
|
39
39
|
IPAddr.new(request.ip).to_i,
|
40
|
-
time.to_i
|
41
|
-
"all-ips:#{time.strftime('%Y-%m-%d')}:#{time.hour}"
|
40
|
+
time.to_i
|
42
41
|
]
|
43
42
|
)
|
44
43
|
|
@@ -49,5 +48,61 @@ module Wafris
|
|
49
48
|
end
|
50
49
|
end
|
51
50
|
end
|
51
|
+
|
52
|
+
def add_block(ip)
|
53
|
+
configuration.connection_pool.with do |conn|
|
54
|
+
conn.zadd(
|
55
|
+
'blocked_ranges',
|
56
|
+
IPAddr.new(ip).to_i,
|
57
|
+
ip
|
58
|
+
)
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
def remove_block(ip)
|
63
|
+
configuration.connection_pool.with do |conn|
|
64
|
+
conn.zrem(
|
65
|
+
'blocked_ranges',
|
66
|
+
ip
|
67
|
+
)
|
68
|
+
end
|
69
|
+
end
|
70
|
+
|
71
|
+
def request_buckets(_now)
|
72
|
+
graph_data = []
|
73
|
+
configuration.connection_pool.with do |conn|
|
74
|
+
time = Time.now.to_f * 1000
|
75
|
+
graph_data = conn.evalsha(
|
76
|
+
configuration.graph_sha,
|
77
|
+
argv: [
|
78
|
+
time.to_i
|
79
|
+
]
|
80
|
+
)
|
81
|
+
end
|
82
|
+
|
83
|
+
return graph_data
|
84
|
+
end
|
85
|
+
|
86
|
+
def ips_with_num_requests
|
87
|
+
configuration.connection_pool.with do |conn|
|
88
|
+
return conn.zunion(
|
89
|
+
*leader_timebuckets,
|
90
|
+
0, -1, with_scores: true
|
91
|
+
)
|
92
|
+
end
|
93
|
+
end
|
94
|
+
|
95
|
+
private
|
96
|
+
|
97
|
+
def leader_timebuckets
|
98
|
+
timebuckets = []
|
99
|
+
|
100
|
+
time = Time.now.utc
|
101
|
+
24.times do |hours|
|
102
|
+
timebuckets << "ip-leader-sset:#{(time - 60 * 60 * hours).strftime("%Y-%m-%d-%H")}"
|
103
|
+
end
|
104
|
+
|
105
|
+
return timebuckets
|
106
|
+
end
|
52
107
|
end
|
53
108
|
end
|
metadata
CHANGED
@@ -1,15 +1,15 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: wafris
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.1.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Micahel Buckbee
|
8
8
|
- Ryan Castillo
|
9
|
-
autorequire:
|
9
|
+
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2023-
|
12
|
+
date: 2023-04-23 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: connection_pool
|
@@ -179,23 +179,28 @@ dependencies:
|
|
179
179
|
- - ">="
|
180
180
|
- !ruby/object:Gem::Version
|
181
181
|
version: 13.0.6
|
182
|
-
description:
|
183
|
-
email:
|
182
|
+
description:
|
183
|
+
email:
|
184
184
|
executables: []
|
185
185
|
extensions: []
|
186
186
|
extra_rdoc_files: []
|
187
187
|
files:
|
188
|
+
- lib/lua/dist/get_graph_data.lua
|
189
|
+
- lib/lua/dist/wafris_core.lua
|
190
|
+
- lib/lua/src/get_time_buckets.lua
|
191
|
+
- lib/lua/src/queries.lua
|
192
|
+
- lib/lua/src/seeds/data_load.lua
|
193
|
+
- lib/lua/src/time_bucket.lua
|
188
194
|
- lib/wafris.rb
|
189
195
|
- lib/wafris/configuration.rb
|
190
196
|
- lib/wafris/middleware.rb
|
191
197
|
- lib/wafris/railtie.rb
|
192
198
|
- lib/wafris/version.rb
|
193
|
-
|
194
|
-
homepage:
|
199
|
+
homepage:
|
195
200
|
licenses:
|
196
|
-
-
|
201
|
+
- Elastic-2.0
|
197
202
|
metadata: {}
|
198
|
-
post_install_message:
|
203
|
+
post_install_message:
|
199
204
|
rdoc_options: []
|
200
205
|
require_paths:
|
201
206
|
- lib
|
@@ -211,7 +216,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
211
216
|
version: '0'
|
212
217
|
requirements: []
|
213
218
|
rubygems_version: 3.3.26
|
214
|
-
signing_key:
|
219
|
+
signing_key:
|
215
220
|
specification_version: 4
|
216
221
|
summary: Web application firewall for Rack apps
|
217
222
|
test_files: []
|
data/lib/wafris/wafris_core.lua
DELETED
@@ -1,42 +0,0 @@
|
|
1
|
-
local LAST_REQUESTS_TIME = 'last_requests_time'
|
2
|
-
local TWENTY_FOUR_HOURS = 86400
|
3
|
-
|
4
|
-
local ip = ARGV[1]
|
5
|
-
local ip_to_decimal = ARGV[2]
|
6
|
-
local unix_time = ARGV[3]
|
7
|
-
local expire_time = unix_time - TWENTY_FOUR_HOURS
|
8
|
-
local ip_request_string = "ip-requests-" .. ip
|
9
|
-
local hour_bucket = ARGV[4]
|
10
|
-
|
11
|
-
-- LEADERBOARD DATA COLLECTION
|
12
|
-
-- Add IP to last_requests_time key by integer timestamp
|
13
|
-
-- ZADD last_requets_time 1661356145 '192.168.1.1'
|
14
|
-
redis.call('ZADD', LAST_REQUESTS_TIME, unix_time, ip)
|
15
|
-
-- Remove IP from last_requests_time if it has been there for 24 hours
|
16
|
-
-- ZREMRANGEBYSCORE last_requests_time 0 (1661356145 - 86400)
|
17
|
-
redis.call('ZREMRANGEBYSCORE', LAST_REQUESTS_TIME, 0, expire_time)
|
18
|
-
-- Add IP to ip-requests-<ip> for leaderboard tracking
|
19
|
-
-- LPUSH ip-requests-192.168.1.1 1661356145
|
20
|
-
redis.call('LPUSH', ip_request_string, unix_time)
|
21
|
-
-- Have the key expire in 24 hours
|
22
|
-
-- EXPIRE ip-requests-192.168.1.1 86400
|
23
|
-
redis.call('EXPIRE', ip_request_string, TWENTY_FOUR_HOURS)
|
24
|
-
|
25
|
-
-- GRAPH DATA COLLECTION
|
26
|
-
-- Increment counter for hourly buckets
|
27
|
-
-- INC all-ips:2022-10-01:12
|
28
|
-
redis.call('INCR', hour_bucket)
|
29
|
-
-- EXPIRE all-ips:2022-10-01:12 86400
|
30
|
-
redis.call('EXPIRE', hour_bucket, TWENTY_FOUR_HOURS)
|
31
|
-
|
32
|
-
-- BLOCKING LOGIC
|
33
|
-
-- Safelist Range Check
|
34
|
-
if next(redis.call('ZRANGEBYSCORE', 'allowed_ranges', ip_to_decimal, "+inf", "LIMIT", 0, 1)) then
|
35
|
-
return 'Allowed'
|
36
|
-
-- Blocklist Range Check
|
37
|
-
elseif next(redis.call('ZRANGEBYSCORE', 'blocked_ranges', ip_to_decimal, "+inf", "LIMIT", 0, 1)) then
|
38
|
-
return 'Blocked'
|
39
|
-
-- No Matches
|
40
|
-
else
|
41
|
-
return 'Not found'
|
42
|
-
end
|