logstash-filter-threats_classifier 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +2 -0
- data/CONTRIBUTORS +11 -0
- data/Gemfile +2 -0
- data/LICENSE +11 -0
- data/README.md +64 -0
- data/lib/logstash/filters/center-client.rb +213 -0
- data/lib/logstash/filters/classification-request.rb +17 -0
- data/lib/logstash/filters/classifier-cache.rb +51 -0
- data/lib/logstash/filters/classifier.rb +335 -0
- data/lib/logstash/filters/cognito-client.rb +48 -0
- data/lib/logstash/filters/elastic-db.rb +128 -0
- data/lib/logstash/filters/field-handler.rb +127 -0
- data/lib/logstash/filters/local-classifier.rb +94 -0
- data/lib/logstash/filters/plugin-logic.rb +166 -0
- data/lib/logstash/filters/response.rb +36 -0
- data/lib/logstash/filters/threats_classifier.rb +230 -0
- data/lib/logstash/filters/utils.rb +46 -0
- data/logstash-filter-threats_classifier.gemspec +38 -0
- data/spec/filters/bulk-processor_spec.rb +92 -0
- data/spec/filters/classifier-cache_spec.rb +44 -0
- data/spec/filters/classifier_spec.rb +78 -0
- data/spec/filters/cognito-client_spec.rb +20 -0
- data/spec/filters/field-handler_spec.rb +101 -0
- data/spec/filters/local-classifier_spec.rb +46 -0
- data/spec/filters/plugin-logic_spec.rb +127 -0
- data/spec/filters/threats-classifier_spec.rb +103 -0
- data/spec/filters/utils_spec.rb +74 -0
- data/spec/spec_helper.rb +2 -0
- metadata +256 -0
@@ -0,0 +1,48 @@
|
|
1
|
+
require 'aws-sdk'
|
2
|
+
|
3
|
+
module LogStash
|
4
|
+
module Filters
|
5
|
+
module Empow
|
6
|
+
class CognitoClient
|
7
|
+
include LogStash::Util::Loggable
|
8
|
+
|
9
|
+
def initialize(username, password, aws_region_name, aws_client_id)
|
10
|
+
@logger = self.logger
|
11
|
+
|
12
|
+
@logger.debug("aws region: #{aws_region_name}")
|
13
|
+
@logger.debug("aws aws_client_id: #{aws_client_id}")
|
14
|
+
@logger.debug("cognito username: #{username}")
|
15
|
+
|
16
|
+
@username = username
|
17
|
+
@password = password
|
18
|
+
@aws_region_name = aws_region_name
|
19
|
+
@aws_client_id = aws_client_id
|
20
|
+
|
21
|
+
Aws.config.update({
|
22
|
+
region: @aws_region_name,
|
23
|
+
credentials: Aws::Credentials.new('aaaa', 'aaaa')
|
24
|
+
})
|
25
|
+
|
26
|
+
@client = Aws::CognitoIdentityProvider::Client.new
|
27
|
+
end
|
28
|
+
|
29
|
+
def authenticate
|
30
|
+
resp = @client.initiate_auth({
|
31
|
+
auth_flow: "USER_PASSWORD_AUTH",
|
32
|
+
auth_parameters: {
|
33
|
+
'USERNAME': @username,
|
34
|
+
'PASSWORD': @password,
|
35
|
+
},
|
36
|
+
client_id: @aws_client_id,
|
37
|
+
})
|
38
|
+
|
39
|
+
id_token = resp.authentication_result.id_token
|
40
|
+
token_type = resp.authentication_result.token_type
|
41
|
+
|
42
|
+
token = token_type + " " + id_token
|
43
|
+
return id_token
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
@@ -0,0 +1,128 @@
|
|
1
|
+
require 'elasticsearch'
|
2
|
+
require 'hashie'
|
3
|
+
|
4
|
+
module LogStash; module Filters; module Empow;
|
5
|
+
class PersistentKeyValueDB
|
6
|
+
#include LogStash::Util::Loggable
|
7
|
+
|
8
|
+
def initialize(hosts, username, password, index)
|
9
|
+
#@logger ||= self.logger
|
10
|
+
|
11
|
+
#@logger.debug("opening the local classification db")
|
12
|
+
|
13
|
+
@elastic ||= Elasticsearch::Client.new(:hosts => hosts)
|
14
|
+
@index = index
|
15
|
+
|
16
|
+
create_index index
|
17
|
+
end
|
18
|
+
|
19
|
+
def create_index(index)
|
20
|
+
return if @elastic.indices.exists? index: index
|
21
|
+
|
22
|
+
@elastic.indices.create index: index, body: {
|
23
|
+
mappings: {
|
24
|
+
_doc: {
|
25
|
+
properties: {
|
26
|
+
product_type: {
|
27
|
+
type: 'keyword'
|
28
|
+
},
|
29
|
+
product: {
|
30
|
+
type: 'keyword'
|
31
|
+
},
|
32
|
+
term_key: {
|
33
|
+
type: 'keyword'
|
34
|
+
},
|
35
|
+
classification: {
|
36
|
+
enabled: false
|
37
|
+
}
|
38
|
+
}
|
39
|
+
}
|
40
|
+
}
|
41
|
+
}
|
42
|
+
end
|
43
|
+
|
44
|
+
def query(product_type, product, term)
|
45
|
+
#@logger.debug("quering local classification db")
|
46
|
+
|
47
|
+
# fix nil product
|
48
|
+
if product.nil?
|
49
|
+
product = 'nil_safe_product_key'
|
50
|
+
end
|
51
|
+
|
52
|
+
response = @elastic.search index: @index, type: '_doc', body: {
|
53
|
+
query: {
|
54
|
+
bool: {
|
55
|
+
must: [
|
56
|
+
{ term: { product_type: product_type } },
|
57
|
+
{
|
58
|
+
bool: {
|
59
|
+
should: [
|
60
|
+
{
|
61
|
+
bool: {
|
62
|
+
must: [
|
63
|
+
{ term: { term_key: term } },
|
64
|
+
{ term: { product: product } }
|
65
|
+
]
|
66
|
+
}
|
67
|
+
},
|
68
|
+
{
|
69
|
+
bool: {
|
70
|
+
must: {
|
71
|
+
term: { term_key: term }
|
72
|
+
},
|
73
|
+
must_not: {
|
74
|
+
exists: { field: 'product' }
|
75
|
+
}
|
76
|
+
}
|
77
|
+
}
|
78
|
+
]
|
79
|
+
}
|
80
|
+
}
|
81
|
+
]
|
82
|
+
}
|
83
|
+
}
|
84
|
+
}
|
85
|
+
|
86
|
+
mash = Hashie::Mash.new response
|
87
|
+
|
88
|
+
return nil if mash.hits.hits.first.nil?
|
89
|
+
|
90
|
+
return mash.hits.hits.first._source.classification
|
91
|
+
end
|
92
|
+
|
93
|
+
def save(doc_id, product_type, product, term, classification)
|
94
|
+
#@logger.debug("saving key to local classification db")
|
95
|
+
|
96
|
+
@elastic.index index: @index, type: '_doc', id: doc_id, body: {
|
97
|
+
product_type: product_type,
|
98
|
+
product: product,
|
99
|
+
term_key: term,
|
100
|
+
classification: classification
|
101
|
+
}
|
102
|
+
end
|
103
|
+
|
104
|
+
def close
|
105
|
+
#@logger.debug("clsoing the local classification db")
|
106
|
+
end
|
107
|
+
end
|
108
|
+
|
109
|
+
end; end; end
|
110
|
+
|
111
|
+
=begin
|
112
|
+
db = LogStash::Filters::Empow::PersistentKeyValueDB.new('192.168.3.24:9200', 'user', 'pass', 'key-val-8')
|
113
|
+
|
114
|
+
db.save("am", "p3", "dummy signature", "v1")
|
115
|
+
db.save("am", "p3", "dummy signature 2", "v1")
|
116
|
+
|
117
|
+
db.save("am", "p1", "dummy", "v1")
|
118
|
+
db.save("am", nil, "dummy", "v1")
|
119
|
+
p db.query "am", "p1", "h1"
|
120
|
+
db.save("am", "p1", "h1", "v1")
|
121
|
+
p db.query "am", "p1", "h1"
|
122
|
+
p db.query "am", "p1", "h2"
|
123
|
+
p db.query "am", "no-such-product", "h1"
|
124
|
+
p db.query "am", nil, "h1"
|
125
|
+
p db.query "am", nil, "dummy"
|
126
|
+
|
127
|
+
p db.query "am", "p3", "dummy signature 2"
|
128
|
+
=end
|
@@ -0,0 +1,127 @@
|
|
1
|
+
require_relative "classification-request"
|
2
|
+
require_relative "utils"
|
3
|
+
|
4
|
+
class LogStash::Filters::Empow::FieldHandler
|
5
|
+
|
6
|
+
IDS = "IDS"
|
7
|
+
AM = "AM"
|
8
|
+
CUSTOM = "CUSTOM"
|
9
|
+
|
10
|
+
public
|
11
|
+
def initialize(product_type_field, product_name_field, threat_field, src_internal_field, dst_internal_field)
|
12
|
+
@product_type_field = product_type_field
|
13
|
+
@product_name_field = product_name_field
|
14
|
+
|
15
|
+
if threat_field.nil? || threat_field.strip.length == 0
|
16
|
+
raise ArgumentError, 'threat field cannot be empty'
|
17
|
+
end
|
18
|
+
|
19
|
+
@threat_field = '[' + threat_field + ']'
|
20
|
+
|
21
|
+
@ids_signature_field = @threat_field + '[signature]'
|
22
|
+
@malware_name_field = @threat_field + '[malware_name]'
|
23
|
+
|
24
|
+
@src_internal_field = @threat_field + '[' + src_internal_field + ']'
|
25
|
+
@dst_internal_field = @threat_field + '[' + dst_internal_field + ']'
|
26
|
+
|
27
|
+
@blacklisted_fields = [src_internal_field, dst_internal_field]
|
28
|
+
|
29
|
+
@hash_field = @threat_field + '[hash]'
|
30
|
+
end
|
31
|
+
|
32
|
+
public
|
33
|
+
def event_to_classification_request(event)
|
34
|
+
product_type = event.get(@product_type_field)
|
35
|
+
product = event.get(@product_name_field)
|
36
|
+
is_src_internal = event.get(@src_internal_field)
|
37
|
+
is_dst_internal = event.get(@dst_internal_field)
|
38
|
+
|
39
|
+
if product_type.nil?
|
40
|
+
LogStash::Filters::Empow::Utils.add_error(event, "missing_product_type")
|
41
|
+
return nil
|
42
|
+
end
|
43
|
+
|
44
|
+
is_src_internal = LogStash::Filters::Empow::Utils.convert_to_boolean(is_src_internal)
|
45
|
+
|
46
|
+
if is_src_internal.nil?
|
47
|
+
is_src_internal = true
|
48
|
+
LogStash::Filters::Empow::Utils.add_warn(event, 'src_internal_wrong_value')
|
49
|
+
end
|
50
|
+
|
51
|
+
is_dst_internal = LogStash::Filters::Empow::Utils.convert_to_boolean(is_dst_internal)
|
52
|
+
|
53
|
+
if is_dst_internal.nil?
|
54
|
+
is_dst_internal = true
|
55
|
+
LogStash::Filters::Empow::Utils.add_warn(event, 'dst_internal_wrong_value')
|
56
|
+
end
|
57
|
+
|
58
|
+
case product_type
|
59
|
+
when IDS
|
60
|
+
return nil if !is_valid_ids_request(product, event)
|
61
|
+
when AM
|
62
|
+
return nil if !is_valid_antimalware_request(product, event)
|
63
|
+
else # others are resolved in the cloud
|
64
|
+
return nil if !is_valid_product(product, event)
|
65
|
+
end
|
66
|
+
|
67
|
+
original_threat = event.get(@threat_field)
|
68
|
+
|
69
|
+
threat = copy_threat(original_threat)
|
70
|
+
|
71
|
+
if (threat.nil?)
|
72
|
+
LogStash::Filters::Empow::Utils.add_error(event, "missing_threat_field")
|
73
|
+
return nil
|
74
|
+
end
|
75
|
+
|
76
|
+
return LogStash::Filters::Empow::ClassificationRequest.new(product_type, product, threat, is_src_internal, is_dst_internal)
|
77
|
+
end
|
78
|
+
|
79
|
+
private
|
80
|
+
def copy_threat(threat)
|
81
|
+
return nil if (threat.nil? or threat.size == 0)
|
82
|
+
|
83
|
+
res = Hash.new
|
84
|
+
|
85
|
+
threat.each do |k, v|
|
86
|
+
next if @blacklisted_fields.include?(k)
|
87
|
+
res[k] = v
|
88
|
+
end
|
89
|
+
|
90
|
+
return res
|
91
|
+
end
|
92
|
+
|
93
|
+
private
|
94
|
+
def is_valid_ids_request(product, event)
|
95
|
+
sid = event.get(@ids_signature_field)
|
96
|
+
|
97
|
+
if sid.nil? || sid.strip.length == 0
|
98
|
+
LogStash::Filters::Empow::Utils.add_error(event, "missing_ids_signature")
|
99
|
+
return false
|
100
|
+
end
|
101
|
+
|
102
|
+
return is_valid_product(product, event)
|
103
|
+
end
|
104
|
+
|
105
|
+
private
|
106
|
+
def is_valid_product(product, event)
|
107
|
+
if (product.nil? or product.strip.length == 0)
|
108
|
+
LogStash::Filters::Empow::Utils.add_error(event, "missing_product_name")
|
109
|
+
return false
|
110
|
+
end
|
111
|
+
|
112
|
+
return true
|
113
|
+
end
|
114
|
+
|
115
|
+
private
|
116
|
+
def is_valid_antimalware_request(product, event)
|
117
|
+
malware_name = event.get(@malware_name_field)
|
118
|
+
malware_hash = event.get(@hash_field)
|
119
|
+
|
120
|
+
if malware_hash.nil? and (malware_name.nil? or product.nil?)
|
121
|
+
LogStash::Filters::Empow::Utils.add_error(event, "anti_malware_missing_hash_or_name")
|
122
|
+
return false
|
123
|
+
end
|
124
|
+
|
125
|
+
return true
|
126
|
+
end
|
127
|
+
end
|
@@ -0,0 +1,94 @@
|
|
1
|
+
require "concurrent"
|
2
|
+
require_relative 'classifier-cache'
|
3
|
+
|
4
|
+
module LogStash; module Filters; module Empow;
|
5
|
+
class LocalClassifier
|
6
|
+
include LogStash::Util::Loggable
|
7
|
+
|
8
|
+
def initialize(cache_size, ttl, async_local_db, local_db)
|
9
|
+
@logger ||= self.logger
|
10
|
+
|
11
|
+
@logger.debug("initializing in memory cache")
|
12
|
+
@logger.debug("cache size #{cache_size}")
|
13
|
+
@logger.debug("cache ttl #{ttl}")
|
14
|
+
|
15
|
+
@cache ||= LogStash::Filters::Empow::ClassifierCache.new(cache_size, ttl)
|
16
|
+
@ttl = ttl
|
17
|
+
|
18
|
+
@local_db ||= local_db
|
19
|
+
|
20
|
+
@local_db_workers ||= Concurrent::ThreadPoolExecutor.new(min_threads: 1, max_threads: 1)
|
21
|
+
@async_local_db ||= async_local_db
|
22
|
+
end
|
23
|
+
|
24
|
+
def close
|
25
|
+
@logger.debug("shutting down local classifier")
|
26
|
+
|
27
|
+
@local_db_workers.shutdown if !@local_db.nil?
|
28
|
+
|
29
|
+
@local_db_workers.wait_for_termination(1)
|
30
|
+
@logger.debug("local classifier shut down")
|
31
|
+
end
|
32
|
+
|
33
|
+
|
34
|
+
def classify(key)
|
35
|
+
if !key.nil?
|
36
|
+
cached_result = @cache.classify(key)
|
37
|
+
return cached_result if !cached_result.nil?
|
38
|
+
end
|
39
|
+
|
40
|
+
return classify_using_local_database(key)
|
41
|
+
end
|
42
|
+
|
43
|
+
def add_to_cache(key, val, expiration_time)
|
44
|
+
return if key.nil?
|
45
|
+
|
46
|
+
@logger.debug? and @logger.info("adding #{key} to cache")
|
47
|
+
|
48
|
+
@cache.put(key, val, Time.now+3600)
|
49
|
+
end
|
50
|
+
|
51
|
+
def save_to_cache_and_db(key, val, expiration_time)
|
52
|
+
return if key.nil?
|
53
|
+
|
54
|
+
@logger.debug? and @logger.info("adding #{key} to the local db and cache")
|
55
|
+
|
56
|
+
product_type = key[:product_type]
|
57
|
+
product = key[:product]
|
58
|
+
term = key[:term]
|
59
|
+
|
60
|
+
doc_id = "#{product_type}-#{product}-term"
|
61
|
+
|
62
|
+
@local_db.save(doc_id, product_type, product, term, val) if !@local_db.nil?
|
63
|
+
add_to_cache(key, val, expiration_time)
|
64
|
+
end
|
65
|
+
|
66
|
+
def read_from_local_database(key)
|
67
|
+
res = @local_db.query(key[:product_type], key[:product], key[:term])
|
68
|
+
|
69
|
+
if !res.nil?
|
70
|
+
@logger.debug("adding result from db to local cache")
|
71
|
+
add_to_cache(key, res, Time.now + @ttl)
|
72
|
+
end
|
73
|
+
|
74
|
+
return res
|
75
|
+
end
|
76
|
+
|
77
|
+
def read_from_local_database_async(key)
|
78
|
+
@local_db_workers.post do
|
79
|
+
read_from_local_database(key)
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
def classify_using_local_database(key)
|
84
|
+
return nil if @local_db.nil? # if a local db wasn't configured
|
85
|
+
|
86
|
+
if (@async_local_db)
|
87
|
+
read_from_local_database_async(key)
|
88
|
+
return nil
|
89
|
+
end
|
90
|
+
|
91
|
+
return read_from_local_database(key)
|
92
|
+
end
|
93
|
+
end
|
94
|
+
end; end; end
|
@@ -0,0 +1,166 @@
|
|
1
|
+
require 'time'
|
2
|
+
require "concurrent"
|
3
|
+
require_relative "classification-request"
|
4
|
+
require_relative "field-handler"
|
5
|
+
require_relative 'response'
|
6
|
+
require_relative 'utils'
|
7
|
+
|
8
|
+
module LogStash; module Filters; module Empow;
|
9
|
+
class PluginLogic
|
10
|
+
include LogStash::Util::Loggable
|
11
|
+
|
12
|
+
def initialize(classifer, field_handler, max_parking_time, max_parked_events, tag_on_timeout, tag_on_error)
|
13
|
+
@logger ||= self.logger
|
14
|
+
#@logger.info("initializing classifier")
|
15
|
+
|
16
|
+
@field_handler = field_handler
|
17
|
+
|
18
|
+
@max_parking_time = max_parking_time
|
19
|
+
@max_parked_events = max_parked_events
|
20
|
+
@tag_on_timeout = tag_on_timeout
|
21
|
+
@tag_on_error = tag_on_error
|
22
|
+
|
23
|
+
@classifer = classifer
|
24
|
+
@parked_events = Concurrent::Array.new
|
25
|
+
end
|
26
|
+
|
27
|
+
def close
|
28
|
+
@classifer.close
|
29
|
+
end
|
30
|
+
|
31
|
+
def classify(event)
|
32
|
+
request = @field_handler.event_to_classification_request(event)
|
33
|
+
|
34
|
+
if request.nil?
|
35
|
+
@tag_on_error.each{|tag| event.tag(tag)}
|
36
|
+
return event
|
37
|
+
end
|
38
|
+
|
39
|
+
if classify_event(request, event)
|
40
|
+
return event
|
41
|
+
else
|
42
|
+
park(event)
|
43
|
+
|
44
|
+
if @parked_events.length > @max_parked_events
|
45
|
+
tuple = @parked_events.shift
|
46
|
+
|
47
|
+
if !tuple.nil?
|
48
|
+
unparked_event = tuple[:event]
|
49
|
+
unparked_event.uncancel
|
50
|
+
return unparked_event
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
return nil
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
def flush(options = {})
|
59
|
+
# tag flushed events,
|
60
|
+
events_to_flush = []
|
61
|
+
|
62
|
+
if options[:final] # indicating "final flush" special event, flush everything
|
63
|
+
while tuple = @parked_events.shift do
|
64
|
+
events_to_flush << tuple[:event]
|
65
|
+
end
|
66
|
+
else
|
67
|
+
@parked_events.delete_if do |tuple|
|
68
|
+
process_parked_event(tuple, events_to_flush)
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
return events_to_flush
|
73
|
+
end
|
74
|
+
|
75
|
+
private def process_parked_event(tuple, events_to_flush)
|
76
|
+
event = tuple[:event]
|
77
|
+
request = @field_handler.event_to_classification_request(event)
|
78
|
+
|
79
|
+
begin
|
80
|
+
res = @classifer.classify(request)
|
81
|
+
|
82
|
+
if (parking_time_expired(tuple) or is_valid_classification(res))
|
83
|
+
tag_event(res, event)
|
84
|
+
|
85
|
+
# if we're releasing this event based on time expiration, tag it with timeout
|
86
|
+
if res.nil? or !res.is_final
|
87
|
+
@tag_on_timeout.each{|tag| event.tag(tag)}
|
88
|
+
end
|
89
|
+
|
90
|
+
events_to_flush << event
|
91
|
+
return true
|
92
|
+
end
|
93
|
+
|
94
|
+
rescue StandardError => e
|
95
|
+
@logger.error("an error occured while processing event, event flushed backed to the stream", :request => request, :backtrace => e.backtrace)
|
96
|
+
return true # so that this event will be flushed out of the plugin
|
97
|
+
end
|
98
|
+
|
99
|
+
return false
|
100
|
+
end
|
101
|
+
|
102
|
+
private
|
103
|
+
def is_unauthorized(classification)
|
104
|
+
return (!classification.nil? and classification.kind_of?(LogStash::Filters::Empow::UnauthorizedReponse))
|
105
|
+
end
|
106
|
+
|
107
|
+
private
|
108
|
+
def classify_event(request, event)
|
109
|
+
res = @classifer.classify(request)
|
110
|
+
|
111
|
+
if is_valid_classification(res)
|
112
|
+
tag_event(res, event)
|
113
|
+
return true
|
114
|
+
end
|
115
|
+
|
116
|
+
return false
|
117
|
+
end
|
118
|
+
|
119
|
+
private
|
120
|
+
def is_valid_classification(classification)
|
121
|
+
return (!classification.nil? and classification.is_final())
|
122
|
+
end
|
123
|
+
|
124
|
+
private
|
125
|
+
def tag_event(classification, event)
|
126
|
+
return if classification.nil?
|
127
|
+
|
128
|
+
responseBody = classification.response
|
129
|
+
|
130
|
+
@logger.debug("classification response", :classification => responseBody)
|
131
|
+
|
132
|
+
response = responseBody["response"]
|
133
|
+
|
134
|
+
if !response.nil? && response.size > 0
|
135
|
+
response.each do |k, v|
|
136
|
+
event.set("[empow_classification_response][#{k}]", v)
|
137
|
+
end
|
138
|
+
end
|
139
|
+
|
140
|
+
if !classification.is_successful()
|
141
|
+
@tag_on_error.each{|tag| event.tag(tag)}
|
142
|
+
|
143
|
+
if (!responseBody.nil?)
|
144
|
+
LogStash::Filters::Empow::Utils.add_error(event, responseBody)
|
145
|
+
end
|
146
|
+
end
|
147
|
+
end
|
148
|
+
|
149
|
+
private
|
150
|
+
def park(event)
|
151
|
+
tuple = {}
|
152
|
+
tuple[:event] = event
|
153
|
+
tuple[:time] = Time.now
|
154
|
+
|
155
|
+
@parked_events << tuple
|
156
|
+
|
157
|
+
event.cancel # don't stream this event just yet ...
|
158
|
+
end
|
159
|
+
|
160
|
+
private
|
161
|
+
def parking_time_expired(tuple)
|
162
|
+
return (Time.now - tuple[:time]) > @max_parking_time
|
163
|
+
end
|
164
|
+
end
|
165
|
+
|
166
|
+
end; end; end
|