logstash-filter-sig 0.9.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/CHANGELOG.md +2 -0
- data/CONTRIBUTORS +11 -0
- data/DEVELOPER.md +2 -0
- data/Gemfile +2 -0
- data/LICENSE +13 -0
- data/NOTICE.TXT +5 -0
- data/README.md +675 -0
- data/lib/logstash/filters/sig.rb +2511 -0
- data/logstash-filter-sig.gemspec +24 -0
- data/spec/filters/sig_spec.rb +20 -0
- data/spec/spec_helper.rb +2 -0
- metadata +106 -0
@@ -0,0 +1,2511 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
# Filter "SIG" analyze signature regexp with condition
|
3
|
+
# POC on event normalized -- experimental version
|
4
|
+
# Contact: Lionel PRAT (lionel.prat9@gmail.com)
|
5
|
+
|
6
|
+
require "logstash/filters/base"
|
7
|
+
require "logstash/namespace"
|
8
|
+
require "json"
|
9
|
+
require "simhash"
|
10
|
+
require 'digest'
|
11
|
+
require "openssl"
|
12
|
+
require 'ipaddr'
|
13
|
+
require 'time'
|
14
|
+
|
15
|
+
#TODO fix fingerprint must be used when SIG/REF/IOC detected
|
16
|
+
#plugin multi function:
|
17
|
+
# - signature
|
18
|
+
# - IOC
|
19
|
+
# - ANOMALIE
|
20
|
+
# - new value in time or no time
|
21
|
+
# - fingerprint simhash
|
22
|
+
# - filter false positive
|
23
|
+
# - first drop list on fingerprint or field -> regexp (ex: host & domain)
|
24
|
+
# Optimiz
|
25
|
+
|
26
|
+
#TODO: SIG add tag on sig for FIR
|
27
|
+
class ::Hash
|
28
|
+
def deep_merge(second)
|
29
|
+
merger = proc { |key, v1, v2| Hash === v1 && Hash === v2 ? v1.merge(v2, &merger) : v2 }
|
30
|
+
self.merge(second, &merger)
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
class LogStash::Filters::Sig < LogStash::Filters::Base
|
35
|
+
config_name "sig"
|
36
|
+
milestone 1
|
37
|
+
############PLUGIN LOGSTASH SIG -- lionel.prat9@gmail.com #############
|
38
|
+
############MULTI FUNCTIONNALITY IN ORDER CALL########################
|
39
|
+
###### DROP_FIRST[FIELD(regexp_list)]->NEW_VALUE[time/notime]->BL->SIG[FIELD_IOC(misp_extract),RULES(list),FALSE_POSITIVE(drop),ANOMALIE(db_reference),DROP_END[SIMHASH(match_list)],FREQ(db_frequence -> create new event if alert) ########
|
40
|
+
###### SIG add special_tag if match #######
|
41
|
+
############### CONFIG DROP SIMPLE FIRST & FINGERPRINT SIMHASH ###################
|
42
|
+
## DESCRIPTION: use for drop noise without risk with very simple rule, if nothing detected then create fingerprint simhash, thus check if fingerprint content in db fingerprint false positive to drop.
|
43
|
+
## Fingerprint hash can use by active add information (scan active, whois, ssl check, ...) for avoid to verify multi time for same alert.
|
44
|
+
## EXAMPLE: { "domain" : "^google.(com|fr)$"}
|
45
|
+
#disable by field in event
|
46
|
+
config :no_check, :validate => :string, :default => "sig_no_apply_all"
|
47
|
+
#disable functions
|
48
|
+
config :disable_drop, :validate => :boolean, :default => false
|
49
|
+
config :disable_enr, :validate => :boolean, :default => false
|
50
|
+
config :disable_fp, :validate => :boolean, :default => false
|
51
|
+
config :disable_nv, :validate => :boolean, :default => false
|
52
|
+
config :disable_bl, :validate => :boolean, :default => false
|
53
|
+
config :disable_ioc, :validate => :boolean, :default => false
|
54
|
+
config :disable_sig, :validate => :boolean, :default => false
|
55
|
+
config :disable_ref, :validate => :boolean, :default => false
|
56
|
+
config :disable_freq, :validate => :boolean, :default => false
|
57
|
+
config :disable_note, :validate => :boolean, :default => false
|
58
|
+
|
59
|
+
#if field exist in event then no apply drop & fingerprint
|
60
|
+
config :noapply_sig_dropfp, :validate => :string, :default => "sig_no_apply_dropfp"
|
61
|
+
config :noapply_sig_dropdb, :validate => :string, :default => "sig_no_apply_dropdb"
|
62
|
+
|
63
|
+
|
64
|
+
#CONF FINGERPRINT - format: json {"type": {fields: [a,b,c], delay: 3600, hashbit: 16},}
|
65
|
+
# create simhash (on hashbit) with fields [a,b,c] for delay 3600 . The delay is used for tag first alert or complementary information. Use delay by exemple if you use dhcp and ip in fingerprint...
|
66
|
+
config :conf_fp, :validate => :path, :default => "/etc/logstash/db/fingerprint_conf.json"
|
67
|
+
|
68
|
+
#DROP RULES DB - format: json {"field": "regexp"} - don't use same field name more time
|
69
|
+
config :db_drop, :validate => :path, :default => "/etc/logstash/db/drop-db.json"
|
70
|
+
#DROP FINGERPRINT DB - format: json {"fingerprint": "raison of fp"}
|
71
|
+
config :db_dropfp, :validate => :path, :default => "/etc/logstash/db/drop-fp.json"
|
72
|
+
|
73
|
+
#Name of field for select rules fp - exemple event['type']="squid" -- in fp_conf.sig: #{"squid":{"fields":["src_ip","dst_host","dst_ip","uri_proto","sig_detected_name","ioc_detected","tags"],"hashbit":8,"delay":3600}}
|
74
|
+
# |--> ^^^^^ ^^^^^
|
75
|
+
config :select_fp, :validate => :string, :default => "type"
|
76
|
+
#Name of field to save fingerprint
|
77
|
+
config :target_fp, :validate => :string, :default => "fingerprint"
|
78
|
+
|
79
|
+
# add tage name if not match
|
80
|
+
# tag mark for difference first fingerprint see, another fingerprint identical is tagger with tag_name_after (complementary information)
|
81
|
+
config :tag_name_first, :validate => :string, :default => "first_alert"
|
82
|
+
config :tag_name_after, :validate => :string, :default => "info_comp"
|
83
|
+
#Select field for write tag information fp: first or complementary
|
84
|
+
config :target_tag_fp, :validate => :string, :default => "tags"
|
85
|
+
|
86
|
+
#interval to refresh conf fingerprint & db
|
87
|
+
config :refresh_interval_conffp, :validate => :number, :default => 3600
|
88
|
+
#interval to refresh database rules & fingerprint
|
89
|
+
config :refresh_interval_dropdb, :validate => :number, :default => 3600
|
90
|
+
|
91
|
+
############### CONFIG ENRICHMENT ###################
|
92
|
+
#Description: add info in event by enrichment active or passive
|
93
|
+
|
94
|
+
#File config - format: json
|
95
|
+
config :conf_enr, :validate => :path, :default => "/etc/logstash/db/enr.json"
|
96
|
+
|
97
|
+
#if field exist in event then no apply new value tag
|
98
|
+
config :noapply_sig_enr, :validate => :string, :default => "sig_no_apply_enr"
|
99
|
+
|
100
|
+
#interval to refresh conf new_value
|
101
|
+
config :refresh_interval_enr, :validate => :number, :default => 3600
|
102
|
+
|
103
|
+
#field name where you add request for server add information active
|
104
|
+
config :field_enr, :validate => :string, :default => "request_enrichiment"
|
105
|
+
|
106
|
+
#enr_tag_response used for identify who is origin of resquest, and send response to good server
|
107
|
+
config :enr_tag_response, :validate => :string, :required => :true, :default => "ENR_RETURN_TO_JOHN"
|
108
|
+
|
109
|
+
#When information not in db and you must to have, send event to another server (contacted by lumberjack) who ask info and add in @field_enr
|
110
|
+
# after add info in field, resend to server origin by lumberjack
|
111
|
+
|
112
|
+
############### CONFIG NEW VALUE ###################
|
113
|
+
#Description: check by rule if event is new and tag event
|
114
|
+
#Exemple: verify on field domain new value, if field domain in event content new value then add in db and tag event
|
115
|
+
|
116
|
+
#File config - format: json {"rules": ["fieldy","fieldx"]}
|
117
|
+
config :conf_nv, :validate => :path, :default => "/etc/logstash/db/new.json"
|
118
|
+
|
119
|
+
#File save db - format: json
|
120
|
+
config :db_nv, :validate => :path, :default => "/etc/logstash/db/new-save.json"
|
121
|
+
|
122
|
+
#if field exist in event then no apply new value tag
|
123
|
+
config :noapply_sig_nv, :validate => :string, :default => "sig_no_apply_nv"
|
124
|
+
|
125
|
+
#interval to refresh conf new_value
|
126
|
+
config :refresh_interval_confnv, :validate => :number, :default => 3600
|
127
|
+
|
128
|
+
#interval to save file db new value
|
129
|
+
config :save_interval_dbnv, :validate => :number, :default => 3600
|
130
|
+
|
131
|
+
#Name of prefix field to save new_value tag (prefix+field_name)
|
132
|
+
config :target_nv, :validate => :string, :default => "new_value_"
|
133
|
+
|
134
|
+
############### CONFIG BL REPUTATION ###################
|
135
|
+
#Description: check if field value is present in DB REPUTATION
|
136
|
+
#JUST IP for first time
|
137
|
+
|
138
|
+
#BL REPUT config - format: json {fieldx: {dbs: [file_name,...], note: X, id: X, category: "malware"}}
|
139
|
+
config :conf_bl, :validate => :path, :default => "/etc/logstash/db/conf_bl.json"
|
140
|
+
|
141
|
+
#File contains BL REPUTATION
|
142
|
+
#https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/firehol_level1.netset
|
143
|
+
#https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/firehol_level2.netset
|
144
|
+
#https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/firehol_level3.netset
|
145
|
+
#https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/firehol_level4.netset
|
146
|
+
#https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/firehol_webserver.netset
|
147
|
+
#https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/firehol_webclient.netset
|
148
|
+
#https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/firehol_abusers_30d.netset
|
149
|
+
#https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/firehol_anonymous.netset
|
150
|
+
#https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/firehol_proxies.netset
|
151
|
+
config :file_bl, :validate => :array, :default => ["/etc/logstash/db/firehol_level1.netset","/etc/logstash/db/firehol_level2.netset","/etc/logstash/db/firehol_level3.netset","/etc/logstash/db/firehol_level4.netset","/etc/logstash/db/firehol_webserver.netset","/etc/logstash/db/firehol_webclient.netset","/etc/logstash/db/firehol_abusers_30d.netset","/etc/logstash/db/firehol_anonymous.netset","/etc/logstash/db/firehol_proxies.netset"]
|
152
|
+
|
153
|
+
#if field exist in event then no apply new value tag
|
154
|
+
config :noapply_sig_bl, :validate => :string, :default => "sig_no_apply_bl"
|
155
|
+
|
156
|
+
#interval to refresh conf new_value
|
157
|
+
config :refresh_interval_confbl, :validate => :number, :default => 3600
|
158
|
+
|
159
|
+
#field where add information on category detected
|
160
|
+
config :targetname_bl, :validate => :string, :default => "bl_detected_category"
|
161
|
+
|
162
|
+
############### CONFIG SIG BASE ###################
|
163
|
+
#numeric test: R[1]['champs0']['numope']['egal'|'inf'|'sup'|'diff']=numeric_value
|
164
|
+
#format file SIG JSON example: {"rules":[{"field2":{"motif":["mot1","mot2"],"note":5, "name":"test", "type":1, "id": 1, "extract": {'field': 'ioc_field'}},"field3":{"false":{}},"field1":{"regexp":["/update\\\?id\=[-0-9A-Fa-f]{8}","/[0-9A-F]{8}/[0-9A-F]{8}/[0-9A-F]{8}"]}},{"fieldx6":{},"fieldx5":{},"fieldx4":{}}]}
|
165
|
+
#R[1]['champs0']['regexp']=[]
|
166
|
+
#R[1]['champs1']['notregexp']=[]
|
167
|
+
#R[1]['champs2']['motif']=[]
|
168
|
+
#R[1]['champs0']['date']['egal'|'inf'|'sup'|'diff']=x -> (time.now)-x ope value field
|
169
|
+
#R[1]['champs0']['hour']['egal'|'inf'|'sup'|'diff']=19 [interger]
|
170
|
+
#R[1]['champs0']['day']['egal'|'inf'|'sup'|'diff']=0 (0==sunday,1==monday,...) [Interger]
|
171
|
+
#R[1]['champs0']['ipaddr']['egal'|'diff']="192.168.0.0/24"
|
172
|
+
#R[1]['champs0']['sizeope']['egal'|'inf'|'sup'|'diff']=0
|
173
|
+
#R[1]['champs0']['numope']['egal'|'inf'|'sup'|'diff']=0
|
174
|
+
#R[1]['champs0']['compope']['champsX']['egal'|'inf'|'sup'|'diff'] => string(not sup & info for string)
|
175
|
+
#R[1]['champs3']['false']
|
176
|
+
#R[1]['champs3']['note'] = numeric, add one by rule/sig /* not importance for field use for note, juste one time */
|
177
|
+
#R[1]['champs3']['name'] = "SIG_DETECTED_TEST_01" /* not importance for field use for name, juste one time */
|
178
|
+
#R[1]['champs3']['type'] = 1 (primary sig -- defaut value) -- 2 (secondary sig -> add if only detect primary sig before)
|
179
|
+
#R[1]['champs3']['modeFP'] = true or false (true == delete & false or not present == detect)
|
180
|
+
#R[1]['champs3']['modeFP'] = true or false (true == delete & false or not present == detect)
|
181
|
+
# brute force & correlation sig add: "freq_field:" [field,field,field,field],"freq_delay":60s,freq_count: 3, freq_resettime: 3600s, correlate_change_fieldvalue: []
|
182
|
+
# use extract on sure alerte without false postive to add IOC in IOC check list in real time
|
183
|
+
# extract field value to insert in ioc_field; ex: extract: {'src_ip': 'ioc_ip', 'user_agent': 'ioc_user-agent'}
|
184
|
+
#order verify: FIELD, MOTIF, REGEXP
|
185
|
+
#At first detected sig, then stop search another sig!!!!
|
186
|
+
#File content rules signatures expression in json
|
187
|
+
config :conf_rules_sig, :validate => :path, :default => "/etc/logstash/db/sig.json"
|
188
|
+
config :file_save_localioc, :validate => :path, :default => "/etc/logstash/db/ioc_local.json"
|
189
|
+
#format json -- example:
|
190
|
+
#{"rules":[
|
191
|
+
# {"id":[22],"optid":[16,38],"opt_num":1,"noid":[],"note":3,"overwrite":true}
|
192
|
+
#]}
|
193
|
+
# id: list contains id of rules must present
|
194
|
+
# optid & opt_num: list contains if of rules can present with minimum of "opt_num" id present
|
195
|
+
# exemple: [16,38] with opt_num =1 then if 16 or 38 or (16 and 38) present is match
|
196
|
+
# noid: list id of rules must absent
|
197
|
+
# overwrite: if overwrite is true, it's significate than if note is more less then defined before, the value is overwrite and note is more less.
|
198
|
+
# note: it's value of new note for event match.
|
199
|
+
config :conf_rules_note, :validate => :path, :default => "/etc/logstash/db/note.json"
|
200
|
+
|
201
|
+
#Name of fields to save value if sig match: name sig, count
|
202
|
+
config :target_sig, :validate => :string, :default => "sig_detected"
|
203
|
+
config :targetnum_sig, :validate => :string, :default => "sig_detected_count"
|
204
|
+
config :targetname_sig, :validate => :string, :default => "sig_detected_name"
|
205
|
+
#Interval to refresh conf rules sig
|
206
|
+
config :refresh_interval_confrules, :validate => :number, :default => 3600
|
207
|
+
|
208
|
+
#if field exist in event then no apply rules check
|
209
|
+
config :noapply_sig_rules, :validate => :string, :default => "sig_no_apply_rules"
|
210
|
+
|
211
|
+
#stop check at one time find sig
|
212
|
+
config :check_stop, :validate => :boolean, :default => false
|
213
|
+
|
214
|
+
#LIST File content IOC - format json - exemple: {"ioc_as":["44050","55960","24961","203973"]}
|
215
|
+
config :db_ioc, :validate => :array, :default => ["/etc/logstash/db/ioc.json", "/etc/logstash/db/ioc_local.json"]
|
216
|
+
#Rules IOC Conf- format json - exemple get rules from list_field_ioc file: {"ioc_hostname":["_host"], "ioc_hostname_downcase":true, "ioc_hostname_iocnote":1, "ioc_hostname_iocid":1001}
|
217
|
+
# conf file significate for ioc_hostname search in value of field name containt string '_host'
|
218
|
+
# If ioc_hostname_downcase is true then force downcase value in field
|
219
|
+
# ioc_hostname_note give note to event if ioc match
|
220
|
+
# ioc_hostname_iocid: give id number to ioc, used in note_sig for change note by relation with another match (sig...).
|
221
|
+
# Ioc ID must be more than 1000 -> 1001..1999
|
222
|
+
config :conf_ioc, :validate => :path, :default => "/etc/logstash/db/ioc_conf.json"
|
223
|
+
|
224
|
+
#Name of fields to save value if ioc match: name ioc, count
|
225
|
+
config :target_ioc, :validate => :string, :default => "ioc_detected"
|
226
|
+
config :targetnum_ioc, :validate => :string, :default => "ioc_detected_count"
|
227
|
+
config :targetname_ioc, :validate => :string, :default => "ioc_detected_name"
|
228
|
+
|
229
|
+
#Name of field where save note of sig & ioc ...
|
230
|
+
config :targetnote, :validate => :string, :default => "sig_detected_note"
|
231
|
+
config :targetid, :validate => :string, :default => "sig_detected_id"
|
232
|
+
|
233
|
+
#Interval to refresh db ioc
|
234
|
+
config :refresh_interval_dbioc, :validate => :number, :default => 3600
|
235
|
+
|
236
|
+
#if field exist in event then no apply check ioc
|
237
|
+
config :noapply_ioc, :validate => :string, :default => "sig_no_apply_ioc"
|
238
|
+
|
239
|
+
##ANOMALIE
|
240
|
+
#Conf ref -- format json -- PIVOT -> SIG -> REF
|
241
|
+
# rules[ {"pivot_field":[field1,field2], "list_sig": [fieldx,fieldy,...]} ]
|
242
|
+
#list_sig: all field used for sig, if all field not present, it doesn't matter, use field present in event and list_sig
|
243
|
+
config :conf_ref, :validate => :path, :default => "/etc/logstash/db/conf_ref.json"
|
244
|
+
#DB reference extract of ES by script
|
245
|
+
config :db_ref, :validate => :path, :default => "/etc/logstash/db/reference.json"
|
246
|
+
#RegExp DB FILE
|
247
|
+
config :db_pattern, :validate => :path, :default => "/etc/logstash/db/pattern.db"
|
248
|
+
#Interval to refresh db reference
|
249
|
+
config :refresh_interval_dbref, :validate => :number, :default => 3600
|
250
|
+
#if field exist in event then no apply check ref
|
251
|
+
config :noapply_ref, :validate => :string, :default => "sig_no_apply_ref"
|
252
|
+
#Name of fields to save value if ref match: name ioc, count
|
253
|
+
config :target_ref, :validate => :string, :default => "ref_detected"
|
254
|
+
config :targetnum_ref, :validate => :string, :default => "ref_detected_count"
|
255
|
+
config :targetname_ref, :validate => :string, :default => "ref_detected_name"
|
256
|
+
config :ref_aroundfloat, :default => 0.5 # TODO :validate => :float
|
257
|
+
config :ref_stop_after_firstffind, :validate => :boolean, :default => true
|
258
|
+
#pivot 1 syslog_programm -> add in ref.json for add multi profil
|
259
|
+
#config :sg_extract, :validate => :string, :default => "syslog_program"
|
260
|
+
#pivot 2 syslog_pri -> add in ref.json for add multi profil
|
261
|
+
#config :spri_extract, :validate => :string, :default => "syslog_pri"
|
262
|
+
#exclude field for create sig -> add field list for create sig and add in ref.json
|
263
|
+
#config :exclude_create_sig, :validate => :array, :default => ["tags","@source_host","_type","type","@timestamp","@message","@version","_id","_index","_type","host","message","received_at","received_from","syslog_facility","syslog_facility_code","syslog_pri","syslog_pid","syslog_program","syslog_severity_code","syslog_severity"]
|
264
|
+
|
265
|
+
##FREQUENCE
|
266
|
+
#rules_freq = [ {'select_field': {'fieldx':[value_list],'fieldy':[value_list]}, 'note': X, 'refresh_time': Xseconds,'reset_time': Xseconds[1j], 'reset_hour': '00:00:00', 'wait_after_reset': 10, 'id': 30XXX},...]
|
267
|
+
config :conf_freq, :validate => :path, :default => "/etc/logstash/db/conf_freq.json"
|
268
|
+
#Interval to refresh rules frequence
|
269
|
+
config :refresh_interval_freqrules, :validate => :number, :default => 3600
|
270
|
+
#if field exist in event then no apply check freq
|
271
|
+
config :noapply_freq, :validate => :string, :default => "sig_no_apply_freq"
|
272
|
+
#rules_freq = [ {'select_field': {'fieldx':[value_list],'fieldy':[value_list]}, 'note': X, 'refresh_time': Xseconds,'reset_time': Xseconds[1j], 'reset_hour': '00:00:00', 'wait_after_reset': 10, 'id': 30XXX},...]
|
273
|
+
#select field for select => first filter
|
274
|
+
#select field value => second filter
|
275
|
+
#refresh_time for check and calculate all time result: max & variation
|
276
|
+
# note if match
|
277
|
+
# reset time in second for reset all counter value
|
278
|
+
# reset hour for begin reset with hour => use for 24h reset begin at 00:00
|
279
|
+
# wait_after_reset: dont't check before 10 times values
|
280
|
+
#db_freq = { '30XXX': {'refresh_date': date, 'old_date': date,'num_time': Xtimes, 'V_prev': x/m, 'Varia_avg': z/m, 'count_prev': Xtimes, 'count_cour': Xtimes, 'V_max': x/m, 'Varia_max': x/m, 'Varia_min': +/- x/m, 'Varia_glob': x/m}}
|
281
|
+
#check refresh db ref
|
282
|
+
public
|
283
|
+
def register
|
284
|
+
@logger.info("Plugin SIG. Loading conf...")
|
285
|
+
#create var extract file db & conf
|
286
|
+
@ioc_db = {}
|
287
|
+
@ioc_db_local = JSON.parse( IO.read(@file_save_localioc, encoding:'utf-8') ) unless @disable_sig and @disable_ioc #use on sig extract
|
288
|
+
@ioc_rules = {}
|
289
|
+
@note_db = []
|
290
|
+
@sig_db = {}
|
291
|
+
@sig_db_array = []
|
292
|
+
@sig_db_array_false = []
|
293
|
+
@sig_db_array_len = 0
|
294
|
+
@nv_db = JSON.parse( IO.read(@db_nv, encoding:'utf-8') ) unless @disable_nv
|
295
|
+
@nv_rules = {}
|
296
|
+
@fp_rules = {}
|
297
|
+
@fp_db = {}
|
298
|
+
@drop_db = {}
|
299
|
+
@fingerprint_db = {} #temporary db
|
300
|
+
@ref_db = {}
|
301
|
+
@pattern_db = {}
|
302
|
+
@ref_rules = {}
|
303
|
+
@freq_rules = {}
|
304
|
+
@db_freq = {}
|
305
|
+
@bl_db = {} # {file_name:[IP]}
|
306
|
+
@bl_rules = {}
|
307
|
+
@db_enr = {}
|
308
|
+
###
|
309
|
+
###special DB
|
310
|
+
@sig_db_freq = {}
|
311
|
+
###
|
312
|
+
#hash file
|
313
|
+
@hash_conf_rules_sig = ""
|
314
|
+
@hash_conf_rules_note = ""
|
315
|
+
@hash_conf_freq = ""
|
316
|
+
@hash_conf_fp = ""
|
317
|
+
@hash_conf_nv = ""
|
318
|
+
@hash_conf_bl = ""
|
319
|
+
@hash_dbbl = {}
|
320
|
+
@hash_dbioc = {}
|
321
|
+
@hash_conf_ioc = ""
|
322
|
+
@hash_dbref = ""
|
323
|
+
@hash_dbpattern = ""
|
324
|
+
@hash_conf_ref = ""
|
325
|
+
@hash_dropdb = ""
|
326
|
+
@hash_dropfp = ""
|
327
|
+
@hash_conf_enr = ""
|
328
|
+
@hash_dbfile_enr = {}
|
329
|
+
@hash_db_enr = {}
|
330
|
+
###
|
331
|
+
#load conf & db
|
332
|
+
@load_statut = false
|
333
|
+
load_conf_rules_sig unless @disable_sig
|
334
|
+
load_conf_rules_note unless @disable_sig and @disable_ioc and @disable_ref
|
335
|
+
load_conf_fp unless @disable_fp
|
336
|
+
load_conf_nv unless @disable_nv
|
337
|
+
load_conf_ioc unless @disable_ioc
|
338
|
+
load_conf_bl unless @disable_bl
|
339
|
+
load_db_ioc unless @disable_ioc
|
340
|
+
load_db_drop unless @disable_drop
|
341
|
+
load_db_dropfp unless @disable_fp
|
342
|
+
load_db_pattern unless @disable_ref
|
343
|
+
load_db_ref unless @disable_ref
|
344
|
+
load_rules_freq unless @disable_freq
|
345
|
+
load_db_enr unless @disable_enr
|
346
|
+
@load_statut = true
|
347
|
+
@load_statut_rules = true
|
348
|
+
@load_statut_fp = true
|
349
|
+
@load_statut_nv = true
|
350
|
+
@load_statut_bl = true
|
351
|
+
@save_statut_nv = true
|
352
|
+
@load_statut_ioc = true
|
353
|
+
@load_statut_ref = true
|
354
|
+
@load_statut_drop = true
|
355
|
+
@load_statut_freqrules = true
|
356
|
+
@load_statut_note = true
|
357
|
+
@load_statut_enr = true
|
358
|
+
###
|
359
|
+
@logger.info("finish")
|
360
|
+
#next refresh file
|
361
|
+
tnow = Time.now
|
362
|
+
@next_refresh_dbref = tnow + @refresh_interval_dbref
|
363
|
+
@next_refresh_dbioc = tnow + @refresh_interval_dbioc
|
364
|
+
@next_refresh_confrules = tnow + @refresh_interval_confrules
|
365
|
+
@next_refresh_confnv = tnow + @refresh_interval_confnv
|
366
|
+
@next_refresh_confbl = tnow + @refresh_interval_confbl
|
367
|
+
@next_refresh_dbnv = tnow + @save_interval_dbnv
|
368
|
+
@next_refresh_dropdb = tnow + @refresh_interval_dropdb
|
369
|
+
@next_refresh_conffp = tnow + @refresh_interval_conffp
|
370
|
+
@next_refresh_note = tnow + @refresh_interval_confrules
|
371
|
+
@next_refresh_freqrules = tnow + @refresh_interval_freqrules
|
372
|
+
@next_refresh_enr = tnow + @refresh_interval_enr
|
373
|
+
###
|
374
|
+
end # def register
|
375
|
+
|
376
|
+
public
|
377
|
+
def filter(event)
|
378
|
+
return unless filter?(event)
|
379
|
+
#check field no_check if present stop search
|
380
|
+
return unless event.get(@no_check).nil?
|
381
|
+
#get time for refresh
|
382
|
+
tnow = Time.now
|
383
|
+
|
384
|
+
######DROP FIRST DB USE######
|
385
|
+
#refresh db
|
386
|
+
unless @disable_drop
|
387
|
+
if @next_refresh_dropdb < tnow
|
388
|
+
if @load_statut_drop == true
|
389
|
+
@load_statut_drop = false
|
390
|
+
load_db_drop
|
391
|
+
@next_refresh_dropdb = tnow + @refresh_interval_dropdb
|
392
|
+
@load_statut_drop = true
|
393
|
+
end
|
394
|
+
end
|
395
|
+
sleep(1) until @load_statut_drop
|
396
|
+
end
|
397
|
+
#check if db not empty
|
398
|
+
if not @drop_db.empty? and event.get(@noapply_sig_dropdb).nil? and not @disable_drop
|
399
|
+
@drop_db.each do |dkey,dval|
|
400
|
+
#search field with name of dkey
|
401
|
+
if not event.get(dkey).nil? and event.get(dkey).is_a?(String) and not dval.empty? and event.get(dkey) =~ /#{dval}/
|
402
|
+
#key exist and match with regexp
|
403
|
+
event.cancel
|
404
|
+
return
|
405
|
+
end
|
406
|
+
end
|
407
|
+
end
|
408
|
+
#######################
|
409
|
+
|
410
|
+
######ENRICHISSEMENT: add info db local, active info, ...######
|
411
|
+
#{"1": {file: path_db, db: {loaded by path db contains hash},"prefix": "prefix_...", filters: {field:regexp,...}, link: [["name_field_select_value_to_search_in_db",...],[WHOIS2,...]], "form_in_db": "https://$1$:$2$", if_empty: 'WHOIS'}, "filter_insert": [], "filter_noinsert": []}
|
412
|
+
# in db {'value_field_link': {name_info1: info1, name_info2: info2, ...}}
|
413
|
+
#probleme sur link lié a la construction d'un nom dans la base par exemple https://host:port (3 fields)
|
414
|
+
#2 eme cas quand l'on veut faire un whois sur plusieurs field: champs1 et champs2
|
415
|
+
#create var for add new field name (for check drop)
|
416
|
+
#choose order for link by enrichissment ex: infoclient -> IP (get MAC by db ip2mac) and second time get info by mac
|
417
|
+
new_field_enr = []
|
418
|
+
#refresh db
|
419
|
+
unless @disable_enr
|
420
|
+
if @next_refresh_enr < tnow
|
421
|
+
if @load_statut_enr == true
|
422
|
+
@load_statut_enr = false
|
423
|
+
save_dbs_enr # save db with news data response -TODO -> 1 verify if db change (md5 hash) if no change save, if change... create diff with save!
|
424
|
+
#load_db_enr #load - use save for load too
|
425
|
+
@next_refresh_enr = tnow + @refresh_interval_enr
|
426
|
+
@load_statut_enr = true
|
427
|
+
end
|
428
|
+
end
|
429
|
+
sleep(1) until @load_statut_enr
|
430
|
+
end
|
431
|
+
#check if db is not empty
|
432
|
+
if not @db_enr.empty? and event.get(@noapply_sig_enr).nil? and not @disable_enr
|
433
|
+
if event.get(@field_enr).is_a?(Array)
|
434
|
+
#check response by response
|
435
|
+
for respo in event.get(@field_enr)
|
436
|
+
respo.each do |rkey,rval| #{rval['if_empty'] => {"id": rkey.to_s, "field": lval.to_s}}
|
437
|
+
next if rval['id'].nil? or rval['field'].nil? or rval['name_in_db'].nil? or rval['response'].nil? or @db_enr[rval['id'].to_s].nil?
|
438
|
+
#add in event
|
439
|
+
rval['response'].each do |xk,xval|
|
440
|
+
next if xval.empty? or @db_enr[rval['id'].to_s]["filter_noinsert"].include?(xk) or (not @db_enr[rval['id'].to_s]["filter_insert"].include?(xk) and not @db_enr[rval['id'].to_s]["filter_insert"].empty? and @db_enr[rval['id'].to_s]["filter_noinsert"].empty?)
|
441
|
+
#!!! overwrite if exist!!!
|
442
|
+
event.set(@db_enr[rval['id'].to_s]['prefix'].to_s+xk.to_s,xval)
|
443
|
+
#add in "FIELD_TEMP_ENR_DROP_NEW"
|
444
|
+
if event.get("FIELD_TEMP_ENR_DROP_NEW").nil?
|
445
|
+
event.set("FIELD_TEMP_ENR_DROP_NEW",[(@db_enr[rval['id'].to_s]['prefix'].to_s+xk.to_s)])
|
446
|
+
else
|
447
|
+
event.set("FIELD_TEMP_ENR_DROP_NEW",event.get("FIELD_TEMP_ENR_DROP_NEW")+[(@db_enr[rval['id'].to_s]['prefix'].to_s+xk.to_s)])
|
448
|
+
end
|
449
|
+
end
|
450
|
+
#add info in db
|
451
|
+
if rval['name_in_db'].empty? and rval['response'].is_a?(Hash) and not rval['response'].empty?
|
452
|
+
@db_enr[rval['id'].to_s]['db'][rval['name_in_db'].to_s] = rval['response']
|
453
|
+
end
|
454
|
+
end
|
455
|
+
end
|
456
|
+
#clean event
|
457
|
+
event.remove(@field_enr)
|
458
|
+
else
|
459
|
+
#check rule by rule
|
460
|
+
eventK = event.to_hash.keys
|
461
|
+
@db_enr.each do |rkey,rval|
|
462
|
+
# rule by rule
|
463
|
+
#check filter
|
464
|
+
#chekc is all keys present in event
|
465
|
+
inter = rval['filters'].keys & eventK
|
466
|
+
#check if fields rule present in event
|
467
|
+
if inter.length == rval['filters'].keys.length
|
468
|
+
#field present
|
469
|
+
#check filed by field
|
470
|
+
sig_add = {}
|
471
|
+
check_sig=false
|
472
|
+
for kfield in inter
|
473
|
+
check_sig=false
|
474
|
+
# field X -- check type
|
475
|
+
if event.get(kfield).is_a?(Array)
|
476
|
+
#array type
|
477
|
+
# if rule value regexp is Array?
|
478
|
+
if rval['filters'][kfield].is_a?(Array)
|
479
|
+
for regexp in rval['filters'][kfield]
|
480
|
+
check_sig=false
|
481
|
+
for elem in event.get(kfield)
|
482
|
+
match = Regexp.new(regexp, nil, 'n').match(elem.to_s)
|
483
|
+
if not match.nil?
|
484
|
+
check_sig=true
|
485
|
+
break
|
486
|
+
end
|
487
|
+
end
|
488
|
+
break unless check_sig
|
489
|
+
end
|
490
|
+
else
|
491
|
+
#rule not array
|
492
|
+
for elem in event.get(kfield)
|
493
|
+
match = Regexp.new(rval['filters'][kfield], nil, 'n').match(elem.to_s)
|
494
|
+
if not match.nil?
|
495
|
+
check_sig=true
|
496
|
+
break
|
497
|
+
end
|
498
|
+
end
|
499
|
+
end
|
500
|
+
else
|
501
|
+
#other type
|
502
|
+
# if rule value regexp is Array?
|
503
|
+
if rval['filters'][kfield].is_a?(Array)
|
504
|
+
#array
|
505
|
+
for regexp in rval['filters'][kfield]
|
506
|
+
match = Regexp.new(regexp, nil, 'n').match(event.get(kfield).to_s)
|
507
|
+
if not match.nil?
|
508
|
+
sig_add[kfield.to_s]="Regexp found #{match}"
|
509
|
+
check_sig=true
|
510
|
+
next
|
511
|
+
end
|
512
|
+
break unless check_sig
|
513
|
+
end
|
514
|
+
else
|
515
|
+
#other
|
516
|
+
match = Regexp.new(rval['filters'][kfield], nil, 'n').match(event.get(kfield).to_s)
|
517
|
+
if not match.nil?
|
518
|
+
check_sig=true
|
519
|
+
next
|
520
|
+
end
|
521
|
+
end
|
522
|
+
end
|
523
|
+
break unless check_sig
|
524
|
+
end
|
525
|
+
#check if filters match
|
526
|
+
if check_sig
|
527
|
+
#matched
|
528
|
+
#check if link present in event & db & link not empty
|
529
|
+
next if rval['link'].empty?
|
530
|
+
next if rval['form_in_db'].empty?
|
531
|
+
lkey=rval['form_in_db'].dup
|
532
|
+
pnext=false
|
533
|
+
cnt_e=0
|
534
|
+
for lval in rval['link']
|
535
|
+
cnt_e+=1
|
536
|
+
if event.get(lval.to_s).nil?
|
537
|
+
pnext=true
|
538
|
+
break
|
539
|
+
else
|
540
|
+
#create dbkey
|
541
|
+
lkey.gsub! '$'+cnt_e.to_s+'$', event.get(lval.to_s)
|
542
|
+
end
|
543
|
+
end
|
544
|
+
next if pnext
|
545
|
+
next if cnt_e != rval['link'].length or lkey =~ /\$\d+\$/
|
546
|
+
if not rval['db'][lkey.to_s].is_a?(Hash)
|
547
|
+
#if not present and must present, then send to server active enrichissement and wait to return
|
548
|
+
#check if if_empty exist?
|
549
|
+
if rval['if_empty'].is_a?(String) and not rval['if_empty'].empty?
|
550
|
+
#send requets to server for resolve information
|
551
|
+
#Add field request_resolv: [{WHOIS: {"id": id_rule, "field": field_name}},{SSL: {"id": id_rule, "field": field_name}}]
|
552
|
+
#TODO verify number of element in form and link
|
553
|
+
request_enr = {rval['if_empty'] => {"id" => rkey.to_s, "field" => rval['link'], "name_in_db" => lkey}}
|
554
|
+
if event.get(@field_enr).nil?
|
555
|
+
event.set(@field_enr,[request_enr])
|
556
|
+
else
|
557
|
+
event.set(@field_enr,event.get(@field_enr)+[request_enr])
|
558
|
+
end
|
559
|
+
#send to server (at end, for possible add multi request by rule
|
560
|
+
end
|
561
|
+
else
|
562
|
+
#if present add
|
563
|
+
next if not rval['prefix'].is_a?(String) or rval['prefix'].empty?
|
564
|
+
#"filter_insert": [], "filter_noinsert": []
|
565
|
+
rval['db'][lkey].each do |xk,xval|
|
566
|
+
next if xval.empty? or rval["filter_noinsert"].include?(xk) or (not rval["filter_insert"].include?(xk) and not rval["filter_insert"].empty? and rval["filter_noinsert"].empty?)
|
567
|
+
#!!! overwrite if exist!!!
|
568
|
+
event.set(rval['prefix'].to_s+xk.to_s,xval)
|
569
|
+
#add in new_field_enr
|
570
|
+
new_field_enr.push(*(rval['prefix'].to_s+xk.to_s))
|
571
|
+
end
|
572
|
+
end
|
573
|
+
end
|
574
|
+
end
|
575
|
+
end
|
576
|
+
event.tag(@enr_tag_response) if not event.get(@field_enr).nil?
|
577
|
+
event.set("FIELD_TEMP_ENR_DROP_NEW",new_field_enr)
|
578
|
+
#when event.get(@field_enr) is set hten not check apply on event, in configuration add to send to server enrichment which resend after add informations
|
579
|
+
end
|
580
|
+
end
|
581
|
+
#######################
|
582
|
+
|
583
|
+
######DROP SECOND######
|
584
|
+
## JUST CHECK NEW FIELD (created by enr)
|
585
|
+
#check if db not empty
|
586
|
+
if not @drop_db.empty? and event.get(@noapply_sig_dropdb).nil? and not @disable_drop and event.get(@field_enr).nil? and not event.get("FIELD_TEMP_ENR_DROP_NEW").nil?
|
587
|
+
for nfield in event.get("FIELD_TEMP_ENR_DROP_NEW")
|
588
|
+
if @drop_db[nfield] and event.get(nfield).is_a?(String) and event.get(nfield) =~ /#{@drop_db[nfield]}/
|
589
|
+
#key exist and match with regexp
|
590
|
+
event.cancel
|
591
|
+
return
|
592
|
+
end
|
593
|
+
end
|
594
|
+
end
|
595
|
+
event.remove("FIELD_TEMP_ENR_DROP_NEW")
|
596
|
+
#######################
|
597
|
+
|
598
|
+
######New Value USE######
|
599
|
+
#reshresh conf & save db
|
600
|
+
unless @disable_nv
|
601
|
+
if @next_refresh_dbnv < tnow
|
602
|
+
if @save_statut_nv == true
|
603
|
+
@save_statut_nv = false
|
604
|
+
save_db_nv
|
605
|
+
@next_refresh_dbnv = tnow + @save_interval_dbnv
|
606
|
+
@save_statut_nv = true
|
607
|
+
end
|
608
|
+
end
|
609
|
+
if @next_refresh_confnv < tnow
|
610
|
+
if @load_statut_nv == true
|
611
|
+
@load_statut_nv = false
|
612
|
+
load_conf_nv
|
613
|
+
@next_refresh_confnv = tnow + @refresh_interval_confnv
|
614
|
+
@load_statut_nv = true
|
615
|
+
end
|
616
|
+
end
|
617
|
+
sleep(1) until @load_statut_nv
|
618
|
+
end
|
619
|
+
#check if db &conf are not empty + select_fp exist
|
620
|
+
if not @nv_rules.empty? and @nv_rules['rules'].is_a?(Array) and not @nv_db.empty? and event.get(@noapply_sig_nv).nil? and not @disable_nv and event.get(@field_enr).nil?
|
621
|
+
#check all rules
|
622
|
+
for rule in @nv_rules['rules']
|
623
|
+
#if rule exist in event?
|
624
|
+
if event.get(rule.to_s)
|
625
|
+
#yes
|
626
|
+
#event content type Array
|
627
|
+
if event.get(rule.to_s).is_a?(Array)
|
628
|
+
for elem in event.get(rule.to_s)
|
629
|
+
if elem.is_a?(String) or elem.is_a?(Numeric)
|
630
|
+
if not @nv_db[rule.to_s].include?(elem)
|
631
|
+
#new value => add
|
632
|
+
@nv_db[rule.to_s].push(*elem)
|
633
|
+
event.set(@target_nv+rule.to_s, elem.to_s)
|
634
|
+
end
|
635
|
+
end
|
636
|
+
end
|
637
|
+
#event content type String or Numeric
|
638
|
+
elsif event.get(rule.to_s).is_a?(String) or event.get(rule.to_s).is_a?(Numeric)
|
639
|
+
if not @nv_db[rule.to_s].include?(event.get(rule.to_s))
|
640
|
+
#new value => add
|
641
|
+
@nv_db[rule.to_s].push(*event.get(rule.to_s))
|
642
|
+
event.set(@target_nv+rule.to_s, event.get(rule.to_s).to_s)
|
643
|
+
end
|
644
|
+
end
|
645
|
+
end
|
646
|
+
end
|
647
|
+
end
|
648
|
+
#########################
|
649
|
+
|
650
|
+
######BL REPUTATION USE######
|
651
|
+
#reshresh conf & save db
|
652
|
+
unless @disable_bl
|
653
|
+
if @next_refresh_confbl < tnow
|
654
|
+
if @load_statut_bl == true
|
655
|
+
@load_statut_bl = false
|
656
|
+
load_conf_bl
|
657
|
+
@next_refresh_confbl = tnow + @refresh_interval_confbl
|
658
|
+
@load_statut_bl = true
|
659
|
+
end
|
660
|
+
end
|
661
|
+
sleep(1) until @load_statut_bl
|
662
|
+
end
|
663
|
+
#check if db &conf are not empty + select_fp exist
|
664
|
+
if not @bl_rules.empty? and not @bl_db.empty? and event.get(@noapply_sig_bl).nil? and not @disable_bl and event.get(@field_enr).nil?
|
665
|
+
#bl_rules: {fieldx: {dbs: [file_name,...], category: , note: X, id: X}}
|
666
|
+
#bl_db: {file_name: [IPs]}
|
667
|
+
#rule by rule
|
668
|
+
@bl_rules.each do |fkey,fval|
|
669
|
+
#veirfy field exist
|
670
|
+
if not event.get(fkey).nil?
|
671
|
+
#verify field contains IP
|
672
|
+
ip = ""
|
673
|
+
next if not ip = IPAddr.new(event.get(fkey).to_s) rescue false
|
674
|
+
for dbbl in fval['dbs']
|
675
|
+
#if @bl_db[dbbl].include?(ip)
|
676
|
+
if @bl_db[dbbl].any?{|block| block === ip}
|
677
|
+
#FIELD FIND IN DB BL REPUTATION
|
678
|
+
#ADD SCORE & ID & CAT
|
679
|
+
unless event.get(@targetnote).nil?
|
680
|
+
if event.get(@targetnote) < fval['note']
|
681
|
+
event.set(@targetnote, detected_sig_note)
|
682
|
+
end
|
683
|
+
else
|
684
|
+
event.set(@targetnote, fval['note'])
|
685
|
+
end
|
686
|
+
unless event.get(@targetname_bl).nil?
|
687
|
+
event.set(@targetname_bl, event.get(@targetname_bl) + [fval['category']])
|
688
|
+
else
|
689
|
+
event.set(@targetname_bl, [fval['category']])
|
690
|
+
end
|
691
|
+
unless event.get(@targetid).nil?
|
692
|
+
event.set(@targetid, event.get(@targetid) + [fval['id']])
|
693
|
+
else
|
694
|
+
event.set(@targetid, [fval['id']])
|
695
|
+
end
|
696
|
+
end
|
697
|
+
end
|
698
|
+
end
|
699
|
+
end
|
700
|
+
end
|
701
|
+
#########################
|
702
|
+
|
703
|
+
######IOC SEARCH######
|
704
|
+
#refresh db
|
705
|
+
unless @disable_ioc
|
706
|
+
if @next_refresh_dbioc < tnow
|
707
|
+
if @load_statut_ioc == true
|
708
|
+
@load_statut_ioc = false
|
709
|
+
load_conf_ioc
|
710
|
+
load_db_ioc
|
711
|
+
@next_refresh_dbioc = tnow + @refresh_interval_dbioc
|
712
|
+
@load_statut_ioc = true
|
713
|
+
end
|
714
|
+
end
|
715
|
+
sleep(1) until @load_statut_ioc
|
716
|
+
end
|
717
|
+
#check db not empty
|
718
|
+
if not @ioc_rules.empty? and not @ioc_db.empty? and event.get(@noapply_ioc).nil? and not @disable_ioc and event.get(@field_enr).nil?
|
719
|
+
detected_ioc = Array.new
|
720
|
+
detected_ioc_count = 0
|
721
|
+
detected_ioc_name = Array.new
|
722
|
+
detected_ioc_id = Array.new
|
723
|
+
detected_ioc_note = 0
|
724
|
+
#verify ioc by rules
|
725
|
+
@ioc_rules.each do |rkey,rval|
|
726
|
+
#rule by rule
|
727
|
+
if rval.is_a?(Array) and not rkey =~ /_downcase$|_iocnote$|_iocid$/ and @ioc_db[rkey.to_s]
|
728
|
+
list_search = []
|
729
|
+
#create list value by rule to check ioc
|
730
|
+
for elemvalue in rval
|
731
|
+
#Collect value of field name contains "elemvalue"
|
732
|
+
hash_tmp = event.to_hash.select{|k,v| (k.to_s).include? elemvalue }
|
733
|
+
if hash_tmp.values.any?
|
734
|
+
#hash not empty
|
735
|
+
if list_search.empty?
|
736
|
+
if @ioc_rules[rkey+'_downcase']
|
737
|
+
#case compare by downcase
|
738
|
+
list_search = hash_tmp.values.map!(&:downcase)
|
739
|
+
else
|
740
|
+
#case normaly compare
|
741
|
+
list_search = hash_tmp.values
|
742
|
+
end
|
743
|
+
else
|
744
|
+
if @ioc_rules[rkey+'_downcase']
|
745
|
+
#case compare by downcase
|
746
|
+
list_search = list_search + hash_tmp.values.map!(&:downcase)
|
747
|
+
else
|
748
|
+
#case normaly compare
|
749
|
+
list_search = list_search + hash_tmp.values
|
750
|
+
end
|
751
|
+
end
|
752
|
+
end
|
753
|
+
end
|
754
|
+
#compare list_value extract of event for one case of ioc and db_ioc -- intersection
|
755
|
+
inter=list_search & @ioc_db[rkey.to_s]
|
756
|
+
if inter.any?
|
757
|
+
#value(s) find
|
758
|
+
ioc_add = {rkey.to_s => inter}
|
759
|
+
detected_ioc_name.push(*rkey.to_s)
|
760
|
+
detected_ioc.push(*ioc_add)
|
761
|
+
detected_ioc_count = detected_ioc_count + 1
|
762
|
+
detected_ioc_id.push(*@ioc_rules[rkey+'_iocid'])
|
763
|
+
if detected_ioc_note < @ioc_rules[rkey+'_iocnote']
|
764
|
+
detected_ioc_note = @ioc_rules[rkey+'_iocnote']
|
765
|
+
end
|
766
|
+
ioc_add.clear
|
767
|
+
end
|
768
|
+
end
|
769
|
+
end
|
770
|
+
#check if ioc find
|
771
|
+
if detected_ioc.any?
|
772
|
+
#ioc find, add information in event (count, name, id, note)
|
773
|
+
unless event.get(@target_ioc).nil?
|
774
|
+
event.set(@target_ioc, event.get(@target_ioc) + detected_ioc)
|
775
|
+
else
|
776
|
+
event.set(@target_ioc, detected_ioc)
|
777
|
+
end
|
778
|
+
unless event.get(@targetnum_ioc).nil?
|
779
|
+
event.set(@targetnum_ioc, event.get(@targetnum_ioc) + detected_ioc_count)
|
780
|
+
else
|
781
|
+
event.set(@targetnum_ioc, detected_ioc_count)
|
782
|
+
end
|
783
|
+
unless event.get(@targetname_ioc).nil?
|
784
|
+
event.set(@targetname_ioc, event.get(@targetname_ioc) + detected_ioc_name)
|
785
|
+
else
|
786
|
+
event.set(@targetname_ioc, detected_ioc_name)
|
787
|
+
end
|
788
|
+
unless event.get(@targetid).nil?
|
789
|
+
event.set(@targetid, event.get(@targetid) + detected_ioc_id)
|
790
|
+
else
|
791
|
+
event.set(@targetid, detected_ioc_id)
|
792
|
+
end
|
793
|
+
unless event.get(@targetnote).nil?
|
794
|
+
if event.get(@targetnote) < detected_ioc_note
|
795
|
+
event.set(@targetnote, detected_ioc_note)
|
796
|
+
end
|
797
|
+
else
|
798
|
+
event.set(@targetnote, detected_ioc_note)
|
799
|
+
end
|
800
|
+
end
|
801
|
+
end
|
802
|
+
######################
|
803
|
+
|
804
|
+
######SIG SEARCH######
|
805
|
+
unless @disable_sig
|
806
|
+
if @next_refresh_confrules < tnow
|
807
|
+
if @load_statut_rules == true
|
808
|
+
@load_statut_rules = false
|
809
|
+
load_conf_rules_sig
|
810
|
+
save_db_ioclocal
|
811
|
+
clean_db_sigfreq(tnow)
|
812
|
+
@next_refresh_confrules = tnow + @refresh_interval_confrules
|
813
|
+
@load_statut_rules = true
|
814
|
+
end
|
815
|
+
end
|
816
|
+
sleep(1) until @load_statut_rules
|
817
|
+
end
|
818
|
+
if not @sig_db.empty? and event.get(@noapply_sig_rules).nil? and not @disable_sig and event.get(@field_enr).nil?
|
819
|
+
#create var local for all rules check
|
820
|
+
detected_sig = Array.new
|
821
|
+
detected_sig_name = Array.new
|
822
|
+
detected_sig_count = 0
|
823
|
+
detected_sig_note = 0
|
824
|
+
detected_sig_id = Array.new
|
825
|
+
detected_sig_id_corre = Array.new
|
826
|
+
type_sig = 0
|
827
|
+
type_obl = 0
|
828
|
+
# get list of all name field present in event
|
829
|
+
eventK = event.to_hash.keys
|
830
|
+
#check all rules
|
831
|
+
(0..@sig_db_array_len).each do |i|
|
832
|
+
#verify exist field used in rule
|
833
|
+
verif=@sig_db_array[i].length
|
834
|
+
inter=@sig_db_array[i] & eventK
|
835
|
+
if inter.length == verif
|
836
|
+
#OK all field rule are present
|
837
|
+
#verify if field name exclude by rule are present
|
838
|
+
inter=@sig_db_array_false[i] & eventK
|
839
|
+
if inter.length == 0
|
840
|
+
#OK exclude field are not present in event
|
841
|
+
#create variable local by rule
|
842
|
+
validfield=0
|
843
|
+
#length of field check contains in rule
|
844
|
+
countfield=@sig_db_array[i].length
|
845
|
+
sig_add = {"Rules" => "Detected rule at emplacement: #{i} (not id)"}
|
846
|
+
sig_add["note"] = 0
|
847
|
+
#check rule field by field in event
|
848
|
+
for kfield in @sig_db_array[i]
|
849
|
+
#CHECK SIG BY FIELD BEGIN
|
850
|
+
#check_sig used for know if check result step by step for break if not match rules
|
851
|
+
check_sig=true
|
852
|
+
#BEGIN : CHECK BY MOTIF
|
853
|
+
unless @sig_db['rules'][i][kfield]['motif'].nil?
|
854
|
+
check_sig=false
|
855
|
+
if event.get(kfield).is_a?(Array)
|
856
|
+
l_tmp = event.get(kfield).flatten(10)
|
857
|
+
inter = l_tmp & @sig_db['rules'][i][kfield]['motif']
|
858
|
+
if inter.length != 0
|
859
|
+
sig_add[kfield.to_s]="motif found: #{inter}"
|
860
|
+
check_sig=true
|
861
|
+
end
|
862
|
+
elsif @sig_db['rules'][i][kfield]['motif'].include? event.get(kfield)
|
863
|
+
sig_add[kfield.to_s]="motif found #{event.get(kfield)}"
|
864
|
+
check_sig=true
|
865
|
+
end
|
866
|
+
end
|
867
|
+
break if check_sig == false
|
868
|
+
#END : CHECK BY MOTIF
|
869
|
+
#BEGIN : CHECK BY Compare value of two fields
|
870
|
+
unless @sig_db['rules'][i][kfield]['compope'].nil?
|
871
|
+
@sig_db['rules'][i][kfield]['compope'].each do |xk,xval|
|
872
|
+
if event.get(xk)
|
873
|
+
if event.get(xk).is_a?(Numeric)
|
874
|
+
unless @sig_db['rules'][i][kfield]['compope'][xk].nil?
|
875
|
+
if event.get(kfield).is_a?(Numeric)
|
876
|
+
unless @sig_db['rules'][i][kfield]['compope'][xk]['egal'].nil?
|
877
|
+
check_sig=false
|
878
|
+
if event.get(kfield) == event.get(xk)
|
879
|
+
sig_add[kfield.to_s]="Fields Value numeric #{event.get(kfield)} == #{event.get(xk)} found"
|
880
|
+
check_sig=true
|
881
|
+
end
|
882
|
+
end
|
883
|
+
break if check_sig == false
|
884
|
+
unless @sig_db['rules'][i][kfield]['compope'][xk]['sup'].nil?
|
885
|
+
check_sig=false
|
886
|
+
if event.get(kfield) > event.get(xk)
|
887
|
+
sig_add[kfield.to_s]="Fields Value numeric #{event.get(kfield)} > #{event.get(xk)} found"
|
888
|
+
check_sig=true
|
889
|
+
end
|
890
|
+
end
|
891
|
+
break if check_sig == false
|
892
|
+
unless @sig_db['rules'][i][kfield]['compope'][xk]['inf'].nil?
|
893
|
+
check_sig=false
|
894
|
+
if event.get(kfield) < event.get(xk)
|
895
|
+
sig_add[kfield.to_s]="Fields Value numeric #{event.get(kfield)} < #{event.get(xk)} found"
|
896
|
+
check_sig=true
|
897
|
+
end
|
898
|
+
end
|
899
|
+
break if check_sig == false
|
900
|
+
unless @sig_db['rules'][i][kfield]['compope'][xk]['diff'].nil?
|
901
|
+
check_sig=false
|
902
|
+
if event.get(kfield) != event.get(xk)
|
903
|
+
sig_add[kfield.to_s]="Fields Value numeric #{event.get(kfield)} != #{event.get(xk)} found"
|
904
|
+
check_sig=true
|
905
|
+
end
|
906
|
+
end
|
907
|
+
break if check_sig == false
|
908
|
+
end
|
909
|
+
end
|
910
|
+
elsif event.get(xk).is_a?(String)
|
911
|
+
unless @sig_db['rules'][i][kfield]['compope'][xk].nil?
|
912
|
+
if event.get(kfield).is_a?(String)
|
913
|
+
unless @sig_db['rules'][i][kfield]['compope'][xk]['egal'].nil?
|
914
|
+
check_sig=false
|
915
|
+
if event.get(kfield).eql?(event.get(xk))
|
916
|
+
sig_add[kfield.to_s]="Fields Value String #{event.get(kfield)} == #{event.get(xk)} found"
|
917
|
+
check_sig=true
|
918
|
+
end
|
919
|
+
end
|
920
|
+
break if check_sig == false
|
921
|
+
unless @sig_db['rules'][i][kfield]['compope'][xk]['diff'].nil?
|
922
|
+
check_sig=false
|
923
|
+
if not event.get(kfield).eql?(event.get(xk))
|
924
|
+
sig_add[kfield.to_s]="Fields Value String #{event.get(kfield)} != #{event.get(xk)} found"
|
925
|
+
check_sig=true
|
926
|
+
end
|
927
|
+
end
|
928
|
+
break if check_sig == false
|
929
|
+
end
|
930
|
+
end
|
931
|
+
#add elsif event.get(kfield).is_a?(Array) ?
|
932
|
+
end
|
933
|
+
end
|
934
|
+
end
|
935
|
+
end
|
936
|
+
break if check_sig == false
|
937
|
+
#END : CHECK BY Compare value of two fields
|
938
|
+
#BEGIN : CHECK BY numeric operation
|
939
|
+
unless @sig_db['rules'][i][kfield]['numope'].nil?
|
940
|
+
if event.get(kfield).is_a?(Numeric)
|
941
|
+
unless @sig_db['rules'][i][kfield]['numope']['egal'].nil?
|
942
|
+
check_sig=false
|
943
|
+
if event.get(kfield) == @sig_db['rules'][i][kfield]['numope']['egal']
|
944
|
+
sig_add[kfield.to_s]="Value numeric #{event.get(kfield)} == #{@sig_db['rules'][i][kfield]['numope']['egal']} found"
|
945
|
+
check_sig=true
|
946
|
+
end
|
947
|
+
end
|
948
|
+
break if check_sig == false
|
949
|
+
unless @sig_db['rules'][i][kfield]['numope']['sup'].nil?
|
950
|
+
check_sig=false
|
951
|
+
if event.get(kfield) > @sig_db['rules'][i][kfield]['numope']['sup']
|
952
|
+
sig_add[kfield.to_s]="Value numeric #{event.get(kfield)} > #{@sig_db['rules'][i][kfield]['numope']['sup']} found"
|
953
|
+
check_sig=true
|
954
|
+
end
|
955
|
+
end
|
956
|
+
break if check_sig == false
|
957
|
+
unless @sig_db['rules'][i][kfield]['numope']['inf'].nil?
|
958
|
+
check_sig=false
|
959
|
+
if event.get(kfield) < @sig_db['rules'][i][kfield]['numope']['inf']
|
960
|
+
sig_add[kfield.to_s]="Value numeric #{event.get(kfield)} < #{@sig_db['rules'][i][kfield]['numope']['inf']} found"
|
961
|
+
check_sig=true
|
962
|
+
end
|
963
|
+
end
|
964
|
+
break if check_sig == false
|
965
|
+
unless @sig_db['rules'][i][kfield]['numope']['diff'].nil?
|
966
|
+
check_sig=false
|
967
|
+
if event.get(kfield) != @sig_db['rules'][i][kfield]['numope']['diff']
|
968
|
+
sig_add[kfield.to_s]="Value numeric #{event.get(kfield)} != #{@sig_db['rules'][i][kfield]['numope']['diff']} found"
|
969
|
+
check_sig=true
|
970
|
+
end
|
971
|
+
end
|
972
|
+
break if check_sig == false
|
973
|
+
end
|
974
|
+
end
|
975
|
+
#END : CHECK BY numeric operation
|
976
|
+
#BEGIN : CHECK BY date
|
977
|
+
unless @sig_db['rules'][i][kfield]['date'].nil?
|
978
|
+
if event.get(kfield).is_a?(String) and not event.get(kfield).nil? and event.get(kfield).length > 0
|
979
|
+
unless @sig_db['rules'][i][kfield]['date']['egal'].nil?
|
980
|
+
check_sig=false
|
981
|
+
if Time.parse(event.get(kfield)) == (tnow - @sig_db['rules'][i][kfield]['date']['egal'])
|
982
|
+
sig_add[kfield.to_s]="Value date #{event.get(kfield)} == #{@sig_db['rules'][i][kfield]['date']['egal']} found"
|
983
|
+
check_sig=true
|
984
|
+
end
|
985
|
+
end
|
986
|
+
break if check_sig == false
|
987
|
+
unless @sig_db['rules'][i][kfield]['date']['sup'].nil?
|
988
|
+
check_sig=false
|
989
|
+
if Time.parse(event.get(kfield)) > (tnow - @sig_db['rules'][i][kfield]['date']['sup'])
|
990
|
+
sig_add[kfield.to_s]="Value date #{event.get(kfield)} > #{@sig_db['rules'][i][kfield]['date']['sup']} found"
|
991
|
+
check_sig=true
|
992
|
+
end
|
993
|
+
end
|
994
|
+
break if check_sig == false
|
995
|
+
unless @sig_db['rules'][i][kfield]['date']['inf'].nil?
|
996
|
+
check_sig=false
|
997
|
+
if Time.parse(event.get(kfield)) < (tnow - @sig_db['rules'][i][kfield]['date']['inf'])
|
998
|
+
sig_add[kfield.to_s]="Value date #{event.get(kfield)} < #{@sig_db['rules'][i][kfield]['date']['inf']} found"
|
999
|
+
check_sig=true
|
1000
|
+
end
|
1001
|
+
end
|
1002
|
+
break if check_sig == false
|
1003
|
+
unless @sig_db['rules'][i][kfield]['date']['diff'].nil?
|
1004
|
+
check_sig=false
|
1005
|
+
if Time.parse(event.get(kfield)) != (tnow - @sig_db['rules'][i][kfield]['date']['diff'])
|
1006
|
+
sig_add[kfield.to_s]="Value date #{event.get(kfield)} != #{@sig_db['rules'][i][kfield]['date']['diff']} found"
|
1007
|
+
check_sig=true
|
1008
|
+
end
|
1009
|
+
end
|
1010
|
+
break if check_sig == false
|
1011
|
+
elsif event.get(kfield).is_a?(Array) and not event.get(kfield).nil? and event.get(kfield).length > 0
|
1012
|
+
for elem_list in event.get(kfield)
|
1013
|
+
if elem_list.is_a?(String)
|
1014
|
+
unless @sig_db['rules'][i][kfield]['date']['egal'].nil?
|
1015
|
+
check_sig=false
|
1016
|
+
if Time.parse(elem_list) == (tnow - @sig_db['rules'][i][kfield]['date']['egal'])
|
1017
|
+
sig_add[kfield.to_s]="Value date #{event.get(kfield)} == #{@sig_db['rules'][i][kfield]['date']['egal']} found"
|
1018
|
+
check_sig=true
|
1019
|
+
end
|
1020
|
+
end
|
1021
|
+
break if check_sig == false
|
1022
|
+
unless @sig_db['rules'][i][kfield]['date']['sup'].nil?
|
1023
|
+
check_sig=false
|
1024
|
+
if Time.parse(elem_list) > (tnow - @sig_db['rules'][i][kfield]['date']['sup'])
|
1025
|
+
sig_add[kfield.to_s]="Value date #{event.get(kfield)} > #{@sig_db['rules'][i][kfield]['date']['sup']} found"
|
1026
|
+
check_sig=true
|
1027
|
+
end
|
1028
|
+
end
|
1029
|
+
break if check_sig == false
|
1030
|
+
unless @sig_db['rules'][i][kfield]['date']['inf'].nil?
|
1031
|
+
check_sig=false
|
1032
|
+
if Time.parse(elem_list) < (tnow - @sig_db['rules'][i][kfield]['date']['inf'])
|
1033
|
+
sig_add[kfield.to_s]="Value date #{event.get(kfield)} < #{@sig_db['rules'][i][kfield]['date']['inf']} found"
|
1034
|
+
check_sig=true
|
1035
|
+
end
|
1036
|
+
end
|
1037
|
+
break if check_sig == false
|
1038
|
+
unless @sig_db['rules'][i][kfield]['date']['diff'].nil?
|
1039
|
+
check_sig=false
|
1040
|
+
if Time.parse(elem_list) != (tnow - @sig_db['rules'][i][kfield]['date']['diff'])
|
1041
|
+
sig_add[kfield.to_s]="Value date #{event.get(kfield)} != #{@sig_db['rules'][i][kfield]['date']['diff']} found"
|
1042
|
+
check_sig=true
|
1043
|
+
end
|
1044
|
+
end
|
1045
|
+
break if check_sig == false
|
1046
|
+
end
|
1047
|
+
end
|
1048
|
+
end
|
1049
|
+
end
|
1050
|
+
#END : CHECK BY date
|
1051
|
+
#BEGIN : CHECK BY hour
|
1052
|
+
unless @sig_db['rules'][i][kfield]['hour'].nil?
|
1053
|
+
if event.get(kfield).is_a?(String) and not event.get(kfield).nil?
|
1054
|
+
unless @sig_db['rules'][i][kfield]['hour']['egal'].nil?
|
1055
|
+
check_sig=false
|
1056
|
+
if Time.parse(event.get(kfield)).hour.to_i == @sig_db['rules'][i][kfield]['hour']['egal'].to_i
|
1057
|
+
sig_add[kfield.to_s]="Value hour #{event.get(kfield)} == #{@sig_db['rules'][i][kfield]['hour']['egal'].to_s} found"
|
1058
|
+
check_sig=true
|
1059
|
+
end
|
1060
|
+
end
|
1061
|
+
break if check_sig == false
|
1062
|
+
unless @sig_db['rules'][i][kfield]['hour']['sup'].nil?
|
1063
|
+
check_sig=false
|
1064
|
+
if Time.parse(event.get(kfield)).hour.to_i > @sig_db['rules'][i][kfield]['hour']['sup'].to_i
|
1065
|
+
sig_add[kfield.to_s]="Value hour #{event.get(kfield)} > #{@sig_db['rules'][i][kfield]['hour']['sup'].to_s} found"
|
1066
|
+
check_sig=true
|
1067
|
+
end
|
1068
|
+
end
|
1069
|
+
break if check_sig == false
|
1070
|
+
unless @sig_db['rules'][i][kfield]['hour']['inf'].nil?
|
1071
|
+
check_sig=false
|
1072
|
+
if Time.parse(event.get(kfield)).hour.to_i < @sig_db['rules'][i][kfield]['hour']['inf'].to_i
|
1073
|
+
sig_add[kfield.to_s]="Value hour #{event.get(kfield)} < #{@sig_db['rules'][i][kfield]['hour']['inf'].to_s} found"
|
1074
|
+
check_sig=true
|
1075
|
+
end
|
1076
|
+
end
|
1077
|
+
break if check_sig == false
|
1078
|
+
unless @sig_db['rules'][i][kfield]['hour']['diff'].nil?
|
1079
|
+
check_sig=false
|
1080
|
+
if Time.parse(event.get(kfield)).hour.to_i != @sig_db['rules'][i][kfield]['hour']['diff'].to_i
|
1081
|
+
sig_add[kfield.to_s]="Value hour #{event.get(kfield)} != #{@sig_db['rules'][i][kfield]['hour']['diff'].to_s} found"
|
1082
|
+
check_sig=true
|
1083
|
+
end
|
1084
|
+
end
|
1085
|
+
break if check_sig == false
|
1086
|
+
end
|
1087
|
+
end
|
1088
|
+
#END : CHECK BY hour
|
1089
|
+
#BEGIN : CHECK BY day
|
1090
|
+
unless @sig_db['rules'][i][kfield]['day'].nil?
|
1091
|
+
if event.get(kfield).is_a?(String) and not event.get(kfield).nil?
|
1092
|
+
unless @sig_db['rules'][i][kfield]['day']['egal'].nil?
|
1093
|
+
check_sig=false
|
1094
|
+
if Time.parse(event.get(kfield)).wday.to_i == @sig_db['rules'][i][kfield]['day']['egal'].to_i
|
1095
|
+
sig_add[kfield.to_s]="Value day #{event.get(kfield)} == #{@sig_db['rules'][i][kfield]['day']['egal'].to_s} found"
|
1096
|
+
check_sig=true
|
1097
|
+
end
|
1098
|
+
end
|
1099
|
+
break if check_sig == false
|
1100
|
+
unless @sig_db['rules'][i][kfield]['day']['sup'].nil?
|
1101
|
+
check_sig=false
|
1102
|
+
if Time.parse(event.get(kfield)).wday.to_i > @sig_db['rules'][i][kfield]['day']['sup'].to_i
|
1103
|
+
sig_add[kfield.to_s]="Value day #{event.get(kfield)} > #{@sig_db['rules'][i][kfield]['day']['sup'].to_s} found"
|
1104
|
+
check_sig=true
|
1105
|
+
end
|
1106
|
+
end
|
1107
|
+
break if check_sig == false
|
1108
|
+
unless @sig_db['rules'][i][kfield]['day']['inf'].nil?
|
1109
|
+
check_sig=false
|
1110
|
+
if Time.parse(event.get(kfield)).wday.to_i < @sig_db['rules'][i][kfield]['dat']['inf'].to_i
|
1111
|
+
sig_add[kfield.to_s]="Value day #{event.get(kfield)} < #{@sig_db['rules'][i][kfield]['day']['inf'].to_s} found"
|
1112
|
+
check_sig=true
|
1113
|
+
end
|
1114
|
+
end
|
1115
|
+
break if check_sig == false
|
1116
|
+
unless @sig_db['rules'][i][kfield]['day']['diff'].nil?
|
1117
|
+
check_sig=false
|
1118
|
+
if Time.parse(event.get(kfield)).wday.to_i != @sig_db['rules'][i][kfield]['day']['diff'].to_i
|
1119
|
+
sig_add[kfield.to_s]="Value day #{event.get(kfield)} != #{@sig_db['rules'][i][kfield]['day']['diff'].to_s} found"
|
1120
|
+
check_sig=true
|
1121
|
+
end
|
1122
|
+
end
|
1123
|
+
break if check_sig == false
|
1124
|
+
end
|
1125
|
+
end
|
1126
|
+
#END : CHECK BY day
|
1127
|
+
#BEGIN : CHECK BY ip adress
|
1128
|
+
unless @sig_db['rules'][i][kfield]['ipaddr'].nil?
|
1129
|
+
if event.get(kfield).is_a?(String) and not event.get(kfield).nil?
|
1130
|
+
unless @sig_db['rules'][i][kfield]['ipaddr']['egal'].nil?
|
1131
|
+
check_sig=false
|
1132
|
+
net = IPAddr.new(@sig_db['rules'][i][kfield]['ipaddr']['egal'])
|
1133
|
+
if net===event.get(kfield).to_s
|
1134
|
+
sig_add[kfield.to_s]="Value IP address #{event.get(kfield)} != #{@sig_db['rules'][i][kfield]['ipaddr']['egal']} found"
|
1135
|
+
check_sig=true
|
1136
|
+
end
|
1137
|
+
end
|
1138
|
+
break if check_sig == false
|
1139
|
+
unless @sig_db['rules'][i][kfield]['ipaddr']['diff'].nil?
|
1140
|
+
check_sig=false
|
1141
|
+
net = IPAddr.new(@sig_db['rules'][i][kfield]['ipaddr']['diff'])
|
1142
|
+
if not net===event.get(kfield).to_s
|
1143
|
+
sig_add[kfield.to_s]="Value IP address #{event.get(kfield)} != #{@sig_db['rules'][i][kfield]['ipaddr']['diff']} found"
|
1144
|
+
check_sig=true
|
1145
|
+
end
|
1146
|
+
end
|
1147
|
+
break if check_sig == false
|
1148
|
+
end
|
1149
|
+
end
|
1150
|
+
#END : CHECK BY ip adress
|
1151
|
+
#BEGIN : CHECK BY size field operation
|
1152
|
+
unless @sig_db['rules'][i][kfield]['sizeope'].nil?
|
1153
|
+
if event.get(kfield).is_a?(String) and not event.get(kfield).nil?
|
1154
|
+
unless @sig_db['rules'][i][kfield]['sizeope']['egal'].nil?
|
1155
|
+
check_sig=false
|
1156
|
+
if event.get(kfield).length == @sig_db['rules'][i][kfield]['sizeope']['egal']
|
1157
|
+
sig_add[kfield.to_s]="Value numeric #{event.get(kfield).length} == #{@sig_db['rules'][i][kfield]['sizeope']['egal']} found"
|
1158
|
+
check_sig=true
|
1159
|
+
end
|
1160
|
+
end
|
1161
|
+
break if check_sig == false
|
1162
|
+
unless @sig_db['rules'][i][kfield]['sizeope']['sup'].nil?
|
1163
|
+
check_sig=false
|
1164
|
+
if event.get(kfield).length > @sig_db['rules'][i][kfield]['sizeope']['sup']
|
1165
|
+
sig_add[kfield.to_s]="Value numeric #{event.get(kfield).length} > #{@sig_db['rules'][i][kfield]['sizeope']['sup']} found"
|
1166
|
+
check_sig=true
|
1167
|
+
end
|
1168
|
+
end
|
1169
|
+
break if check_sig == false
|
1170
|
+
unless @sig_db['rules'][i][kfield]['sizeope']['inf'].nil?
|
1171
|
+
check_sig=false
|
1172
|
+
if event.get(kfield).length < @sig_db['rules'][i][kfield]['sizeope']['inf']
|
1173
|
+
sig_add[kfield.to_s]="Value numeric #{event.get(kfield).length} < #{@sig_db['rules'][i][kfield]['sizeope']['inf']} found"
|
1174
|
+
check_sig=true
|
1175
|
+
end
|
1176
|
+
end
|
1177
|
+
break if check_sig == false
|
1178
|
+
unless @sig_db['rules'][i][kfield]['sizeope']['diff'].nil?
|
1179
|
+
check_sig=false
|
1180
|
+
if event.get(kfield).length != @sig_db['rules'][i][kfield]['sizeope']['diff']
|
1181
|
+
sig_add[kfield.to_s]="Value numeric #{event.get(kfield).length} != #{@sig_db['rules'][i][kfield]['sizeope']['diff']} found"
|
1182
|
+
check_sig=true
|
1183
|
+
end
|
1184
|
+
end
|
1185
|
+
break if check_sig == false
|
1186
|
+
end
|
1187
|
+
end
|
1188
|
+
#END : CHECK BY size field operation
|
1189
|
+
#BEGIN : CHECK BY regexp
|
1190
|
+
unless @sig_db['rules'][i][kfield]['regexp'].nil?
|
1191
|
+
check_sig=false
|
1192
|
+
for regexp in @sig_db['rules'][i][kfield]['regexp']
|
1193
|
+
if event.get(kfield).is_a?(String) and not event.get(kfield).nil?
|
1194
|
+
match = Regexp.new(regexp, nil, 'n').match(event.get(kfield))
|
1195
|
+
if not match.nil?
|
1196
|
+
sig_add[kfield.to_s]="Regexp found #{match}"
|
1197
|
+
check_sig=true
|
1198
|
+
break
|
1199
|
+
end
|
1200
|
+
elsif event.get(kfield).is_a?(Array)
|
1201
|
+
for elem_list in event.get(kfield)
|
1202
|
+
if elem_list.is_a?(String)
|
1203
|
+
match = Regexp.new(regexp, nil, 'n').match(elem_list)
|
1204
|
+
if not match.nil?
|
1205
|
+
sig_add[kfield.to_s]="Regexp found #{match}"
|
1206
|
+
check_sig=true
|
1207
|
+
break
|
1208
|
+
end
|
1209
|
+
end
|
1210
|
+
end
|
1211
|
+
end
|
1212
|
+
end
|
1213
|
+
end
|
1214
|
+
break if check_sig == false
|
1215
|
+
#END : CHECK BY regexp
|
1216
|
+
#BEGIN : CHECK BY regexp excluse (not present)
|
1217
|
+
unless @sig_db['rules'][i][kfield]['notregexp'].nil?
|
1218
|
+
check_sig=false
|
1219
|
+
regexplen=@sig_db['rules'][i][kfield]['notregexp'].length
|
1220
|
+
veriflen=0
|
1221
|
+
for regexp in @sig_db['rules'][i][kfield]['notregexp']
|
1222
|
+
if event.get(kfield).is_a?(String)
|
1223
|
+
match = Regexp.new(regexp, nil, 'n').match(event.get(kfield))
|
1224
|
+
if match.nil?
|
1225
|
+
veriflen=veriflen+1
|
1226
|
+
end
|
1227
|
+
elsif event.get(kfield).is_a?(Array)
|
1228
|
+
for elem_list in event.get(kfield)
|
1229
|
+
if elem_list.is_a?(String)
|
1230
|
+
match = Regexp.new(regexp, nil, 'n').match(elem_list)
|
1231
|
+
if match.nil?
|
1232
|
+
veriflen=veriflen+1
|
1233
|
+
end
|
1234
|
+
end
|
1235
|
+
end
|
1236
|
+
end
|
1237
|
+
end
|
1238
|
+
if veriflen==regexplen
|
1239
|
+
sig_add[kfield.to_s]="Not Regexp present: OK"
|
1240
|
+
check_sig=true
|
1241
|
+
end
|
1242
|
+
end
|
1243
|
+
break if check_sig == false
|
1244
|
+
#END : CHECK BY regexp excluse (not present)
|
1245
|
+
#CHECK SIG BY FIELD END
|
1246
|
+
#check SIG RESULT FIND and get information name, type, modefp, note, id
|
1247
|
+
if check_sig == true
|
1248
|
+
validfield = validfield + 1
|
1249
|
+
if @sig_db['rules'][i][kfield]['id'].is_a?(Numeric)
|
1250
|
+
sig_add["id"] = @sig_db['rules'][i][kfield]['id'].to_i
|
1251
|
+
else
|
1252
|
+
#all information must to be on same field
|
1253
|
+
next
|
1254
|
+
end
|
1255
|
+
if @sig_db['rules'][i][kfield]['name'].is_a?(String)
|
1256
|
+
if sig_add["name_sig"].nil?
|
1257
|
+
sig_add["name_sig"] = @sig_db['rules'][i][kfield]['name']
|
1258
|
+
else
|
1259
|
+
sig_add["name_sig"] = sig_add["name_sig"] + @sig_db['rules'][i][kfield]['name']
|
1260
|
+
end
|
1261
|
+
end
|
1262
|
+
if @sig_db['rules'][i][kfield]['type'].is_a?(Numeric)
|
1263
|
+
if @sig_db['rules'][i][kfield]['type'] == 2
|
1264
|
+
type_sig = type_sig + 1
|
1265
|
+
end
|
1266
|
+
if @sig_db['rules'][i][kfield]['type'] == 1
|
1267
|
+
type_obl = type_obl + 1
|
1268
|
+
end
|
1269
|
+
end
|
1270
|
+
if @sig_db['rules'][i][kfield]['modeFP'].nil?
|
1271
|
+
if @sig_db['rules'][i][kfield]['modeFP'] == true
|
1272
|
+
sig_add["modeFP"] = true
|
1273
|
+
end
|
1274
|
+
end
|
1275
|
+
if @sig_db['rules'][i][kfield]['note'].is_a?(Numeric)
|
1276
|
+
if sig_add["note"].nil?
|
1277
|
+
sig_add["note"] = @sig_db['rules'][i][kfield]['note'].to_s
|
1278
|
+
else
|
1279
|
+
sig_add["note"] = (sig_add["note"].to_i + @sig_db['rules'][i][kfield]['note'].to_i).to_s
|
1280
|
+
end
|
1281
|
+
end
|
1282
|
+
if @sig_db['rules'][i][kfield]['extract'].is_a?(Hash)
|
1283
|
+
sig_add["extract"] = @sig_db['rules'][i][kfield]['extract']
|
1284
|
+
end
|
1285
|
+
#"freq_field:" [field,field,field,field],"freq_delay":60s,freq_count: 3, freq_resettime: 3600s, correlate_change_fieldvalue: []
|
1286
|
+
#use for correlate multi event type with correlate_change_fieldvalue
|
1287
|
+
# or use for freq select, by exemple brute force without correlate_change_fieldvalue
|
1288
|
+
if @sig_db['rules'][i][kfield]['freq_field'].is_a?(Array) and @sig_db['rules'][i][kfield]['freq_delay'].is_a?(Interger) and @sig_db['rules'][i][kfield]['freq_resettime'].is_a?(Integer) and @sig_db['rules'][i][kfield]['freq_count'].is_a?(Integer)
|
1289
|
+
sig_add["freq_field"] = @sig_db['rules'][i][kfield]['freq_field']
|
1290
|
+
sig_add["freq_delay"] = @sig_db['rules'][i][kfield]['freq_delay']
|
1291
|
+
sig_add["freq_count"] = @sig_db['rules'][i][kfield]['freq_count']
|
1292
|
+
sig_add["freq_resettime"] = @sig_db['rules'][i][kfield]['freq_resettime']
|
1293
|
+
if @sig_db['rules'][i][kfield]['correlate_change_fieldvalue'].is_a?(Array) and not @sig_db['rules'][i][kfield]['correlate_change_fieldvalue'].empty?
|
1294
|
+
sig_add["correlate_change_fieldvalue"] = @sig_db['rules'][i][kfield]['correlate_change_fieldvalue']
|
1295
|
+
end
|
1296
|
+
end
|
1297
|
+
end
|
1298
|
+
#end check result find
|
1299
|
+
end
|
1300
|
+
#verify all field checked and all checks are matched
|
1301
|
+
if countfield > 0 and countfield == validfield
|
1302
|
+
#if mode FP break and delete event
|
1303
|
+
if sig_add["modeFP"] == true
|
1304
|
+
#@logger.warn("DROP EVENT FP:", :string => sig_add["name_sig"])
|
1305
|
+
sig_add.clear
|
1306
|
+
detected_sig.clear
|
1307
|
+
detected_sig_count=0
|
1308
|
+
event.cancel
|
1309
|
+
return
|
1310
|
+
end
|
1311
|
+
#detected freq & correlate
|
1312
|
+
if sig_add["freq_field"]
|
1313
|
+
#get id sig for know if you create alert or no
|
1314
|
+
#example if just id match then don't create alert
|
1315
|
+
detected_sig_id_corre.push(*sig_add["id"])
|
1316
|
+
#create hash of event
|
1317
|
+
fields_value=""
|
1318
|
+
for fx in sig_add["freq_field"]
|
1319
|
+
if event.get(fx)
|
1320
|
+
fields_value = fields_value + event.get(fx).to_s.downcase
|
1321
|
+
end
|
1322
|
+
end
|
1323
|
+
hash_field = OpenSSL::HMAC.hexdigest(OpenSSL::Digest::SHA256.new, "SIG-PLUGIN-FREQ", fields_value.to_s).force_encoding(Encoding::UTF_8)
|
1324
|
+
if @sig_db_freq[hash_field]
|
1325
|
+
#hash in db
|
1326
|
+
#verify if valid is false
|
1327
|
+
if @sig_db_freq[hash_field]['valid'] == false
|
1328
|
+
#ok hash not matched
|
1329
|
+
#verify delay
|
1330
|
+
if @sig_db_freq[hash_field]['delay'] < tnow
|
1331
|
+
#delay is out
|
1332
|
+
#restart of 0
|
1333
|
+
@sig_db_freq[hash_field]['count'] = 1
|
1334
|
+
@sig_db_freq[hash_field]['delay'] = tnow + sig_add["freq_delay"]
|
1335
|
+
if sig_add["correlate_change_fieldvalue"]
|
1336
|
+
fields_corre_value=""
|
1337
|
+
@sig_db_freq[hash_field]['corre_value'] = []
|
1338
|
+
for fy in sig_add["freq_field"]
|
1339
|
+
if event.get(fy)
|
1340
|
+
fields_corre_value = fields_corre_value + event.get(fy).to_s.downcase
|
1341
|
+
end
|
1342
|
+
end
|
1343
|
+
@sig_db_freq[hash_field]['corre_value'].push(*OpenSSL::HMAC.hexdigest(OpenSSL::Digest::SHA256.new, "SIG-PLUGIN-FREQ", fields_corre_value.to_s).force_encoding(Encoding::UTF_8))
|
1344
|
+
end
|
1345
|
+
@sig_db_freq[hash_field]['valid'] = false
|
1346
|
+
else
|
1347
|
+
#ok count, because delay is valid
|
1348
|
+
#check if sig_add["correlate_change_fieldvalue"] is present
|
1349
|
+
hash_corre_value = ""
|
1350
|
+
if sig_add["correlate_change_fieldvalue"]
|
1351
|
+
fields_corre_value=""
|
1352
|
+
for fy in sig_add["freq_field"]
|
1353
|
+
if event.get(fy)
|
1354
|
+
fields_corre_value = fields_corre_value + event.get(fy).to_s.downcase
|
1355
|
+
end
|
1356
|
+
end
|
1357
|
+
hash_corre_value = OpenSSL::HMAC.hexdigest(OpenSSL::Digest::SHA256.new, "SIG-PLUGIN-FREQ", fields_corre_value.to_s).force_encoding(Encoding::UTF_8)
|
1358
|
+
if not @sig_db_freq[hash_field]['corre_value'].include?(hash_corre_value)
|
1359
|
+
#if correlate hash not exist count ++
|
1360
|
+
@sig_db_freq[hash_field]['count'] = @sig_db_freq[hash_field]['count'] + 1
|
1361
|
+
@sig_db_freq[hash_field]['corre_value'].push(*hash_corre_value)
|
1362
|
+
end
|
1363
|
+
else
|
1364
|
+
#no correlate
|
1365
|
+
@sig_db_freq[hash_field]['count'] = @sig_db_freq[hash_field]['count'] + 1
|
1366
|
+
end
|
1367
|
+
#verify if count reach count_value rule
|
1368
|
+
if @sig_db_freq[hash_field]['count'] >= sig_add["freq_resettime"]
|
1369
|
+
#valid sig
|
1370
|
+
@sig_db_freq[hash_field]['delay'] = tnow + sig_add["freq_resettime"]
|
1371
|
+
@sig_db_freq[hash_field]['valid'] = true
|
1372
|
+
detected_sig_id_corre.clear
|
1373
|
+
end
|
1374
|
+
end
|
1375
|
+
else
|
1376
|
+
#hash matched in past, verify if resettime is passed?
|
1377
|
+
if @sig_db_freq[hash_field]['delay'] < tnow
|
1378
|
+
#delay is passed, restart to 0
|
1379
|
+
@sig_db_freq[hash_field]['count'] = 1
|
1380
|
+
if sig_add["correlate_change_fieldvalue"]
|
1381
|
+
@sig_db_freq[hash_field]['corre_value'] = []
|
1382
|
+
fields_corre_value=""
|
1383
|
+
for fy in sig_add["freq_field"]
|
1384
|
+
if event.get(fy)
|
1385
|
+
fields_corre_value = fields_corre_value + event.get(fy).to_s.downcase
|
1386
|
+
end
|
1387
|
+
end
|
1388
|
+
@sig_db_freq[hash_field]['corre_value'].push(*OpenSSL::HMAC.hexdigest(OpenSSL::Digest::SHA256.new, "SIG-PLUGIN-FREQ", fields_corre_value.to_s).force_encoding(Encoding::UTF_8))
|
1389
|
+
end
|
1390
|
+
@sig_db_freq[hash_field]['delay'] = tnow + sig_add["freq_delay"]
|
1391
|
+
@sig_db_freq[hash_field]['valid'] = false
|
1392
|
+
end
|
1393
|
+
end
|
1394
|
+
else
|
1395
|
+
#new hash
|
1396
|
+
@sig_db_freq[hash_field] = {}
|
1397
|
+
@sig_db_freq[hash_field]['count'] = 1
|
1398
|
+
if sig_add["correlate_change_fieldvalue"]
|
1399
|
+
fields_corre_value=""
|
1400
|
+
@sig_db_freq[hash_field]['corre_value'] = []
|
1401
|
+
for fy in sig_add["freq_field"]
|
1402
|
+
if event.get(fy)
|
1403
|
+
fields_corre_value = fields_corre_value + event.get(fy).to_s.downcase
|
1404
|
+
end
|
1405
|
+
end
|
1406
|
+
@sig_db_freq[hash_field]['corre_value'].push(*OpenSSL::HMAC.hexdigest(OpenSSL::Digest::SHA256.new, "SIG-PLUGIN-FREQ", fields_corre_value.to_s).force_encoding(Encoding::UTF_8))
|
1407
|
+
end
|
1408
|
+
@sig_db_freq[hash_field]['delay'] = tnow + sig_add["freq_delay"]
|
1409
|
+
@sig_db_freq[hash_field]['valid'] = false
|
1410
|
+
end
|
1411
|
+
end
|
1412
|
+
#detected_extract
|
1413
|
+
if sig_add["extract"]
|
1414
|
+
#extract field and insert in ioc local
|
1415
|
+
sig_add["extract"].each do |ekey,eval|
|
1416
|
+
if event.get(ekey) and @ioc_db_local
|
1417
|
+
if @ioc_db_local[eval]
|
1418
|
+
unless @ioc_db_local[eval].include?(event.get(ekey))
|
1419
|
+
@ioc_db_local[eval].push(*event.get(ekey))
|
1420
|
+
sleep(1) until @load_statut_ioc
|
1421
|
+
@load_statut_ioc = false
|
1422
|
+
@ioc_db = @ioc_db.merge(db_tmp) {|key, first, second| first.is_a?(Array) && second.is_a?(Array) ? first | second : second }
|
1423
|
+
@load_statut_ioc = true
|
1424
|
+
end
|
1425
|
+
else
|
1426
|
+
@ioc_db_local[eval] = []
|
1427
|
+
@ioc_db_local[eval].push(*event.get(ekey))
|
1428
|
+
sleep(1) until @load_statut_ioc
|
1429
|
+
@load_statut_ioc = false
|
1430
|
+
@ioc_db = @ioc_db.merge(db_tmp) {|key, first, second| first.is_a?(Array) && second.is_a?(Array) ? first | second : second }
|
1431
|
+
@load_statut_ioc = true
|
1432
|
+
end
|
1433
|
+
end
|
1434
|
+
end
|
1435
|
+
end
|
1436
|
+
detected_sig.push(*sig_add)
|
1437
|
+
#no continu if one rule match
|
1438
|
+
if @check_stop
|
1439
|
+
detected_sig_count = 1
|
1440
|
+
detected_sig_note = sig_add["note"].to_i
|
1441
|
+
detected_sig_id.push(*sig_add["id"])
|
1442
|
+
detected_sig_name.push(*sig_add["name_sig"])
|
1443
|
+
sig_add.clear
|
1444
|
+
break
|
1445
|
+
else
|
1446
|
+
detected_sig_count = detected_sig_count + 1
|
1447
|
+
detected_sig_name.push(*sig_add["name_sig"])
|
1448
|
+
detected_sig_id.push(*sig_add["id"])
|
1449
|
+
if detected_sig_note < sig_add["note"].to_i
|
1450
|
+
detected_sig_note = sig_add["note"].to_i
|
1451
|
+
end
|
1452
|
+
sig_add.clear
|
1453
|
+
end
|
1454
|
+
else
|
1455
|
+
sig_add.clear
|
1456
|
+
end
|
1457
|
+
end
|
1458
|
+
end
|
1459
|
+
end
|
1460
|
+
eventK.clear
|
1461
|
+
#check if sig detected, and add to @targetxxx_sig
|
1462
|
+
if detected_sig.any? and type_sig < detected_sig_count and type_obl > 0
|
1463
|
+
#verify if not juste correlate rule match
|
1464
|
+
if detected_sig_id != detected_sig_id_corre
|
1465
|
+
unless event.get(@target_sig).nil?
|
1466
|
+
event.set(@target_sig, event.get(@target_sig) + detected_sig)
|
1467
|
+
else
|
1468
|
+
event.set(@target_sig, detected_sig)
|
1469
|
+
end
|
1470
|
+
unless event.get(@targetnum_sig).nil?
|
1471
|
+
event.set(@targetnum_sig, event.get(@targetnum_sig) + detected_sig_count)
|
1472
|
+
else
|
1473
|
+
event.set(@targetnum_sig, detected_sig_count)
|
1474
|
+
end
|
1475
|
+
unless event.get(@targetnote).nil?
|
1476
|
+
if event.get(@targetnote) < detected_sig_note
|
1477
|
+
event.set(@targetnote, detected_sig_note)
|
1478
|
+
end
|
1479
|
+
else
|
1480
|
+
event.set(@targetnote, detected_sig_note)
|
1481
|
+
end
|
1482
|
+
unless event.get(@targetname_sig).nil?
|
1483
|
+
event.set(@targetname_sig, event.get(@targetname_sig) + detected_sig_name)
|
1484
|
+
else
|
1485
|
+
event.set(@targetname_sig, detected_sig_name)
|
1486
|
+
end
|
1487
|
+
unless event.get(@targetid).nil?
|
1488
|
+
event.set(@targetid, event.get(@targetid) + detected_sig_id)
|
1489
|
+
else
|
1490
|
+
event.set(@targetid, detected_sig_id)
|
1491
|
+
end
|
1492
|
+
end
|
1493
|
+
#@logger.warn("Dectected SIG", :detected_sig_name => detected_sig_name)
|
1494
|
+
end
|
1495
|
+
end
|
1496
|
+
######################
|
1497
|
+
|
1498
|
+
######REFERENCE#######
|
1499
|
+
#check refresh db ref
|
1500
|
+
unless @disable_ref
|
1501
|
+
if @next_refresh_confrules < tnow
|
1502
|
+
if @load_statut_rules == true
|
1503
|
+
@load_statut_rules = false
|
1504
|
+
load_db_ref
|
1505
|
+
@next_refresh_confrules = tnow + @refresh_interval_confrules
|
1506
|
+
@load_statut_rules = true
|
1507
|
+
end
|
1508
|
+
end
|
1509
|
+
sleep(1) until @load_statut_rules
|
1510
|
+
end
|
1511
|
+
#check if db and rule not empty
|
1512
|
+
if not @ref_rules.empty? and not @ref_db.empty? and not @pattern_db and not @disable_ref and event.get(@noapply_ref).nil? and event.get(@field_enr).nil?
|
1513
|
+
#list all rules
|
1514
|
+
#!!!! amelioration de la sig avec simhash...
|
1515
|
+
detected_ref = Array.new
|
1516
|
+
detected_ref_field = Array.new
|
1517
|
+
detected_ref_id = Array.new
|
1518
|
+
detected_ref_err_count = 0
|
1519
|
+
detected_ref_note = 0
|
1520
|
+
eventK = event.to_hash.keys
|
1521
|
+
for r_rule in @ref_rules
|
1522
|
+
#rules[ {"pivot_field":{field1:'value'},{field2:'value'}, "list_sig": [fieldx,fieldy,...], "relation_min": 10, "simhash_size": 16, "simhash_use_size": 14, "id": 200X} ]
|
1523
|
+
# if pivot containt array, !!! order is important
|
1524
|
+
# list_sig containt field possible, if one field not exist, it works too
|
1525
|
+
num_p = r_rule["pivot_field"].keys.length
|
1526
|
+
pivot = r_rule["pivot_field"].keys & eventK
|
1527
|
+
tmp_detect={}
|
1528
|
+
tmp_detect[r_rule["id"].to_s]={}
|
1529
|
+
#heck if pivot present in event
|
1530
|
+
if num_p == pivot.length
|
1531
|
+
stop = false
|
1532
|
+
for keyx in pivot
|
1533
|
+
if event.get(keyx) === r_rule["pivot_field"][keyx]
|
1534
|
+
stop = true
|
1535
|
+
break
|
1536
|
+
end
|
1537
|
+
end
|
1538
|
+
next if stop
|
1539
|
+
if @ref_db[r_rule["id"].to_s]
|
1540
|
+
#{ 'ID20XXXX': {
|
1541
|
+
# 'field': {
|
1542
|
+
# 'TYPE': 'Array|Int|String|...',
|
1543
|
+
# 'Uniq_value': true or false, #define if value is random => true
|
1544
|
+
# 'NOTE_UNIQ_REDUC': 0.1 # for reduce note if match on uniq fueld
|
1545
|
+
# 'LIST_VALUE': ['value_possible1','value_possible2','value_possibleX'],
|
1546
|
+
# 'NOTE_LISTV': 0.25 # note between 0.x and 4 default 0.25
|
1547
|
+
# 'ENCODING': true or false, # value contains than ascii caratere
|
1548
|
+
# 'NOTE_ENCODING': 0.25 # note between 0.x and 4 default 0.25
|
1549
|
+
# 'LEN_MAX': numeric_value,
|
1550
|
+
# 'NOTE_LEN': 0.25 # note between 0.x and 4 default 0.25
|
1551
|
+
# 'LEN_MIN': numeric_value,
|
1552
|
+
# 'LEN_AVG': numeric_value,
|
1553
|
+
# 'LEN_AVG_PRCT': pourcent for AVG,
|
1554
|
+
# 'NOTE_LEN_AVG': 0.1 # note between 0.x and 4 default 0.1
|
1555
|
+
# 'LEN_EVENorUNEVENnum': numeric_value, #even num = 1;uneven num = 2; unknown/undefine = 0
|
1556
|
+
# 'NOTE_LEN_EVEN': 0.25 # note between 0.x and 4 default 0.25
|
1557
|
+
# 'REGEXP_MIN': [],
|
1558
|
+
# 'NOTE_REGEXP_MIN': 0.25 # note between 0.x and 4 default 0.25
|
1559
|
+
# 'REGEXP': []
|
1560
|
+
# 'NOTE_REGEXP': 0.25 # note between 0.x and 4 default 0.25
|
1561
|
+
# } ,
|
1562
|
+
# #relation value_fix contains list of value of field not unique (random)
|
1563
|
+
# # by exemple fld1: '1'; fld2: 'blabla';fld3: '10.10.10.10'
|
1564
|
+
# # create LIST simhash value and attention to order field
|
1565
|
+
# # you can optimiz with simhash - end if earn place memory
|
1566
|
+
# # important you count SIMHASH:COUNT for use COUNT if very little score => suspect [use conf -> relation_min]
|
1567
|
+
# 'relation_value_fix": {'SIMHASH1':COUNTX,'SIMHASH2':COUNTY,'SIMHASH3':COUNTX},
|
1568
|
+
# 'NOTE_DEFAULT': 2# note between 0.x and 4 default 2
|
1569
|
+
# !!!!!!!!!!!!!!! if NOTE or relation_value_fix is name of real field == problem!!!!
|
1570
|
+
# }}}
|
1571
|
+
#create sig event
|
1572
|
+
sig_tmp = r_rule["list_sig"] & eventK
|
1573
|
+
if sig_tmp.any?
|
1574
|
+
#sif is not empty
|
1575
|
+
#CHECK FIELD by FIELD
|
1576
|
+
sig_not_uniq = []
|
1577
|
+
for field in sig_tmp
|
1578
|
+
tmp_detect[r_rule["id"].to_s][field.to_s]={}
|
1579
|
+
string_field = true
|
1580
|
+
#CHECK: TYPE -> int/string/array/hash/... not for note, juste for next step for good choice => nummber or string analysis
|
1581
|
+
if ['boolean', 'long', 'integer', 'short', 'byte', 'double', 'float'].include?(@ref_db[r_rule["id"].to_s][field]['TYPE'].to_s)
|
1582
|
+
string_field = false
|
1583
|
+
end
|
1584
|
+
#CHECK: LIST_VALUE is not empty then check if contains
|
1585
|
+
if not @ref_db[r_rule["id"].to_s][field]['LIST_VALUE'].empty? and not @ref_db[r_rule["id"].to_s][field]['LIST_VALUE'].include?(event.get(field.to_s))
|
1586
|
+
detected_ref_note = detected_ref_note + @ref_db[r_rule["id"].to_s][field]['NOTE_LISTV']
|
1587
|
+
detected_ref_err_count = detected_ref_err_count + 1
|
1588
|
+
detected_ref_id.push(*r_rule["id"]) if not detected_ref_id.include?(r_rule["id"])
|
1589
|
+
detected_ref_field.push(*field.to_s) if not detected_ref_field.include?(field.to_s)
|
1590
|
+
tmp_detect[r_rule["id"].to_s][field.to_s]['LIST_VALUE']="Value not in list: " + event.get(field.to_s)
|
1591
|
+
end
|
1592
|
+
#CHECK: ENCODING char, not check if not string
|
1593
|
+
if string_field and not @ref_db[r_rule["id"].to_s][field]['ENCODING'].include?(event.get(field.to_s).encoding.to_s)
|
1594
|
+
detected_ref_note = detected_ref_note + @ref_db[r_rule["id"].to_s][field]['NOTE_ENCODING']
|
1595
|
+
detected_ref_err_count = detected_ref_err_count + 1
|
1596
|
+
detected_ref_id.push(*r_rule["id"]) if not detected_ref_id.include?(r_rule["id"])
|
1597
|
+
detected_ref_field.push(*field.to_s) if not detected_ref_field.include?(field.to_s)
|
1598
|
+
tmp_detect[r_rule["id"].to_s][field.to_s]['ENCODING']=event.get(field.to_s).encoding.to_s
|
1599
|
+
end
|
1600
|
+
#CHECK: TYPE class of number, not check for string
|
1601
|
+
if not string_field and not @ref_db[r_rule["id"].to_s][field]['ENCODING'].include?(event.get(field.to_s).class.to_s)
|
1602
|
+
detected_ref_note = detected_ref_note + @ref_db[r_rule["id"].to_s][field]['NOTE_ENCODING']
|
1603
|
+
detected_ref_err_count = detected_ref_err_count + 1
|
1604
|
+
detected_ref_id.push(*r_rule["id"]) if not detected_ref_id.include?(r_rule["id"])
|
1605
|
+
detected_ref_field.push(*field.to_s) if not detected_ref_field.include?(field.to_s)
|
1606
|
+
tmp_detect[r_rule["id"].to_s][field.to_s]['ENCODING']=event.get(field.to_s).encoding.to_s
|
1607
|
+
end
|
1608
|
+
#CHECK: LEN for compare to MAX/MIN/AVG
|
1609
|
+
f_len=0
|
1610
|
+
#DIfferent check if field type string or number
|
1611
|
+
if string_field
|
1612
|
+
f_len=event.get(field.to_s).length
|
1613
|
+
else
|
1614
|
+
f_len=event.get(field.to_s)
|
1615
|
+
end
|
1616
|
+
prct_h = @ref_db[r_rule["id"].to_s][field]['LEN_AVG'].to_f + ( @ref_db[r_rule["id"].to_s][field]['LEN_AVG'].to_f / 100.to_f * @ref_db[r_rule["id"].to_s][field]['LEN_AVG_PRCT'].to_f )
|
1617
|
+
prct_l = @ref_db[r_rule["id"].to_s][field]['LEN_AVG'].to_f - ( @ref_db[r_rule["id"].to_s][field]['LEN_AVG'].to_f / 100.to_f * @ref_db[r_rule["id"].to_s][field]['LEN_AVG_PRCT'].to_f )
|
1618
|
+
if f_len > @ref_db[r_rule["id"].to_s][field]['LEN_MAX'] or f_len < @ref_db[r_rule["id"].to_s][field]['LEN_MIN'] or (prct_l >= f_len.to_f and f_len.to_f <= prct_h)
|
1619
|
+
detected_ref_note = detected_ref_note + @ref_db[r_rule["id"].to_s][field]['NOTE_LEN']
|
1620
|
+
detected_ref_err_count = detected_ref_err_count + 1
|
1621
|
+
detected_ref_id.push(*r_rule["id"]) if not detected_ref_id.include?(r_rule["id"])
|
1622
|
+
detected_ref_field.push(*field.to_s) if not detected_ref_field.include?(field.to_s)
|
1623
|
+
tmp_detect[r_rule["id"].to_s][field.to_s]['LEN']=f_len
|
1624
|
+
end
|
1625
|
+
#CHECK: type number (unven/uneven) if value different of 0
|
1626
|
+
f_len_even = 2
|
1627
|
+
if f_len.even?
|
1628
|
+
f_len_even = 1
|
1629
|
+
end
|
1630
|
+
if @ref_db[r_rule["id"].to_s][field]['LEN_EVENorUNEVENnum'] != 0 and f_len_even != @ref_db[r_rule["id"].to_s][field]['LEN_EVENorUNEVENnum']
|
1631
|
+
detected_ref_note = detected_ref_note + @ref_db[r_rule["id"].to_s][field]['NOTE_LEN_EVEN']
|
1632
|
+
detected_ref_err_count = detected_ref_err_count + 1
|
1633
|
+
detected_ref_id.push(*r_rule["id"]) if not detected_ref_id.include?(r_rule["id"])
|
1634
|
+
detected_ref_field.push(*field.to_s) if not detected_ref_field.include?(field.to_s)
|
1635
|
+
tmp_detect[r_rule["id"].to_s][field.to_s]['LEN_EVEN']=f_len_even
|
1636
|
+
end
|
1637
|
+
#CHECK: Regexp pattern Normaly/MInimal
|
1638
|
+
#create regexp list match of field
|
1639
|
+
rlist = []
|
1640
|
+
@pattern_db.each do |key, value|
|
1641
|
+
match = Regexp.new(value, nil, 'n').match(event.get(field.to_s).to_s)
|
1642
|
+
if not match.nil?
|
1643
|
+
rlist << key
|
1644
|
+
end
|
1645
|
+
end
|
1646
|
+
#intersection with reference
|
1647
|
+
regexp_min = @ref_db[r_rule["id"].to_s][field]['REGEXP_MIN'] & rlist
|
1648
|
+
#if all reference not present in event
|
1649
|
+
if regexp_min.length != @ref_db[r_rule["id"].to_s][field]['REGEXP_MIN'].length
|
1650
|
+
detected_ref_note = detected_ref_note + @ref_db[r_rule["id"].to_s][field]['NOTE_REGEXP_MIN']
|
1651
|
+
detected_ref_err_count = detected_ref_err_count + 1
|
1652
|
+
detected_ref_id.push(*r_rule["id"]) if not detected_ref_id.include?(r_rule["id"])
|
1653
|
+
detected_ref_field.push(*field.to_s) if not detected_ref_field.include?(field.to_s)
|
1654
|
+
tmp_detect[r_rule["id"].to_s][field.to_s]['REGEXP_MIN']=regexp_min - rlist
|
1655
|
+
end
|
1656
|
+
#create regexp sig
|
1657
|
+
srlist=rlist.join("::")
|
1658
|
+
#Search regexp sig in reference
|
1659
|
+
unless @ref_db[r_rule["id"].to_s][field]['REGEXP'].include?(srlist)
|
1660
|
+
detected_ref_note = detected_ref_note + @ref_db[r_rule["id"].to_s][field]['NOTE_REGEXP']
|
1661
|
+
detected_ref_err_count = detected_ref_err_count + 1
|
1662
|
+
detected_ref_id.push(*r_rule["id"]) if not detected_ref_id.include?(r_rule["id"])
|
1663
|
+
detected_ref_field.push(*field.to_s) if not detected_ref_field.include?(field.to_s)
|
1664
|
+
tmp_detect[r_rule["id"].to_s][field.to_s]['REGEXP']=srlist
|
1665
|
+
end
|
1666
|
+
#CHECK: Unique Value -> create SIG UNIQ
|
1667
|
+
unless @ref_db[r_rule["id"].to_s][field]['Uniq_value']
|
1668
|
+
sig_not_uniq << field.to_s
|
1669
|
+
end
|
1670
|
+
end
|
1671
|
+
#CHECK: GLOBAL relation of uniq field by simhash for PIVOT->SIG
|
1672
|
+
#create simhash of sig_not_uniq value
|
1673
|
+
sig_not_uniq = sig_not_uniq.sort
|
1674
|
+
sig_not_uniq_value = []
|
1675
|
+
for xfield in sig_not_uniq
|
1676
|
+
sig_not_uniq_value << event.get(xfield.to_s)
|
1677
|
+
end
|
1678
|
+
#create simhash
|
1679
|
+
sig_not_uniq_value = sig_not_uniq_value.to_s.force_encoding('iso-8859-1').encode('utf-8') #string
|
1680
|
+
simhash_event = sig_not_uniq_value.simhash(:hashbits => r_rule["simhash_size"]).to_s
|
1681
|
+
if @ref_db[r_rule["id"].to_s]['relation_value_fix'].key?(simhash_event)
|
1682
|
+
#present , verify count
|
1683
|
+
if @ref_db[r_rule["id"].to_s]['relation_value_fix'][simhash_event] < r_rule["relation_min"]
|
1684
|
+
# more less than count_min
|
1685
|
+
detected_ref_note = detected_ref_note + @ref_db[r_rule["id"].to_s][field]['NOTE']
|
1686
|
+
detected_ref_err_count = detected_ref_err_count + 1
|
1687
|
+
detected_ref_id.push(*r_rule["id"]) if not detected_ref_id.include?(r_rule["id"])
|
1688
|
+
detected_ref_field.push(*field.to_s) if not detected_ref_field.include?(field.to_s)
|
1689
|
+
tmp_detect[r_rule["id"].to_s][field.to_s]['RELATION_LOW']=simhash_event
|
1690
|
+
end
|
1691
|
+
else
|
1692
|
+
# not present
|
1693
|
+
detected_ref_note = detected_ref_note + @ref_db[r_rule["id"].to_s][field]['NOTE']
|
1694
|
+
detected_ref_err_count = detected_ref_err_count + 1
|
1695
|
+
detected_ref_id.push(*r_rule["id"]) if not detected_ref_id.include?(r_rule["id"])
|
1696
|
+
detected_ref_field.push(*field.to_s) if not detected_ref_field.include?(field.to_s)
|
1697
|
+
tmp_detect[r_rule["id"].to_s][field.to_s]['RELATION']=simhash_event
|
1698
|
+
end
|
1699
|
+
detected_ref.push(*tmp_detect)
|
1700
|
+
end
|
1701
|
+
end
|
1702
|
+
end
|
1703
|
+
if @ref_stop_after_firstffind and detected_ref_err_count > 0
|
1704
|
+
break
|
1705
|
+
end
|
1706
|
+
end
|
1707
|
+
#add detected to event
|
1708
|
+
if detected_ref.any? and detected_ref_err_count > 0
|
1709
|
+
unless event.get(@target_ref).nil?
|
1710
|
+
event.set(@target_ref, event.get(@target_ref) + detected_ref)
|
1711
|
+
else
|
1712
|
+
event.set(@target_ref, detected_ref)
|
1713
|
+
end
|
1714
|
+
unless event.get(@targetnum_ref).nil?
|
1715
|
+
event.set(@targetnum_ref, event.get(@targetnum_ref) + detected_ref_err_count)
|
1716
|
+
else
|
1717
|
+
event.set(@targetnum_ref, detected_ref_err_count)
|
1718
|
+
end
|
1719
|
+
detected_ref_note = ( detected_ref_note + @ref_aroundfloat ).to_i #around float to int -- default + 0.5
|
1720
|
+
if detected_ref_note > 4
|
1721
|
+
detected_ref_note = 4
|
1722
|
+
end
|
1723
|
+
unless event.get(@targetnote).nil?
|
1724
|
+
if event.get(@targetnote) < detected_ref_note
|
1725
|
+
event.set(@targetnote, detected_ref_note)
|
1726
|
+
end
|
1727
|
+
else
|
1728
|
+
event.set(@targetnote, detected_ref_note)
|
1729
|
+
end
|
1730
|
+
unless event.get(@targetname_ref).nil?
|
1731
|
+
event.set(@targetname_ref, event.get(@targetname_ref) + detected_ref_field)
|
1732
|
+
else
|
1733
|
+
event.set(@targetname_ref, detected_ref_field)
|
1734
|
+
end
|
1735
|
+
unless event.get(@targetid).nil?
|
1736
|
+
event.set(@targetid, event.get(@targetid) + detected_ref_id)
|
1737
|
+
else
|
1738
|
+
event.set(@targetid, detected_ref_id)
|
1739
|
+
end
|
1740
|
+
#@logger.warn("Dectected SIG", :detected_sig_name => detected_sig_name)
|
1741
|
+
end
|
1742
|
+
end
|
1743
|
+
######################
|
1744
|
+
|
1745
|
+
######## NOTE ########
|
1746
|
+
#check refresh db note
|
1747
|
+
if not event.get(@targetid).nil? and not @disable_note
|
1748
|
+
if @next_refresh_note < tnow
|
1749
|
+
if @load_statut_note == true
|
1750
|
+
@load_statut_note = false
|
1751
|
+
load_conf_rules_note
|
1752
|
+
@next_refresh_note = tnow + @refresh_interval_confrules
|
1753
|
+
@load_statut_note = true
|
1754
|
+
end
|
1755
|
+
end
|
1756
|
+
sleep(1) until @load_statut_note
|
1757
|
+
end
|
1758
|
+
#check if db note empty and @targetid in event exist
|
1759
|
+
if not @note_db.empty? and not event.get(@targetid).nil? and not @disable_note and event.get(@field_enr).nil?
|
1760
|
+
note_max=0
|
1761
|
+
overwrite=false
|
1762
|
+
#check all rules
|
1763
|
+
for r_note in @note_db
|
1764
|
+
#check note
|
1765
|
+
if r_note['id'] #id must present
|
1766
|
+
if r_note['id'].is_a?(Array) #id must be type Array
|
1767
|
+
verif=r_note['id'].length
|
1768
|
+
#create intersection with event id and id present in rule
|
1769
|
+
intersec = event.get(@targetid) & r_note['id']
|
1770
|
+
#verify all id present in event
|
1771
|
+
if not intersec.length == verif
|
1772
|
+
next
|
1773
|
+
end
|
1774
|
+
end
|
1775
|
+
#check if option id present in rule
|
1776
|
+
if not r_note['optid'].nil? and not r_note['opt_num'].nil? #id find with opt_num present
|
1777
|
+
intersec = event.get(@targetid) & r_note['optid'] #create intersection
|
1778
|
+
#verify minimum X (@opt_num) present in event
|
1779
|
+
if not intersec.length >= r_note['opt_num'].to_i
|
1780
|
+
next
|
1781
|
+
end
|
1782
|
+
end
|
1783
|
+
#check if not id present option in rule
|
1784
|
+
if r_note['noid'].is_a?(Array) and not r_note['noid'].empty?
|
1785
|
+
intersec = event.get(@targetid) & r_note['noid'] #create intersection
|
1786
|
+
#verify none id present in event
|
1787
|
+
if not intersec.length == 0
|
1788
|
+
next
|
1789
|
+
end
|
1790
|
+
end
|
1791
|
+
#change note if upper
|
1792
|
+
if note_max < r_note['note']
|
1793
|
+
note_max = r_note['note']
|
1794
|
+
# if option overwrite, change note even if note lower
|
1795
|
+
if r_note['overwrite']
|
1796
|
+
overwrite=true
|
1797
|
+
end
|
1798
|
+
end
|
1799
|
+
end
|
1800
|
+
end
|
1801
|
+
if note_max != 0
|
1802
|
+
if ( event.get(@targetnote) > note_max and overwrite ) or ( event.get(@targetnote) < note_max )
|
1803
|
+
event.set(@targetnote, note_max)
|
1804
|
+
end
|
1805
|
+
end
|
1806
|
+
end
|
1807
|
+
######################
|
1808
|
+
|
1809
|
+
######FINGERPRINT USE & DROP END######
|
1810
|
+
# create fingerprint at end because, you need to have sig & ioc detected for unique event
|
1811
|
+
#refresh db & conf fingerprint
|
1812
|
+
unless @disable_fp
|
1813
|
+
if @next_refresh_conffp < tnow
|
1814
|
+
if @load_statut_fp == true
|
1815
|
+
@load_statut_fp = false
|
1816
|
+
load_conf_fp
|
1817
|
+
load_db_dropfp
|
1818
|
+
@next_refresh_conffp = tnow + @refresh_interval_conffp
|
1819
|
+
@load_statut_fp = true
|
1820
|
+
end
|
1821
|
+
end
|
1822
|
+
sleep(1) until @load_statut_fp
|
1823
|
+
end
|
1824
|
+
#chekc if db &conf are not empty + select_fp exist
|
1825
|
+
if not @fp_rules.empty? and not @fp_db.nil? and not event.get(@select_fp).nil? and not @disable_fp and event.get(@field_enr).nil?
|
1826
|
+
to_string = ""
|
1827
|
+
if event.get(@select_fp).is_a?(Array)
|
1828
|
+
for elemsfp in event.get(@select_fp)
|
1829
|
+
#check if rules match with select_fp (case: Array)
|
1830
|
+
if @fp_rules.key?(elemsfp.to_s)
|
1831
|
+
if @fp_rules[elemsfp.to_s]['fields'].is_a?(Array) and @fp_rules[elemsfp.to_s]['hashbit'].is_a?(Numeric)
|
1832
|
+
#create fingerprint
|
1833
|
+
@fp_rules[elemsfp.to_s]['fields'].sort.each do |k|
|
1834
|
+
if event.get(k)
|
1835
|
+
to_string << "|#{k}|#{event.get(k)}"
|
1836
|
+
end
|
1837
|
+
end
|
1838
|
+
to_string << "|"
|
1839
|
+
to_string = to_string.force_encoding('iso-8859-1').encode('utf-8') #string
|
1840
|
+
event.set(@target_fp, to_string.simhash(:hashbits => @fp_rules[elemsfp.to_s]['hashbit']).to_s)
|
1841
|
+
#check db fp drop
|
1842
|
+
if event.get(@noapply_sig_dropfp).nil? and @fp_db[event.get(@target_fp)]
|
1843
|
+
event.cancel
|
1844
|
+
return
|
1845
|
+
end
|
1846
|
+
if @fingerprint_db[event.get(@target_fp)]
|
1847
|
+
#key existe -- event known
|
1848
|
+
if @fingerprint_db[event.get(@target_fp)] < tnow
|
1849
|
+
#date is passed
|
1850
|
+
@fingerprint_db[event.get(@target_fp)] = tnow + @fp_rules[elemsfp.to_s]['delay']
|
1851
|
+
#(event[@target_tag_fp] ||= []) << @tag_name_first
|
1852
|
+
event.set(@target_tag_fp, []) unless event.get(@target_tag_fp)
|
1853
|
+
event.set(@target_tag_fp, event.get(@target_tag_fp) + @tag_name_first)
|
1854
|
+
else
|
1855
|
+
#add tag
|
1856
|
+
#(event.get(@target_tag_fp) ||= []) << @tag_name_after
|
1857
|
+
event.set(@target_tag_fp, []) unless event.get(@target_tag_fp)
|
1858
|
+
event.set(@target_tag_fp, event.get(@target_tag_fp) + @tag_name_after)
|
1859
|
+
end
|
1860
|
+
else
|
1861
|
+
#key not exist -- new event
|
1862
|
+
@fingerprint_db[event.get(@target_fp)] = tnow + @fp_rules[elemsfp.to_s]['delay']
|
1863
|
+
#(event[@target_tag_fp] ||= []) << @tag_name_first
|
1864
|
+
event.set(@target_tag_fp, []) unless event.get(@target_tag_fp)
|
1865
|
+
event.set(@target_tag_fp, event.get(@target_tag_fp) + @tag_name_first)
|
1866
|
+
end
|
1867
|
+
end
|
1868
|
+
break
|
1869
|
+
end
|
1870
|
+
end
|
1871
|
+
#check if rules match with select_fp (case String)
|
1872
|
+
elsif event.get(@select_fp).is_a?(String) and @fp_rules.key?(event.get(@select_fp))
|
1873
|
+
if @fp_rules[event.get(@select_fp)]['fields'].is_a?(Array) and @fp_rules[event.get(@select_fp)]['hashbit'].is_a?(Integer)
|
1874
|
+
#create fingerprint
|
1875
|
+
@fp_rules[event.get(@select_fp)]['fields'].sort.each do |k|
|
1876
|
+
if event.get(k)
|
1877
|
+
to_string << "|#{k}|#{event.get(k)}"
|
1878
|
+
end
|
1879
|
+
end
|
1880
|
+
to_string << "|"
|
1881
|
+
to_string = to_string.force_encoding('iso-8859-1').encode('utf-8') #string
|
1882
|
+
event.set(@target_fp, to_string.simhash(:hashbits => @fp_rules[event.get(@select_fp)]['hashbit']).to_s)
|
1883
|
+
#check db fp drop
|
1884
|
+
if event.get(@noapply_sig_dropfp).nil? and @fp_db[event.get(@target_fp)]
|
1885
|
+
event.cancel
|
1886
|
+
return
|
1887
|
+
end
|
1888
|
+
if @fingerprint_db[event.get(@target_fp)]
|
1889
|
+
#key existe -- event known
|
1890
|
+
if @fingerprint_db[event.get(@target_fp)] < tnow
|
1891
|
+
#date is passed
|
1892
|
+
@fingerprint_db[event.get(@target_fp)] = tnow + @fp_rules[event.get(@select_fp)]['delay']
|
1893
|
+
#(event[@target_tag_fp] ||= []) << @tag_name_first
|
1894
|
+
event.set(@target_tag_fp, []) unless event.get(@target_tag_fp)
|
1895
|
+
event.set(@target_tag_fp, event.get(@target_tag_fp) + [@tag_name_first])
|
1896
|
+
else
|
1897
|
+
#add tag
|
1898
|
+
#(event[@target_tag_fp] ||= []) << @tag_name_after
|
1899
|
+
event.set(@target_tag_fp, []) unless event.get(@target_tag_fp)
|
1900
|
+
event.set(@target_tag_fp, event.get(@target_tag_fp) + [@tag_name_after])
|
1901
|
+
end
|
1902
|
+
else
|
1903
|
+
#key not exist -- new event
|
1904
|
+
@fingerprint_db[event.get(@target_fp)] = tnow + @fp_rules[event.get(@select_fp)]['delay']
|
1905
|
+
#(event[@target_tag_fp] ||= []) << @tag_name_first
|
1906
|
+
event.set(@target_tag_fp, []) unless event.get(@target_tag_fp)
|
1907
|
+
event.set(@target_tag_fp, event.get(@target_tag_fp) + [@tag_name_first])
|
1908
|
+
end
|
1909
|
+
end
|
1910
|
+
end
|
1911
|
+
end
|
1912
|
+
###########################
|
1913
|
+
|
1914
|
+
######## FREQ EVENT ########
|
1915
|
+
#rules_freq = [ {'select_field': {'fieldx':[value_list],'fieldy':[value_list]}, 'note': X, 'refresh_time': Xseconds,'reset_time': Xseconds[1j], 'reset_hour': '00:00:00', 'wait_after_reset': 10, 'id': 30XXX},...]
|
1916
|
+
#TODO: CREATE TEMPLATE FOR NEW MESSAGE
|
1917
|
+
#select field for select => first filter
|
1918
|
+
#select field value => second filter
|
1919
|
+
#refresh_time for check and calculate all time result: max & variation
|
1920
|
+
# note if match
|
1921
|
+
# reset time in second for reset all counter value
|
1922
|
+
# reset hour for begin reset with hour => use for 24h reset begin at 00:00
|
1923
|
+
# wait_after_reset: dont't check before 10 times values
|
1924
|
+
#db_freq = { '30XXX': {'status_acces':true,'reset_time': date,'refresh_date': date, 'old_date': date,'num_time': Xtimes, 'V_prev': x/m, 'Varia_avg': z/m, 'count_prev': Xtimes, 'count_cour': Xtimes, 'V_max': x/m, 'Varia_max': x/m, 'Varia_min': +/- x/m, 'Varia_glob': x/m}}
|
1925
|
+
#check refresh db ref
|
1926
|
+
#
|
1927
|
+
unless @disable_freq
|
1928
|
+
if @next_refresh_freqrules < tnow
|
1929
|
+
if @load_statut_freqrules == true
|
1930
|
+
@load_statut_freqrules = false
|
1931
|
+
load_rules_freq # load rules and create db_freq with init var
|
1932
|
+
@next_refresh_freqrules = tnow + @refresh_interval_freqrules
|
1933
|
+
@load_statut_freqrules = true
|
1934
|
+
end
|
1935
|
+
end
|
1936
|
+
sleep(1) until @load_statut_freqrules
|
1937
|
+
end
|
1938
|
+
#verify db & rules is not empty
|
1939
|
+
if not @freq_rules.empty? and not @db_freq.empty? and not @disable_freq and event.get(@noapply_freq).nil? #and event.get(@field_enr).nil?
|
1940
|
+
eventK = event.to_hash.keys
|
1941
|
+
#CHECK RULE BY RULE
|
1942
|
+
no_match = true
|
1943
|
+
for f_rule in @freq_rules
|
1944
|
+
f_rule.each do |fkey,fval|
|
1945
|
+
#VERIFY FIELD by FIELD if present and value match
|
1946
|
+
no_match = true
|
1947
|
+
if not event.get(fkey.to_s).nil? and fval.include?(event.get(fkey.to_s))
|
1948
|
+
no_match = false
|
1949
|
+
end
|
1950
|
+
break if no_match
|
1951
|
+
end
|
1952
|
+
# if rule no match then next
|
1953
|
+
next if no_match
|
1954
|
+
# if rule match increment count
|
1955
|
+
if @db_freq[f_rule['id']]
|
1956
|
+
#incrimente count
|
1957
|
+
@db_freq[f_rule['id']]['count_cour'] = @db_freq[f_rule['id']]['count_cour'] + 1
|
1958
|
+
#check if time to calculate varia & freq
|
1959
|
+
#check if first time to check
|
1960
|
+
if ( @db_freq[f_rule['id']]['num_time'] == 0 and @db_freq[f_rule['id']]['status_acces'] == true ) or ( @db_freq[f_rule['id']]['reset_time'] <= tnow and @db_freq[f_rule['id']]['status_acces'] == true )
|
1961
|
+
#first time
|
1962
|
+
@db_freq[f_rule['id']]['status_acces'] = false
|
1963
|
+
#init old_date & refresh date
|
1964
|
+
@db_freq[f_rule['id']]['reset_time'] = tnow + f_rule['reset_time']
|
1965
|
+
@db_freq[f_rule['id']]['old_date'] = tnow
|
1966
|
+
@db_freq[f_rule['id']]['refresh_date'] = tnow + f_rule['refresh_time']
|
1967
|
+
@db_freq[f_rule['id']]['count_prev'] = @db_freq[f_rule['id']]['count_cour']
|
1968
|
+
@db_freq[f_rule['id']]['status_acces']['v_max']=0
|
1969
|
+
@db_freq[f_rule['id']]['status_acces']['varia_min']=10000
|
1970
|
+
@db_freq[f_rule['id']]['status_acces']['varia_max']=0
|
1971
|
+
@db_freq[f_rule['id']]['v_prev'] = 1
|
1972
|
+
@db_freq[f_rule['id']]['varia_glob'] = 0
|
1973
|
+
@db_freq[f_rule['id']]['num_time'] = 1
|
1974
|
+
@db_freq[f_rule['id']]['status_acces'] = true
|
1975
|
+
elsif @db_freq[f_rule['id']]['num_time'] != 0
|
1976
|
+
if @db_freq[f_rule['id']]['refresh_date'] <= tnow and @db_freq[f_rule['id']]['status_acces'] == true
|
1977
|
+
@db_freq[f_rule['id']]['status_acces'] = false
|
1978
|
+
#time to re-calculate
|
1979
|
+
# put all in same unit => 60s
|
1980
|
+
#calculate diff between ald date and new date
|
1981
|
+
diff_time = tnow - @db_freq[f_rule['id']]['old_date'] #in seconds
|
1982
|
+
#reinit old_date & refresh date
|
1983
|
+
@db_freq[f_rule['id']]['old_date'] = tnow
|
1984
|
+
@db_freq[f_rule['id']]['refresh_date'] = tnow + f_rule['refresh_time']
|
1985
|
+
#calculate diff between previous count and count courant
|
1986
|
+
count_diff = @db_freq[f_rule['id']]['count_cour'] - @db_freq[f_rule['id']]['count_prev']
|
1987
|
+
#reinit count_previous
|
1988
|
+
@db_freq[f_rule['id']]['count_prev'] = @db_freq[f_rule['id']]['count_cour']
|
1989
|
+
#calculate v
|
1990
|
+
v_cour = (((count_diff / diff_time)*60)+0.5).to_i # vcour/60s to interger
|
1991
|
+
#check v_max
|
1992
|
+
if @db_freq[f_rule['id']]['v_max'] < v_cour
|
1993
|
+
@db_freq[f_rule['id']]['v_max'] = v_cour
|
1994
|
+
#CREATE ALERT
|
1995
|
+
end
|
1996
|
+
#cacl varia
|
1997
|
+
varia_cour = v_cour - db_freq[f_rule['id']]['v_prev']
|
1998
|
+
#reinit v_prev
|
1999
|
+
db_freq[f_rule['id']]['v_prev'] = v_cour
|
2000
|
+
#incriment varia_glob
|
2001
|
+
db_freq[f_rule['id']]['varia_glob'] = db_freq[f_rule['id']]['varia_glob'] + varia_cour.abs
|
2002
|
+
#check varia_max & varia_min
|
2003
|
+
if @db_freq[f_rule['id']]['varia_max'] < varia_cour
|
2004
|
+
#CREATE ALERT
|
2005
|
+
if f_rule['wait_after_reset'] < @db_freq[f_rule['id']]['num_time']
|
2006
|
+
new_event = LogStash::Event.new
|
2007
|
+
new_event.set("message", "ALERT FREQ -- rule id:" + f_rule['id'].to_s + " -- count " + v_cour.to_s + "events for 60s -- VARIA : " + varia_cour.to_s + "(varia courant) -- The value change old varia max: " + @db_freq[f_rule['id']]['varia_max'])
|
2008
|
+
new_event.set("type", "alert_freq")
|
2009
|
+
new_event.set("ruleid", f_rule['id'].to_s)
|
2010
|
+
new_event.set("time", tnow.to_s)
|
2011
|
+
end
|
2012
|
+
@db_freq[f_rule['id']]['varia_max'] = varia_cour
|
2013
|
+
end
|
2014
|
+
if @db_freq[f_rule['id']]['varia_min'] > varia_cour
|
2015
|
+
#CREATE ALERT
|
2016
|
+
if f_rule['wait_after_reset'] < @db_freq[f_rule['id']]['num_time']
|
2017
|
+
new_event = LogStash::Event.new
|
2018
|
+
new_event.set("message", "ALERT FREQ -- rule id:" + f_rule['id'].to_s + " -- count " + v_cour.to_s + "events for 60s -- VARIA : " + varia_cour.to_s + "(varia courant) -- The value change old varia min: " + @db_freq[f_rule['id']]['varia_min'])
|
2019
|
+
new_event.set("type", "alert_freq")
|
2020
|
+
new_event.set("ruleid", f_rule['id'].to_s)
|
2021
|
+
new_event.set("time", tnow.to_s)
|
2022
|
+
end
|
2023
|
+
db_freq[f_rule['id']]['varia_min'] = varia_cour
|
2024
|
+
end
|
2025
|
+
#calculate varia_avg
|
2026
|
+
@db_freq[f_rule['id']]['varia_avg'] = db_freq[f_rule['id']]['varia_glob'] / @db_freq[f_rule['id']]['num_time']
|
2027
|
+
#incremente num time of calculate
|
2028
|
+
@db_freq[f_rule['id']]['num_time'] = @db_freq[f_rule['id']]['num_time'] + 1
|
2029
|
+
#check if varia is more than v_cour
|
2030
|
+
if varia_cour > @db_freq[f_rule['id']]['varia_avg']
|
2031
|
+
#CREATE ALERT
|
2032
|
+
if f_rule['wait_after_reset'] < @db_freq[f_rule['id']]['num_time']
|
2033
|
+
new_event = LogStash::Event.new
|
2034
|
+
new_event.set("message", "ALERT FREQ -- rule id:" + f_rule['id'].to_s + " -- count " + v_cour.to_s + "events for 60s -- VARIA morest : " + varia_cour.to_s + "(varia courant) > " + @db_freq[f_rule['id']]['varia_avg'].to_s + "(varia global)")
|
2035
|
+
new_event.set("type", "alert_freq")
|
2036
|
+
new_event.set("ruleid", f_rule['id'].to_s)
|
2037
|
+
new_event.set("time", tnow.to_s)
|
2038
|
+
end
|
2039
|
+
end
|
2040
|
+
@db_freq[f_rule['id']]['status_acces'] = true
|
2041
|
+
end
|
2042
|
+
end
|
2043
|
+
end
|
2044
|
+
end
|
2045
|
+
end
|
2046
|
+
##### NOT CREATE ALERTE ON EVENT BECAUSE EVENT MAYBE NOT ORIGIN FREQ INCREASE ###
|
2047
|
+
######################
|
2048
|
+
|
2049
|
+
filter_matched(event)
|
2050
|
+
end
|
2051
|
+
########## LOAD/REFRESH/SAVE CONF & DB ################
|
2052
|
+
private
|
2053
|
+
def load_rules_freq
|
2054
|
+
if !File.exists?(@conf_freq)
|
2055
|
+
@logger.warn("DB file read failure, stop loading", :path => @conf_freq)
|
2056
|
+
exit -1
|
2057
|
+
end
|
2058
|
+
tmp_hash = Digest::SHA256.hexdigest File.read @conf_freq
|
2059
|
+
if not tmp_hash == @hash_conf_freq
|
2060
|
+
@hash_conf_freq = tmp_hash
|
2061
|
+
begin
|
2062
|
+
tmp_db = JSON.parse( IO.read(@conf_freq, encoding:'utf-8') )
|
2063
|
+
unless tmp_db.nil?
|
2064
|
+
if tmp_db['rules'].is_a?(Array)
|
2065
|
+
@freq_rules = tmp_db['rules']
|
2066
|
+
#CREATE DB with ID
|
2067
|
+
for rulex in @freq_rules
|
2068
|
+
if @db_freq[rulex['id']].nil?
|
2069
|
+
@db_freq[rulex['id']]={}
|
2070
|
+
@db_freq[rulex['id']]['num_time']=0
|
2071
|
+
@db_freq[rulex['id']]['count_cour']=0
|
2072
|
+
@db_freq[rulex['id']]['reset_time']=0
|
2073
|
+
@db_freq[rulex['id']]['status_acces']=true
|
2074
|
+
end
|
2075
|
+
end
|
2076
|
+
end
|
2077
|
+
end
|
2078
|
+
@logger.info("loading/refreshing REFERENCES conf rules")
|
2079
|
+
rescue
|
2080
|
+
@logger.error("JSON CONF SIG -- FREQ RULES-- PARSE ERROR")
|
2081
|
+
end
|
2082
|
+
end
|
2083
|
+
end
|
2084
|
+
def load_db_pattern
|
2085
|
+
if !File.exists?(@db_pattern)
|
2086
|
+
@logger.warn("DB file read failure, stop loading", :path => @db_pattern)
|
2087
|
+
exit -1
|
2088
|
+
end
|
2089
|
+
tmp_hash = Digest::SHA256.hexdigest File.read @db_pattern
|
2090
|
+
if not tmp_hash == @hash_dbpattern
|
2091
|
+
@hash_dbpattern = tmp_hash
|
2092
|
+
File.readlines(@db_pattern).each do |line|
|
2093
|
+
elem1, elem2 = line.split(/=>>/)
|
2094
|
+
elem2.delete!("\n")
|
2095
|
+
@pattern_db[elem1] = elem2
|
2096
|
+
end
|
2097
|
+
end
|
2098
|
+
end
|
2099
|
+
def load_db_ref
|
2100
|
+
if !File.exists?(@db_ref)
|
2101
|
+
@logger.warn("DB file read failure, stop loading", :path => @db_ref)
|
2102
|
+
exit -1
|
2103
|
+
end
|
2104
|
+
tmp_hash = Digest::SHA256.hexdigest File.read @db_ref
|
2105
|
+
if not tmp_hash == @hash_dbref
|
2106
|
+
@hash_dbref = tmp_hash
|
2107
|
+
begin
|
2108
|
+
tmp_db = JSON.parse( IO.read(@db_ref, encoding:'utf-8') )
|
2109
|
+
unless tmp_db.nil?
|
2110
|
+
@ref_db = tmp_db
|
2111
|
+
end
|
2112
|
+
@logger.info("loading/refreshing REFERENCES DB")
|
2113
|
+
rescue
|
2114
|
+
end
|
2115
|
+
end
|
2116
|
+
#CONF
|
2117
|
+
if !File.exists?(@conf_ref)
|
2118
|
+
@logger.warn("DB file read failure, stop loading", :path => @conf_ref)
|
2119
|
+
exit -1
|
2120
|
+
end
|
2121
|
+
tmp_hash = Digest::SHA256.hexdigest File.read @conf_ref
|
2122
|
+
if not tmp_hash == @hash_conf_ref
|
2123
|
+
@hash_conf_ref = tmp_hash
|
2124
|
+
begin
|
2125
|
+
tmp_db = JSON.parse( IO.read(@conf_ref, encoding:'utf-8') )
|
2126
|
+
unless tmp_db.nil?
|
2127
|
+
unless tmp_db['rules'].nil?
|
2128
|
+
if tmp_db['rules'].is_a?(Array)
|
2129
|
+
@ref_rules= tmp_db['rules']
|
2130
|
+
end
|
2131
|
+
end
|
2132
|
+
end
|
2133
|
+
@logger.info("loading/refreshing REFERENCES conf rules")
|
2134
|
+
rescue
|
2135
|
+
@logger.error("JSON CONF SIG -- DB REF -- PARSE ERROR")
|
2136
|
+
end
|
2137
|
+
end
|
2138
|
+
end
|
2139
|
+
def load_conf_bl
|
2140
|
+
#load file
|
2141
|
+
for f in @file_bl
|
2142
|
+
if !File.exists?(f)
|
2143
|
+
@logger.warn("DB file read failure, stop loading", :path => f)
|
2144
|
+
exit -1
|
2145
|
+
end
|
2146
|
+
tmp_hash = Digest::SHA256.hexdigest File.read f
|
2147
|
+
if @hash_dbbl[f]
|
2148
|
+
if not tmp_hash == @hash_dbioc[f]
|
2149
|
+
#change
|
2150
|
+
@bl_db[File.basename(f)].clear
|
2151
|
+
@hash_dbbl[f] = tmp_hash
|
2152
|
+
File.readlines(f).each do |line|
|
2153
|
+
line=line.strip
|
2154
|
+
ip = ""
|
2155
|
+
@bl_db[File.basename(f)].push(ip) if ip = IPAddr.new(line) rescue false
|
2156
|
+
end
|
2157
|
+
end
|
2158
|
+
else
|
2159
|
+
#unknown
|
2160
|
+
@bl_db[File.basename(f)] = []
|
2161
|
+
@hash_dbbl[f] = tmp_hash
|
2162
|
+
File.readlines(f).each do |line|
|
2163
|
+
line=line.strip
|
2164
|
+
ip = ""
|
2165
|
+
@bl_db[File.basename(f)].push(ip) if ip = IPAddr.new(line) rescue false
|
2166
|
+
end
|
2167
|
+
end
|
2168
|
+
@logger.info("loading/refreshing DB BL REPUTATION file(s): #{File.basename(f)}")
|
2169
|
+
end
|
2170
|
+
#load conf
|
2171
|
+
if !File.exists?(@conf_bl)
|
2172
|
+
@logger.warn("DB file read failure, stop loading", :path => @conf_bl)
|
2173
|
+
exit -1
|
2174
|
+
end
|
2175
|
+
tmp_hash = Digest::SHA256.hexdigest File.read @conf_bl
|
2176
|
+
if not tmp_hash == @hash_conf_bl
|
2177
|
+
@hash_conf_bl = tmp_hash
|
2178
|
+
begin
|
2179
|
+
tmp_db = JSON.parse( IO.read(@conf_bl, encoding:'utf-8') )
|
2180
|
+
unless tmp_db.nil?
|
2181
|
+
#{fieldx: {dbs: [file_name,...], catergory: , note: X, id: X}}
|
2182
|
+
#verify dbs filename exist
|
2183
|
+
tmp_db.each do |fkey,fval|
|
2184
|
+
if fval['dbs'].is_a?(Array)
|
2185
|
+
for fn in fval['dbs']
|
2186
|
+
if @bl_db[fn].nil?
|
2187
|
+
@logger.error("You use a file name not exist in conf BL REPUTATION!!!")
|
2188
|
+
exit -1
|
2189
|
+
end
|
2190
|
+
end
|
2191
|
+
else
|
2192
|
+
@logger.error("DBS field not exist in JSON conf BL REPUTATION!!")
|
2193
|
+
exit -1
|
2194
|
+
end
|
2195
|
+
end
|
2196
|
+
@bl_rules = tmp_db
|
2197
|
+
end
|
2198
|
+
@logger.info("loading/refreshing Conf BL REPUTATION #{@bl_db}")
|
2199
|
+
rescue
|
2200
|
+
@logger.error("JSON CONF SIG -- CONF BL -- PARSE ERROR")
|
2201
|
+
end
|
2202
|
+
end
|
2203
|
+
end
|
2204
|
+
|
2205
|
+
def load_conf_rules_sig
|
2206
|
+
if !File.exists?(@conf_rules_sig)
|
2207
|
+
@logger.warn("DB file read failure, stop loading", :path => @conf_rules_sig)
|
2208
|
+
exit -1
|
2209
|
+
end
|
2210
|
+
tmp_hash = Digest::SHA256.hexdigest File.read @conf_rules_sig
|
2211
|
+
if not tmp_hash == @hash_conf_rules_sig
|
2212
|
+
@hash_conf_rules_sig = tmp_hash
|
2213
|
+
begin
|
2214
|
+
@sig_db = JSON.parse( IO.read(@conf_rules_sig, encoding:'utf-8') )
|
2215
|
+
@sig_db_array.clear
|
2216
|
+
@sig_db_array_false.clear
|
2217
|
+
keyF = Array.new
|
2218
|
+
keyT = Array.new
|
2219
|
+
#order sig_db by type (1 or 2)
|
2220
|
+
if @sig_db['rules'].is_a?(Array)
|
2221
|
+
tmp = *@sig_db['rules']
|
2222
|
+
j=0
|
2223
|
+
(0..@sig_db['rules'].length-1).each do |i|
|
2224
|
+
@sig_db['rules'][i].each do |nkey,nval|
|
2225
|
+
if nval['type'].is_a?(Numeric)
|
2226
|
+
if nval['type'] == 2
|
2227
|
+
#puts 'find at'+i.to_s+' -> '+j.to_s+' -- '+tmp[i-j].to_s
|
2228
|
+
tmp=tmp.insert(-1,tmp.delete_at(i-j))
|
2229
|
+
j=j+1
|
2230
|
+
break
|
2231
|
+
end
|
2232
|
+
end
|
2233
|
+
end
|
2234
|
+
end
|
2235
|
+
#create Field True & false
|
2236
|
+
@sig_db['rules'] = *tmp
|
2237
|
+
for rule in tmp
|
2238
|
+
keyF.clear
|
2239
|
+
keyT.clear
|
2240
|
+
rule.each do |nkey,nval|
|
2241
|
+
if nval.has_key?('false')
|
2242
|
+
keyF.push(nkey)
|
2243
|
+
else
|
2244
|
+
keyT.push(nkey)
|
2245
|
+
end
|
2246
|
+
end
|
2247
|
+
@sig_db_array.push([*keyT])
|
2248
|
+
@sig_db_array_false.push([*keyF])
|
2249
|
+
@sig_db_array_len=@sig_db_array.length-1
|
2250
|
+
end
|
2251
|
+
keyF.clear
|
2252
|
+
keyT.clear
|
2253
|
+
end
|
2254
|
+
@logger.info("loading/refreshing SIG conf rules")
|
2255
|
+
rescue
|
2256
|
+
@logger.error("JSON CONF SIG -- SIG RULES -- PARSE ERROR")
|
2257
|
+
end
|
2258
|
+
end
|
2259
|
+
end
|
2260
|
+
def load_conf_rules_note
|
2261
|
+
if !File.exists?(@conf_rules_note)
|
2262
|
+
@logger.warn("DB file read failure, stop loading", :path => @conf_rules_note)
|
2263
|
+
exit -1
|
2264
|
+
end
|
2265
|
+
tmp_hash = Digest::SHA256.hexdigest File.read @conf_rules_note
|
2266
|
+
if not tmp_hash == @hash_conf_rules_note
|
2267
|
+
@hash_conf_rules_note = tmp_hash
|
2268
|
+
begin
|
2269
|
+
tmp_db = JSON.parse( IO.read(@conf_rules_note, encoding:'utf-8') )
|
2270
|
+
unless tmp_db.nil?
|
2271
|
+
unless tmp_db['rules'].nil?
|
2272
|
+
if tmp_db['rules'].is_a?(Array)
|
2273
|
+
@note_db = tmp_db['rules']
|
2274
|
+
end
|
2275
|
+
end
|
2276
|
+
end
|
2277
|
+
@logger.info("loading/refreshing NOTE conf rules")
|
2278
|
+
rescue
|
2279
|
+
@logger.error("JSON CONF SIG -- NOTE/SCORE RULES -- PARSE ERROR")
|
2280
|
+
end
|
2281
|
+
end
|
2282
|
+
end
|
2283
|
+
def load_conf_ioc
|
2284
|
+
if !File.exists?(@conf_ioc)
|
2285
|
+
@logger.warn("DB file read failure, stop loading", :path => @conf_ioc)
|
2286
|
+
exit -1
|
2287
|
+
end
|
2288
|
+
tmp_hash = Digest::SHA256.hexdigest File.read @conf_ioc
|
2289
|
+
if not tmp_hash == @hash_conf_ioc
|
2290
|
+
@hash_conf_ioc = tmp_hash
|
2291
|
+
begin
|
2292
|
+
tmp_db = JSON.parse( IO.read(@conf_ioc, encoding:'utf-8') )
|
2293
|
+
@ioc_rules = tmp_db
|
2294
|
+
@logger.info("loading/refreshing IOC conf rules")
|
2295
|
+
rescue
|
2296
|
+
@logger.error("JSON CONF SIG -- IOC DB -- PARSE ERROR")
|
2297
|
+
end
|
2298
|
+
end
|
2299
|
+
end
|
2300
|
+
def load_db_ioc
|
2301
|
+
#if one file change reload all file
|
2302
|
+
change = false
|
2303
|
+
@db_ioc.sort.each do |f|
|
2304
|
+
if !File.exists?(f)
|
2305
|
+
@logger.warn("DB file read failure, stop loading", :path => f)
|
2306
|
+
exit -1
|
2307
|
+
end
|
2308
|
+
tmp_hash = Digest::SHA256.hexdigest File.read f
|
2309
|
+
if @hash_dbioc[f]
|
2310
|
+
if not tmp_hash == @hash_dbioc[f]
|
2311
|
+
#load
|
2312
|
+
@hash_dbioc[f] = tmp_hash
|
2313
|
+
change = true
|
2314
|
+
end
|
2315
|
+
else
|
2316
|
+
#load
|
2317
|
+
@hash_dbioc[f] = tmp_hash
|
2318
|
+
change = true
|
2319
|
+
end
|
2320
|
+
end
|
2321
|
+
if change == true
|
2322
|
+
@ioc_db = {}
|
2323
|
+
@db_ioc.sort.each do |f|
|
2324
|
+
begin
|
2325
|
+
db_tmp = JSON.parse( IO.read(f, encoding:'utf-8') )
|
2326
|
+
@ioc_db = @ioc_db.merge(db_tmp) {|key, first, second| first.is_a?(Array) && second.is_a?(Array) ? first | second : second }
|
2327
|
+
rescue
|
2328
|
+
@logger.error("JSON CONF SIG -- IOC DB -- PARSE ERROR")
|
2329
|
+
end
|
2330
|
+
end
|
2331
|
+
@logger.info("loading/refreshing DB IOC file(s)")
|
2332
|
+
end
|
2333
|
+
end
|
2334
|
+
def load_conf_nv
|
2335
|
+
if !File.exists?(@conf_nv)
|
2336
|
+
@logger.warn("DB file read failure, stop loading", :path => @conf_nv)
|
2337
|
+
exit -1
|
2338
|
+
end
|
2339
|
+
tmp_hash = Digest::SHA256.hexdigest File.read @conf_nv
|
2340
|
+
if not tmp_hash == @hash_conf_nv
|
2341
|
+
@hash_conf_nv = tmp_hash
|
2342
|
+
begin
|
2343
|
+
tmp_db = JSON.parse( IO.read(@conf_nv, encoding:'utf-8') )
|
2344
|
+
@nv_rules = tmp_db
|
2345
|
+
if @nv_rules['rules']
|
2346
|
+
for rule in @nv_rules['rules']
|
2347
|
+
if not @nv_db[rule.to_s]
|
2348
|
+
@nv_db[rule.to_s] = []
|
2349
|
+
end
|
2350
|
+
end
|
2351
|
+
end
|
2352
|
+
@logger.info("refreshing DB NewValue file")
|
2353
|
+
rescue
|
2354
|
+
@logger.error("JSON CONF SIG -- CONF NV -- PARSE ERROR")
|
2355
|
+
end
|
2356
|
+
end
|
2357
|
+
end
|
2358
|
+
def save_db_nv
|
2359
|
+
begin
|
2360
|
+
File.open(@db_nv,"w+") do |f|
|
2361
|
+
f.write(JSON.pretty_generate(@nv_db))
|
2362
|
+
end
|
2363
|
+
rescue
|
2364
|
+
@logger.error("JSON SAVE SIG -- SAVE NV-- PARSE/WRITE ERROR")
|
2365
|
+
end
|
2366
|
+
end
|
2367
|
+
def save_db_ioclocal
|
2368
|
+
begin
|
2369
|
+
File.open(@file_save_localioc,"w+") do |f|
|
2370
|
+
f.write(JSON.pretty_generate(@ioc_db_local))
|
2371
|
+
end
|
2372
|
+
rescue
|
2373
|
+
@logger.error("JSON SAVE SIG -- SAVE IOC LOCAL -- PARSE/WRITE ERROR")
|
2374
|
+
end
|
2375
|
+
end
|
2376
|
+
|
2377
|
+
def load_db_enr
|
2378
|
+
if !File.exists?(@conf_enr)
|
2379
|
+
@logger.warn("DB file read failure, stop loading", :path => @conf_enr)
|
2380
|
+
exit -1
|
2381
|
+
end
|
2382
|
+
tmp_hash = Digest::SHA256.hexdigest File.read @conf_enr
|
2383
|
+
if not tmp_hash == @hash_conf_enr
|
2384
|
+
@hash_conf_enr = tmp_hash
|
2385
|
+
begin
|
2386
|
+
tmp_db = JSON.parse( IO.read(@conf_enr, encoding:'utf-8') )
|
2387
|
+
@db_enr = tmp_db
|
2388
|
+
#open all db file
|
2389
|
+
@db_enr.each do |ekey,eval|
|
2390
|
+
ofile=eval["file"]
|
2391
|
+
if !File.exists?(ofile)
|
2392
|
+
@logger.warn("DB file read failure, stop loading", :path => ofile)
|
2393
|
+
exit -1
|
2394
|
+
end
|
2395
|
+
tmp_hash = Digest::SHA256.hexdigest File.read ofile
|
2396
|
+
if not tmp_hash == @hash_dbfile_enr[ofile]
|
2397
|
+
@hash_dbfile_enr[ofile] = tmp_hash
|
2398
|
+
@db_enr[ekey]["db"] = JSON.parse( IO.read(ofile, encoding:'utf-8') )
|
2399
|
+
sha1db=Digest::SHA1.hexdigest @db_enr[ekey]["db"].to_s
|
2400
|
+
@hash_db_enr[ofile] = sha1db
|
2401
|
+
end
|
2402
|
+
end
|
2403
|
+
rescue
|
2404
|
+
@logger.error("JSON CONF SIG -- DB ENR -- PARSE ERROR")
|
2405
|
+
end
|
2406
|
+
end
|
2407
|
+
end
|
2408
|
+
def save_dbs_enr
|
2409
|
+
#load db conf fp
|
2410
|
+
@db_enr.each do |ekey,eval|
|
2411
|
+
ofile=eval["file"]
|
2412
|
+
if !File.exists?(ofile)
|
2413
|
+
@logger.warn("DB file read failure, stop loading", :path => ofile)
|
2414
|
+
exit -1
|
2415
|
+
end
|
2416
|
+
tmp_sha1db = Digest::SHA1.hexdigest @db_enr[ekey]["db"].to_s
|
2417
|
+
if not tmp_sha1db == @hash_db_enr[ofile]
|
2418
|
+
File.open(ofile,"w+") do |f|
|
2419
|
+
begin
|
2420
|
+
f.write(JSON.pretty_generate(@db_enr[ekey]["db"]))
|
2421
|
+
rescue
|
2422
|
+
@logger.error("JSON SAVE SIG -- SAVE ENR -- PARSE/WRITE ERROR")
|
2423
|
+
end
|
2424
|
+
end
|
2425
|
+
@hash_db_enr[ofile] = tmp_sha1db
|
2426
|
+
tmp_hash = Digest::SHA256.hexdigest File.read ofile
|
2427
|
+
@hash_dbfile_enr[ofile] = tmp_hash
|
2428
|
+
end
|
2429
|
+
end
|
2430
|
+
=begin
|
2431
|
+
tmp_hash = Digest::SHA256.hexdigest File.read @conf_enr
|
2432
|
+
if not tmp_hash == @hash_conf_enr
|
2433
|
+
@hash_conf_enr = tmp_hash
|
2434
|
+
db_enr_tmp = JSON.parse( IO.read(@conf_enr, encoding:'utf-8') )
|
2435
|
+
#TODO verify if it works
|
2436
|
+
@db_enr=@db_enr.deep_merge(db_enr_tmp)
|
2437
|
+
File.open(@conf_enr,"w+") do |f|
|
2438
|
+
f.write(JSON.pretty_generate(@db_enr))
|
2439
|
+
end
|
2440
|
+
tmp_hash = Digest::SHA256.hexdigest File.read @conf_enr
|
2441
|
+
@hash_conf_enr = tmp_hash
|
2442
|
+
else
|
2443
|
+
File.open(@conf_enr,"w+") do |f|
|
2444
|
+
f.write(JSON.pretty_generate(@db_enr))
|
2445
|
+
end
|
2446
|
+
tmp_hash = Digest::SHA256.hexdigest File.read @conf_enr
|
2447
|
+
@hash_conf_enr = tmp_hash
|
2448
|
+
end
|
2449
|
+
=end
|
2450
|
+
end
|
2451
|
+
|
2452
|
+
def load_conf_fp
|
2453
|
+
#load db conf fp
|
2454
|
+
if !File.exists?(@conf_fp)
|
2455
|
+
@logger.warn("DB file read failure, stop loading", :path => @conf_fp)
|
2456
|
+
exit -1
|
2457
|
+
end
|
2458
|
+
tmp_hash = Digest::SHA256.hexdigest File.read @conf_fp
|
2459
|
+
if not tmp_hash == @hash_conf_fp
|
2460
|
+
@hash_conf_fp = tmp_hash
|
2461
|
+
begin
|
2462
|
+
tmp_db = JSON.parse( IO.read(@conf_fp, encoding:'utf-8') )
|
2463
|
+
@fp_rules = tmp_db
|
2464
|
+
rescue
|
2465
|
+
@logger.error("JSON CONF SIG -- CONF FP -- PARSE ERROR")
|
2466
|
+
end
|
2467
|
+
end
|
2468
|
+
end
|
2469
|
+
def load_db_dropfp
|
2470
|
+
#load fp
|
2471
|
+
if !File.exists?(@db_dropfp)
|
2472
|
+
@logger.warn("DB file read failure, stop loading", :path => @db_dropfp)
|
2473
|
+
exit -1
|
2474
|
+
end
|
2475
|
+
tmp_hash = Digest::SHA256.hexdigest File.read @db_dropfp
|
2476
|
+
if not tmp_hash == @hash_dropfp
|
2477
|
+
@hash_dropfp = tmp_hash
|
2478
|
+
begin
|
2479
|
+
tmp_db = JSON.parse( IO.read(@db_dropfp, encoding:'utf-8') )
|
2480
|
+
@fp_db = tmp_db
|
2481
|
+
rescue
|
2482
|
+
@logger.error("JSON CONF SIG -- DROPFP -- PARSE ERROR")
|
2483
|
+
end
|
2484
|
+
end
|
2485
|
+
end
|
2486
|
+
def load_db_drop
|
2487
|
+
#load drop db
|
2488
|
+
if !File.exists?(@db_drop)
|
2489
|
+
@logger.warn("DB file read failure, stop loading", :path => @db_drop)
|
2490
|
+
exit -1
|
2491
|
+
end
|
2492
|
+
tmp_hash = Digest::SHA256.hexdigest File.read @db_drop
|
2493
|
+
if not tmp_hash == @hash_dropdb
|
2494
|
+
@hash_dropdb = tmp_hash
|
2495
|
+
begin
|
2496
|
+
tmp_db = JSON.parse( IO.read(@db_drop, encoding:'utf-8') )
|
2497
|
+
@drop_db = tmp_db
|
2498
|
+
rescue
|
2499
|
+
@logger.error("JSON CONF SIG -- DROPDB -- PARSE ERROR")
|
2500
|
+
end
|
2501
|
+
end
|
2502
|
+
end
|
2503
|
+
#clean db special
|
2504
|
+
def clean_db_sigfreq(date)
|
2505
|
+
@sig_db_freq.each do |nkey,nval|
|
2506
|
+
if nval[delay] < date
|
2507
|
+
@sig_db_freq.delete(nkey)
|
2508
|
+
end
|
2509
|
+
end
|
2510
|
+
end
|
2511
|
+
end
|