logstash-input-akamai-siem 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,410 @@
1
+ # encoding: utf-8
2
+ require "logstash/inputs/base"
3
+ require "logstash/namespace"
4
+ require "logstash/json"
5
+ require "logstash/event"
6
+ require "socket" # for Socket.gethostname
7
+ require "addressable/template"
8
+ require "manticore"
9
+ require "logstash/plugin_mixins/http_client"
10
+ require "logstash/plugin_mixins/ecs_compatibility_support"
11
+ require 'logstash/plugin_mixins/ecs_compatibility_support/target_check'
12
+ require 'logstash/plugin_mixins/validator_support/field_reference_validation_adapter'
13
+ require 'logstash/plugin_mixins/event_support/event_factory_adapter'
14
+ require 'logstash/plugin_mixins/event_support/from_json_helper'
15
+ require 'logstash/plugin_mixins/scheduler'
16
+
17
+ class LogStash::Inputs::AkamaiSiem < LogStash::Inputs::Base
18
+ require 'logstash/inputs/akamai_siem/headers'
19
+ require 'logstash/inputs/akamai_siem/middleware_registry'
20
+ require 'logstash/inputs/akamai_siem/exception'
21
+ require 'logstash/inputs/akamai_siem/request'
22
+ require 'logstash/inputs/akamai_siem/base'
23
+
24
+ include LogStash::Inputs::AkamaiSiem::Base[with_deprecated: true, with_akamai_siem: true]
25
+ include LogStash::PluginMixins::ECSCompatibilitySupport(:disabled, :v1, v8: :v1)
26
+ include LogStash::PluginMixins::ECSCompatibilitySupport::TargetCheck
27
+ include LogStash::PluginMixins::EventSupport::EventFactoryAdapter
28
+
29
+ extend LogStash::PluginMixins::ValidatorSupport::FieldReferenceValidationAdapter
30
+
31
+ include LogStash::PluginMixins::EventSupport::EventFactoryAdapter
32
+ include LogStash::PluginMixins::EventSupport::FromJsonHelper
33
+
34
+ include LogStash::PluginMixins::Scheduler
35
+
36
+ config_name "akamai_siem"
37
+
38
+ QUERY_PARAMETERS = [
39
+ %w[offset],
40
+ %w[offset limit],
41
+ %w[from],
42
+ %w[from limit],
43
+ %w[from to],
44
+ %w[from to limit],
45
+ ]
46
+ # Schedule of when to periodically poll from the urls
47
+ # Format: A hash with
48
+ # + key: "cron" | "every" | "in" | "at"
49
+ # + value: string
50
+ # Examples:
51
+ # a) { "every" => "1h" }
52
+ # b) { "cron" => "* * * * * UTC" }
53
+ # See: rufus/scheduler for details about different schedule options and value string format
54
+ config :schedule, validate: :hash, default: { "every" => "5s" }
55
+
56
+ # Define the target field for placing the received data. If this setting is omitted, the data will be stored at the root (top level) of the event.
57
+ config :target, validate: :field_reference
58
+
59
+ ###### settings for akamai siem ######
60
+ # Examples:
61
+ # { "configs_ids" => ['123456','32164','4567'] }
62
+ config :configs_ids, validate: :array, require: true
63
+
64
+ # mode to get security events data from your security configurations
65
+ # https://techdocs.akamai.com/siem-integration/reference/query-parameter-combinations
66
+ # Examples:
67
+ # a) { "query_mode" => ['offset'] }
68
+ # b) { "query_mode" => ['offset', 'limit'] }
69
+ # c) { "query_mode" => ['from', 'limit'] }
70
+ config :query_mode, validate: :array, default: ['offset']
71
+
72
+ # If you'd like to work with the request/response metadata.
73
+ # Set this value to the name of the field you'd like to store a nested
74
+ # hash of metadata.
75
+ config :metadata_target, validate: :string, default: '@metadata'
76
+
77
+ config :stub, validate: :boolean, default: false
78
+ config :stub_response, validate: :string
79
+
80
+ attr_reader :template
81
+ public
82
+
83
+ def register
84
+ @host = Socket.gethostname.force_encoding(Encoding::UTF_8)
85
+ @offset = nil
86
+ @from = nil
87
+ @to = nil
88
+ @limit = nil
89
+
90
+ @template = Addressable::Template.new(File.join(base_url, "siem/v1/configs", "{configs_ids}", "{?query*}"))
91
+
92
+ setup_ecs_field!
93
+ query_validation!
94
+ LogStash::Logging::Logger::configure_logging('debug')
95
+ end
96
+
97
+ # @overload
98
+ def stop
99
+ close_client
100
+ end
101
+
102
+ # @overload
103
+ def close
104
+ close_client
105
+ end
106
+
107
+ def close_client
108
+ @logger.debug("closing http client", client: client)
109
+ begin
110
+ client.close # since Manticore 0.9.0 this shuts-down/closes all resources
111
+ rescue => e
112
+ details = { exception: e.class, message: e.message }
113
+ details[:backtrace] = e.backtrace if @logger.debug?
114
+ @logger.warn "failed closing http client", details
115
+ end
116
+ end
117
+ private :close_client
118
+
119
+ private
120
+
121
+ def query_validation!
122
+ query_validation = false
123
+ QUERY_PARAMETERS.select do |query|
124
+ query_validation = true if (query - query_mode).empty?
125
+ end
126
+ raise LogStash::ConfigurationError, "query mode (#{url_or_spec.join(',')}) is not valid see https://techdocs.akamai.com/siem-integration/reference/query-parameter-combinations" unless query_validation
127
+ end
128
+
129
+ private
130
+
131
+ # In the context of ECS, there are two type of events in this plugin, valid HTTP response and failure
132
+ # For a valid HTTP response, `url`, `request_method` and `host` are metadata of request.
133
+ # The call could retrieve event which contain `[url]`, `[http][request][method]`, `[host][hostname]` data
134
+ # Therefore, metadata should not write to those fields
135
+ # For a failure, `url`, `request_method` and `host` are primary data of the event because the plugin owns this event,
136
+ # so it writes to url.*, http.*, host.*
137
+ def setup_ecs_field!
138
+ @request_host_field = ecs_select[disabled: "[#{metadata_target}][host]", v1: "[#{metadata_target}][input][akamai_siem][request][host][hostname]"]
139
+ @response_code_field = ecs_select[disabled: "[#{metadata_target}][code]", v1: "[#{metadata_target}][input][akamai_siem][response][status_code]"]
140
+ @response_headers_field = ecs_select[disabled: "[#{metadata_target}][response_headers]", v1: "[#{metadata_target}][input][akamai_siem][response][headers]"]
141
+ @response_message_field = ecs_select[disabled: "[#{metadata_target}][response_message]", v1: "[#{metadata_target}][input][akamai_siem][response][status_message]"]
142
+ @response_time_s_field = ecs_select[disabled: "[#{metadata_target}][runtime_seconds]", v1: nil]
143
+ @response_time_ns_field = ecs_select[disabled: nil, v1: "[#{metadata_target}][input][akamai_siem][response][elapsed_time_ns]"]
144
+ @request_retry_count_field = ecs_select[disabled: "[#{metadata_target}][times_retried]", v1: "[#{metadata_target}][input][akamai_siem][request][retry_count]"]
145
+ @original_request_field = ecs_select[disabled: "[#{metadata_target}][request]", v1: "[#{metadata_target}][input][akamai_siem][request][original]"]
146
+
147
+ @error_msg_field = ecs_select[disabled: "[http_request_failure][error]", v1: "[error][message]"]
148
+ @stack_trace_field = ecs_select[disabled: "[http_request_failure][backtrace]", v1: "[error][stack_trace]"]
149
+ @fail_original_request_field = ecs_select[disabled: "[http_request_failure][request]", v1: nil]
150
+ @fail_response_time_s_field = ecs_select[disabled: "[http_request_failure][runtime_seconds]", v1: nil]
151
+ @fail_response_time_ns_field = ecs_select[disabled: nil, v1: "[event][duration]"]
152
+ @fail_request_url_field = ecs_select[disabled: nil, v1: "[url][full]"]
153
+ @fail_request_method_field = ecs_select[disabled: nil, v1: "[http][request][method]"]
154
+ @fail_request_host_field = ecs_select[disabled: nil, v1: "[host][hostname]"]
155
+ end
156
+
157
+ public
158
+
159
+ def run(queue)
160
+ setup_schedule(queue)
161
+ end
162
+
163
+ def setup_schedule(queue)
164
+ # schedule hash must contain exactly one of the allowed keys
165
+ msg_invalid_schedule = "Invalid config. schedule hash must contain " +
166
+ "exactly one of the following keys - cron, at, every or in"
167
+ raise Logstash::ConfigurationError, msg_invalid_schedule if @schedule.keys.length != 1
168
+ schedule_type = @schedule.keys.first
169
+ schedule_value = @schedule[schedule_type]
170
+ raise LogStash::ConfigurationError, msg_invalid_schedule unless %w(cron every at in).include?(schedule_type)
171
+
172
+ opts = schedule_type == "every" ? { first_in: 0.01 } : {}
173
+ scheduler.public_send(schedule_type, schedule_value, opts) { run_once(queue) }
174
+ scheduler.join
175
+ end
176
+
177
+ def run_once(queue)
178
+ url = template.expand({
179
+ "configs_ids" => configs_ids.join(';'),
180
+ "query" => normalize_query,
181
+ },
182
+ )
183
+
184
+
185
+ timestamp = eg_timestamp
186
+ nonce = new_nonce
187
+ headers.update({
188
+ "accept" => "application/json",
189
+ })
190
+ options = {}
191
+ method = 'get'
192
+
193
+ request = build_request(method) do |req|
194
+ req.update_uri(url) if url
195
+ req.headers.update(headers) if headers
196
+ yield(req) if block_given?
197
+ end
198
+
199
+ request[KEY] = make_auth_header(request, timestamp, nonce)
200
+
201
+ request_async(
202
+ queue,
203
+ request
204
+ )
205
+ client.execute! unless stop?
206
+ end
207
+
208
+ private
209
+ def build_request(method)
210
+ Request.create(method) do |req|
211
+ req.headers = headers.dup
212
+ yield(req) if block_given?
213
+ end
214
+ end
215
+
216
+ def normalize_query
217
+ query = Hash.new
218
+ query_mode.each do |query_name|
219
+ query.merge!(query_name => instance_variable_get("@#{query_name}").nil? ? 'null' : instance_variable_get("@#{query_name}") )
220
+ end
221
+ query
222
+ end
223
+
224
+ def request_async(queue, request)
225
+ @logger.debug? && @logger.debug("async queueing fetching url", url: request)
226
+ started = Time.now
227
+
228
+ method, *request_opts = request.to_a
229
+ client.async.send(method, *request_opts).
230
+ on_success { |response| handle_success(queue, request, response, Time.now - started) }.
231
+ on_failure { |exception| handle_failure(queue, request, exception, Time.now - started) }
232
+ end
233
+
234
+ # time diff in float to nanoseconds
235
+ def to_nanoseconds(time_diff)
236
+ (time_diff * 1000000).to_i
237
+ end
238
+
239
+ def handle_success(queue, request, response, execution_time)
240
+ @logger.debug? && @logger.debug("success fetching url", url: request)
241
+ code = {
242
+ '416': ->(queue, request, response, execution_time) { status_416(queue, request, response, execution_time) },
243
+ '200': ->(queue, request, response, execution_time) { status_200(queue, request, response, execution_time) },
244
+ }
245
+ code.default = ->(queue, request, response, execution_time) { status_default(queue, request, response, execution_time) }
246
+ code[response.code.to_s.to_sym].call(queue, request, response, execution_time)
247
+ end
248
+
249
+ def status_200(queue, request, response, execution_time)
250
+ body = response.body
251
+ if body && body.size > 0
252
+ events = body.split("\n")
253
+ offset_event = events.pop
254
+ events.each do |data|
255
+ decode(data) do |decoded|
256
+ @logger.debug? && @logger.debug("decoded event", event: decoded)
257
+ event = targeted_event_factory.new_event(decoded)
258
+ handle_decoded_event(queue, request, response, event, execution_time)
259
+ end
260
+ end
261
+ decode_offset(offset_event) do |decode|
262
+ @offset = decode['offset']
263
+ @logger.debug? && @logger.debug("decoded offset", offset: @offset)
264
+ end
265
+ else
266
+ event = event_factory.new_event
267
+ handle_decoded_event(queue, request, response, event, execution_time)
268
+ end
269
+ end
270
+
271
+ def status_416(queue, request, response, execution_time)
272
+ @offset = nil
273
+ exception = build_exception(response.body)
274
+ handle_failure(queue, request, exception, execution_time)
275
+ end
276
+
277
+ def status_default(queue, request, response, execution_time)
278
+ exception = build_exception(response.body)
279
+ handle_failure(queue, request, exception, execution_time)
280
+ end
281
+
282
+ def parse_json(body)
283
+ ::LogStash::Json.jruby_load(body)
284
+ end
285
+ def decode_offset(body)
286
+ parse_json(body)
287
+ end
288
+
289
+ def decode(body)
290
+ event = parse_json(body)
291
+ if attack_section = event['attackData']
292
+ rules_array = []
293
+ attack_section.each do |member_name, value|
294
+ next if !member_name.match '^rule'
295
+ member_as_singular = member_name.gsub(/s$/, '')
296
+ url_decoded = Addressable::URI.unencode(value)
297
+ member_array = url_decoded.split(";")
298
+ if rules_array.empty?
299
+ rules_array = Array.new(member_array.count) {Hash.new}
300
+ end
301
+ if member_array.empty?
302
+ rules_array.map { |rule| rule[member_as_singular] = '' }
303
+ else
304
+ member_array.each_with_index do |member, index|
305
+ bits = Base64.decode64(member)
306
+ rules_array[index][member_as_singular] = bits
307
+ end
308
+ end
309
+
310
+ end
311
+ attack_section.delete('ruleMessages')
312
+ attack_section.delete('ruleSelectors')
313
+ attack_section.delete('rules')
314
+ attack_section.delete('ruleActions')
315
+ attack_section.delete('ruleVersions')
316
+ attack_section.delete('ruleData')
317
+ attack_section.delete('ruleTags')
318
+ attack_section['rules'] = rules_array
319
+ event['attackData'] = attack_section
320
+ else
321
+ event['attackData'] = {}
322
+ end
323
+ yield event
324
+ end
325
+
326
+ def handle_decoded_event(queue, request, response, event, execution_time)
327
+ apply_metadata(event, request, response, execution_time)
328
+ decorate(event)
329
+ queue << event
330
+ rescue StandardError, java.lang.Exception => e
331
+ @logger.error? && @logger.error("Error eventifying response!",
332
+ :exception => e,
333
+ :exception_message => e.message,
334
+ :url => request,
335
+ :response => response
336
+ )
337
+ end
338
+
339
+ def build_exception(body)
340
+ Exception.create(body) do |exception|
341
+ yield(exception) if block_given?
342
+ end
343
+ end
344
+
345
+ # Beware, on old versions of manticore some uncommon failures are not handled
346
+ def handle_failure(queue, request, exception, execution_time)
347
+ @logger.debug? && @logger.debug("failed fetching url", url: request)
348
+ event = event_factory.new_event
349
+ event.tag("_http_request_failure")
350
+ apply_metadata(event, request, nil, execution_time)
351
+ apply_failure_fields(event, request, exception, execution_time)
352
+
353
+ queue << event
354
+ rescue StandardError, java.lang.Exception => e
355
+ @logger.error? && @logger.error("Cannot read URL or send the error as an event!",
356
+ :exception => e,
357
+ :exception_message => e.message,
358
+ :exception_backtrace => e.backtrace)
359
+
360
+ # If we are running in debug mode we can display more information about the
361
+ # specific request which could give more details about the connection.
362
+ @logger.debug? && @logger.debug("Cannot read URL or send the error as an event!",
363
+ :exception => e,
364
+ :exception_message => e.message,
365
+ :exception_backtrace => e.backtrace,
366
+ :url => request)
367
+ end
368
+
369
+ def apply_metadata(event, request, response, execution_time)
370
+ return unless @metadata_target
371
+
372
+ event.set(@request_host_field, @host)
373
+ event.set(@response_time_s_field, execution_time) if @response_time_s_field
374
+ event.set(@response_time_ns_field, to_nanoseconds(execution_time)) if @response_time_ns_field
375
+ event.set(@original_request_field, structure_request(request))
376
+
377
+ if response
378
+ event.set(@response_code_field, response.code)
379
+ event.set(@response_headers_field, response.headers)
380
+ event.set(@response_message_field, response.message)
381
+ event.set(@request_retry_count_field, response.times_retried)
382
+ end
383
+ end
384
+
385
+ def apply_failure_fields(event, request, exception, execution_time)
386
+ # This is also in the metadata, but we send it anyone because we want this
387
+ # persisted by default, whereas metadata isn't. People don't like mysterious errors
388
+ event.set(@fail_original_request_field, structure_request(request)) if @fail_original_request_field
389
+
390
+ method, url, _ = request
391
+ event.set(@fail_request_url_field, url) if @fail_request_url_field
392
+ event.set(@fail_request_method_field, method.to_s) if @fail_request_method_field
393
+ event.set(@fail_request_host_field, @host) if @fail_request_host_field
394
+
395
+ event.set(@fail_response_time_s_field, execution_time) if @fail_response_time_s_field
396
+ event.set(@fail_response_time_ns_field, to_nanoseconds(execution_time)) if @fail_response_time_ns_field
397
+ event.set(@error_msg_field, exception.to_s)
398
+ event.set(@stack_trace_field, exception.backtrace)
399
+ end
400
+
401
+ # Turn [method, url, spec] requests into a hash for friendlier logging / ES indexing
402
+ def structure_request(request)
403
+ method, url, spec = request
404
+ # Flatten everything into the 'spec' hash, also stringify any keys to normalize
405
+ Hash[(spec || {}).merge({
406
+ "method" => method.to_s,
407
+ "url" => url,
408
+ }).map { |k, v| [k.to_s, v] }]
409
+ end
410
+ end
@@ -0,0 +1,36 @@
1
+ # frozen_string_literal: true
2
+
3
+ Gem::Specification.new do |s|
4
+ s.name = 'logstash-input-akamai-siem'
5
+ s.version = '1.0.0'
6
+ s.licenses = ['Apache-2.0']
7
+ s.summary = "akamai siem input streams logs at a definable interval."
8
+ s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install logstash-input-akamai-siem. This gem is not a stand-alone program"
9
+ s.authors = ["Hans Moulron"]
10
+ s.email = 'hans.moulron@francetv.fr'
11
+ s.homepage = "https://github.com/francetv/logstash-input-akamai-siem"
12
+ s.require_paths = ["lib"]
13
+
14
+ # Files
15
+ # s.files = Dir['lib/**/*', 'spec/**/*', 'vendor/**/*', '*.gemspec', '*.md', 'CONTRIBUTORS', 'Gemfile', 'LICENSE', 'NOTICE.TXT']
16
+ s.files = Dir['lib/**/*', 'vendor/**/*', '*.gemspec', '*.md', 'CONTRIBUTORS', 'Gemfile', 'LICENSE', 'NOTICE.TXT']
17
+ # Tests
18
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
19
+
20
+ # Special flag to let us know this is actually a logstash plugin
21
+ s.metadata = { "logstash_plugin" => "true", "logstash_group" => "input" }
22
+
23
+ # Gem dependencies
24
+ s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
25
+ s.add_runtime_dependency "logstash-mixin-http_client", ">= 7.4.0", "< 8.0.0"
26
+ s.add_runtime_dependency 'logstash-mixin-scheduler', '~> 1.0'
27
+ s.add_runtime_dependency 'logstash-mixin-ecs_compatibility_support', '~>1.3'
28
+ s.add_runtime_dependency 'logstash-mixin-event_support', '~> 1.0', '>= 1.0.1'
29
+ s.add_runtime_dependency 'logstash-mixin-validator_support', '~> 1.0'
30
+ s.add_runtime_dependency 'akamai-edgegrid', '~> 1.0', '>= 1.0.7'
31
+ s.add_runtime_dependency('addressable', ['= 2.7.0'])
32
+
33
+ s.add_development_dependency 'logstash-devutils'
34
+ s.add_development_dependency 'flores'
35
+ s.add_development_dependency 'timecop'
36
+ end
metadata ADDED
@@ -0,0 +1,238 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: logstash-input-akamai-siem
3
+ version: !ruby/object:Gem::Version
4
+ version: 1.0.0
5
+ platform: ruby
6
+ authors:
7
+ - Hans Moulron
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2024-10-06 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ requirement: !ruby/object:Gem::Requirement
15
+ requirements:
16
+ - - ">="
17
+ - !ruby/object:Gem::Version
18
+ version: '1.60'
19
+ - - "<="
20
+ - !ruby/object:Gem::Version
21
+ version: '2.99'
22
+ name: logstash-core-plugin-api
23
+ type: :runtime
24
+ prerelease: false
25
+ version_requirements: !ruby/object:Gem::Requirement
26
+ requirements:
27
+ - - ">="
28
+ - !ruby/object:Gem::Version
29
+ version: '1.60'
30
+ - - "<="
31
+ - !ruby/object:Gem::Version
32
+ version: '2.99'
33
+ - !ruby/object:Gem::Dependency
34
+ requirement: !ruby/object:Gem::Requirement
35
+ requirements:
36
+ - - ">="
37
+ - !ruby/object:Gem::Version
38
+ version: 7.4.0
39
+ - - "<"
40
+ - !ruby/object:Gem::Version
41
+ version: 8.0.0
42
+ name: logstash-mixin-http_client
43
+ type: :runtime
44
+ prerelease: false
45
+ version_requirements: !ruby/object:Gem::Requirement
46
+ requirements:
47
+ - - ">="
48
+ - !ruby/object:Gem::Version
49
+ version: 7.4.0
50
+ - - "<"
51
+ - !ruby/object:Gem::Version
52
+ version: 8.0.0
53
+ - !ruby/object:Gem::Dependency
54
+ requirement: !ruby/object:Gem::Requirement
55
+ requirements:
56
+ - - "~>"
57
+ - !ruby/object:Gem::Version
58
+ version: '1.0'
59
+ name: logstash-mixin-scheduler
60
+ type: :runtime
61
+ prerelease: false
62
+ version_requirements: !ruby/object:Gem::Requirement
63
+ requirements:
64
+ - - "~>"
65
+ - !ruby/object:Gem::Version
66
+ version: '1.0'
67
+ - !ruby/object:Gem::Dependency
68
+ requirement: !ruby/object:Gem::Requirement
69
+ requirements:
70
+ - - "~>"
71
+ - !ruby/object:Gem::Version
72
+ version: '1.3'
73
+ name: logstash-mixin-ecs_compatibility_support
74
+ type: :runtime
75
+ prerelease: false
76
+ version_requirements: !ruby/object:Gem::Requirement
77
+ requirements:
78
+ - - "~>"
79
+ - !ruby/object:Gem::Version
80
+ version: '1.3'
81
+ - !ruby/object:Gem::Dependency
82
+ requirement: !ruby/object:Gem::Requirement
83
+ requirements:
84
+ - - "~>"
85
+ - !ruby/object:Gem::Version
86
+ version: '1.0'
87
+ - - ">="
88
+ - !ruby/object:Gem::Version
89
+ version: 1.0.1
90
+ name: logstash-mixin-event_support
91
+ type: :runtime
92
+ prerelease: false
93
+ version_requirements: !ruby/object:Gem::Requirement
94
+ requirements:
95
+ - - "~>"
96
+ - !ruby/object:Gem::Version
97
+ version: '1.0'
98
+ - - ">="
99
+ - !ruby/object:Gem::Version
100
+ version: 1.0.1
101
+ - !ruby/object:Gem::Dependency
102
+ requirement: !ruby/object:Gem::Requirement
103
+ requirements:
104
+ - - "~>"
105
+ - !ruby/object:Gem::Version
106
+ version: '1.0'
107
+ name: logstash-mixin-validator_support
108
+ type: :runtime
109
+ prerelease: false
110
+ version_requirements: !ruby/object:Gem::Requirement
111
+ requirements:
112
+ - - "~>"
113
+ - !ruby/object:Gem::Version
114
+ version: '1.0'
115
+ - !ruby/object:Gem::Dependency
116
+ requirement: !ruby/object:Gem::Requirement
117
+ requirements:
118
+ - - "~>"
119
+ - !ruby/object:Gem::Version
120
+ version: '1.0'
121
+ - - ">="
122
+ - !ruby/object:Gem::Version
123
+ version: 1.0.7
124
+ name: akamai-edgegrid
125
+ type: :runtime
126
+ prerelease: false
127
+ version_requirements: !ruby/object:Gem::Requirement
128
+ requirements:
129
+ - - "~>"
130
+ - !ruby/object:Gem::Version
131
+ version: '1.0'
132
+ - - ">="
133
+ - !ruby/object:Gem::Version
134
+ version: 1.0.7
135
+ - !ruby/object:Gem::Dependency
136
+ requirement: !ruby/object:Gem::Requirement
137
+ requirements:
138
+ - - '='
139
+ - !ruby/object:Gem::Version
140
+ version: 2.7.0
141
+ name: addressable
142
+ type: :runtime
143
+ prerelease: false
144
+ version_requirements: !ruby/object:Gem::Requirement
145
+ requirements:
146
+ - - '='
147
+ - !ruby/object:Gem::Version
148
+ version: 2.7.0
149
+ - !ruby/object:Gem::Dependency
150
+ requirement: !ruby/object:Gem::Requirement
151
+ requirements:
152
+ - - ">="
153
+ - !ruby/object:Gem::Version
154
+ version: '0'
155
+ name: logstash-devutils
156
+ type: :development
157
+ prerelease: false
158
+ version_requirements: !ruby/object:Gem::Requirement
159
+ requirements:
160
+ - - ">="
161
+ - !ruby/object:Gem::Version
162
+ version: '0'
163
+ - !ruby/object:Gem::Dependency
164
+ requirement: !ruby/object:Gem::Requirement
165
+ requirements:
166
+ - - ">="
167
+ - !ruby/object:Gem::Version
168
+ version: '0'
169
+ name: flores
170
+ type: :development
171
+ prerelease: false
172
+ version_requirements: !ruby/object:Gem::Requirement
173
+ requirements:
174
+ - - ">="
175
+ - !ruby/object:Gem::Version
176
+ version: '0'
177
+ - !ruby/object:Gem::Dependency
178
+ requirement: !ruby/object:Gem::Requirement
179
+ requirements:
180
+ - - ">="
181
+ - !ruby/object:Gem::Version
182
+ version: '0'
183
+ name: timecop
184
+ type: :development
185
+ prerelease: false
186
+ version_requirements: !ruby/object:Gem::Requirement
187
+ requirements:
188
+ - - ">="
189
+ - !ruby/object:Gem::Version
190
+ version: '0'
191
+ description: This gem is a Logstash plugin required to be installed on top of the
192
+ Logstash core pipeline using $LS_HOME/bin/logstash-plugin install logstash-input-akamai-siem.
193
+ This gem is not a stand-alone program
194
+ email: hans.moulron@francetv.fr
195
+ executables: []
196
+ extensions: []
197
+ extra_rdoc_files: []
198
+ files:
199
+ - CHANGELOG.md
200
+ - DEVELOPER.md
201
+ - Gemfile
202
+ - LICENSE
203
+ - NOTICE.TXT
204
+ - README.md
205
+ - lib/logstash/inputs/akamai_siem.rb
206
+ - lib/logstash/inputs/akamai_siem/base.rb
207
+ - lib/logstash/inputs/akamai_siem/edge_grid.rb
208
+ - lib/logstash/inputs/akamai_siem/exception.rb
209
+ - lib/logstash/inputs/akamai_siem/headers.rb
210
+ - lib/logstash/inputs/akamai_siem/middleware_registry.rb
211
+ - lib/logstash/inputs/akamai_siem/request.rb
212
+ - logstash-input-akamai-siem.gemspec
213
+ homepage: https://github.com/francetv/logstash-input-akamai-siem
214
+ licenses:
215
+ - Apache-2.0
216
+ metadata:
217
+ logstash_plugin: 'true'
218
+ logstash_group: input
219
+ post_install_message:
220
+ rdoc_options: []
221
+ require_paths:
222
+ - lib
223
+ required_ruby_version: !ruby/object:Gem::Requirement
224
+ requirements:
225
+ - - ">="
226
+ - !ruby/object:Gem::Version
227
+ version: '0'
228
+ required_rubygems_version: !ruby/object:Gem::Requirement
229
+ requirements:
230
+ - - ">="
231
+ - !ruby/object:Gem::Version
232
+ version: '0'
233
+ requirements: []
234
+ rubygems_version: 3.3.26
235
+ signing_key:
236
+ specification_version: 4
237
+ summary: akamai siem input streams logs at a definable interval.
238
+ test_files: []