logstash-logger 0.23.0 → 0.24.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: f7b8e0765b2b048a5ef9dcb2c42d4e31f3805029
4
- data.tar.gz: c6cbcdefe330fe9aa8da5c93c78bab7bb2ee7751
3
+ metadata.gz: 6a53c4f86f44f72b31d055db56d35e6e374cfe30
4
+ data.tar.gz: de52e5fcefe9adf476a5b104d92ee2b6d9f1b1e4
5
5
  SHA512:
6
- metadata.gz: e137e5bae19157b9f716faa987766fb19dfc96791e11a5813ab6b3274599ae21a957a0069fc411d1fe84ef32a0dac82a96b696657ffdb1d587ca31f8199a1f1a
7
- data.tar.gz: 8bb2ce9e6305d789087923d6c3bbf382aedb4e2562a139786d157bbe14c40ab492c58b23e2c7a9ad5bfeefaef911babf0b88c969f0702a90ed7763198139dcbc
6
+ metadata.gz: 861e72a87a6230f386f15b9b14d5df7f303a294f843cc841352693a57fbdc17f0e55e5b8f04ba19ed6a13a72d1737ede43323b3786226cb4621487a70a68b886
7
+ data.tar.gz: 2f1e95420c8e2a5bb69a2a36ab4f8e4f40c4500d834fb16be66c113f5b3645e188434ab04fbded1b805cd110a52754e82b71e6927269abf1a4afbfeac038d451
data/CHANGELOG.md CHANGED
@@ -1,3 +1,7 @@
1
+ ## 0.24.0
2
+
3
+ - Adds support for AWS Firehose. [#121](https://github.com/dwbutler/logstash-logger/pull/121)
4
+
1
5
  ## 0.23.0
2
6
 
3
7
  - Adds support for SSL host verification to the TCP device. [#114](https://github.com/dwbutler/logstash-logger/pull/114)
data/README.md CHANGED
@@ -9,7 +9,7 @@ writing to a file or syslog since Logstash can receive the structured data direc
9
9
  ## Features
10
10
 
11
11
  * Can write directly to a logstash listener over a UDP or TCP/SSL connection.
12
- * Can write to a file, Redis, Kafka, Kinesis, a unix socket, syslog, stdout, or stderr.
12
+ * Can write to a file, Redis, Kafka, Kinesis, Firehose, a unix socket, syslog, stdout, or stderr.
13
13
  * Logger can take a string message, a hash, a `LogStash::Event`, an object, or a JSON string as input.
14
14
  * Events are automatically populated with message, timestamp, host, and severity.
15
15
  * Writes in logstash JSON format, but supports other formats as well.
@@ -572,6 +572,30 @@ config.logstash.aws_secret_access_key = 'ASKASKHLD1234123412341234'
572
572
 
573
573
  ```
574
574
 
575
+ #### Firehose
576
+
577
+ Add the aws-sdk gem to your Gemfile:
578
+
579
+ gem 'aws-sdk'
580
+
581
+ ```ruby
582
+ # Required
583
+ config.logstash.type = :firehose
584
+
585
+ # Optional, will default to the 'logstash' delivery stream
586
+ config.logstash.stream = 'my-stream-name'
587
+
588
+ # Optional, will default to 'us-east-1'
589
+ config.logstash.aws_region = 'us-west-2'
590
+
591
+ # Optional, will default to the AWS_ACCESS_KEY_ID environment variable
592
+ config.logstash.aws_access_key_id = 'ASKASKHLD12341'
593
+
594
+ # Optional, will default to the AWS_SECRET_ACCESS_KEY environment variable
595
+ config.logstash.aws_secret_access_key = 'ASKASKHLD1234123412341234'
596
+
597
+ ```
598
+
575
599
  #### File
576
600
 
577
601
  ```ruby
@@ -13,6 +13,7 @@ module LogStashLogger
13
13
  autoload :Redis, 'logstash-logger/device/redis'
14
14
  autoload :Kafka, 'logstash-logger/device/kafka'
15
15
  autoload :Kinesis, 'logstash-logger/device/kinesis'
16
+ autoload :Firehose, 'logstash-logger/device/firehose'
16
17
  autoload :File, 'logstash-logger/device/file'
17
18
  autoload :IO, 'logstash-logger/device/io'
18
19
  autoload :Stdout, 'logstash-logger/device/stdout'
@@ -53,6 +54,7 @@ module LogStashLogger
53
54
  when :redis then Redis
54
55
  when :kafka then Kafka
55
56
  when :kinesis then Kinesis
57
+ when :firehose then Firehose
56
58
  when :io then IO
57
59
  when :stdout then Stdout
58
60
  when :stderr then Stderr
@@ -0,0 +1,91 @@
1
+ require 'aws-sdk'
2
+
3
+ module LogStashLogger
4
+ module Device
5
+ class AwsStream < Connectable
6
+
7
+ DEFAULT_REGION = 'us-east-1'
8
+ DEFAULT_STREAM = 'logstash'
9
+
10
+ @stream_class = nil
11
+ @recoverable_error_codes = []
12
+
13
+ class << self
14
+ attr_accessor :stream_class, :recoverable_error_codes
15
+ end
16
+
17
+ attr_accessor :aws_region, :stream
18
+
19
+ def initialize(opts)
20
+ super
21
+ @access_key_id = opts[:aws_access_key_id] || ENV['AWS_ACCESS_KEY_ID']
22
+ @secret_access_key = opts[:aws_secret_access_key] || ENV['AWS_SECRET_ACCESS_KEY']
23
+ @aws_region = opts[:aws_region] || DEFAULT_REGION
24
+ @stream = opts[:stream] || DEFAULT_STREAM
25
+ end
26
+
27
+ def transform_message(message)
28
+ fail NotImplementedError
29
+ end
30
+
31
+ def put_records(records)
32
+ fail NotImplementedError
33
+ end
34
+
35
+ def is_successful_response(resp)
36
+ fail NotImplementedError
37
+ end
38
+
39
+ def get_response_records(resp)
40
+ fail NotImplementedError
41
+ end
42
+
43
+ def connect
44
+ @io = self.class.stream_class.new(
45
+ region: @aws_region,
46
+ credentials: ::Aws::Credentials.new(@access_key_id, @secret_access_key)
47
+ )
48
+ end
49
+
50
+ def with_connection
51
+ connect unless connected?
52
+ yield
53
+ rescue => e
54
+ log_error(e)
55
+ log_warning("giving up")
56
+ close(flush: false)
57
+ end
58
+
59
+ def write_batch(messages, group = nil)
60
+ records = messages.map{ |m| transform_message(m) }
61
+
62
+ with_connection do
63
+ resp = put_records(records)
64
+
65
+ # Put any failed records back into the buffer
66
+ if !is_successful_response(resp)
67
+ get_response_records(resp).each_with_index do |record, index|
68
+ if self.class.recoverable_error_codes.include?(record.error_code)
69
+ log_warning("Failed to post record using #{self.class.stream_class.name} with error: #{record.error_code} #{record.error_message}")
70
+ log_warning("Retrying")
71
+ write(records[index][:data])
72
+ elsif !record.error_code.nil? && record.error_code != ''
73
+ log_error("Failed to post record using #{self.class.stream_class.name} with error: #{record.error_code} #{record.error_message}")
74
+ end
75
+ end
76
+ end
77
+ end
78
+ end
79
+
80
+ def write_one(message)
81
+ write_batch([message])
82
+ end
83
+
84
+ def close!
85
+ @io = nil
86
+ end
87
+
88
+ end
89
+ end
90
+ end
91
+
@@ -0,0 +1,37 @@
1
+ require 'aws-sdk'
2
+ require 'logstash-logger/device/aws_stream'
3
+
4
+ module LogStashLogger
5
+ module Device
6
+ class Firehose < AwsStream
7
+ @stream_class = ::Aws::Firehose::Client
8
+ @recoverable_error_codes = [
9
+ "ServiceUnavailable",
10
+ "InternalFailure",
11
+ "ServiceUnavailableException"
12
+ ].freeze
13
+
14
+ def transform_message(message)
15
+ {
16
+ data: message
17
+ }
18
+ end
19
+
20
+ def put_records(records)
21
+ @io.put_record_batch({
22
+ records: records,
23
+ delivery_stream_name: @stream
24
+ })
25
+ end
26
+
27
+ def is_successful_response(resp)
28
+ resp.failed_put_count == 0
29
+ end
30
+
31
+ def get_response_records(resp)
32
+ resp.request_responses
33
+ end
34
+
35
+ end
36
+ end
37
+ end
@@ -1,80 +1,39 @@
1
1
  require 'aws-sdk'
2
+ require 'logstash-logger/device/aws_stream'
2
3
 
3
4
  module LogStashLogger
4
5
  module Device
5
- class Kinesis < Connectable
6
-
7
- DEFAULT_REGION = 'us-east-1'
8
- DEFAULT_STREAM = 'logstash'
9
- RECOVERABLE_ERROR_CODES = [
6
+ class Kinesis < AwsStream
7
+ @stream_class = ::Aws::Kinesis::Client
8
+ @recoverable_error_codes = [
10
9
  "ServiceUnavailable",
11
10
  "Throttling",
12
11
  "RequestExpired",
13
12
  "ProvisionedThroughputExceededException"
14
- ]
15
-
16
- attr_accessor :aws_region, :stream
13
+ ].freeze
17
14
 
18
- def initialize(opts)
19
- super
20
- @access_key_id = opts[:aws_access_key_id] || ENV['AWS_ACCESS_KEY_ID']
21
- @secret_access_key = opts[:aws_secret_access_key] || ENV['AWS_SECRET_ACCESS_KEY']
22
- @aws_region = opts[:aws_region] || DEFAULT_REGION
23
- @stream = opts[:stream] || DEFAULT_STREAM
15
+ def transform_message(message)
16
+ {
17
+ data: message,
18
+ partition_key: SecureRandom.uuid
19
+ }
24
20
  end
25
21
 
26
- def connect
27
- @io = ::Aws::Kinesis::Client.new(
28
- region: @aws_region,
29
- credentials: ::Aws::Credentials.new(@access_key_id, @secret_access_key)
30
- )
22
+ def put_records(records)
23
+ @io.put_records({
24
+ records: records,
25
+ stream_name: @stream
26
+ })
31
27
  end
32
28
 
33
- def with_connection
34
- connect unless connected?
35
- yield
36
- rescue => e
37
- log_error(e)
38
- log_warning("giving up")
39
- close(flush: false)
29
+ def is_successful_response(resp)
30
+ resp.failed_record_count == 0
40
31
  end
41
32
 
42
- def write_batch(messages, group = nil)
43
- kinesis_records = messages.map do |message|
44
- {
45
- data: message,
46
- partition_key: SecureRandom.uuid
47
- }
48
- end
49
-
50
- with_connection do
51
- resp = @io.put_records({
52
- records: kinesis_records,
53
- stream_name: @stream
54
- })
55
-
56
- # Put any failed records back into the buffer
57
- if resp.failed_record_count != 0
58
- resp.records.each_with_index do |record, index|
59
- if RECOVERABLE_ERROR_CODES.include?(record.error_code)
60
- log_warning("Failed to post record to kinesis with error: #{record.error_code} #{record.error_message}")
61
- log_warning("Retrying")
62
- write(kinesis_records[index][:data])
63
- elsif !record.error_code.nil? && record.error_code != ''
64
- log_error("Failed to post record to kinesis with error: #{record.error_code} #{record.error_message}")
65
- end
66
- end
67
- end
68
- end
33
+ def get_response_records(resp)
34
+ resp.records
69
35
  end
70
36
 
71
- def write_one(message)
72
- write_batch([message])
73
- end
74
-
75
- def close!
76
- @io = nil
77
- end
78
37
  end
79
38
  end
80
39
  end
@@ -1,3 +1,3 @@
1
1
  module LogStashLogger
2
- VERSION = "0.23.0"
2
+ VERSION = "0.24.0"
3
3
  end
@@ -0,0 +1,45 @@
1
+ require 'logstash-logger'
2
+
3
+ describe LogStashLogger::Device::Kinesis do
4
+ include_context 'device'
5
+
6
+ let(:client) { double("Aws::Firehose::Client") }
7
+
8
+ before(:each) do
9
+ allow(Aws::Firehose::Client).to receive(:new) { client }
10
+ end
11
+
12
+ it "writes to a Firehose stream" do
13
+ response = ::Aws::Firehose::Types::PutRecordBatchOutput.new
14
+ response.failed_put_count = 0
15
+ response.request_responses = []
16
+ expect(client).to receive(:put_record_batch) { response }
17
+ firehose_device.write "foo"
18
+
19
+ expect(firehose_device).to be_connected
20
+ firehose_device.close!
21
+ expect(firehose_device).not_to be_connected
22
+ end
23
+
24
+ it "it puts records with recoverable errors back in the buffer" do
25
+ failed_record = ::Aws::Firehose::Types::PutRecordBatchResponseEntry.new
26
+ failed_record.error_code = "InternalFailure"
27
+ failed_record.error_message = "InternalFailure"
28
+ response = ::Aws::Firehose::Types::PutRecordBatchOutput.new
29
+ response.failed_put_count = 1
30
+ response.request_responses = [failed_record]
31
+
32
+ expect(client).to receive(:put_record_batch) { response }
33
+ expect(firehose_device).to receive(:write).with("foo")
34
+
35
+ firehose_device.write_one "foo"
36
+ end
37
+
38
+ it "defaults the AWS region to us-east-1" do
39
+ expect(firehose_device.aws_region).to eq('us-east-1')
40
+ end
41
+
42
+ it "defaults the Firehose stream to logstash" do
43
+ expect(firehose_device.stream).to eq('logstash')
44
+ end
45
+ end
data/spec/spec_helper.rb CHANGED
@@ -62,6 +62,7 @@ RSpec.shared_context 'device' do
62
62
  let(:redis_device) { LogStashLogger::Device.new(type: :redis, sync: true) }
63
63
  let(:kafka_device) { LogStashLogger::Device.new(type: :kafka, sync: true) }
64
64
  let(:kinesis_device) { LogStashLogger::Device.new(type: :kinesis, sync: true) }
65
+ let(:firehose_device) { LogStashLogger::Device.new(type: :firehose, sync: true) }
65
66
 
66
67
  let(:outputs) { [{type: :stdout}, {type: :io, io: io}] }
67
68
  let(:multi_delegator_device) { LogStashLogger::Device.new(type: :multi_delegator, outputs: outputs) }
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-logger
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.23.0
4
+ version: 0.24.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - David Butler
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-04-14 00:00:00.000000000 Z
11
+ date: 2017-04-28 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -191,10 +191,12 @@ files:
191
191
  - lib/logstash-logger/buffer.rb
192
192
  - lib/logstash-logger/configuration.rb
193
193
  - lib/logstash-logger/device.rb
194
+ - lib/logstash-logger/device/aws_stream.rb
194
195
  - lib/logstash-logger/device/balancer.rb
195
196
  - lib/logstash-logger/device/base.rb
196
197
  - lib/logstash-logger/device/connectable.rb
197
198
  - lib/logstash-logger/device/file.rb
199
+ - lib/logstash-logger/device/firehose.rb
198
200
  - lib/logstash-logger/device/io.rb
199
201
  - lib/logstash-logger/device/kafka.rb
200
202
  - lib/logstash-logger/device/kinesis.rb
@@ -234,6 +236,7 @@ files:
234
236
  - spec/device/balancer_spec.rb
235
237
  - spec/device/connectable_spec.rb
236
238
  - spec/device/file_spec.rb
239
+ - spec/device/firehose_spec.rb
237
240
  - spec/device/io_spec.rb
238
241
  - spec/device/kafka_spec.rb
239
242
  - spec/device/kinesis_spec.rb
@@ -290,6 +293,7 @@ test_files:
290
293
  - spec/device/balancer_spec.rb
291
294
  - spec/device/connectable_spec.rb
292
295
  - spec/device/file_spec.rb
296
+ - spec/device/firehose_spec.rb
293
297
  - spec/device/io_spec.rb
294
298
  - spec/device/kafka_spec.rb
295
299
  - spec/device/kinesis_spec.rb