fluent-plugin-kinesis 1.3.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.travis.yml +2 -23
- data/CHANGELOG.md +13 -0
- data/Gemfile +9 -9
- data/LICENSE.txt +201 -40
- data/Makefile +24 -31
- data/README.md +179 -308
- data/Rakefile +9 -13
- data/benchmark/task.rake +96 -58
- data/fluent-plugin-kinesis.gemspec +15 -19
- data/gemfiles/Gemfile.fluentd-0.12 +10 -10
- data/lib/fluent/plugin/kinesis.rb +166 -0
- data/lib/fluent/plugin/kinesis_helper/aggregator.rb +99 -0
- data/lib/fluent/plugin/kinesis_helper/api.rb +152 -121
- data/lib/fluent/plugin/kinesis_helper/client.rb +125 -12
- data/lib/fluent/plugin/out_kinesis_firehose.rb +40 -27
- data/lib/fluent/plugin/out_kinesis_streams.rb +51 -30
- data/lib/fluent/plugin/out_kinesis_streams_aggregated.rb +76 -0
- data/lib/fluent_plugin_kinesis/version.rb +10 -10
- metadata +18 -70
- data/README-v0.4.md +0 -348
- data/benchmark/dummy.conf +0 -0
- data/gemfiles/Gemfile.aws-sdk-2.4 +0 -20
- data/gemfiles/Gemfile.fluentd-0.10.58 +0 -20
- data/gemfiles/Gemfile.fluentd-0.14.11 +0 -20
- data/gemfiles/Gemfile.ruby-2.0 +0 -21
- data/gemfiles/Gemfile.ruby-2.1 +0 -21
- data/lib/fluent/plugin/kinesis_helper.rb +0 -36
- data/lib/fluent/plugin/kinesis_helper/class_methods.rb +0 -123
- data/lib/fluent/plugin/kinesis_helper/credentials.rb +0 -51
- data/lib/fluent/plugin/kinesis_helper/error.rb +0 -43
- data/lib/fluent/plugin/kinesis_helper/format.rb +0 -85
- data/lib/fluent/plugin/kinesis_helper/initialize.rb +0 -59
- data/lib/fluent/plugin/kinesis_helper/kpl.rb +0 -82
- data/lib/fluent/plugin/out_kinesis.rb +0 -323
- data/lib/fluent/plugin/out_kinesis_producer.rb +0 -48
- data/lib/fluent/plugin/patched_detach_process_impl.rb +0 -103
- data/lib/kinesis_producer.rb +0 -24
- data/lib/kinesis_producer/binary.rb +0 -10
- data/lib/kinesis_producer/daemon.rb +0 -270
- data/lib/kinesis_producer/library.rb +0 -122
- data/lib/kinesis_producer/protobuf/config.pb.rb +0 -66
- data/lib/kinesis_producer/protobuf/messages.pb.rb +0 -151
- data/lib/kinesis_producer/tasks/binary.rake +0 -73
@@ -0,0 +1,99 @@
|
|
1
|
+
#
|
2
|
+
# Copyright 2014-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
3
|
+
#
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
5
|
+
# may not use this file except in compliance with the License. A copy of
|
6
|
+
# the License is located at
|
7
|
+
#
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
+
#
|
10
|
+
# or in the "license" file accompanying this file. This file is
|
11
|
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
12
|
+
# ANY KIND, either express or implied. See the License for the specific
|
13
|
+
# language governing permissions and limitations under the License.
|
14
|
+
|
15
|
+
require 'fluent/configurable'
|
16
|
+
require 'google/protobuf'
|
17
|
+
|
18
|
+
Google::Protobuf::DescriptorPool.generated_pool.build do
|
19
|
+
add_message "AggregatedRecord" do
|
20
|
+
repeated :partition_key_table, :string, 1
|
21
|
+
repeated :explicit_hash_key_table, :string, 2
|
22
|
+
repeated :records, :message, 3, "Record"
|
23
|
+
end
|
24
|
+
add_message "Tag" do
|
25
|
+
optional :key, :string, 1
|
26
|
+
optional :value, :string, 2
|
27
|
+
end
|
28
|
+
add_message "Record" do
|
29
|
+
optional :partition_key_index, :uint64, 1
|
30
|
+
optional :explicit_hash_key_index, :uint64, 2
|
31
|
+
optional :data, :bytes, 3
|
32
|
+
repeated :tags, :message, 4, "Tag"
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
module Fluent
|
37
|
+
module KinesisHelper
|
38
|
+
class Aggregator
|
39
|
+
AggregatedRecord = Google::Protobuf::DescriptorPool.generated_pool.lookup("AggregatedRecord").msgclass
|
40
|
+
Tag = Google::Protobuf::DescriptorPool.generated_pool.lookup("Tag").msgclass
|
41
|
+
Record = Google::Protobuf::DescriptorPool.generated_pool.lookup("Record").msgclass
|
42
|
+
|
43
|
+
class InvalidEncodingError < ::StandardError; end
|
44
|
+
|
45
|
+
MagicNumber = ['F3899AC2'].pack('H*')
|
46
|
+
|
47
|
+
def aggregate(records, partition_key)
|
48
|
+
message = AggregatedRecord.encode(AggregatedRecord.new(
|
49
|
+
partition_key_table: ['a', partition_key],
|
50
|
+
records: records.map{|data|
|
51
|
+
Record.new(partition_key_index: 1, data: data)
|
52
|
+
},
|
53
|
+
))
|
54
|
+
[MagicNumber, message, Digest::MD5.digest(message)].pack("A4A*A16")
|
55
|
+
end
|
56
|
+
|
57
|
+
def deaggregate(encoded)
|
58
|
+
unless aggregated?(encoded)
|
59
|
+
raise InvalidEncodingError, "Invalid MagicNumber #{encoded[0..3]}}"
|
60
|
+
end
|
61
|
+
message, digest = encoded[4..encoded.length-17], encoded[encoded.length-16..-1]
|
62
|
+
if Digest::MD5.digest(message) != digest
|
63
|
+
raise InvalidEncodingError, "Digest mismatch #{digest}"
|
64
|
+
end
|
65
|
+
decoded = AggregatedRecord.decode(message)
|
66
|
+
records = decoded.records.map(&:data)
|
67
|
+
partition_key = decoded.partition_key_table[1]
|
68
|
+
[records, partition_key]
|
69
|
+
end
|
70
|
+
|
71
|
+
def aggregated?(encoded)
|
72
|
+
encoded[0..3] == MagicNumber
|
73
|
+
end
|
74
|
+
|
75
|
+
def aggregated_size_offset(partition_key)
|
76
|
+
data = 'd'
|
77
|
+
encoded = aggregate([record(data)], partition_key)
|
78
|
+
finalize(encoded).size - data.size
|
79
|
+
end
|
80
|
+
|
81
|
+
module Mixin
|
82
|
+
AggregateOffset = 25
|
83
|
+
RecordOffset = 10
|
84
|
+
|
85
|
+
module Params
|
86
|
+
include Fluent::Configurable
|
87
|
+
end
|
88
|
+
|
89
|
+
def self.included(mod)
|
90
|
+
mod.include Params
|
91
|
+
end
|
92
|
+
|
93
|
+
def aggregator
|
94
|
+
@aggregator ||= Aggregator.new
|
95
|
+
end
|
96
|
+
end
|
97
|
+
end
|
98
|
+
end
|
99
|
+
end
|
@@ -1,163 +1,194 @@
|
|
1
1
|
#
|
2
|
-
#
|
2
|
+
# Copyright 2014-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
3
3
|
#
|
4
|
-
#
|
5
|
-
#
|
6
|
-
#
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
5
|
+
# may not use this file except in compliance with the License. A copy of
|
6
|
+
# the License is located at
|
7
7
|
#
|
8
|
-
#
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9
9
|
#
|
10
|
-
#
|
11
|
-
#
|
12
|
-
#
|
13
|
-
#
|
10
|
+
# or in the "license" file accompanying this file. This file is
|
11
|
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
12
|
+
# ANY KIND, either express or implied. See the License for the specific
|
13
|
+
# language governing permissions and limitations under the License.
|
14
14
|
|
15
15
|
require 'fluent_plugin_kinesis/version'
|
16
|
+
require 'fluent/configurable'
|
16
17
|
|
17
18
|
module Fluent
|
18
19
|
module KinesisHelper
|
19
20
|
module API
|
21
|
+
MaxRecordSize = 1024 * 1024 # 1 MB
|
22
|
+
|
23
|
+
module APIParams
|
24
|
+
include Fluent::Configurable
|
25
|
+
config_param :max_record_size, :integer, default: MaxRecordSize
|
26
|
+
end
|
27
|
+
|
28
|
+
def self.included(mod)
|
29
|
+
mod.include APIParams
|
30
|
+
end
|
31
|
+
|
20
32
|
def configure(conf)
|
21
33
|
super
|
22
|
-
if @
|
23
|
-
raise ConfigError, "
|
24
|
-
end
|
25
|
-
if @batch_request_max_size > self.class::BatchRequestLimitSize
|
26
|
-
raise ConfigError, "batch_request_max_size can't be grater than #{self.class::BatchRequestLimitSize}."
|
34
|
+
if @max_record_size > MaxRecordSize
|
35
|
+
raise ConfigError, "max_record_size can't be grater than #{MaxRecordSize/1024} KB."
|
27
36
|
end
|
28
|
-
@region = client.config.region if @region.nil?
|
29
37
|
end
|
30
38
|
|
31
|
-
|
32
|
-
|
33
|
-
|
39
|
+
module BatchRequest
|
40
|
+
module BatchRequestParams
|
41
|
+
include Fluent::Configurable
|
42
|
+
config_param :retries_on_batch_request, :integer, default: 8
|
43
|
+
config_param :reset_backoff_if_success, :bool, default: true
|
44
|
+
config_param :batch_request_max_count, :integer, default: nil
|
45
|
+
config_param :batch_request_max_size, :integer, default: nil
|
34
46
|
end
|
35
|
-
end
|
36
47
|
|
37
|
-
|
38
|
-
|
39
|
-
def client_options
|
40
|
-
options = {
|
41
|
-
user_agent_suffix: "fluent-plugin-kinesis/#{request_type}/#{FluentPluginKinesis::VERSION}",
|
42
|
-
credentials: credentials,
|
43
|
-
}
|
44
|
-
options.update(region: @region) unless @region.nil?
|
45
|
-
options.update(http_proxy: @http_proxy) unless @http_proxy.nil?
|
46
|
-
options.update(endpoint: @endpoint) unless @endpoint.nil?
|
47
|
-
options.update(ssl_verify_peer: @ssl_verify_peer) unless @ssl_verify_peer.nil?
|
48
|
-
if @debug
|
49
|
-
options.update(logger: Logger.new(log.out))
|
50
|
-
options.update(log_level: :debug)
|
48
|
+
def self.included(mod)
|
49
|
+
mod.include BatchRequestParams
|
51
50
|
end
|
52
|
-
options
|
53
|
-
end
|
54
|
-
|
55
|
-
def split_to_batches(records)
|
56
|
-
batch_by_limit(records, @batch_request_max_count, @batch_request_max_size)
|
57
|
-
end
|
58
51
|
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
size = 0
|
52
|
+
def configure(conf)
|
53
|
+
super
|
54
|
+
if @batch_request_max_count.nil?
|
55
|
+
@batch_request_max_count = self.class::BatchRequestLimitCount
|
56
|
+
elsif @batch_request_max_count > self.class::BatchRequestLimitCount
|
57
|
+
raise ConfigError, "batch_request_max_count can't be grater than #{self.class::BatchRequestLimitCount}."
|
66
58
|
end
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
59
|
+
if @batch_request_max_size.nil?
|
60
|
+
@batch_request_max_size = self.class::BatchRequestLimitSize
|
61
|
+
elsif @batch_request_max_size > self.class::BatchRequestLimitSize
|
62
|
+
raise ConfigError, "batch_request_max_size can't be grater than #{self.class::BatchRequestLimitSize}."
|
63
|
+
end
|
64
|
+
end
|
73
65
|
|
74
|
-
|
75
|
-
|
76
|
-
|
66
|
+
def size_of_values(record)
|
67
|
+
record.compact.map(&:size).inject(:+) || 0
|
68
|
+
end
|
77
69
|
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
70
|
+
private
|
71
|
+
|
72
|
+
def split_to_batches(records, &block)
|
73
|
+
batch = []
|
74
|
+
size = 0
|
75
|
+
records.each do |record|
|
76
|
+
record_size = size_of_values(record)
|
77
|
+
if batch.size+1 > @batch_request_max_count or size+record_size > @batch_request_max_size
|
78
|
+
yield(batch, size)
|
79
|
+
batch = []
|
80
|
+
size = 0
|
81
|
+
end
|
82
|
+
batch << record
|
83
|
+
size += record_size
|
91
84
|
end
|
85
|
+
yield(batch, size) if batch.size > 0
|
92
86
|
end
|
93
|
-
end
|
94
|
-
|
95
|
-
def any_records_shipped?(res)
|
96
|
-
results(res).size > failed_count(res)
|
97
|
-
end
|
98
87
|
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
88
|
+
def batch_request_with_retry(batch, retry_count=0, backoff: nil, &block)
|
89
|
+
backoff ||= Backoff.new
|
90
|
+
res = yield(batch)
|
91
|
+
if failed_count(res) > 0
|
92
|
+
failed_records = collect_failed_records(batch, res)
|
93
|
+
if retry_count < @retries_on_batch_request
|
94
|
+
backoff.reset if @reset_backoff_if_success and any_records_shipped?(res)
|
95
|
+
wait_second = backoff.next
|
96
|
+
msg = 'Retrying to request batch. Retry count: %3d, Retry records: %3d, Wait seconds %3.2f' % [retry_count+1, failed_records.size, wait_second]
|
97
|
+
log.warn(truncate msg)
|
98
|
+
# TODO: sleep() doesn't wait the given seconds sometime.
|
99
|
+
# The root cause is unknown so far, so I'd like to add debug print only. It should be fixed in the future.
|
100
|
+
log.debug("#{Thread.current.object_id} sleep start")
|
101
|
+
sleep(wait_second)
|
102
|
+
log.debug("#{Thread.current.object_id} sleep finish")
|
103
|
+
batch_request_with_retry(retry_records(failed_records), retry_count+1, backoff: backoff, &block)
|
104
|
+
else
|
105
|
+
give_up_retries(failed_records)
|
106
|
+
end
|
107
|
+
end
|
108
108
|
end
|
109
|
-
failed_records
|
110
|
-
end
|
111
109
|
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
when :firehose; :failed_put_count
|
116
|
-
end
|
117
|
-
res[failed_field]
|
118
|
-
end
|
110
|
+
def any_records_shipped?(res)
|
111
|
+
results(res).size > failed_count(res)
|
112
|
+
end
|
119
113
|
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
114
|
+
def collect_failed_records(records, res)
|
115
|
+
failed_records = []
|
116
|
+
results(res).each_with_index do |record, index|
|
117
|
+
next unless record[:error_code]
|
118
|
+
original = case request_type
|
119
|
+
when :streams, :firehose; records[index]
|
120
|
+
when :streams_aggregated; records
|
124
121
|
end
|
125
|
-
|
126
|
-
|
122
|
+
failed_records.push(
|
123
|
+
original: original,
|
124
|
+
error_code: record[:error_code],
|
125
|
+
error_message: record[:error_message]
|
126
|
+
)
|
127
|
+
end
|
128
|
+
failed_records
|
129
|
+
end
|
127
130
|
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
end
|
131
|
+
def retry_records(failed_records)
|
132
|
+
case request_type
|
133
|
+
when :streams, :firehose
|
134
|
+
failed_records.map{|r| r[:original] }
|
135
|
+
when :streams_aggregated
|
136
|
+
failed_records.first[:original]
|
137
|
+
end
|
138
|
+
end
|
137
139
|
|
138
|
-
|
139
|
-
|
140
|
-
|
140
|
+
def failed_count(res)
|
141
|
+
failed_field = case request_type
|
142
|
+
when :streams; :failed_record_count
|
143
|
+
when :streams_aggregated; :failed_record_count
|
144
|
+
when :firehose; :failed_put_count
|
145
|
+
end
|
146
|
+
res[failed_field]
|
141
147
|
end
|
142
148
|
|
143
|
-
def
|
144
|
-
|
145
|
-
|
146
|
-
|
149
|
+
def results(res)
|
150
|
+
result_field = case request_type
|
151
|
+
when :streams; :records
|
152
|
+
when :streams_aggregated; :records
|
153
|
+
when :firehose; :request_responses
|
154
|
+
end
|
155
|
+
res[result_field]
|
147
156
|
end
|
148
157
|
|
149
|
-
def
|
150
|
-
|
158
|
+
def give_up_retries(failed_records)
|
159
|
+
failed_records.each {|record|
|
160
|
+
log.error(truncate 'Could not put record, Error: %s/%s, Record: %s' % [
|
161
|
+
record[:error_code],
|
162
|
+
record[:error_message],
|
163
|
+
record[:original]
|
164
|
+
])
|
165
|
+
}
|
151
166
|
end
|
152
167
|
|
153
|
-
|
168
|
+
class Backoff
|
169
|
+
def initialize
|
170
|
+
@count = 0
|
171
|
+
end
|
154
172
|
|
155
|
-
|
156
|
-
|
157
|
-
|
173
|
+
def next
|
174
|
+
value = calc(@count)
|
175
|
+
@count += 1
|
176
|
+
value
|
177
|
+
end
|
178
|
+
|
179
|
+
def reset
|
180
|
+
@count = 0
|
181
|
+
end
|
182
|
+
|
183
|
+
private
|
158
184
|
|
159
|
-
|
160
|
-
|
185
|
+
def calc(count)
|
186
|
+
(2 ** count) * scaling_factor
|
187
|
+
end
|
188
|
+
|
189
|
+
def scaling_factor
|
190
|
+
0.3 + (0.5-rand) * 0.1
|
191
|
+
end
|
161
192
|
end
|
162
193
|
end
|
163
194
|
end
|
@@ -1,20 +1,75 @@
|
|
1
1
|
#
|
2
|
-
#
|
2
|
+
# Copyright 2014-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
3
3
|
#
|
4
|
-
#
|
5
|
-
#
|
6
|
-
#
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
5
|
+
# may not use this file except in compliance with the License. A copy of
|
6
|
+
# the License is located at
|
7
7
|
#
|
8
|
-
#
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9
9
|
#
|
10
|
-
#
|
11
|
-
#
|
12
|
-
#
|
13
|
-
#
|
10
|
+
# or in the "license" file accompanying this file. This file is
|
11
|
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
12
|
+
# ANY KIND, either express or implied. See the License for the specific
|
13
|
+
# language governing permissions and limitations under the License.
|
14
|
+
|
15
|
+
require 'fluent/configurable'
|
14
16
|
|
15
17
|
module Fluent
|
16
18
|
module KinesisHelper
|
17
19
|
module Client
|
20
|
+
module ClientParams
|
21
|
+
include Fluent::Configurable
|
22
|
+
config_param :region, :string, default: nil
|
23
|
+
|
24
|
+
config_param :http_proxy, :string, default: nil, secret: true
|
25
|
+
config_param :endpoint, :string, default: nil
|
26
|
+
config_param :ssl_verify_peer, :bool, default: true
|
27
|
+
|
28
|
+
config_param :aws_key_id, :string, default: nil, secret: true
|
29
|
+
config_param :aws_sec_key, :string, default: nil, secret: true
|
30
|
+
config_section :assume_role_credentials, multi: false do
|
31
|
+
desc "The Amazon Resource Name (ARN) of the role to assume"
|
32
|
+
config_param :role_arn, :string, secret: true
|
33
|
+
desc "An identifier for the assumed role session"
|
34
|
+
config_param :role_session_name, :string
|
35
|
+
desc "An IAM policy in JSON format"
|
36
|
+
config_param :policy, :string, default: nil
|
37
|
+
desc "The duration, in seconds, of the role session (900-3600)"
|
38
|
+
config_param :duration_seconds, :integer, default: nil
|
39
|
+
desc "A unique identifier that is used by third parties when assuming roles in their customers' accounts."
|
40
|
+
config_param :external_id, :string, default: nil, secret: true
|
41
|
+
end
|
42
|
+
config_section :instance_profile_credentials, multi: false do
|
43
|
+
desc "Number of times to retry when retrieving credentials"
|
44
|
+
config_param :retries, :integer, default: nil
|
45
|
+
desc "IP address (default:169.254.169.254)"
|
46
|
+
config_param :ip_address, :string, default: nil
|
47
|
+
desc "Port number (default:80)"
|
48
|
+
config_param :port, :integer, default: nil
|
49
|
+
desc "Number of seconds to wait for the connection to open"
|
50
|
+
config_param :http_open_timeout, :float, default: nil
|
51
|
+
desc "Number of seconds to wait for one block to be read"
|
52
|
+
config_param :http_read_timeout, :float, default: nil
|
53
|
+
# config_param :delay, :integer or :proc, :default => nil
|
54
|
+
# config_param :http_degub_output, :io, :default => nil
|
55
|
+
end
|
56
|
+
config_section :shared_credentials, multi: false do
|
57
|
+
desc "Path to the shared file. (default: $HOME/.aws/credentials)"
|
58
|
+
config_param :path, :string, default: nil
|
59
|
+
desc "Profile name. Default to 'default' or ENV['AWS_PROFILE']"
|
60
|
+
config_param :profile_name, :string, default: nil
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
def self.included(mod)
|
65
|
+
mod.include ClientParams
|
66
|
+
end
|
67
|
+
|
68
|
+
def configure(conf)
|
69
|
+
super
|
70
|
+
@region = client.config.region if @region.nil?
|
71
|
+
end
|
72
|
+
|
18
73
|
def client
|
19
74
|
@client ||= client_class.new(client_options)
|
20
75
|
end
|
@@ -23,14 +78,72 @@ module Fluent
|
|
23
78
|
|
24
79
|
def client_class
|
25
80
|
case request_type
|
26
|
-
when :streams
|
81
|
+
when :streams, :streams_aggregated
|
82
|
+
require 'aws-sdk-kinesis'
|
27
83
|
Aws::Kinesis::Client
|
28
84
|
when :firehose
|
85
|
+
require 'aws-sdk-firehose'
|
29
86
|
Aws::Firehose::Client
|
30
|
-
when :producer
|
31
|
-
KinesisProducer::Library
|
32
87
|
end
|
33
88
|
end
|
89
|
+
|
90
|
+
def client_options
|
91
|
+
options = setup_credentials
|
92
|
+
options.update(
|
93
|
+
user_agent_suffix: "fluent-plugin-kinesis/#{request_type}/#{FluentPluginKinesis::VERSION}"
|
94
|
+
)
|
95
|
+
options.update(region: @region) unless @region.nil?
|
96
|
+
options.update(http_proxy: @http_proxy) unless @http_proxy.nil?
|
97
|
+
options.update(endpoint: @endpoint) unless @endpoint.nil?
|
98
|
+
options.update(ssl_verify_peer: @ssl_verify_peer) unless @ssl_verify_peer.nil?
|
99
|
+
if @debug
|
100
|
+
options.update(logger: Logger.new(log.out))
|
101
|
+
options.update(log_level: :debug)
|
102
|
+
end
|
103
|
+
options
|
104
|
+
end
|
105
|
+
|
106
|
+
def setup_credentials
|
107
|
+
options = {}
|
108
|
+
credentials_options = {}
|
109
|
+
case
|
110
|
+
when @aws_key_id && @aws_sec_key
|
111
|
+
options[:access_key_id] = @aws_key_id
|
112
|
+
options[:secret_access_key] = @aws_sec_key
|
113
|
+
when @assume_role_credentials
|
114
|
+
c = @assume_role_credentials
|
115
|
+
credentials_options[:role_arn] = c.role_arn
|
116
|
+
credentials_options[:role_session_name] = c.role_session_name
|
117
|
+
credentials_options[:policy] = c.policy if c.policy
|
118
|
+
credentials_options[:duration_seconds] = c.duration_seconds if c.duration_seconds
|
119
|
+
credentials_options[:external_id] = c.external_id if c.external_id
|
120
|
+
if @region
|
121
|
+
credentials_options[:client] = Aws::STS::Client.new(region: @region)
|
122
|
+
end
|
123
|
+
options[:credentials] = Aws::AssumeRoleCredentials.new(credentials_options)
|
124
|
+
when @instance_profile_credentials
|
125
|
+
c = @instance_profile_credentials
|
126
|
+
credentials_options[:retries] = c.retries if c.retries
|
127
|
+
credentials_options[:ip_address] = c.ip_address if c.ip_address
|
128
|
+
credentials_options[:port] = c.port if c.port
|
129
|
+
credentials_options[:http_open_timeout] = c.http_open_timeout if c.http_open_timeout
|
130
|
+
credentials_options[:http_read_timeout] = c.http_read_timeout if c.http_read_timeout
|
131
|
+
if ENV["AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"]
|
132
|
+
options[:credentials] = Aws::ECSCredentials.new(credentials_options)
|
133
|
+
else
|
134
|
+
options[:credentials] = Aws::InstanceProfileCredentials.new(credentials_options)
|
135
|
+
end
|
136
|
+
when @shared_credentials
|
137
|
+
c = @shared_credentials
|
138
|
+
credentials_options[:path] = c.path if c.path
|
139
|
+
credentials_options[:profile_name] = c.profile_name if c.profile_name
|
140
|
+
options[:credentials] = Aws::SharedCredentials.new(credentials_options)
|
141
|
+
else
|
142
|
+
# Use default credentials
|
143
|
+
# See http://docs.aws.amazon.com/sdkforruby/api/Aws/S3/Client.html
|
144
|
+
end
|
145
|
+
options
|
146
|
+
end
|
34
147
|
end
|
35
148
|
end
|
36
149
|
end
|