fluent-plugin-cloudwatch-logs-foxtrot9 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/Rakefile ADDED
@@ -0,0 +1,10 @@
1
+ require "bundler/gem_tasks"
2
+
3
+ require 'rake/testtask'
4
+
5
+ Rake::TestTask.new(:test) do |test|
6
+ test.libs << 'test'
7
+ test.test_files = FileList['test/plugin/*.rb']
8
+ end
9
+
10
+ task :default => :test
@@ -0,0 +1,23 @@
1
+ <source>
2
+ @type forward
3
+ </source>
4
+
5
+ <source>
6
+ @type cloudwatch_logs
7
+ tag test.cloudwatch_logs.in
8
+ log_group_name fluent-plugin-cloudwatch-example
9
+ log_stream_name fluent-plugin-cloudwatch-example
10
+ state_file /tmp/fluent-plugin-cloudwatch-example.state
11
+ </source>
12
+
13
+ <match test.cloudwatch_logs.out>
14
+ @type cloudwatch_logs
15
+ log_group_name fluent-plugin-cloudwatch-example
16
+ log_stream_name fluent-plugin-cloudwatch-example
17
+ auto_create_stream true
18
+ </match>
19
+
20
+ <match test.cloudwatch_logs.in>
21
+ @type stdout
22
+ </match>
23
+
@@ -0,0 +1,28 @@
1
+ # coding: utf-8
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require 'fluent/plugin/cloudwatch/logs/version'
5
+
6
+ Gem::Specification.new do |spec|
7
+ spec.name = "fluent-plugin-cloudwatch-logs-foxtrot9"
8
+ spec.version = Fluent::Plugin::Cloudwatch::Logs::VERSION
9
+ spec.authors = ["Mit Naria"]
10
+ spec.email = ["mit4dev@gmail.com"]
11
+ spec.summary = %q{CloudWatch Logs Plugin for Fluentd}
12
+ spec.homepage = "https://github.com/foxtrot9/fluent-plugin-cloudwatch-logs"
13
+ spec.license = "MIT"
14
+
15
+ spec.files = `git ls-files -z`.split("\x0")
16
+ spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
17
+ spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
18
+ spec.require_paths = ["lib"]
19
+
20
+ spec.add_dependency 'fluentd', '>= 0.14.15'
21
+ spec.add_dependency 'aws-sdk-cloudwatchlogs', '~> 1.0'
22
+
23
+ spec.add_development_dependency "bundler"
24
+ spec.add_development_dependency "rake"
25
+ spec.add_development_dependency "test-unit"
26
+ spec.add_development_dependency "test-unit-rr"
27
+ spec.add_development_dependency "mocha"
28
+ end
@@ -0,0 +1,11 @@
1
+ require "fluent/plugin/cloudwatch/logs/version"
2
+
3
+ module Fluent
4
+ module Plugin
5
+ module Cloudwatch
6
+ module Logs
7
+ # Your code goes here...
8
+ end
9
+ end
10
+ end
11
+ end
@@ -0,0 +1,9 @@
1
+ module Fluent
2
+ module Plugin
3
+ module Cloudwatch
4
+ module Logs
5
+ VERSION = "0.0.1"
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,203 @@
1
+ require 'date'
2
+ require 'fluent/plugin/input'
3
+ require 'fluent/plugin/parser'
4
+ require 'yajl'
5
+
6
+ module Fluent::Plugin
7
+ class CloudwatchLogsInput < Input
8
+ Fluent::Plugin.register_input('cloudwatch_logs', self)
9
+
10
+ helpers :parser, :thread, :compat_parameters
11
+
12
+ config_param :aws_key_id, :string, default: nil, secret: true
13
+ config_param :aws_sec_key, :string, default: nil, secret: true
14
+ config_param :aws_use_sts, :bool, default: false
15
+ config_param :aws_sts_role_arn, :string, default: nil
16
+ config_param :aws_sts_session_name, :string, default: 'fluentd'
17
+ config_param :region, :string, default: nil
18
+ config_param :endpoint, :string, default: nil
19
+ config_param :tag, :string
20
+ config_param :log_group_name, :string
21
+ config_param :log_stream_name, :string, default: nil
22
+ config_param :use_log_stream_name_prefix, :bool, default: false
23
+ config_param :state_file, :string
24
+ config_param :fetch_interval, :time, default: 60
25
+ config_param :http_proxy, :string, default: nil
26
+ config_param :json_handler, :enum, list: [:yajl, :json], default: :yajl
27
+ config_param :use_todays_log_stream, :bool, default: false
28
+ config_param :use_aws_timestamp, :bool, default: false
29
+
30
+ config_section :parse do
31
+ config_set_default :@type, 'none'
32
+ end
33
+
34
+ def initialize
35
+ super
36
+
37
+ require 'aws-sdk-cloudwatchlogs'
38
+ end
39
+
40
+ def configure(conf)
41
+ compat_parameters_convert(conf, :parser)
42
+ super
43
+ configure_parser(conf)
44
+ end
45
+
46
+ def start
47
+ super
48
+ options = {}
49
+ options[:region] = @region if @region
50
+ options[:endpoint] = @endpoint if @endpoint
51
+ options[:http_proxy] = @http_proxy if @http_proxy
52
+
53
+ if @aws_use_sts
54
+ Aws.config[:region] = options[:region]
55
+ options[:credentials] = Aws::AssumeRoleCredentials.new(
56
+ role_arn: @aws_sts_role_arn,
57
+ role_session_name: @aws_sts_session_name
58
+ )
59
+ else
60
+ options[:credentials] = Aws::Credentials.new(@aws_key_id, @aws_sec_key) if @aws_key_id && @aws_sec_key
61
+ end
62
+
63
+ @logs = Aws::CloudWatchLogs::Client.new(options)
64
+
65
+ @finished = false
66
+ thread_create(:in_cloudwatch_logs_runner, &method(:run))
67
+
68
+ @json_handler = case @json_handler
69
+ when :yajl
70
+ Yajl
71
+ when :json
72
+ JSON
73
+ end
74
+ end
75
+
76
+ def shutdown
77
+ @finished = true
78
+ super
79
+ end
80
+
81
+ private
82
+ def configure_parser(conf)
83
+ if conf['format']
84
+ @parser = parser_create
85
+ end
86
+ end
87
+
88
+ def state_file_for(log_stream_name)
89
+ return "#{@state_file}_#{log_stream_name.gsub(File::SEPARATOR, '-')}" if log_stream_name
90
+ return @state_file
91
+ end
92
+
93
+ def next_token(log_stream_name)
94
+ return nil unless File.exist?(state_file_for(log_stream_name))
95
+ File.read(state_file_for(log_stream_name)).chomp
96
+ end
97
+
98
+ def store_next_token(token, log_stream_name = nil)
99
+ File.open(state_file_for(log_stream_name), 'w') do |f|
100
+ f.write token
101
+ end
102
+ end
103
+
104
+ def run
105
+ @next_fetch_time = Time.now
106
+
107
+ until @finished
108
+ if Time.now > @next_fetch_time
109
+ @next_fetch_time += @fetch_interval
110
+
111
+ if @use_log_stream_name_prefix || @use_todays_log_stream
112
+ log_stream_name_prefix = @use_todays_log_stream ? get_todays_date : @log_stream_name
113
+ begin
114
+ log_streams = describe_log_streams(log_stream_name_prefix)
115
+ log_streams.concat(describe_log_streams(get_yesterdays_date)) if @use_todays_log_stream
116
+ log_streams.each do |log_stream|
117
+ log_stream_name = log_stream.log_stream_name
118
+ events = get_events(log_stream_name)
119
+ events.each do |event|
120
+ emit(log_stream_name, event)
121
+ end
122
+ end
123
+ rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException
124
+ log.warn "'#{@log_stream_name}' prefixed log stream(s) are not found"
125
+ next
126
+ end
127
+ else
128
+ events = get_events(@log_stream_name)
129
+ events.each do |event|
130
+ emit(log_stream_name, event)
131
+ end
132
+ end
133
+ end
134
+ sleep 1
135
+ end
136
+ end
137
+
138
+ def emit(stream, event)
139
+ if @parser
140
+ @parser.parse(event.message) {|time,record|
141
+ if @use_aws_timestamp
142
+ time = (event.timestamp / 1000).floor
143
+ end
144
+ router.emit(@tag, time, record)
145
+ }
146
+ else
147
+ time = (event.timestamp / 1000).floor
148
+ begin
149
+ record = @json_handler.load(event.message)
150
+ rescue JSON::ParserError, Yajl::ParseError # Catch parser errors
151
+ log.debug "Non-JSON message encountered"
152
+ record = { message: event.message }
153
+ end
154
+ router.emit(@tag, time, record)
155
+ end
156
+ end
157
+
158
+ def get_events(log_stream_name)
159
+ request = {
160
+ log_group_name: @log_group_name,
161
+ log_stream_name: log_stream_name
162
+ }
163
+ log_next_token = next_token(log_stream_name)
164
+ request[:next_token] = log_next_token if !log_next_token.nil? && !log_next_token.empty?
165
+ response = @logs.get_log_events(request)
166
+ if valid_next_token(log_next_token, response.next_forward_token)
167
+ store_next_token(response.next_forward_token, log_stream_name)
168
+ end
169
+
170
+ response.events
171
+ end
172
+
173
+ def describe_log_streams(log_stream_name_prefix, log_streams = nil, next_token = nil)
174
+ request = {
175
+ log_group_name: @log_group_name
176
+ }
177
+ request[:next_token] = next_token if next_token
178
+ request[:log_stream_name_prefix] = log_stream_name_prefix
179
+ response = @logs.describe_log_streams(request)
180
+ if log_streams
181
+ log_streams.concat(response.log_streams)
182
+ else
183
+ log_streams = response.log_streams
184
+ end
185
+ if response.next_token
186
+ log_streams = describe_log_streams(log_stream_name_prefix, log_streams, response.next_token)
187
+ end
188
+ log_streams
189
+ end
190
+
191
+ def valid_next_token(prev_token, next_token)
192
+ next_token && prev_token != next_token.chomp
193
+ end
194
+
195
+ def get_todays_date
196
+ Date.today.strftime("%Y/%m/%d")
197
+ end
198
+
199
+ def get_yesterdays_date
200
+ (Date.today - 1).strftime("%Y/%m/%d")
201
+ end
202
+ end
203
+ end
@@ -0,0 +1,473 @@
1
+ require 'fluent/plugin/output'
2
+ require 'thread'
3
+ require 'yajl'
4
+
5
+ module Fluent::Plugin
6
+ class CloudwatchLogsOutput < Output
7
+ include Fluent::MessagePackFactory::Mixin
8
+ Fluent::Plugin.register_output('cloudwatch_logs', self)
9
+
10
+ helpers :compat_parameters, :inject
11
+
12
+ DEFAULT_BUFFER_TYPE = "memory"
13
+
14
+ config_param :aws_key_id, :string, :default => nil, :secret => true
15
+ config_param :aws_sec_key, :string, :default => nil, :secret => true
16
+ config_param :aws_instance_profile_credentials_retries, :integer, default: nil
17
+ config_param :aws_use_sts, :bool, default: false
18
+ config_param :aws_sts_role_arn, :string, default: nil
19
+ config_param :aws_sts_session_name, :string, default: 'fluentd'
20
+ config_param :region, :string, :default => nil
21
+ config_param :endpoint, :string, :default => nil
22
+ config_param :log_group_name, :string, :default => nil
23
+ config_param :log_stream_name, :string, :default => nil
24
+ config_param :auto_create_stream, :bool, default: false
25
+ config_param :message_keys, :array, :default => [], value_type: :string
26
+ config_param :max_message_length, :integer, :default => nil
27
+ config_param :max_events_per_batch, :integer, :default => 10000
28
+ config_param :use_tag_as_group, :bool, :default => false # TODO: Rename to use_tag_as_group_name ?
29
+ config_param :use_tag_as_stream, :bool, :default => false # TODO: Rename to use_tag_as_stream_name ?
30
+ config_param :log_group_name_key, :string, :default => nil
31
+ config_param :log_stream_name_key, :string, :default => nil
32
+ config_param :remove_log_group_name_key, :bool, :default => false
33
+ config_param :remove_log_stream_name_key, :bool, :default => false
34
+ config_param :http_proxy, :string, default: nil
35
+ config_param :put_log_events_retry_wait, :time, default: 1.0
36
+ config_param :put_log_events_retry_limit, :integer, default: 17
37
+ config_param :put_log_events_disable_retry_limit, :bool, default: false
38
+ config_param :concurrency, :integer, default: 1
39
+ config_param :log_group_aws_tags, :hash, default: nil
40
+ config_param :log_group_aws_tags_key, :string, default: nil
41
+ config_param :remove_log_group_aws_tags_key, :bool, default: false
42
+ config_param :retention_in_days, :integer, default: nil
43
+ config_param :retention_in_days_key, :string, default: nil
44
+ config_param :remove_retention_in_days, :bool, default: false
45
+ config_param :json_handler, :enum, list: [:yajl, :json], :default => :yajl
46
+ config_param :log_rejected_request, :bool, :default => false
47
+
48
+ config_section :buffer do
49
+ config_set_default :@type, DEFAULT_BUFFER_TYPE
50
+ end
51
+
52
+ MAX_EVENTS_SIZE = 1_048_576
53
+ MAX_EVENT_SIZE = 256 * 1024
54
+ EVENT_HEADER_SIZE = 26
55
+
56
+ def initialize
57
+ super
58
+
59
+ require 'aws-sdk-cloudwatchlogs'
60
+ end
61
+
62
+ def configure(conf)
63
+ compat_parameters_convert(conf, :buffer, :inject)
64
+ super
65
+
66
+ unless [conf['log_group_name'], conf['use_tag_as_group'], conf['log_group_name_key']].compact.size == 1
67
+ raise Fluent::ConfigError, "Set only one of log_group_name, use_tag_as_group and log_group_name_key"
68
+ end
69
+
70
+ unless [conf['log_stream_name'], conf['use_tag_as_stream'], conf['log_stream_name_key']].compact.size == 1
71
+ raise Fluent::ConfigError, "Set only one of log_stream_name, use_tag_as_stream and log_stream_name_key"
72
+ end
73
+
74
+ if [conf['log_group_aws_tags'], conf['log_group_aws_tags_key']].compact.size > 1
75
+ raise ConfigError, "Set only one of log_group_aws_tags, log_group_aws_tags_key"
76
+ end
77
+
78
+ if [conf['retention_in_days'], conf['retention_in_days_key']].compact.size > 1
79
+ raise ConfigError, "Set only one of retention_in_days, retention_in_days_key"
80
+ end
81
+ end
82
+
83
+ def start
84
+ super
85
+
86
+ options = {}
87
+ options[:region] = @region if @region
88
+ options[:endpoint] = @endpoint if @endpoint
89
+ options[:instance_profile_credentials_retries] = @aws_instance_profile_credentials_retries if @aws_instance_profile_credentials_retries
90
+
91
+ if @aws_use_sts
92
+ Aws.config[:region] = options[:region]
93
+ options[:credentials] = Aws::AssumeRoleCredentials.new(
94
+ role_arn: @aws_sts_role_arn,
95
+ role_session_name: @aws_sts_session_name
96
+ )
97
+ else
98
+ options[:credentials] = Aws::Credentials.new(@aws_key_id, @aws_sec_key) if @aws_key_id && @aws_sec_key
99
+ end
100
+ options[:http_proxy] = @http_proxy if @http_proxy
101
+ @logs ||= Aws::CloudWatchLogs::Client.new(options)
102
+ @sequence_tokens = {}
103
+ @store_next_sequence_token_mutex = Mutex.new
104
+
105
+ @json_handler = case @json_handler
106
+ when :yajl
107
+ Yajl
108
+ when :json
109
+ JSON
110
+ end
111
+ end
112
+
113
+ def format(tag, time, record)
114
+ record = inject_values_to_record(tag, time, record)
115
+ msgpack_packer.pack([tag, time, record]).to_s
116
+ end
117
+
118
+ def formatted_to_msgpack_binary?
119
+ true
120
+ end
121
+
122
+ def multi_workers_ready?
123
+ true
124
+ end
125
+
126
+ def write(chunk)
127
+ log_group_name = extract_placeholders(@log_group_name, chunk) if @log_group_name
128
+ log_stream_name = extract_placeholders(@log_stream_name, chunk) if @log_stream_name
129
+
130
+ queue = Thread::Queue.new
131
+
132
+ chunk.enum_for(:msgpack_each).select {|tag, time, record|
133
+ if record.nil?
134
+ log.warn "record is nil (tag=#{tag})"
135
+ false
136
+ else
137
+ true
138
+ end
139
+ }.group_by {|tag, time, record|
140
+ group = case
141
+ when @use_tag_as_group
142
+ tag
143
+ when @log_group_name_key
144
+ if @remove_log_group_name_key
145
+ record.delete(@log_group_name_key)
146
+ else
147
+ record[@log_group_name_key]
148
+ end
149
+ else
150
+ log_group_name
151
+ end
152
+
153
+ stream = case
154
+ when @use_tag_as_stream
155
+ tag
156
+ when @log_stream_name_key
157
+ if @remove_log_stream_name_key
158
+ record.delete(@log_stream_name_key)
159
+ else
160
+ record[@log_stream_name_key]
161
+ end
162
+ else
163
+ log_stream_name
164
+ end
165
+
166
+ [group, stream]
167
+ }.each {|group_stream, rs|
168
+ group_name, stream_name = group_stream
169
+
170
+ if stream_name.nil?
171
+ log.warn "stream_name is nil (group_name=#{group_name})"
172
+ next
173
+ end
174
+
175
+ unless log_group_exists?(group_name)
176
+ #rs = [[name, timestamp, record],[name,timestamp,record]]
177
+ #get tags and retention from first record
178
+ #as we create log group only once, values from first record will persist
179
+ record = rs[0][2]
180
+
181
+ awstags = @log_group_aws_tags
182
+ unless @log_group_aws_tags_key.nil?
183
+ if @remove_log_group_aws_tags_key
184
+ awstags = record.delete(@log_group_aws_tags_key)
185
+ else
186
+ awstags = record[@log_group_aws_tags_key]
187
+ end
188
+ end
189
+
190
+ retention_in_days = @retention_in_days
191
+ unless @retention_in_days_key.nil?
192
+ if @remove_retention_in_days_key
193
+ retention_in_days = record.delete(@retention_in_days_key)
194
+ else
195
+ retention_in_days = record[@retention_in_days_key]
196
+ end
197
+ end
198
+
199
+ if @auto_create_stream
200
+ create_log_group(group_name, awstags, retention_in_days)
201
+ else
202
+ log.warn "Log group '#{group_name}' does not exist"
203
+ next
204
+ end
205
+ end
206
+
207
+ unless log_stream_exists?(group_name, stream_name)
208
+ if @auto_create_stream
209
+ create_log_stream(group_name, stream_name)
210
+ else
211
+ log.warn "Log stream '#{stream_name}' does not exist"
212
+ next
213
+ end
214
+ end
215
+
216
+ events = []
217
+ rs.each do |t, time, record|
218
+ time_ms = (time.to_f * 1000).floor
219
+
220
+ scrub_record!(record)
221
+ unless @message_keys.empty?
222
+ message = @message_keys.map {|k| record[k].to_s }.join(' ')
223
+ else
224
+ message = @json_handler.dump(record)
225
+ end
226
+
227
+ if @max_message_length
228
+ message = message.slice(0, @max_message_length)
229
+ end
230
+
231
+ events << {timestamp: time_ms, message: message}
232
+ end
233
+ # The log events in the batch must be in chronological ordered by their timestamp.
234
+ # http://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_PutLogEvents.html
235
+ events = events.sort_by {|e| e[:timestamp] }
236
+
237
+ queue << [group_name, stream_name, events]
238
+ }
239
+
240
+ @concurrency.times do
241
+ queue << nil
242
+ end
243
+ threads = @concurrency.times.map do |i|
244
+ Thread.start do
245
+ while job = queue.shift
246
+ group_name, stream_name, events = job
247
+ put_events_by_chunk(group_name, stream_name, events)
248
+ end
249
+ end
250
+ end
251
+ threads.each(&:join)
252
+ end
253
+
254
+ private
255
+ def scrub_record!(record)
256
+ case record
257
+ when Hash
258
+ record.each_value {|v| scrub_record!(v) }
259
+ when Array
260
+ record.each {|v| scrub_record!(v) }
261
+ when String
262
+ # The AWS API requires UTF-8 encoding
263
+ # https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/CloudWatchLogsConcepts.html
264
+ record.force_encoding('UTF-8')
265
+ record.scrub!
266
+ end
267
+ end
268
+
269
+ def delete_sequence_token(group_name, stream_name)
270
+ @sequence_tokens[group_name].delete(stream_name)
271
+ end
272
+
273
+ def next_sequence_token(group_name, stream_name)
274
+ @sequence_tokens[group_name][stream_name]
275
+ end
276
+
277
+ def store_next_sequence_token(group_name, stream_name, token)
278
+ @store_next_sequence_token_mutex.synchronize do
279
+ @sequence_tokens[group_name][stream_name] = token
280
+ end
281
+ end
282
+
283
+ def put_events_by_chunk(group_name, stream_name, events)
284
+ chunk = []
285
+
286
+ # The maximum batch size is 1,048,576 bytes, and this size is calculated as the sum of all event messages in UTF-8, plus 26 bytes for each log event.
287
+ # http://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_PutLogEvents.html
288
+ total_bytesize = 0
289
+ while event = events.shift
290
+ event_bytesize = event[:message].bytesize + EVENT_HEADER_SIZE
291
+ if MAX_EVENT_SIZE < event_bytesize
292
+ log.warn "Log event is discarded because it is too large: #{event_bytesize} bytes exceeds limit of #{MAX_EVENT_SIZE}"
293
+ break
294
+ end
295
+
296
+ new_chunk = chunk + [event]
297
+
298
+ chunk_span_too_big = new_chunk.size > 1 && new_chunk[-1][:timestamp] - new_chunk[0][:timestamp] >= 1000 * 60 * 60 * 24
299
+ chunk_too_big = total_bytesize + event_bytesize > MAX_EVENTS_SIZE
300
+ chunk_too_long = @max_events_per_batch && chunk.size >= @max_events_per_batch
301
+ if chunk_too_big or chunk_span_too_big or chunk_too_long
302
+ put_events(group_name, stream_name, chunk, total_bytesize)
303
+ chunk = [event]
304
+ total_bytesize = event_bytesize
305
+ else
306
+ chunk << event
307
+ total_bytesize += event_bytesize
308
+ end
309
+ end
310
+
311
+ unless chunk.empty?
312
+ put_events(group_name, stream_name, chunk, total_bytesize)
313
+ end
314
+ end
315
+
316
+ def put_events(group_name, stream_name, events, events_bytesize)
317
+ response = nil
318
+ retry_count = 0
319
+
320
+ until response
321
+ args = {
322
+ log_events: events,
323
+ log_group_name: group_name,
324
+ log_stream_name: stream_name,
325
+ }
326
+
327
+ token = next_sequence_token(group_name, stream_name)
328
+ args[:sequence_token] = token if token
329
+
330
+ begin
331
+ t = Time.now
332
+ response = @logs.put_log_events(args)
333
+ request = {
334
+ "group" => group_name,
335
+ "stream" => stream_name,
336
+ "events_count" => events.size,
337
+ "events_bytesize" => events_bytesize,
338
+ "sequence_token" => token,
339
+ "thread" => Thread.current.object_id,
340
+ "request_sec" => Time.now - t,
341
+ }
342
+ if response.rejected_log_events_info != nil && @log_rejected_request
343
+ log.warn response.rejected_log_events_info
344
+ log.warn "Called PutLogEvents API", request
345
+ else
346
+ log.debug "Called PutLogEvents API", request
347
+ end
348
+ rescue Aws::CloudWatchLogs::Errors::InvalidSequenceTokenException, Aws::CloudWatchLogs::Errors::DataAlreadyAcceptedException => err
349
+ sleep 1 # to avoid too many API calls
350
+ log_stream = find_log_stream(group_name, stream_name)
351
+ store_next_sequence_token(group_name, stream_name, log_stream.upload_sequence_token)
352
+ log.warn "updating upload sequence token forcefully because unrecoverable error occured", {
353
+ "error" => err,
354
+ "log_group" => group_name,
355
+ "log_stream" => stream_name,
356
+ "new_sequence_token" => token,
357
+ }
358
+ retry_count += 1
359
+ rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException => err
360
+ if @auto_create_stream && err.message == 'The specified log stream does not exist.'
361
+ log.warn 'Creating log stream because "The specified log stream does not exist." error is got', {
362
+ "error" => err,
363
+ "log_group" => group_name,
364
+ "log_stream" => stream_name,
365
+ }
366
+ create_log_stream(group_name, stream_name)
367
+ delete_sequence_token(group_name, stream_name)
368
+ retry_count += 1
369
+ else
370
+ raise err
371
+ end
372
+ rescue Aws::CloudWatchLogs::Errors::ThrottlingException => err
373
+ if !@put_log_events_disable_retry_limit && @put_log_events_retry_limit < retry_count
374
+ log.error "failed to PutLogEvents and discard logs because retry count exceeded put_log_events_retry_limit", {
375
+ "error_class" => err.class.to_s,
376
+ "error" => err.message,
377
+ }
378
+ return
379
+ else
380
+ sleep_sec = @put_log_events_retry_wait * (2 ** retry_count)
381
+ sleep_sec += sleep_sec * (0.25 * (rand - 0.5))
382
+ log.warn "failed to PutLogEvents", {
383
+ "next_retry" => Time.now + sleep_sec,
384
+ "error_class" => err.class.to_s,
385
+ "error" => err.message,
386
+ }
387
+ sleep(sleep_sec)
388
+ retry_count += 1
389
+ end
390
+ end
391
+ end
392
+
393
+ if 0 < retry_count
394
+ log.warn "retry succeeded"
395
+ end
396
+
397
+ store_next_sequence_token(group_name, stream_name, response.next_sequence_token)
398
+ end
399
+
400
+ def create_log_group(group_name, log_group_aws_tags = nil, retention_in_days = nil)
401
+ begin
402
+ @logs.create_log_group(log_group_name: group_name, tags: log_group_aws_tags)
403
+ unless retention_in_days.nil?
404
+ put_retention_policy(group_name, retention_in_days)
405
+ end
406
+ @sequence_tokens[group_name] = {}
407
+ rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException
408
+ log.debug "Log group '#{group_name}' already exists"
409
+ end
410
+ end
411
+
412
+ def put_retention_policy(group_name, retention_in_days)
413
+ begin
414
+ @logs.put_retention_policy({
415
+ log_group_name: group_name,
416
+ retention_in_days: retention_in_days
417
+ })
418
+ rescue Aws::CloudWatchLogs::Errors::InvalidParameterException => error
419
+ log.warn "failed to set retention policy for Log group '#{group_name}' with error #{error.backtrace}"
420
+ end
421
+ end
422
+
423
+ def create_log_stream(group_name, stream_name)
424
+ begin
425
+ @logs.create_log_stream(log_group_name: group_name, log_stream_name: stream_name)
426
+ @sequence_tokens[group_name] ||= {}
427
+ @sequence_tokens[group_name][stream_name] = nil
428
+ rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException
429
+ log.debug "Log stream '#{stream_name}' already exists"
430
+ end
431
+ end
432
+
433
+ def log_group_exists?(group_name)
434
+ if @sequence_tokens[group_name]
435
+ true
436
+ elsif @logs.describe_log_groups.any? {|page| page.log_groups.any? {|i| i.log_group_name == group_name } }
437
+ @sequence_tokens[group_name] = {}
438
+ true
439
+ else
440
+ false
441
+ end
442
+ end
443
+
444
+ def log_stream_exists?(group_name, stream_name)
445
+ if not @sequence_tokens[group_name]
446
+ false
447
+ elsif @sequence_tokens[group_name].has_key?(stream_name)
448
+ true
449
+ elsif (log_stream = find_log_stream(group_name, stream_name))
450
+ @sequence_tokens[group_name][stream_name] = log_stream.upload_sequence_token
451
+ true
452
+ else
453
+ false
454
+ end
455
+ end
456
+
457
+ def find_log_stream(group_name, stream_name)
458
+ next_token = nil
459
+ loop do
460
+ response = @logs.describe_log_streams(log_group_name: group_name, log_stream_name_prefix: stream_name, next_token: next_token)
461
+ if (log_stream = response.log_streams.find {|i| i.log_stream_name == stream_name })
462
+ return log_stream
463
+ end
464
+ if response.next_token.nil?
465
+ break
466
+ end
467
+ next_token = response.next_token
468
+ sleep 0.1
469
+ end
470
+ nil
471
+ end
472
+ end
473
+ end