fluent-plugin-cloudwatch-logs 0.9.1 → 0.10.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 849137579f60a74de04a845cdad60ec5c31a7903b8af5248c1122a06a4d66b3a
4
- data.tar.gz: 4911ef8c0a16b88501f08cfe2dd217e458025d68496b1f279662bef606a53d2d
3
+ metadata.gz: 105e96fa516c4f972fc2b01f83c1daa30d39ff4b827b46b811b9f30514231516
4
+ data.tar.gz: 85741a1b0ccad0f9e207c837e7417b786ef68d1fc1552cbe80b73030e5b4191b
5
5
  SHA512:
6
- metadata.gz: 72233f6fe060a41813673688fe5ca00ec36de7e6e21bc52fcdb7751091a2e7b0198110e398ad5233021527496f74eee22a3c18490d2f35c847cea3587dcf5c40
7
- data.tar.gz: 13357b5705f53a84a5598132238c00fc9caf1c0b317e943e38a1ff8b3ac96d1e427a01499ce89f911291b0571ac43876528c3faaa0bd821a600d20805fe76ec7
6
+ metadata.gz: 84a8458e9704102609b382b9cb8fe4c332a1c1b9c9562b89b959961bb3624958cbffc1f965eba2a502c10a0e45f1ceb427a987681d5420c35663bc6df88295ed
7
+ data.tar.gz: d6e22ad22c4eef5cb6c8fab7690ca75585e7ad99756926181f50c43fef70df0e52e12722e14809b793947e56ed8199fc393a318f42308f05fd6d23d6aa9a4b13
data/README.md CHANGED
@@ -149,7 +149,7 @@ Fetch sample log from CloudWatch Logs:
149
149
  * `remove_log_group_aws_tags_key`: remove field specified by `log_group_aws_tags_key`
150
150
  * `remove_log_group_name_key`: remove field specified by `log_group_name_key`
151
151
  * `remove_log_stream_name_key`: remove field specified by `log_stream_name_key`
152
- * `remove_retention_in_days`: remove field specified by `retention_in_days`
152
+ * `remove_retention_in_days_key`: remove field specified by `retention_in_days_key`
153
153
  * `retention_in_days`: use to set the expiry time for log group when created with `auto_create_stream`. (default to no expiry)
154
154
  * `retention_in_days_key`: use specified field of records as retention period
155
155
  * `use_tag_as_group`: to use tag as a group name
@@ -178,6 +178,9 @@ Please refer to [the PutRetentionPolicy column in documentation](https://docs.aw
178
178
  #<parse>
179
179
  # @type none # or csv, tsv, regexp etc.
180
180
  #</parse>
181
+ #<storage>
182
+ # @type local # or redis, memcached, etc.
183
+ #</storage>
181
184
  </source>
182
185
  ```
183
186
 
@@ -193,7 +196,9 @@ Please refer to [the PutRetentionPolicy column in documentation](https://docs.aw
193
196
  * `log_group_name`: name of log group to fetch logs
194
197
  * `log_stream_name`: name of log stream to fetch logs
195
198
  * `region`: AWS Region. See [Authentication](#authentication) for more information.
196
- * `state_file`: file to store current state (e.g. next\_forward\_token)
199
+ * `throttling_retry_seconds`: time period in seconds to retry a request when aws CloudWatch rate limit exceeds (default: nil)
200
+ * `include_metadata`: include metadata such as `log_group_name` and `log_stream_name`. (default: false)
201
+ * `state_file`: file to store current state (e.g. next\_forward\_token). This parameter is deprecated. Use `<storage>` instead.
197
202
  * `tag`: fluentd tag
198
203
  * `use_log_stream_name_prefix`: to use `log_stream_name` as log stream name prefix (default false)
199
204
  * `use_todays_log_stream`: use todays and yesterdays date as log stream name prefix (formatted YYYY/MM/DD). (default: `false`)
@@ -203,6 +208,7 @@ Please refer to [the PutRetentionPolicy column in documentation](https://docs.aw
203
208
  * `time_range_format`: specify time format for time range. (default: `%Y-%m-%d %H:%M:%S`)
204
209
  * `format`: specify CloudWatchLogs' log format. (default `nil`)
205
210
  * `<parse>`: specify parser plugin configuration. see also: https://docs.fluentd.org/v/1.0/parser#how-to-use
211
+ * `<storage>`: specify storage plugin configuration. see also: https://docs.fluentd.org/v/1.0/storage#how-to-use
206
212
 
207
213
  ## Test
208
214
 
@@ -2,7 +2,7 @@ module Fluent
2
2
  module Plugin
3
3
  module Cloudwatch
4
4
  module Logs
5
- VERSION = "0.9.1"
5
+ VERSION = "0.10.0"
6
6
  end
7
7
  end
8
8
  end
@@ -8,7 +8,9 @@ module Fluent::Plugin
8
8
  class CloudwatchLogsInput < Input
9
9
  Fluent::Plugin.register_input('cloudwatch_logs', self)
10
10
 
11
- helpers :parser, :thread, :compat_parameters
11
+ helpers :parser, :thread, :compat_parameters, :storage
12
+
13
+ DEFAULT_STORAGE_TYPE = 'local'
12
14
 
13
15
  config_param :aws_key_id, :string, default: nil, secret: true
14
16
  config_param :aws_sec_key, :string, default: nil, secret: true
@@ -21,7 +23,8 @@ module Fluent::Plugin
21
23
  config_param :log_group_name, :string
22
24
  config_param :log_stream_name, :string, default: nil
23
25
  config_param :use_log_stream_name_prefix, :bool, default: false
24
- config_param :state_file, :string
26
+ config_param :state_file, :string, default: nil,
27
+ deprecated: "Use <stroage> instead."
25
28
  config_param :fetch_interval, :time, default: 60
26
29
  config_param :http_proxy, :string, default: nil
27
30
  config_param :json_handler, :enum, list: [:yajl, :json], default: :yajl
@@ -30,11 +33,19 @@ module Fluent::Plugin
30
33
  config_param :start_time, :string, default: nil
31
34
  config_param :end_time, :string, default: nil
32
35
  config_param :time_range_format, :string, default: "%Y-%m-%d %H:%M:%S"
36
+ config_param :throttling_retry_seconds, :time, default: nil
37
+ config_param :include_metadata, :bool, default: false
33
38
 
34
39
  config_section :parse do
35
40
  config_set_default :@type, 'none'
36
41
  end
37
42
 
43
+ config_section :storage do
44
+ config_set_default :usage, 'store_next_tokens'
45
+ config_set_default :@type, DEFAULT_STORAGE_TYPE
46
+ config_set_default :persistent, false
47
+ end
48
+
38
49
  def initialize
39
50
  super
40
51
 
@@ -52,6 +63,7 @@ module Fluent::Plugin
52
63
  if @start_time && @end_time && (@end_time < @start_time)
53
64
  raise Fluent::ConfigError, "end_time(#{@end_time}) should be greater than start_time(#{@start_time})."
54
65
  end
66
+ @next_token_storage = storage_create(usage: 'store_next_tokens', conf: config, default_type: DEFAULT_STORAGE_TYPE)
55
67
  end
56
68
 
57
69
  def start
@@ -98,20 +110,28 @@ module Fluent::Plugin
98
110
  end
99
111
  end
100
112
 
101
- def state_file_for(log_stream_name)
102
- return "#{@state_file}_#{log_stream_name.gsub(File::SEPARATOR, '-')}" if log_stream_name
103
- return @state_file
113
+ def state_key_for(log_stream_name)
114
+ if log_stream_name
115
+ "#{@state_file}_#{log_stream_name.gsub(File::SEPARATOR, '-')}"
116
+ else
117
+ @state_file
118
+ end
119
+ end
120
+
121
+ def migrate_state_file_to_storage(log_stream_name)
122
+ @next_token_storage.put(:"#{state_key_for(log_stream_name)}", File.read(state_key_for(log_stream_name)).chomp)
123
+ File.delete(state_key_for(log_stream_name))
104
124
  end
105
125
 
106
126
  def next_token(log_stream_name)
107
- return nil unless File.exist?(state_file_for(log_stream_name))
108
- File.read(state_file_for(log_stream_name)).chomp
127
+ if @next_token_storage.persistent && File.exist?(state_key_for(log_stream_name))
128
+ migrate_state_file_to_storage(log_stream_name)
129
+ end
130
+ @next_token_storage.get(:"#{state_key_for(log_stream_name)}")
109
131
  end
110
132
 
111
133
  def store_next_token(token, log_stream_name = nil)
112
- File.open(state_file_for(log_stream_name), 'w') do |f|
113
- f.write token
114
- end
134
+ @next_token_storage.put(:"#{state_key_for(log_stream_name)}", token)
115
135
  end
116
136
 
117
137
  def run
@@ -129,8 +149,16 @@ module Fluent::Plugin
129
149
  log_streams.each do |log_stream|
130
150
  log_stream_name = log_stream.log_stream_name
131
151
  events = get_events(log_stream_name)
152
+ metadata = if @include_metadata
153
+ {
154
+ "log_stream_name" => log_stream_name,
155
+ "log_group_name" => @log_group_name
156
+ }
157
+ else
158
+ {}
159
+ end
132
160
  events.each do |event|
133
- emit(log_stream_name, event)
161
+ emit(log_stream_name, event, metadata)
134
162
  end
135
163
  end
136
164
  rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException
@@ -139,8 +167,16 @@ module Fluent::Plugin
139
167
  end
140
168
  else
141
169
  events = get_events(@log_stream_name)
170
+ metadata = if @include_metadata
171
+ {
172
+ "log_stream_name" => @log_stream_name,
173
+ "log_group_name" => @log_group_name
174
+ }
175
+ else
176
+ {}
177
+ end
142
178
  events.each do |event|
143
- emit(log_stream_name, event)
179
+ emit(log_stream_name, event, metadata)
144
180
  end
145
181
  end
146
182
  end
@@ -148,18 +184,24 @@ module Fluent::Plugin
148
184
  end
149
185
  end
150
186
 
151
- def emit(stream, event)
187
+ def emit(stream, event, metadata)
152
188
  if @parser
153
189
  @parser.parse(event.message) {|time,record|
154
190
  if @use_aws_timestamp
155
191
  time = (event.timestamp / 1000).floor
156
192
  end
193
+ unless metadata.empty?
194
+ record.merge!("metadata" => metadata)
195
+ end
157
196
  router.emit(@tag, time, record)
158
197
  }
159
198
  else
160
199
  time = (event.timestamp / 1000).floor
161
200
  begin
162
201
  record = @json_handler.load(event.message)
202
+ unless metadata.empty?
203
+ record.merge!("metadata" => metadata)
204
+ end
163
205
  router.emit(@tag, time, record)
164
206
  rescue JSON::ParserError, Yajl::ParseError => error # Catch parser errors
165
207
  log.error "Invalid JSON encountered while parsing event.message"
@@ -169,38 +211,55 @@ module Fluent::Plugin
169
211
  end
170
212
 
171
213
  def get_events(log_stream_name)
172
- request = {
173
- log_group_name: @log_group_name,
174
- log_stream_name: log_stream_name
175
- }
176
- request.merge!(start_time: @start_time) if @start_time
177
- request.merge!(end_time: @end_time) if @end_time
178
- log_next_token = next_token(log_stream_name)
179
- request[:next_token] = log_next_token if !log_next_token.nil? && !log_next_token.empty?
180
- response = @logs.get_log_events(request)
181
- if valid_next_token(log_next_token, response.next_forward_token)
182
- store_next_token(response.next_forward_token, log_stream_name)
183
- end
214
+ throttling_handler('get_log_events') do
215
+ request = {
216
+ log_group_name: @log_group_name,
217
+ log_stream_name: log_stream_name
218
+ }
219
+ request.merge!(start_time: @start_time) if @start_time
220
+ request.merge!(end_time: @end_time) if @end_time
221
+ log_next_token = next_token(log_stream_name)
222
+ request[:next_token] = log_next_token if !log_next_token.nil? && !log_next_token.empty?
223
+ response = @logs.get_log_events(request)
224
+ if valid_next_token(log_next_token, response.next_forward_token)
225
+ store_next_token(response.next_forward_token, log_stream_name)
226
+ end
184
227
 
185
- response.events
228
+ response.events
229
+ end
186
230
  end
187
231
 
188
232
  def describe_log_streams(log_stream_name_prefix, log_streams = nil, next_token = nil)
189
- request = {
190
- log_group_name: @log_group_name
191
- }
192
- request[:next_token] = next_token if next_token
193
- request[:log_stream_name_prefix] = log_stream_name_prefix
194
- response = @logs.describe_log_streams(request)
195
- if log_streams
196
- log_streams.concat(response.log_streams)
197
- else
198
- log_streams = response.log_streams
233
+ throttling_handler('describe_log_streams') do
234
+ request = {
235
+ log_group_name: @log_group_name
236
+ }
237
+ request[:next_token] = next_token if next_token
238
+ request[:log_stream_name_prefix] = log_stream_name_prefix if log_stream_name_prefix
239
+ response = @logs.describe_log_streams(request)
240
+ if log_streams
241
+ log_streams.concat(response.log_streams)
242
+ else
243
+ log_streams = response.log_streams
244
+ end
245
+ if response.next_token
246
+ log_streams = describe_log_streams(log_stream_name_prefix, log_streams, response.next_token)
247
+ end
248
+ log_streams
199
249
  end
200
- if response.next_token
201
- log_streams = describe_log_streams(log_stream_name_prefix, log_streams, response.next_token)
250
+ end
251
+
252
+ def throttling_handler(method_name)
253
+ yield
254
+ rescue Aws::CloudWatchLogs::Errors::ThrottlingException => err
255
+ if throttling_retry_seconds
256
+ log.warn "ThrottlingException #{method_name}. Waiting #{throttling_retry_seconds} seconds to retry."
257
+ sleep throttling_retry_seconds
258
+
259
+ throttling_handler(method_name) { yield }
260
+ else
261
+ raise err
202
262
  end
203
- log_streams
204
263
  end
205
264
 
206
265
  def valid_next_token(prev_token, next_token)
@@ -41,7 +41,7 @@ module Fluent::Plugin
41
41
  config_param :remove_log_group_aws_tags_key, :bool, default: false
42
42
  config_param :retention_in_days, :integer, default: nil
43
43
  config_param :retention_in_days_key, :string, default: nil
44
- config_param :remove_retention_in_days, :bool, default: false
44
+ config_param :remove_retention_in_days_key, :bool, default: false
45
45
  config_param :json_handler, :enum, list: [:yajl, :json], :default => :yajl
46
46
  config_param :log_rejected_request, :bool, :default => false
47
47
 
@@ -219,6 +219,14 @@ module Fluent::Plugin
219
219
 
220
220
  events = []
221
221
  rs.each do |t, time, record|
222
+ if @log_group_aws_tags_key && @remove_log_group_aws_tags_key
223
+ record.delete(@log_group_aws_tags_key)
224
+ end
225
+
226
+ if @retention_in_days_key && @remove_retention_in_days_key
227
+ record.delete(@retention_in_days_key)
228
+ end
229
+
222
230
  record = drop_empty_record(record)
223
231
 
224
232
  time_ms = (time.to_f * 1000).floor
@@ -369,8 +377,7 @@ module Fluent::Plugin
369
377
  end
370
378
  rescue Aws::CloudWatchLogs::Errors::InvalidSequenceTokenException, Aws::CloudWatchLogs::Errors::DataAlreadyAcceptedException => err
371
379
  sleep 1 # to avoid too many API calls
372
- log_stream = find_log_stream(group_name, stream_name)
373
- store_next_sequence_token(group_name, stream_name, log_stream.upload_sequence_token)
380
+ store_next_sequence_token(group_name, stream_name, err.expected_sequence_token)
374
381
  log.warn "updating upload sequence token forcefully because unrecoverable error occured", {
375
382
  "error" => err,
376
383
  "log_group" => group_name,
@@ -28,6 +28,7 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
28
28
  start_time "2019-06-18 00:00:00Z"
29
29
  end_time "2020-01-18 00:00:00Z"
30
30
  time_range_format "%Y-%m-%d %H:%M:%S%z"
31
+ throttling_retry_seconds 30
31
32
  EOC
32
33
 
33
34
  assert_equal('test_id', d.instance.aws_key_id)
@@ -43,6 +44,7 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
43
44
  assert_equal(1560816000000, d.instance.start_time)
44
45
  assert_equal(1579305600000, d.instance.end_time)
45
46
  assert_equal("%Y-%m-%d %H:%M:%S%z", d.instance.time_range_format)
47
+ assert_equal(30, d.instance.throttling_retry_seconds)
46
48
  end
47
49
 
48
50
  test 'invalid time range' do
@@ -97,6 +99,34 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
97
99
  assert_equal(['test', (time_ms / 1000).floor, {'cloudwatch' => 'logs2'}], emits[1])
98
100
  end
99
101
 
102
+ def test_emit_with_metadata
103
+ create_log_stream
104
+
105
+ time_ms = (Time.now.to_f * 1000).floor
106
+ put_log_events([
107
+ {timestamp: time_ms, message: '{"cloudwatch":"logs1"}'},
108
+ {timestamp: time_ms, message: '{"cloudwatch":"logs2"}'},
109
+ ])
110
+
111
+ sleep 5
112
+
113
+ d = create_driver(default_config + %[include_metadata true])
114
+ d.run(expect_emits: 2, timeout: 5)
115
+
116
+ emits = d.events
117
+ assert_true(emits[0][2].has_key?("metadata"))
118
+ assert_true(emits[1][2].has_key?("metadata"))
119
+ emits[0][2].delete_if {|k, v|
120
+ k == "metadata"
121
+ }
122
+ emits[1][2].delete_if {|k, v|
123
+ k == "metadata"
124
+ }
125
+ assert_equal(2, emits.size)
126
+ assert_equal(['test', (time_ms / 1000).floor, {'cloudwatch' => 'logs1'}], emits[0])
127
+ assert_equal(['test', (time_ms / 1000).floor, {'cloudwatch' => 'logs2'}], emits[1])
128
+ end
129
+
100
130
  def test_emit_with_aws_timestamp
101
131
  create_log_stream
102
132
 
@@ -171,7 +201,6 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
171
201
  '@type' => 'cloudwatch_logs',
172
202
  'log_group_name' => "#{log_group_name}",
173
203
  'log_stream_name' => "#{log_stream_name}",
174
- 'state_file' => '/tmp/state',
175
204
  }
176
205
  cloudwatch_config = cloudwatch_config.merge!(config_elementify(aws_key_id)) if ENV['aws_key_id']
177
206
  cloudwatch_config = cloudwatch_config.merge!(config_elementify(aws_sec_key)) if ENV['aws_sec_key']
@@ -181,7 +210,49 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
181
210
  csv_format_config = config_element('ROOT', '', cloudwatch_config, [
182
211
  config_element('parse', '', {'@type' => 'csv',
183
212
  'keys' => 'time,message',
184
- 'time_key' => 'time'})
213
+ 'time_key' => 'time'}),
214
+ config_element('storage', '', {'@type' => 'local',
215
+ 'path' => '/tmp/state'})
216
+ ])
217
+ create_log_stream
218
+
219
+ time_ms = (Time.now.to_f * 1000).floor
220
+ log_time_ms = time_ms - 10000
221
+ put_log_events([
222
+ {timestamp: time_ms, message: Time.at(log_time_ms/1000.floor).to_s + ",Cloudwatch non json logs1"},
223
+ {timestamp: time_ms, message: Time.at(log_time_ms/1000.floor).to_s + ",Cloudwatch non json logs2"},
224
+ ])
225
+
226
+ sleep 5
227
+
228
+ d = create_driver(csv_format_config)
229
+ d.run(expect_emits: 2, timeout: 5)
230
+
231
+ emits = d.events
232
+ assert_equal(2, emits.size)
233
+ assert_equal(['test', (log_time_ms / 1000).floor, {"message"=>"Cloudwatch non json logs1"}], emits[0])
234
+ assert_equal(['test', (log_time_ms / 1000).floor, {"message"=>"Cloudwatch non json logs2"}], emits[1])
235
+ end
236
+
237
+ test "emit with <parse> csv with metadata" do
238
+ cloudwatch_config = {'tag' => "test",
239
+ '@type' => 'cloudwatch_logs',
240
+ 'log_group_name' => "#{log_group_name}",
241
+ 'log_stream_name' => "#{log_stream_name}",
242
+ 'include_metadata' => true,
243
+ }
244
+ cloudwatch_config = cloudwatch_config.merge!(config_elementify(aws_key_id)) if ENV['aws_key_id']
245
+ cloudwatch_config = cloudwatch_config.merge!(config_elementify(aws_sec_key)) if ENV['aws_sec_key']
246
+ cloudwatch_config = cloudwatch_config.merge!(config_elementify(region)) if ENV['region']
247
+ cloudwatch_config = cloudwatch_config.merge!(config_elementify(endpoint)) if ENV['endpoint']
248
+
249
+ csv_format_config = config_element('ROOT', '', cloudwatch_config, [
250
+ config_element('parse', '', {'@type' => 'csv',
251
+ 'keys' => 'time,message',
252
+ 'time_key' => 'time'}),
253
+ config_element('storage', '', {'@type' => 'local',
254
+ 'path' => '/tmp/state'})
255
+
185
256
  ])
186
257
  create_log_stream
187
258
 
@@ -198,6 +269,14 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
198
269
  d.run(expect_emits: 2, timeout: 5)
199
270
 
200
271
  emits = d.events
272
+ assert_true(emits[0][2].has_key?("metadata"))
273
+ assert_true(emits[1][2].has_key?("metadata"))
274
+ emits[0][2].delete_if {|k, v|
275
+ k == "metadata"
276
+ }
277
+ emits[1][2].delete_if {|k, v|
278
+ k == "metadata"
279
+ }
201
280
  assert_equal(2, emits.size)
202
281
  assert_equal(['test', (log_time_ms / 1000).floor, {"message"=>"Cloudwatch non json logs1"}], emits[0])
203
282
  assert_equal(['test', (log_time_ms / 1000).floor, {"message"=>"Cloudwatch non json logs2"}], emits[1])
@@ -244,7 +323,6 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
244
323
  '@type' => 'cloudwatch_logs',
245
324
  'log_group_name' => "#{log_group_name}",
246
325
  'log_stream_name' => "#{log_stream_name}",
247
- 'state_file' => '/tmp/state',
248
326
  }
249
327
  cloudwatch_config = cloudwatch_config.merge!(config_elementify(aws_key_id)) if ENV['aws_key_id']
250
328
  cloudwatch_config = cloudwatch_config.merge!(config_elementify(aws_sec_key)) if ENV['aws_sec_key']
@@ -254,7 +332,9 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
254
332
  regex_format_config = config_element('ROOT', '', cloudwatch_config, [
255
333
  config_element('parse', '', {'@type' => 'regexp',
256
334
  'expression' => "/^(?<cloudwatch>[^ ]*)?/",
257
- })
335
+ }),
336
+ config_element('storage', '', {'@type' => 'local',
337
+ 'path' => '/tmp/state'})
258
338
  ])
259
339
  create_log_stream
260
340
 
@@ -608,6 +688,59 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
608
688
  assert_equal(["test", ((time_ms + 7000) / 1000), { "cloudwatch" => "logs7" }], events[6])
609
689
  assert_equal(["test", ((time_ms + 8000) / 1000), { "cloudwatch" => "logs8" }], events[7])
610
690
  end
691
+
692
+ test "retry on Aws::CloudWatchLogs::Errors::ThrottlingException in get_log_events" do
693
+ config = <<-CONFIG
694
+ tag test
695
+ @type cloudwatch_logs
696
+ log_group_name #{log_group_name}
697
+ state_file /tmp/state
698
+ fetch_interval 0.1
699
+ throttling_retry_seconds 0.2
700
+ CONFIG
701
+
702
+ # it will raises the error 2 times
703
+ counter = 0
704
+ times = 2
705
+ stub(@client).get_log_events(anything) {
706
+ counter += 1
707
+ counter <= times ? raise(Aws::CloudWatchLogs::Errors::ThrottlingException.new(nil, "error")) : OpenStruct.new(events: [], next_forward_token: nil)
708
+ }
709
+
710
+ d = create_driver(config)
711
+
712
+ # so, it is expected to valid_next_token once
713
+ mock(d.instance).valid_next_token(nil, nil).once
714
+
715
+ d.run
716
+ assert_equal(2, d.logs.select {|l| l =~ /ThrottlingException get_log_events. Waiting 0.2 seconds to retry/ }.size)
717
+ end
718
+
719
+ test "retry on Aws::CloudWatchLogs::Errors::ThrottlingException in describe_log_streams" do
720
+ config = <<-CONFIG
721
+ tag test
722
+ @type cloudwatch_logs
723
+ log_group_name #{log_group_name}
724
+ use_log_stream_name_prefix true
725
+ state_file /tmp/state
726
+ fetch_interval 0.1
727
+ throttling_retry_seconds 0.2
728
+ CONFIG
729
+
730
+ # it will raises the error 2 times
731
+ log_stream = Aws::CloudWatchLogs::Types::LogStream.new(log_stream_name: "stream_name")
732
+ counter = 0
733
+ times = 2
734
+ stub(@client).describe_log_streams(anything) {
735
+ counter += 1
736
+ counter <= times ? raise(Aws::CloudWatchLogs::Errors::ThrottlingException.new(nil, "error")) : OpenStruct.new(log_streams: [log_stream], next_token: nil)
737
+ }
738
+
739
+ d = create_driver(config)
740
+
741
+ d.run
742
+ assert_equal(2, d.logs.select {|l| l =~ /ThrottlingException describe_log_streams. Waiting 0.2 seconds to retry/ }.size)
743
+ end
611
744
  end
612
745
 
613
746
  private
@@ -547,6 +547,37 @@ class CloudwatchLogsOutputTest < Test::Unit::TestCase
547
547
  assert(d.logs.any?{|log| log.include?("failed to set retention policy for Log group")})
548
548
  end
549
549
 
550
+ def test_remove_retention_in_days_key
551
+ new_log_stream
552
+
553
+ d = create_driver(<<-EOC)
554
+ #{default_config}
555
+ log_group_name #{log_group_name}
556
+ log_stream_name #{log_stream_name}
557
+ retention_in_days_key retention_in_days
558
+ remove_retention_in_days_key true
559
+ EOC
560
+
561
+ records = [
562
+ {'cloudwatch' => 'logs1', 'message' => 'message1', 'retention_in_days' => '7'},
563
+ {'cloudwatch' => 'logs2', 'message' => 'message2', 'retention_in_days' => '7'},
564
+ ]
565
+
566
+ time = Time.now
567
+ d.run(default_tag: fluentd_tag) do
568
+ records.each_with_index do |record, i|
569
+ d.feed(time.to_i + i, record)
570
+ end
571
+ end
572
+
573
+ sleep 10
574
+
575
+ events = get_log_events
576
+ assert_equal(2, events.size)
577
+ assert_equal({'cloudwatch' => 'logs1', 'message' => 'message1'}, JSON.parse(events[0].message))
578
+ assert_equal({'cloudwatch' => 'logs2', 'message' => 'message2'}, JSON.parse(events[1].message))
579
+ end
580
+
550
581
  def test_log_group_aws_tags_key
551
582
  clear_log_group
552
583
 
@@ -576,6 +607,37 @@ class CloudwatchLogsOutputTest < Test::Unit::TestCase
576
607
  assert_equal("value2", awstags.fetch("tag2"))
577
608
  end
578
609
 
610
+ def test_remove_log_group_aws_tags_key
611
+ new_log_stream
612
+
613
+ d = create_driver(<<-EOC)
614
+ #{default_config}
615
+ log_group_name #{log_group_name}
616
+ log_stream_name #{log_stream_name}
617
+ log_group_aws_tags_key log_group_tags
618
+ remove_log_group_aws_tags_key true
619
+ EOC
620
+
621
+ records = [
622
+ {'cloudwatch' => 'logs1', 'message' => 'message1', 'log_group_tags' => {"tag1" => "value1", "tag2" => "value2"}},
623
+ {'cloudwatch' => 'logs2', 'message' => 'message2', 'log_group_tags' => {"tag1" => "value1", "tag2" => "value2"}},
624
+ ]
625
+
626
+ time = Time.now
627
+ d.run(default_tag: fluentd_tag) do
628
+ records.each_with_index do |record, i|
629
+ d.feed(time.to_i + i, record)
630
+ end
631
+ end
632
+
633
+ sleep 10
634
+
635
+ events = get_log_events
636
+ assert_equal(2, events.size)
637
+ assert_equal({'cloudwatch' => 'logs1', 'message' => 'message1'}, JSON.parse(events[0].message))
638
+ assert_equal({'cloudwatch' => 'logs2', 'message' => 'message2'}, JSON.parse(events[1].message))
639
+ end
640
+
579
641
  def test_log_group_aws_tags_key_same_group_diff_tags
580
642
  clear_log_group
581
643
 
@@ -42,10 +42,7 @@ module CloudwatchLogsTestHelper
42
42
  end
43
43
 
44
44
  def log_stream_name(log_stream_name_prefix = nil)
45
- if !@log_stream_name
46
- new_log_stream(log_stream_name_prefix)
47
- end
48
- @log_stream_name
45
+ @log_stream_name ||= new_log_stream(log_stream_name_prefix)
49
46
  end
50
47
 
51
48
  def new_log_stream(log_stream_name_prefix = nil)
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-cloudwatch-logs
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.9.1
4
+ version: 0.10.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ryota Arai
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-03-24 00:00:00.000000000 Z
11
+ date: 2020-06-23 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: fluentd