fluent-plugin-cloudwatch-logs 0.9.3 → 0.10.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: aa72486196ed38595f884945613e076fecd8f66c6d77046e118a8fc24301dc06
4
- data.tar.gz: 1076cb96661bf6d89d3a958989c04a9f690fae9b5ac6aae33a7d9231e5cbb8d5
3
+ metadata.gz: 3a66a32521964e4be0392f1d786e81750850b4f027b5ec9649a9033ab330595a
4
+ data.tar.gz: 6041e92eea6e3148133a78a72ad3d9702ff33f89da89216e4bff579693459725
5
5
  SHA512:
6
- metadata.gz: c21965fb1e9c340a92cc2d482ce5fed0590736f14df03da1d6706ae039f74f2e767f897e693fafdba644306b9ab4dde178a11abcbb23fb138b7b64f6b3ba08cc
7
- data.tar.gz: 93b8ec018eeffa3229b12f9daf312ec660eafb39d4159c42c92863c0150a3d29c43b93ff55dfda46e4f03917a96eb92ef4f59f358e23eda2e37facb0572ecfce
6
+ metadata.gz: e8c2a9720f9e309698c1ac04e51bcb1acbe141443997dde1e9daf636cc9ce5356d3f87f7fb9ea18b7c9c760e1e9a57039e9d7aafb1bbcf98d2cbe3f657e89483
7
+ data.tar.gz: bf1d58dd34328aedb4d18529b9f172c05b442a3040a332ee0f5d4edfd1e76b50b2df72f2edeb195f44a77ea45628627e3f4413847203581c9610bc5cee69fb75
@@ -0,0 +1,12 @@
1
+ name: Autocloser
2
+ on: [issues]
3
+ jobs:
4
+ autoclose:
5
+ runs-on: ubuntu-latest
6
+ steps:
7
+ - name: Autoclose issues that did not follow issue template
8
+ uses: roots/issue-closer-action@v1.1
9
+ with:
10
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
11
+ issue-close-message: "@${issue.user.login} this issue was automatically closed because it did not follow the issue template."
12
+ issue-pattern: "(.*Problem.*)|(.*Expected Behavior or What you need to ask.*)|(.*Using Fluentd and CloudWatchLogs plugin versions.*)"
data/README.md CHANGED
@@ -43,6 +43,46 @@ Create IAM user with a policy like the following:
43
43
  }
44
44
  ```
45
45
 
46
+ More restricted IAM policy for `out_cloudwatch_logs` is:
47
+
48
+ ```json
49
+ {
50
+ "Version": "2012-10-17",
51
+ "Statement": [
52
+ {
53
+ "Action": [
54
+ "logs:PutLogEvents",
55
+ "logs:CreateLogGroup",
56
+ "logs:PutRetentionPolicy",
57
+ "logs:CreateLogStream",
58
+ "logs:DescribeLogGroups",
59
+ "logs:DescribeLogStreams"
60
+ ],
61
+ "Effect": "Allow",
62
+ "Resource": "*"
63
+ }
64
+ ]
65
+ }
66
+ ```
67
+
68
+ Also, more restricted IAM policy for `in_cloudwatch_logs` is:
69
+
70
+ ```json
71
+ {
72
+ "Version": "2012-10-17",
73
+ "Statement": [
74
+ {
75
+ "Action": [
76
+ "logs:GetLogEvents",
77
+ "logs:DescribeLogStreams"
78
+ ],
79
+ "Effect": "Allow",
80
+ "Resource": "*"
81
+ }
82
+ ]
83
+ }
84
+ ```
85
+
46
86
  ## Authentication
47
87
 
48
88
  There are several methods to provide authentication credentials. Be aware that there are various tradeoffs for these methods,
@@ -178,6 +218,9 @@ Please refer to [the PutRetentionPolicy column in documentation](https://docs.aw
178
218
  #<parse>
179
219
  # @type none # or csv, tsv, regexp etc.
180
220
  #</parse>
221
+ #<storage>
222
+ # @type local # or redis, memcached, etc.
223
+ #</storage>
181
224
  </source>
182
225
  ```
183
226
 
@@ -193,7 +236,9 @@ Please refer to [the PutRetentionPolicy column in documentation](https://docs.aw
193
236
  * `log_group_name`: name of log group to fetch logs
194
237
  * `log_stream_name`: name of log stream to fetch logs
195
238
  * `region`: AWS Region. See [Authentication](#authentication) for more information.
196
- * `state_file`: file to store current state (e.g. next\_forward\_token)
239
+ * `throttling_retry_seconds`: time period in seconds to retry a request when aws CloudWatch rate limit exceeds (default: nil)
240
+ * `include_metadata`: include metadata such as `log_group_name` and `log_stream_name`. (default: false)
241
+ * `state_file`: file to store current state (e.g. next\_forward\_token). This parameter is deprecated. Use `<storage>` instead.
197
242
  * `tag`: fluentd tag
198
243
  * `use_log_stream_name_prefix`: to use `log_stream_name` as log stream name prefix (default false)
199
244
  * `use_todays_log_stream`: use todays and yesterdays date as log stream name prefix (formatted YYYY/MM/DD). (default: `false`)
@@ -203,6 +248,7 @@ Please refer to [the PutRetentionPolicy column in documentation](https://docs.aw
203
248
  * `time_range_format`: specify time format for time range. (default: `%Y-%m-%d %H:%M:%S`)
204
249
  * `format`: specify CloudWatchLogs' log format. (default `nil`)
205
250
  * `<parse>`: specify parser plugin configuration. see also: https://docs.fluentd.org/v/1.0/parser#how-to-use
251
+ * `<storage>`: specify storage plugin configuration. see also: https://docs.fluentd.org/v/1.0/storage#how-to-use
206
252
 
207
253
  ## Test
208
254
 
@@ -2,7 +2,7 @@ module Fluent
2
2
  module Plugin
3
3
  module Cloudwatch
4
4
  module Logs
5
- VERSION = "0.9.3"
5
+ VERSION = "0.10.2"
6
6
  end
7
7
  end
8
8
  end
@@ -8,7 +8,9 @@ module Fluent::Plugin
8
8
  class CloudwatchLogsInput < Input
9
9
  Fluent::Plugin.register_input('cloudwatch_logs', self)
10
10
 
11
- helpers :parser, :thread, :compat_parameters
11
+ helpers :parser, :thread, :compat_parameters, :storage
12
+
13
+ DEFAULT_STORAGE_TYPE = 'local'
12
14
 
13
15
  config_param :aws_key_id, :string, default: nil, secret: true
14
16
  config_param :aws_sec_key, :string, default: nil, secret: true
@@ -21,7 +23,8 @@ module Fluent::Plugin
21
23
  config_param :log_group_name, :string
22
24
  config_param :log_stream_name, :string, default: nil
23
25
  config_param :use_log_stream_name_prefix, :bool, default: false
24
- config_param :state_file, :string
26
+ config_param :state_file, :string, default: nil,
27
+ deprecated: "Use <stroage> instead."
25
28
  config_param :fetch_interval, :time, default: 60
26
29
  config_param :http_proxy, :string, default: nil
27
30
  config_param :json_handler, :enum, list: [:yajl, :json], default: :yajl
@@ -30,11 +33,19 @@ module Fluent::Plugin
30
33
  config_param :start_time, :string, default: nil
31
34
  config_param :end_time, :string, default: nil
32
35
  config_param :time_range_format, :string, default: "%Y-%m-%d %H:%M:%S"
36
+ config_param :throttling_retry_seconds, :time, default: nil
37
+ config_param :include_metadata, :bool, default: false
33
38
 
34
39
  config_section :parse do
35
40
  config_set_default :@type, 'none'
36
41
  end
37
42
 
43
+ config_section :storage do
44
+ config_set_default :usage, 'store_next_tokens'
45
+ config_set_default :@type, DEFAULT_STORAGE_TYPE
46
+ config_set_default :persistent, false
47
+ end
48
+
38
49
  def initialize
39
50
  super
40
51
 
@@ -52,6 +63,7 @@ module Fluent::Plugin
52
63
  if @start_time && @end_time && (@end_time < @start_time)
53
64
  raise Fluent::ConfigError, "end_time(#{@end_time}) should be greater than start_time(#{@start_time})."
54
65
  end
66
+ @next_token_storage = storage_create(usage: 'store_next_tokens', conf: config, default_type: DEFAULT_STORAGE_TYPE)
55
67
  end
56
68
 
57
69
  def start
@@ -98,20 +110,28 @@ module Fluent::Plugin
98
110
  end
99
111
  end
100
112
 
101
- def state_file_for(log_stream_name)
102
- return "#{@state_file}_#{log_stream_name.gsub(File::SEPARATOR, '-')}" if log_stream_name
103
- return @state_file
113
+ def state_key_for(log_stream_name)
114
+ if log_stream_name
115
+ "#{@state_file}_#{log_stream_name.gsub(File::SEPARATOR, '-')}"
116
+ else
117
+ @state_file
118
+ end
119
+ end
120
+
121
+ def migrate_state_file_to_storage(log_stream_name)
122
+ @next_token_storage.put(:"#{state_key_for(log_stream_name)}", File.read(state_key_for(log_stream_name)).chomp)
123
+ File.delete(state_key_for(log_stream_name))
104
124
  end
105
125
 
106
126
  def next_token(log_stream_name)
107
- return nil unless File.exist?(state_file_for(log_stream_name))
108
- File.read(state_file_for(log_stream_name)).chomp
127
+ if @next_token_storage.persistent && File.exist?(state_key_for(log_stream_name))
128
+ migrate_state_file_to_storage(log_stream_name)
129
+ end
130
+ @next_token_storage.get(:"#{state_key_for(log_stream_name)}")
109
131
  end
110
132
 
111
133
  def store_next_token(token, log_stream_name = nil)
112
- File.open(state_file_for(log_stream_name), 'w') do |f|
113
- f.write token
114
- end
134
+ @next_token_storage.put(:"#{state_key_for(log_stream_name)}", token)
115
135
  end
116
136
 
117
137
  def run
@@ -129,8 +149,16 @@ module Fluent::Plugin
129
149
  log_streams.each do |log_stream|
130
150
  log_stream_name = log_stream.log_stream_name
131
151
  events = get_events(log_stream_name)
152
+ metadata = if @include_metadata
153
+ {
154
+ "log_stream_name" => log_stream_name,
155
+ "log_group_name" => @log_group_name
156
+ }
157
+ else
158
+ {}
159
+ end
132
160
  events.each do |event|
133
- emit(log_stream_name, event)
161
+ emit(log_stream_name, event, metadata)
134
162
  end
135
163
  end
136
164
  rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException
@@ -139,8 +167,16 @@ module Fluent::Plugin
139
167
  end
140
168
  else
141
169
  events = get_events(@log_stream_name)
170
+ metadata = if @include_metadata
171
+ {
172
+ "log_stream_name" => @log_stream_name,
173
+ "log_group_name" => @log_group_name
174
+ }
175
+ else
176
+ {}
177
+ end
142
178
  events.each do |event|
143
- emit(log_stream_name, event)
179
+ emit(log_stream_name, event, metadata)
144
180
  end
145
181
  end
146
182
  end
@@ -148,18 +184,24 @@ module Fluent::Plugin
148
184
  end
149
185
  end
150
186
 
151
- def emit(stream, event)
187
+ def emit(stream, event, metadata)
152
188
  if @parser
153
189
  @parser.parse(event.message) {|time,record|
154
190
  if @use_aws_timestamp
155
191
  time = (event.timestamp / 1000).floor
156
192
  end
193
+ unless metadata.empty?
194
+ record.merge!("metadata" => metadata)
195
+ end
157
196
  router.emit(@tag, time, record)
158
197
  }
159
198
  else
160
199
  time = (event.timestamp / 1000).floor
161
200
  begin
162
201
  record = @json_handler.load(event.message)
202
+ unless metadata.empty?
203
+ record.merge!("metadata" => metadata)
204
+ end
163
205
  router.emit(@tag, time, record)
164
206
  rescue JSON::ParserError, Yajl::ParseError => error # Catch parser errors
165
207
  log.error "Invalid JSON encountered while parsing event.message"
@@ -169,38 +211,55 @@ module Fluent::Plugin
169
211
  end
170
212
 
171
213
  def get_events(log_stream_name)
172
- request = {
173
- log_group_name: @log_group_name,
174
- log_stream_name: log_stream_name
175
- }
176
- request.merge!(start_time: @start_time) if @start_time
177
- request.merge!(end_time: @end_time) if @end_time
178
- log_next_token = next_token(log_stream_name)
179
- request[:next_token] = log_next_token if !log_next_token.nil? && !log_next_token.empty?
180
- response = @logs.get_log_events(request)
181
- if valid_next_token(log_next_token, response.next_forward_token)
182
- store_next_token(response.next_forward_token, log_stream_name)
183
- end
214
+ throttling_handler('get_log_events') do
215
+ request = {
216
+ log_group_name: @log_group_name,
217
+ log_stream_name: log_stream_name
218
+ }
219
+ request.merge!(start_time: @start_time) if @start_time
220
+ request.merge!(end_time: @end_time) if @end_time
221
+ log_next_token = next_token(log_stream_name)
222
+ request[:next_token] = log_next_token if !log_next_token.nil? && !log_next_token.empty?
223
+ response = @logs.get_log_events(request)
224
+ if valid_next_token(log_next_token, response.next_forward_token)
225
+ store_next_token(response.next_forward_token, log_stream_name)
226
+ end
184
227
 
185
- response.events
228
+ response.events
229
+ end
186
230
  end
187
231
 
188
232
  def describe_log_streams(log_stream_name_prefix, log_streams = nil, next_token = nil)
189
- request = {
190
- log_group_name: @log_group_name
191
- }
192
- request[:next_token] = next_token if next_token
193
- request[:log_stream_name_prefix] = log_stream_name_prefix if log_stream_name_prefix
194
- response = @logs.describe_log_streams(request)
195
- if log_streams
196
- log_streams.concat(response.log_streams)
197
- else
198
- log_streams = response.log_streams
233
+ throttling_handler('describe_log_streams') do
234
+ request = {
235
+ log_group_name: @log_group_name
236
+ }
237
+ request[:next_token] = next_token if next_token
238
+ request[:log_stream_name_prefix] = log_stream_name_prefix if log_stream_name_prefix
239
+ response = @logs.describe_log_streams(request)
240
+ if log_streams
241
+ log_streams.concat(response.log_streams)
242
+ else
243
+ log_streams = response.log_streams
244
+ end
245
+ if response.next_token
246
+ log_streams = describe_log_streams(log_stream_name_prefix, log_streams, response.next_token)
247
+ end
248
+ log_streams
199
249
  end
200
- if response.next_token
201
- log_streams = describe_log_streams(log_stream_name_prefix, log_streams, response.next_token)
250
+ end
251
+
252
+ def throttling_handler(method_name)
253
+ yield
254
+ rescue Aws::CloudWatchLogs::Errors::ThrottlingException => err
255
+ if throttling_retry_seconds
256
+ log.warn "ThrottlingException #{method_name}. Waiting #{throttling_retry_seconds} seconds to retry."
257
+ sleep throttling_retry_seconds
258
+
259
+ throttling_handler(method_name) { yield }
260
+ else
261
+ raise err
202
262
  end
203
- log_streams
204
263
  end
205
264
 
206
265
  def valid_next_token(prev_token, next_token)
@@ -7,6 +7,8 @@ module Fluent::Plugin
7
7
  class CloudwatchLogsOutput < Output
8
8
  Fluent::Plugin.register_output('cloudwatch_logs', self)
9
9
 
10
+ class TooLargeEventError < Fluent::UnrecoverableError; end
11
+
10
12
  helpers :compat_parameters, :inject
11
13
 
12
14
  DEFAULT_BUFFER_TYPE = "memory"
@@ -130,6 +132,9 @@ module Fluent::Plugin
130
132
  def write(chunk)
131
133
  log_group_name = extract_placeholders(@log_group_name, chunk) if @log_group_name
132
134
  log_stream_name = extract_placeholders(@log_stream_name, chunk) if @log_stream_name
135
+ aws_tags = @log_group_aws_tags.each {|k, v|
136
+ @log_group_aws_tags[extract_placeholders(k, chunk)] = extract_placeholders(v, chunk)
137
+ } if @log_group_aws_tags
133
138
 
134
139
  queue = Thread::Queue.new
135
140
 
@@ -182,7 +187,7 @@ module Fluent::Plugin
182
187
  #as we create log group only once, values from first record will persist
183
188
  record = rs[0][2]
184
189
 
185
- awstags = @log_group_aws_tags
190
+ awstags = aws_tags
186
191
  unless @log_group_aws_tags_key.nil?
187
192
  if @remove_log_group_aws_tags_key
188
193
  awstags = record.delete(@log_group_aws_tags_key)
@@ -319,8 +324,7 @@ module Fluent::Plugin
319
324
  while event = events.shift
320
325
  event_bytesize = event[:message].bytesize + EVENT_HEADER_SIZE
321
326
  if MAX_EVENT_SIZE < event_bytesize
322
- log.warn "Log event in #{group_name} is discarded because it is too large: #{event_bytesize} bytes exceeds limit of #{MAX_EVENT_SIZE}"
323
- break
327
+ raise TooLargeEventError, "Log event in #{group_name} is discarded because it is too large: #{event_bytesize} bytes exceeds limit of #{MAX_EVENT_SIZE}"
324
328
  end
325
329
 
326
330
  new_chunk = chunk + [event]
@@ -377,8 +381,7 @@ module Fluent::Plugin
377
381
  end
378
382
  rescue Aws::CloudWatchLogs::Errors::InvalidSequenceTokenException, Aws::CloudWatchLogs::Errors::DataAlreadyAcceptedException => err
379
383
  sleep 1 # to avoid too many API calls
380
- log_stream = find_log_stream(group_name, stream_name)
381
- store_next_sequence_token(group_name, stream_name, log_stream.upload_sequence_token)
384
+ store_next_sequence_token(group_name, stream_name, err.expected_sequence_token)
382
385
  log.warn "updating upload sequence token forcefully because unrecoverable error occured", {
383
386
  "error" => err,
384
387
  "log_group" => group_name,
@@ -400,7 +403,13 @@ module Fluent::Plugin
400
403
  raise err
401
404
  end
402
405
  rescue Aws::CloudWatchLogs::Errors::ThrottlingException => err
403
- if !@put_log_events_disable_retry_limit && @put_log_events_retry_limit < retry_count
406
+ if @put_log_events_retry_limit < 1
407
+ log.warn "failed to PutLogEvents and discard logs because put_log_events_retry_limit is less than 1", {
408
+ "error_class" => err.class.to_s,
409
+ "error" => err.message,
410
+ }
411
+ return
412
+ elsif !@put_log_events_disable_retry_limit && @put_log_events_retry_limit < retry_count
404
413
  log.error "failed to PutLogEvents and discard logs because retry count exceeded put_log_events_retry_limit", {
405
414
  "error_class" => err.class.to_s,
406
415
  "error" => err.message,
@@ -28,6 +28,7 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
28
28
  start_time "2019-06-18 00:00:00Z"
29
29
  end_time "2020-01-18 00:00:00Z"
30
30
  time_range_format "%Y-%m-%d %H:%M:%S%z"
31
+ throttling_retry_seconds 30
31
32
  EOC
32
33
 
33
34
  assert_equal('test_id', d.instance.aws_key_id)
@@ -43,6 +44,7 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
43
44
  assert_equal(1560816000000, d.instance.start_time)
44
45
  assert_equal(1579305600000, d.instance.end_time)
45
46
  assert_equal("%Y-%m-%d %H:%M:%S%z", d.instance.time_range_format)
47
+ assert_equal(30, d.instance.throttling_retry_seconds)
46
48
  end
47
49
 
48
50
  test 'invalid time range' do
@@ -97,6 +99,34 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
97
99
  assert_equal(['test', (time_ms / 1000).floor, {'cloudwatch' => 'logs2'}], emits[1])
98
100
  end
99
101
 
102
+ def test_emit_with_metadata
103
+ create_log_stream
104
+
105
+ time_ms = (Time.now.to_f * 1000).floor
106
+ put_log_events([
107
+ {timestamp: time_ms, message: '{"cloudwatch":"logs1"}'},
108
+ {timestamp: time_ms, message: '{"cloudwatch":"logs2"}'},
109
+ ])
110
+
111
+ sleep 5
112
+
113
+ d = create_driver(default_config + %[include_metadata true])
114
+ d.run(expect_emits: 2, timeout: 5)
115
+
116
+ emits = d.events
117
+ assert_true(emits[0][2].has_key?("metadata"))
118
+ assert_true(emits[1][2].has_key?("metadata"))
119
+ emits[0][2].delete_if {|k, v|
120
+ k == "metadata"
121
+ }
122
+ emits[1][2].delete_if {|k, v|
123
+ k == "metadata"
124
+ }
125
+ assert_equal(2, emits.size)
126
+ assert_equal(['test', (time_ms / 1000).floor, {'cloudwatch' => 'logs1'}], emits[0])
127
+ assert_equal(['test', (time_ms / 1000).floor, {'cloudwatch' => 'logs2'}], emits[1])
128
+ end
129
+
100
130
  def test_emit_with_aws_timestamp
101
131
  create_log_stream
102
132
 
@@ -171,7 +201,6 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
171
201
  '@type' => 'cloudwatch_logs',
172
202
  'log_group_name' => "#{log_group_name}",
173
203
  'log_stream_name' => "#{log_stream_name}",
174
- 'state_file' => '/tmp/state',
175
204
  }
176
205
  cloudwatch_config = cloudwatch_config.merge!(config_elementify(aws_key_id)) if ENV['aws_key_id']
177
206
  cloudwatch_config = cloudwatch_config.merge!(config_elementify(aws_sec_key)) if ENV['aws_sec_key']
@@ -181,7 +210,9 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
181
210
  csv_format_config = config_element('ROOT', '', cloudwatch_config, [
182
211
  config_element('parse', '', {'@type' => 'csv',
183
212
  'keys' => 'time,message',
184
- 'time_key' => 'time'})
213
+ 'time_key' => 'time'}),
214
+ config_element('storage', '', {'@type' => 'local',
215
+ 'path' => '/tmp/state'})
185
216
  ])
186
217
  create_log_stream
187
218
 
@@ -203,6 +234,54 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
203
234
  assert_equal(['test', (log_time_ms / 1000).floor, {"message"=>"Cloudwatch non json logs2"}], emits[1])
204
235
  end
205
236
 
237
+ test "emit with <parse> csv with metadata" do
238
+ cloudwatch_config = {'tag' => "test",
239
+ '@type' => 'cloudwatch_logs',
240
+ 'log_group_name' => "#{log_group_name}",
241
+ 'log_stream_name' => "#{log_stream_name}",
242
+ 'include_metadata' => true,
243
+ }
244
+ cloudwatch_config = cloudwatch_config.merge!(config_elementify(aws_key_id)) if ENV['aws_key_id']
245
+ cloudwatch_config = cloudwatch_config.merge!(config_elementify(aws_sec_key)) if ENV['aws_sec_key']
246
+ cloudwatch_config = cloudwatch_config.merge!(config_elementify(region)) if ENV['region']
247
+ cloudwatch_config = cloudwatch_config.merge!(config_elementify(endpoint)) if ENV['endpoint']
248
+
249
+ csv_format_config = config_element('ROOT', '', cloudwatch_config, [
250
+ config_element('parse', '', {'@type' => 'csv',
251
+ 'keys' => 'time,message',
252
+ 'time_key' => 'time'}),
253
+ config_element('storage', '', {'@type' => 'local',
254
+ 'path' => '/tmp/state'})
255
+
256
+ ])
257
+ create_log_stream
258
+
259
+ time_ms = (Time.now.to_f * 1000).floor
260
+ log_time_ms = time_ms - 10000
261
+ put_log_events([
262
+ {timestamp: time_ms, message: Time.at(log_time_ms/1000.floor).to_s + ",Cloudwatch non json logs1"},
263
+ {timestamp: time_ms, message: Time.at(log_time_ms/1000.floor).to_s + ",Cloudwatch non json logs2"},
264
+ ])
265
+
266
+ sleep 5
267
+
268
+ d = create_driver(csv_format_config)
269
+ d.run(expect_emits: 2, timeout: 5)
270
+
271
+ emits = d.events
272
+ assert_true(emits[0][2].has_key?("metadata"))
273
+ assert_true(emits[1][2].has_key?("metadata"))
274
+ emits[0][2].delete_if {|k, v|
275
+ k == "metadata"
276
+ }
277
+ emits[1][2].delete_if {|k, v|
278
+ k == "metadata"
279
+ }
280
+ assert_equal(2, emits.size)
281
+ assert_equal(['test', (log_time_ms / 1000).floor, {"message"=>"Cloudwatch non json logs1"}], emits[0])
282
+ assert_equal(['test', (log_time_ms / 1000).floor, {"message"=>"Cloudwatch non json logs2"}], emits[1])
283
+ end
284
+
206
285
  def test_emit_width_format
207
286
  create_log_stream
208
287
 
@@ -244,7 +323,6 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
244
323
  '@type' => 'cloudwatch_logs',
245
324
  'log_group_name' => "#{log_group_name}",
246
325
  'log_stream_name' => "#{log_stream_name}",
247
- 'state_file' => '/tmp/state',
248
326
  }
249
327
  cloudwatch_config = cloudwatch_config.merge!(config_elementify(aws_key_id)) if ENV['aws_key_id']
250
328
  cloudwatch_config = cloudwatch_config.merge!(config_elementify(aws_sec_key)) if ENV['aws_sec_key']
@@ -254,7 +332,9 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
254
332
  regex_format_config = config_element('ROOT', '', cloudwatch_config, [
255
333
  config_element('parse', '', {'@type' => 'regexp',
256
334
  'expression' => "/^(?<cloudwatch>[^ ]*)?/",
257
- })
335
+ }),
336
+ config_element('storage', '', {'@type' => 'local',
337
+ 'path' => '/tmp/state'})
258
338
  ])
259
339
  create_log_stream
260
340
 
@@ -550,6 +630,8 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
550
630
  end
551
631
 
552
632
  test "emit with today's log stream" do
633
+ omit "This testcase is unstable in CI." if ENV["CI"] == "true"
634
+
553
635
  config = <<-CONFIG
554
636
  tag test
555
637
  @type cloudwatch_logs
@@ -608,6 +690,59 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
608
690
  assert_equal(["test", ((time_ms + 7000) / 1000), { "cloudwatch" => "logs7" }], events[6])
609
691
  assert_equal(["test", ((time_ms + 8000) / 1000), { "cloudwatch" => "logs8" }], events[7])
610
692
  end
693
+
694
+ test "retry on Aws::CloudWatchLogs::Errors::ThrottlingException in get_log_events" do
695
+ config = <<-CONFIG
696
+ tag test
697
+ @type cloudwatch_logs
698
+ log_group_name #{log_group_name}
699
+ state_file /tmp/state
700
+ fetch_interval 0.1
701
+ throttling_retry_seconds 0.2
702
+ CONFIG
703
+
704
+ # it will raises the error 2 times
705
+ counter = 0
706
+ times = 2
707
+ stub(@client).get_log_events(anything) {
708
+ counter += 1
709
+ counter <= times ? raise(Aws::CloudWatchLogs::Errors::ThrottlingException.new(nil, "error")) : OpenStruct.new(events: [], next_forward_token: nil)
710
+ }
711
+
712
+ d = create_driver(config)
713
+
714
+ # so, it is expected to valid_next_token once
715
+ mock(d.instance).valid_next_token(nil, nil).once
716
+
717
+ d.run
718
+ assert_equal(2, d.logs.select {|l| l =~ /ThrottlingException get_log_events. Waiting 0.2 seconds to retry/ }.size)
719
+ end
720
+
721
+ test "retry on Aws::CloudWatchLogs::Errors::ThrottlingException in describe_log_streams" do
722
+ config = <<-CONFIG
723
+ tag test
724
+ @type cloudwatch_logs
725
+ log_group_name #{log_group_name}
726
+ use_log_stream_name_prefix true
727
+ state_file /tmp/state
728
+ fetch_interval 0.1
729
+ throttling_retry_seconds 0.2
730
+ CONFIG
731
+
732
+ # it will raises the error 2 times
733
+ log_stream = Aws::CloudWatchLogs::Types::LogStream.new(log_stream_name: "stream_name")
734
+ counter = 0
735
+ times = 2
736
+ stub(@client).describe_log_streams(anything) {
737
+ counter += 1
738
+ counter <= times ? raise(Aws::CloudWatchLogs::Errors::ThrottlingException.new(nil, "error")) : OpenStruct.new(log_streams: [log_stream], next_token: nil)
739
+ }
740
+
741
+ d = create_driver(config)
742
+
743
+ d.run
744
+ assert_equal(2, d.logs.select {|l| l =~ /ThrottlingException describe_log_streams. Waiting 0.2 seconds to retry/ }.size)
745
+ end
611
746
  end
612
747
 
613
748
  private
@@ -492,6 +492,47 @@ class CloudwatchLogsOutputTest < Test::Unit::TestCase
492
492
  assert_equal("value2", awstags.fetch("tag2"))
493
493
  end
494
494
 
495
+ def test_log_group_aws_tags_with_placeholders
496
+ clear_log_group
497
+
498
+ config = {
499
+ "@type" => "cloudwatch_logs",
500
+ "auto_create_stream" => true,
501
+ "use_tag_as_stream" => true,
502
+ "log_group_name_key" => "group_name_key",
503
+ "log_group_aws_tags" => '{"tag1": "${tag}", "tag2": "${namespace_name}"}',
504
+ }
505
+ config.merge!(config_elementify(aws_key_id)) if aws_key_id
506
+ config.merge!(config_elementify(aws_sec_key)) if aws_sec_key
507
+ config.merge!(config_elementify(region)) if region
508
+ config.merge!(config_elementify(endpoint)) if endpoint
509
+
510
+ d = create_driver(
511
+ Fluent::Config::Element.new('ROOT', '', config, [
512
+ Fluent::Config::Element.new('buffer', 'tag, namespace_name', {
513
+ '@type' => 'memory',
514
+ }, [])
515
+ ])
516
+ )
517
+
518
+ records = [
519
+ {'cloudwatch' => 'logs1', 'message' => 'message1', 'group_name_key' => log_group_name, "namespace_name" => "fluentd"},
520
+ {'cloudwatch' => 'logs2', 'message' => 'message1', 'group_name_key' => log_group_name, "namespace_name" => "fluentd"},
521
+ {'cloudwatch' => 'logs3', 'message' => 'message1', 'group_name_key' => log_group_name, "namespace_name" => "fluentd"},
522
+ ]
523
+
524
+ time = Time.now
525
+ d.run(default_tag: fluentd_tag) do
526
+ records.each_with_index do |record, i|
527
+ d.feed(time.to_i + i, record)
528
+ end
529
+ end
530
+
531
+ awstags = get_log_group_tags
532
+ assert_equal(fluentd_tag, awstags.fetch("tag1"))
533
+ assert_equal("fluentd", awstags.fetch("tag2"))
534
+ end
535
+
495
536
  def test_retention_in_days
496
537
  clear_log_group
497
538
 
@@ -713,6 +754,32 @@ class CloudwatchLogsOutputTest < Test::Unit::TestCase
713
754
  assert_equal({'cloudwatch' => 'logs2', 'message' => 'message2'}, JSON.parse(events[1].message))
714
755
  end
715
756
 
757
+ def test_retrying_on_throttling_exception_with_put_log_events_retry_limit_as_zero
758
+ client = Aws::CloudWatchLogs::Client.new
759
+ @called = false
760
+ stub(client).put_log_events(anything) {
761
+ raise(Aws::CloudWatchLogs::Errors::ThrottlingException.new(nil, "error"))
762
+ }.once.ordered
763
+
764
+ d = create_driver(<<-EOC)
765
+ #{default_config}
766
+ log_group_name #{log_group_name}
767
+ log_stream_name #{log_stream_name}
768
+ @log_level debug
769
+ put_log_events_retry_limit 0
770
+ EOC
771
+ time = event_time
772
+ d.instance.instance_variable_set(:@logs, client)
773
+ d.run(default_tag: fluentd_tag) do
774
+ d.feed(time, {'message' => 'message1'})
775
+ end
776
+
777
+ logs = d.logs
778
+ assert_equal(0, logs.select {|l| l =~ /Called PutLogEvents API/ }.size)
779
+ assert_equal(1, logs.select {|l| l =~ /failed to PutLogEvents/ }.size)
780
+ assert_equal(0, logs.select {|l| l =~ /retry succeeded/ }.size)
781
+ end
782
+
716
783
  def test_retrying_on_throttling_exception
717
784
  resp = Object.new
718
785
  mock(resp).rejected_log_events_info {}
@@ -42,10 +42,7 @@ module CloudwatchLogsTestHelper
42
42
  end
43
43
 
44
44
  def log_stream_name(log_stream_name_prefix = nil)
45
- if !@log_stream_name
46
- new_log_stream(log_stream_name_prefix)
47
- end
48
- @log_stream_name
45
+ @log_stream_name ||= new_log_stream(log_stream_name_prefix)
49
46
  end
50
47
 
51
48
  def new_log_stream(log_stream_name_prefix = nil)
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-cloudwatch-logs
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.9.3
4
+ version: 0.10.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ryota Arai
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-04-03 00:00:00.000000000 Z
11
+ date: 2020-07-21 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: fluentd
@@ -115,6 +115,7 @@ executables: []
115
115
  extensions: []
116
116
  extra_rdoc_files: []
117
117
  files:
118
+ - ".github/workflows/issue-auto-closer.yml"
118
119
  - ".gitignore"
119
120
  - ".travis.yml"
120
121
  - Gemfile