fluent-plugin-cloudwatch-logs 0.12.0 → 0.13.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +6 -0
- data/lib/fluent/plugin/cloudwatch/logs/version.rb +1 -1
- data/lib/fluent/plugin/in_cloudwatch_logs.rb +94 -52
- data/test/plugin/test_in_cloudwatch_logs.rb +122 -0
- data/test/test_helper.rb +4 -0
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 17e367fb1fc1c2b58cd9a22cd9caefaf6396b3542f589b44616f160c40c3219d
|
4
|
+
data.tar.gz: 37eee2b6a48d17d86451a8eee88435fc1ddd572248b3ed181bf3d833d97a3d96
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 30807c60113fe1eed915092e005e83863a4cfc5911a859eb00925299a1a4e45dc0dd581006d0486d3ab176ed2babf7b1fb5e90f0b41e04838c855a578db31faf
|
7
|
+
data.tar.gz: a44051e6aa3f1010dd5ee17f1046dfccabd59a929ed5b8976eeda135410f681400f1f024c61a84992cc28500285148e598aad7aa2daab10a965aca0204398a28
|
data/README.md
CHANGED
@@ -222,6 +222,9 @@ Please refer to [the PutRetentionPolicy column in documentation](https://docs.aw
|
|
222
222
|
@type cloudwatch_logs
|
223
223
|
tag cloudwatch.in
|
224
224
|
log_group_name group
|
225
|
+
#add_log_group_name true
|
226
|
+
#log_group_name_key group_name_key
|
227
|
+
#use_log_group_name_prefix true
|
225
228
|
log_stream_name stream
|
226
229
|
#use_log_stream_name_prefix true
|
227
230
|
state_file /var/lib/fluent/group_stream.in.state
|
@@ -256,6 +259,9 @@ Please refer to [the PutRetentionPolicy column in documentation](https://docs.aw
|
|
256
259
|
* `http_proxy`: use to set an optional HTTP proxy
|
257
260
|
* `json_handler`: name of the library to be used to handle JSON data. For now, supported libraries are `json` (default) and `yajl`.
|
258
261
|
* `log_group_name`: name of log group to fetch logs
|
262
|
+
* `add_log_group_name`: add record into the name of log group (default `false`)
|
263
|
+
* `log_group_name_key`: specify the key where adding record into the name of log group (default `'log_group'`)
|
264
|
+
* `use_log_group_name_prefix`: to use `log_group_name` as log group name prefix (default `false`)
|
259
265
|
* `log_stream_name`: name of log stream to fetch logs
|
260
266
|
* `region`: AWS Region. See [Authentication](#authentication) for more information.
|
261
267
|
* `throttling_retry_seconds`: time period in seconds to retry a request when aws CloudWatch rate limit exceeds (default: nil)
|
@@ -22,6 +22,9 @@ module Fluent::Plugin
|
|
22
22
|
config_param :endpoint, :string, default: nil
|
23
23
|
config_param :tag, :string
|
24
24
|
config_param :log_group_name, :string
|
25
|
+
config_param :add_log_group_name, :bool, default: false
|
26
|
+
config_param :log_group_name_key, :string, default: 'log_group'
|
27
|
+
config_param :use_log_group_name_prefix, :bool, default: false
|
25
28
|
config_param :log_stream_name, :string, default: nil
|
26
29
|
config_param :use_log_stream_name_prefix, :bool, default: false
|
27
30
|
config_param :state_file, :string, default: nil,
|
@@ -128,6 +131,17 @@ module Fluent::Plugin
|
|
128
131
|
super
|
129
132
|
end
|
130
133
|
|
134
|
+
# No private for testing
|
135
|
+
def state_key_for(log_stream_name, log_group_name = nil)
|
136
|
+
if log_group_name && log_stream_name
|
137
|
+
"#{@state_file}_#{log_group_name.gsub(File::SEPARATOR, '-')}_#{log_stream_name.gsub(File::SEPARATOR, '-')}"
|
138
|
+
elsif log_stream_name
|
139
|
+
"#{@state_file}_#{log_stream_name.gsub(File::SEPARATOR, '-')}"
|
140
|
+
else
|
141
|
+
@state_file
|
142
|
+
end
|
143
|
+
end
|
144
|
+
|
131
145
|
private
|
132
146
|
def configure_parser(conf)
|
133
147
|
if conf['format']
|
@@ -137,28 +151,20 @@ module Fluent::Plugin
|
|
137
151
|
end
|
138
152
|
end
|
139
153
|
|
140
|
-
def state_key_for(log_stream_name)
|
141
|
-
if log_stream_name
|
142
|
-
"#{@state_file}_#{log_stream_name.gsub(File::SEPARATOR, '-')}"
|
143
|
-
else
|
144
|
-
@state_file
|
145
|
-
end
|
146
|
-
end
|
147
|
-
|
148
154
|
def migrate_state_file_to_storage(log_stream_name)
|
149
155
|
@next_token_storage.put(:"#{state_key_for(log_stream_name)}", File.read(state_key_for(log_stream_name)).chomp)
|
150
156
|
File.delete(state_key_for(log_stream_name))
|
151
157
|
end
|
152
158
|
|
153
|
-
def next_token(log_stream_name)
|
159
|
+
def next_token(log_stream_name, log_group_name = nil)
|
154
160
|
if @next_token_storage.persistent && File.exist?(state_key_for(log_stream_name))
|
155
161
|
migrate_state_file_to_storage(log_stream_name)
|
156
162
|
end
|
157
|
-
@next_token_storage.get(:"#{state_key_for(log_stream_name)}")
|
163
|
+
@next_token_storage.get(:"#{state_key_for(log_stream_name, log_group_name)}")
|
158
164
|
end
|
159
165
|
|
160
|
-
def store_next_token(token, log_stream_name = nil)
|
161
|
-
@next_token_storage.put(:"#{state_key_for(log_stream_name)}", token)
|
166
|
+
def store_next_token(token, log_stream_name = nil, log_group_name = nil)
|
167
|
+
@next_token_storage.put(:"#{state_key_for(log_stream_name, log_group_name)}", token)
|
162
168
|
end
|
163
169
|
|
164
170
|
def run
|
@@ -168,55 +174,67 @@ module Fluent::Plugin
|
|
168
174
|
if Time.now > @next_fetch_time
|
169
175
|
@next_fetch_time += @fetch_interval
|
170
176
|
|
171
|
-
if @
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
177
|
+
if @use_log_group_name_prefix
|
178
|
+
log_group_names = describe_log_groups(@log_group_name).map{|log_group|
|
179
|
+
log_group.log_group_name
|
180
|
+
}
|
181
|
+
else
|
182
|
+
log_group_names = [@log_group_name]
|
183
|
+
end
|
184
|
+
log_group_names.each do |log_group_name|
|
185
|
+
if @use_log_stream_name_prefix || @use_todays_log_stream
|
186
|
+
log_stream_name_prefix = @use_todays_log_stream ? get_todays_date : @log_stream_name
|
187
|
+
begin
|
188
|
+
log_streams = describe_log_streams(log_stream_name_prefix)
|
189
|
+
log_streams.concat(describe_log_streams(get_yesterdays_date)) if @use_todays_log_stream
|
190
|
+
log_streams.each do |log_stream|
|
191
|
+
log_stream_name = log_stream.log_stream_name
|
192
|
+
events = get_events(log_group_name, log_stream_name)
|
193
|
+
metadata = if @include_metadata
|
194
|
+
{
|
195
|
+
"log_stream_name" => log_stream_name,
|
196
|
+
"log_group_name" => @log_group_name
|
197
|
+
}
|
198
|
+
else
|
199
|
+
{}
|
200
|
+
end
|
201
|
+
events.each do |event|
|
202
|
+
emit(log_group_name, log_stream_name, event, metadata)
|
203
|
+
end
|
189
204
|
end
|
205
|
+
rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException
|
206
|
+
log.warn "'#{@log_stream_name}' prefixed log stream(s) are not found"
|
207
|
+
next
|
208
|
+
end
|
209
|
+
else
|
210
|
+
events = get_events(log_group_name, @log_stream_name)
|
211
|
+
metadata = if @include_metadata
|
212
|
+
{
|
213
|
+
"log_stream_name" => @log_stream_name,
|
214
|
+
"log_group_name" => @log_group_name
|
215
|
+
}
|
216
|
+
else
|
217
|
+
{}
|
218
|
+
end
|
219
|
+
events.each do |event|
|
220
|
+
emit(log_group_name, log_stream_name, event, metadata)
|
190
221
|
end
|
191
|
-
rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException
|
192
|
-
log.warn "'#{@log_stream_name}' prefixed log stream(s) are not found"
|
193
|
-
next
|
194
|
-
end
|
195
|
-
else
|
196
|
-
events = get_events(@log_stream_name)
|
197
|
-
metadata = if @include_metadata
|
198
|
-
{
|
199
|
-
"log_stream_name" => @log_stream_name,
|
200
|
-
"log_group_name" => @log_group_name
|
201
|
-
}
|
202
|
-
else
|
203
|
-
{}
|
204
|
-
end
|
205
|
-
events.each do |event|
|
206
|
-
emit(log_stream_name, event, metadata)
|
207
222
|
end
|
208
223
|
end
|
209
|
-
end
|
210
224
|
sleep 1
|
225
|
+
end
|
211
226
|
end
|
212
227
|
end
|
213
228
|
|
214
|
-
def emit(stream, event, metadata)
|
229
|
+
def emit(group, stream, event, metadata)
|
215
230
|
if @parser
|
216
231
|
@parser.parse(event.message) {|time,record|
|
217
232
|
if @use_aws_timestamp
|
218
233
|
time = (event.timestamp / 1000).floor
|
219
234
|
end
|
235
|
+
if @add_log_group_name
|
236
|
+
record[@log_group_name_key] = group
|
237
|
+
end
|
220
238
|
unless metadata.empty?
|
221
239
|
record.merge!("metadata" => metadata)
|
222
240
|
end
|
@@ -226,6 +244,9 @@ module Fluent::Plugin
|
|
226
244
|
time = (event.timestamp / 1000).floor
|
227
245
|
begin
|
228
246
|
record = @json_handler.load(event.message)
|
247
|
+
if @add_log_group_name
|
248
|
+
record[@log_group_name_key] = group
|
249
|
+
end
|
229
250
|
unless metadata.empty?
|
230
251
|
record.merge!("metadata" => metadata)
|
231
252
|
end
|
@@ -237,19 +258,23 @@ module Fluent::Plugin
|
|
237
258
|
end
|
238
259
|
end
|
239
260
|
|
240
|
-
def get_events(log_stream_name)
|
261
|
+
def get_events(log_group_name, log_stream_name)
|
241
262
|
throttling_handler('get_log_events') do
|
242
263
|
request = {
|
243
|
-
log_group_name:
|
264
|
+
log_group_name: log_group_name,
|
244
265
|
log_stream_name: log_stream_name
|
245
266
|
}
|
246
267
|
request.merge!(start_time: @start_time) if @start_time
|
247
268
|
request.merge!(end_time: @end_time) if @end_time
|
248
|
-
log_next_token = next_token(log_stream_name)
|
269
|
+
log_next_token = next_token(log_group_name, log_stream_name)
|
249
270
|
request[:next_token] = log_next_token if !log_next_token.nil? && !log_next_token.empty?
|
250
271
|
response = @logs.get_log_events(request)
|
251
272
|
if valid_next_token(log_next_token, response.next_forward_token)
|
252
|
-
|
273
|
+
if @use_log_group_name_prefix
|
274
|
+
store_next_token(response.next_forward_token, log_stream_name, log_group_name)
|
275
|
+
else
|
276
|
+
store_next_token(response.next_forward_token, log_stream_name)
|
277
|
+
end
|
253
278
|
end
|
254
279
|
|
255
280
|
response.events
|
@@ -289,6 +314,23 @@ module Fluent::Plugin
|
|
289
314
|
end
|
290
315
|
end
|
291
316
|
|
317
|
+
def describe_log_groups(log_group_name_prefix, log_groups = nil, next_token = nil)
|
318
|
+
request = {
|
319
|
+
log_group_name_prefix: log_group_name_prefix
|
320
|
+
}
|
321
|
+
request[:next_token] = next_token if next_token
|
322
|
+
response = @logs.describe_log_groups(request)
|
323
|
+
if log_groups
|
324
|
+
log_groups.concat(response.log_groups)
|
325
|
+
else
|
326
|
+
log_groups = response.log_groups
|
327
|
+
end
|
328
|
+
if response.next_token
|
329
|
+
log_groups = describe_log_groups(log_group_name_prefix, log_groups, response.next_token)
|
330
|
+
end
|
331
|
+
log_groups
|
332
|
+
end
|
333
|
+
|
292
334
|
def valid_next_token(prev_token, next_token)
|
293
335
|
next_token && prev_token != next_token.chomp
|
294
336
|
end
|
@@ -99,6 +99,128 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
|
|
99
99
|
assert_equal(['test', (time_ms / 1000).floor, {'cloudwatch' => 'logs2'}], emits[1])
|
100
100
|
end
|
101
101
|
|
102
|
+
sub_test_case "use_log_group_name_prefix true" do
|
103
|
+
test "emit" do
|
104
|
+
set_log_group_name("fluent-plugin-cloudwatch-group-prefix-test-#{Time.now.to_f}")
|
105
|
+
create_log_stream
|
106
|
+
|
107
|
+
time_ms = (Time.now.to_f * 1000).floor
|
108
|
+
put_log_events([
|
109
|
+
{timestamp: time_ms, message: '{"cloudwatch":"logs1"}'},
|
110
|
+
{timestamp: time_ms, message: '{"cloudwatch":"logs2"}'},
|
111
|
+
])
|
112
|
+
|
113
|
+
sleep 5
|
114
|
+
|
115
|
+
config = <<-EOC
|
116
|
+
tag test
|
117
|
+
@type cloudwatch_logs
|
118
|
+
log_group_name fluent-plugin-cloudwatch-group-prefix-test
|
119
|
+
use_log_group_name_prefix true
|
120
|
+
log_stream_name #{log_stream_name}
|
121
|
+
state_file /tmp/state
|
122
|
+
fetch_interval 1
|
123
|
+
#{aws_key_id}
|
124
|
+
#{aws_sec_key}
|
125
|
+
#{region}
|
126
|
+
#{endpoint}
|
127
|
+
EOC
|
128
|
+
|
129
|
+
d = create_driver(config)
|
130
|
+
d.run(expect_emits: 2, timeout: 5)
|
131
|
+
|
132
|
+
emits = d.events
|
133
|
+
assert_equal(2, emits.size)
|
134
|
+
assert_equal(['test', (time_ms / 1000).floor, {'cloudwatch' => 'logs1'}], emits[0])
|
135
|
+
assert_equal(['test', (time_ms / 1000).floor, {'cloudwatch' => 'logs2'}], emits[1])
|
136
|
+
end
|
137
|
+
|
138
|
+
test "emit with add_log_group_name" do
|
139
|
+
set_log_group_name("fluent-plugin-cloudwatch-add-log-group-#{Time.now.to_f}")
|
140
|
+
create_log_stream
|
141
|
+
|
142
|
+
time_ms = (Time.now.to_f * 1000).floor
|
143
|
+
put_log_events([
|
144
|
+
{timestamp: time_ms, message: '{"cloudwatch":"logs1"}'},
|
145
|
+
{timestamp: time_ms, message: '{"cloudwatch":"logs2"}'},
|
146
|
+
])
|
147
|
+
|
148
|
+
sleep 5
|
149
|
+
|
150
|
+
log_group_name_key = 'log_group_key'
|
151
|
+
config = <<-EOC
|
152
|
+
tag test
|
153
|
+
@type cloudwatch_logs
|
154
|
+
log_group_name fluent-plugin-cloudwatch-add-log-group
|
155
|
+
use_log_group_name_prefix true
|
156
|
+
add_log_group_name true
|
157
|
+
log_group_name_key #{log_group_name_key}
|
158
|
+
log_stream_name #{log_stream_name}
|
159
|
+
state_file /tmp/state
|
160
|
+
fetch_interval 1
|
161
|
+
#{aws_key_id}
|
162
|
+
#{aws_sec_key}
|
163
|
+
#{region}
|
164
|
+
#{endpoint}
|
165
|
+
EOC
|
166
|
+
|
167
|
+
d = create_driver(config)
|
168
|
+
d.run(expect_emits: 2, timeout: 5)
|
169
|
+
|
170
|
+
emits = d.events
|
171
|
+
assert_equal(2, emits.size)
|
172
|
+
assert_true emits[0][2].has_key?(log_group_name_key)
|
173
|
+
emits[0][2].delete(log_group_name_key)
|
174
|
+
assert_equal(['test', (time_ms / 1000).floor, {'cloudwatch' => 'logs1'}], emits[0])
|
175
|
+
assert_true emits[1][2].has_key?(log_group_name_key)
|
176
|
+
emits[1][2].delete(log_group_name_key)
|
177
|
+
assert_equal(['test', (time_ms / 1000).floor, {'cloudwatch' => 'logs2'}], emits[1])
|
178
|
+
end
|
179
|
+
|
180
|
+
test "emit with add_log_group_name and <parse> csv" do
|
181
|
+
cloudwatch_config = {'tag' => "test",
|
182
|
+
'@type' => 'cloudwatch_logs',
|
183
|
+
'log_group_name' => "fluent-plugin-cloudwatch-with-csv-format",
|
184
|
+
'log_stream_name' => "#{log_stream_name}",
|
185
|
+
'use_log_group_name_prefix' => true,
|
186
|
+
}
|
187
|
+
cloudwatch_config = cloudwatch_config.merge!(config_elementify(aws_key_id)) if ENV['aws_key_id']
|
188
|
+
cloudwatch_config = cloudwatch_config.merge!(config_elementify(aws_sec_key)) if ENV['aws_sec_key']
|
189
|
+
cloudwatch_config = cloudwatch_config.merge!(config_elementify(region)) if ENV['region']
|
190
|
+
cloudwatch_config = cloudwatch_config.merge!(config_elementify(endpoint)) if ENV['endpoint']
|
191
|
+
|
192
|
+
csv_format_config = config_element('ROOT', '', cloudwatch_config, [
|
193
|
+
config_element('parse', '', {'@type' => 'csv',
|
194
|
+
'keys' => 'time,message',
|
195
|
+
'time_key' => 'time'}),
|
196
|
+
config_element('storage', '', {'@type' => 'local',
|
197
|
+
'path' => '/tmp/state'})
|
198
|
+
])
|
199
|
+
log_group_name = "fluent-plugin-cloudwatch-with-csv-format-#{Time.now.to_f}"
|
200
|
+
set_log_group_name(log_group_name)
|
201
|
+
create_log_stream
|
202
|
+
|
203
|
+
time_ms = (Time.now.to_f * 1000).floor
|
204
|
+
log_time_ms = time_ms - 10000
|
205
|
+
put_log_events([
|
206
|
+
{timestamp: time_ms, message: Time.at(log_time_ms/1000.floor).to_s + ",Cloudwatch non json logs1"},
|
207
|
+
{timestamp: time_ms, message: Time.at(log_time_ms/1000.floor).to_s + ",Cloudwatch non json logs2"},
|
208
|
+
])
|
209
|
+
|
210
|
+
sleep 5
|
211
|
+
|
212
|
+
d = create_driver(csv_format_config)
|
213
|
+
d.run(expect_emits: 2, timeout: 5)
|
214
|
+
next_token = d.instance.instance_variable_get(:@next_token_storage)
|
215
|
+
assert_true next_token.get(d.instance.state_key_for(log_stream_name, log_group_name)).is_a?(String)
|
216
|
+
|
217
|
+
emits = d.events
|
218
|
+
assert_equal(2, emits.size)
|
219
|
+
assert_equal(['test', (log_time_ms / 1000).floor, {"message"=>"Cloudwatch non json logs1"}], emits[0])
|
220
|
+
assert_equal(['test', (log_time_ms / 1000).floor, {"message"=>"Cloudwatch non json logs2"}], emits[1])
|
221
|
+
end
|
222
|
+
end
|
223
|
+
|
102
224
|
def test_emit_with_metadata
|
103
225
|
create_log_stream
|
104
226
|
|
data/test/test_helper.rb
CHANGED
@@ -17,6 +17,10 @@ module CloudwatchLogsTestHelper
|
|
17
17
|
@logs ||= Aws::CloudWatchLogs::Client.new(options)
|
18
18
|
end
|
19
19
|
|
20
|
+
def set_log_group_name(log_group_name)
|
21
|
+
@log_group_name = log_group_name
|
22
|
+
end
|
23
|
+
|
20
24
|
def log_group_name
|
21
25
|
@log_group_name ||= "fluent-plugin-cloudwatch-test-#{Time.now.to_f}"
|
22
26
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-cloudwatch-logs
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.13.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Ryota Arai
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2020-
|
11
|
+
date: 2020-12-08 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: fluentd
|