fluent-plugin-cloudwatch-logs 0.1.1 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +6 -1
- data/example/fluentd.conf +0 -1
- data/lib/fluent/plugin/cloudwatch/logs/version.rb +1 -1
- data/lib/fluent/plugin/in_cloudwatch_logs.rb +49 -13
- data/lib/fluent/plugin/out_cloudwatch_logs.rb +8 -4
- data/test/plugin/test_in_cloudwatch_logs.rb +44 -0
- data/test/plugin/test_out_cloudwatch_logs.rb +63 -7
- data/test/test_helper.rb +5 -5
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 7433cbc405896d7a542689ab5e23c6f199043760
|
4
|
+
data.tar.gz: c8f7680af5579da676e5b0379b9e911f5afa7dbe
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 0bb5a93717b9cddfc9d29fd4e4dca9c84751747b6f10b3225f5a8be0542a418223525e4f03527131daf9d3bfec543fe2d06b372f391760535deeb1cf149c555a
|
7
|
+
data.tar.gz: 86be56949e121311e7ad8ebaf5fb41667fdede3d0a0a75f80418659df35664f4924756614e1bf174c03105f028fca9009a2f80fddb6a4f1a74e2bf8ae79a9d83
|
data/README.md
CHANGED
@@ -73,18 +73,21 @@ Fetch sample log from CloudWatch Logs:
|
|
73
73
|
#max_message_length 32768
|
74
74
|
#use_tag_as_group false
|
75
75
|
#use_tag_as_stream false
|
76
|
+
#include_time_key true
|
77
|
+
#localtime true
|
76
78
|
</match>
|
77
79
|
```
|
78
80
|
|
79
81
|
* `log_group_name`: name of log group to store logs
|
80
82
|
* `log_stream_name`: name of log stream to store logs
|
81
|
-
* `sequence_token_file`: file to store next sequence token
|
82
83
|
* `auto_create_stream`: to create log group and stream automatically
|
83
84
|
* `message_keys`: keys to send messages as events
|
84
85
|
* `max_message_length`: maximum length of the message
|
85
86
|
* `max_events_per_batch`: maximum number of events to send at once (default 10000)
|
86
87
|
* `use_tag_as_group`: to use tag as a group name
|
87
88
|
* `use_tag_as_stream`: to use tag as a stream name
|
89
|
+
* `include_time_key`: include time key as part of the log entry (defaults to UTC)
|
90
|
+
* `localtime`: use localtime timezone for `include_time_key` output (overrides UTC default)
|
88
91
|
|
89
92
|
### in_cloudwatch_logs
|
90
93
|
|
@@ -94,6 +97,7 @@ Fetch sample log from CloudWatch Logs:
|
|
94
97
|
tag cloudwatch.in
|
95
98
|
log_group_name group
|
96
99
|
log_stream_name stream
|
100
|
+
#use_log_stream_name_prefix true
|
97
101
|
state_file /var/lib/fluent/group_stream.in.state
|
98
102
|
</source>
|
99
103
|
```
|
@@ -101,6 +105,7 @@ Fetch sample log from CloudWatch Logs:
|
|
101
105
|
* `tag`: fluentd tag
|
102
106
|
* `log_group_name`: name of log group to fetch logs
|
103
107
|
* `log_stream_name`: name of log stream to fetch logs
|
108
|
+
* `use_log_stream_name_prefix`: to use `log_stream_name` as log stream name prefix (default false)
|
104
109
|
* `state_file`: file to store current state (e.g. next\_forward\_token)
|
105
110
|
|
106
111
|
## Test
|
data/example/fluentd.conf
CHANGED
@@ -13,6 +13,7 @@ module Fluent
|
|
13
13
|
config_param :tag, :string
|
14
14
|
config_param :log_group_name, :string
|
15
15
|
config_param :log_stream_name, :string
|
16
|
+
config_param :use_log_stream_name_prefix, :bool, default: false
|
16
17
|
config_param :state_file, :string
|
17
18
|
config_param :fetch_interval, :time, default: 60
|
18
19
|
config_param :http_proxy, :string, default: nil
|
@@ -57,8 +58,10 @@ module Fluent
|
|
57
58
|
File.read(@state_file).chomp
|
58
59
|
end
|
59
60
|
|
60
|
-
def store_next_token(token)
|
61
|
-
|
61
|
+
def store_next_token(token, log_stream_name = nil)
|
62
|
+
state_file = @state_file
|
63
|
+
state_file = "#{@state_file}_#{log_stream_name}" if log_stream_name
|
64
|
+
open(state_file, 'w') do |f|
|
62
65
|
f.write token
|
63
66
|
end
|
64
67
|
end
|
@@ -70,15 +73,19 @@ module Fluent
|
|
70
73
|
if Time.now > @next_fetch_time
|
71
74
|
@next_fetch_time += @fetch_interval
|
72
75
|
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
76
|
+
if @use_log_stream_name_prefix
|
77
|
+
log_streams = describe_log_streams
|
78
|
+
log_streams.each do |log_stram|
|
79
|
+
log_stream_name = log_stram.log_stream_name
|
80
|
+
events = get_events(log_stream_name)
|
81
|
+
events.each do |event|
|
82
|
+
emit(event)
|
83
|
+
end
|
84
|
+
end
|
85
|
+
else
|
86
|
+
events = get_events(@log_stream_name)
|
87
|
+
events.each do |event|
|
88
|
+
emit(event)
|
82
89
|
end
|
83
90
|
end
|
84
91
|
end
|
@@ -86,10 +93,21 @@ module Fluent
|
|
86
93
|
end
|
87
94
|
end
|
88
95
|
|
89
|
-
def
|
96
|
+
def emit(event)
|
97
|
+
if @parser
|
98
|
+
record = @parser.parse(event.message)
|
99
|
+
router.emit(@tag, record[0], record[1])
|
100
|
+
else
|
101
|
+
time = (event.timestamp / 1000).floor
|
102
|
+
record = JSON.parse(event.message)
|
103
|
+
router.emit(@tag, time, record)
|
104
|
+
end
|
105
|
+
end
|
106
|
+
|
107
|
+
def get_events(log_stream_name)
|
90
108
|
request = {
|
91
109
|
log_group_name: @log_group_name,
|
92
|
-
log_stream_name:
|
110
|
+
log_stream_name: log_stream_name
|
93
111
|
}
|
94
112
|
request[:next_token] = next_token if next_token
|
95
113
|
response = @logs.get_log_events(request)
|
@@ -97,5 +115,23 @@ module Fluent
|
|
97
115
|
|
98
116
|
response.events
|
99
117
|
end
|
118
|
+
|
119
|
+
def describe_log_streams(log_streams = nil, next_token = nil)
|
120
|
+
request = {
|
121
|
+
log_group_name: @log_group_name
|
122
|
+
}
|
123
|
+
request[:next_token] = next_token if next_token
|
124
|
+
request[:log_stream_name_prefix] = @log_stream_name
|
125
|
+
response = @logs.describe_log_streams(request)
|
126
|
+
if log_streams
|
127
|
+
log_streams << response.log_streams
|
128
|
+
else
|
129
|
+
log_streams = response.log_streams
|
130
|
+
end
|
131
|
+
if response.next_token
|
132
|
+
log_streams = describe_log_streams(log_streams, response.next_token)
|
133
|
+
end
|
134
|
+
log_streams
|
135
|
+
end
|
100
136
|
end
|
101
137
|
end
|
@@ -2,6 +2,8 @@ module Fluent
|
|
2
2
|
class CloudwatchLogsOutput < BufferedOutput
|
3
3
|
Plugin.register_output('cloudwatch_logs', self)
|
4
4
|
|
5
|
+
include Fluent::SetTimeKeyMixin
|
6
|
+
|
5
7
|
config_param :aws_key_id, :string, :default => nil, :secret => true
|
6
8
|
config_param :aws_sec_key, :string, :default => nil, :secret => true
|
7
9
|
config_param :region, :string, :default => nil
|
@@ -54,7 +56,7 @@ module Fluent
|
|
54
56
|
if @auto_create_stream
|
55
57
|
create_log_group(group_name)
|
56
58
|
else
|
57
|
-
log.warn "Log group '#{group_name}'
|
59
|
+
log.warn "Log group '#{group_name}' does not exist"
|
58
60
|
next
|
59
61
|
end
|
60
62
|
end
|
@@ -63,7 +65,7 @@ module Fluent
|
|
63
65
|
if @auto_create_stream
|
64
66
|
create_log_stream(group_name, stream_name)
|
65
67
|
else
|
66
|
-
log.warn "Log stream '#{stream_name}'
|
68
|
+
log.warn "Log stream '#{stream_name}' does not exist"
|
67
69
|
next
|
68
70
|
end
|
69
71
|
end
|
@@ -77,7 +79,9 @@ module Fluent
|
|
77
79
|
message = record.to_json
|
78
80
|
end
|
79
81
|
|
80
|
-
|
82
|
+
# CloudWatchLogs API only accepts valid UTF-8 strings
|
83
|
+
# so we should encode the message to UTF-8
|
84
|
+
message.encode('UTF-8', :invalid => :replace)
|
81
85
|
|
82
86
|
if @max_message_length
|
83
87
|
message = message.slice(0, @max_message_length)
|
@@ -109,7 +113,7 @@ module Fluent
|
|
109
113
|
while event = events.shift
|
110
114
|
new_chunk = chunk + [event]
|
111
115
|
chunk_span_too_big = new_chunk.size > 1 && new_chunk[-1][:timestamp] - new_chunk[0][:timestamp] >= 1000 * 60 * 60 * 24
|
112
|
-
chunk_too_big = new_chunk.inject(0) {|sum, e| sum + e[:message].
|
116
|
+
chunk_too_big = new_chunk.inject(0) {|sum, e| sum + e[:message].bytesize + 26 } > MAX_EVENTS_SIZE
|
113
117
|
chunk_too_long = @max_events_per_batch && chunk.size >= @max_events_per_batch
|
114
118
|
if chunk_too_big or chunk_span_too_big or chunk_too_long
|
115
119
|
put_events(group_name, stream_name, chunk)
|
@@ -22,6 +22,7 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
|
|
22
22
|
tag test
|
23
23
|
log_group_name group
|
24
24
|
log_stream_name stream
|
25
|
+
use_log_stream_name_prefix true
|
25
26
|
state_file /tmp/state
|
26
27
|
EOC
|
27
28
|
|
@@ -31,6 +32,7 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
|
|
31
32
|
assert_equal('test', d.instance.tag)
|
32
33
|
assert_equal('group', d.instance.log_group_name)
|
33
34
|
assert_equal('stream', d.instance.log_stream_name)
|
35
|
+
assert_equal(true, d.instance.use_log_stream_name_prefix)
|
34
36
|
assert_equal('/tmp/state', d.instance.state_file)
|
35
37
|
end
|
36
38
|
|
@@ -93,6 +95,48 @@ class CloudwatchLogsInputTest < Test::Unit::TestCase
|
|
93
95
|
assert_equal({'cloudwatch' => 'logs2'}, emits[1][2])
|
94
96
|
end
|
95
97
|
|
98
|
+
def test_emit_with_prefix
|
99
|
+
new_log_stream("testprefix")
|
100
|
+
create_log_stream
|
101
|
+
|
102
|
+
time_ms = (Time.now.to_f * 1000).floor
|
103
|
+
put_log_events([
|
104
|
+
{timestamp: time_ms, message: '{"cloudwatch":"logs1"}'},
|
105
|
+
{timestamp: time_ms, message: '{"cloudwatch":"logs2"}'},
|
106
|
+
])
|
107
|
+
|
108
|
+
new_log_stream("testprefix")
|
109
|
+
create_log_stream
|
110
|
+
put_log_events([
|
111
|
+
{timestamp: time_ms, message: '{"cloudwatch":"logs3"}'},
|
112
|
+
{timestamp: time_ms, message: '{"cloudwatch":"logs4"}'},
|
113
|
+
])
|
114
|
+
|
115
|
+
sleep 5
|
116
|
+
|
117
|
+
d = create_driver(<<-EOC)
|
118
|
+
tag test
|
119
|
+
type cloudwatch_logs
|
120
|
+
log_group_name #{log_group_name}
|
121
|
+
log_stream_name testprefix
|
122
|
+
use_log_stream_name_prefix true
|
123
|
+
state_file /tmp/state
|
124
|
+
#{aws_key_id}
|
125
|
+
#{aws_sec_key}
|
126
|
+
#{region}
|
127
|
+
EOC
|
128
|
+
d.run do
|
129
|
+
sleep 5
|
130
|
+
end
|
131
|
+
|
132
|
+
emits = d.emits
|
133
|
+
assert_equal(4, emits.size)
|
134
|
+
assert_equal(['test', (time_ms / 1000).floor, {'cloudwatch' => 'logs1'}], emits[0])
|
135
|
+
assert_equal(['test', (time_ms / 1000).floor, {'cloudwatch' => 'logs2'}], emits[1])
|
136
|
+
assert_equal(['test', (time_ms / 1000).floor, {'cloudwatch' => 'logs3'}], emits[2])
|
137
|
+
assert_equal(['test', (time_ms / 1000).floor, {'cloudwatch' => 'logs4'}], emits[3])
|
138
|
+
end
|
139
|
+
|
96
140
|
private
|
97
141
|
def default_config
|
98
142
|
<<-EOC
|
@@ -12,7 +12,6 @@ class CloudwatchLogsOutputTest < Test::Unit::TestCase
|
|
12
12
|
|
13
13
|
def teardown
|
14
14
|
clear_log_group
|
15
|
-
FileUtils.rm_f(sequence_token_file)
|
16
15
|
end
|
17
16
|
|
18
17
|
|
@@ -54,6 +53,22 @@ class CloudwatchLogsOutputTest < Test::Unit::TestCase
|
|
54
53
|
assert_equal('{"cloudwatch":"logs2"}', events[1].message)
|
55
54
|
end
|
56
55
|
|
56
|
+
def test_write_utf8
|
57
|
+
new_log_stream
|
58
|
+
|
59
|
+
d = create_driver
|
60
|
+
time = Time.now
|
61
|
+
d.emit({'cloudwatch' => 'これは日本語です'.force_encoding('UTF-8')}, time.to_i)
|
62
|
+
d.run
|
63
|
+
|
64
|
+
sleep 20
|
65
|
+
|
66
|
+
events = get_log_events
|
67
|
+
assert_equal(1, events.size)
|
68
|
+
assert_equal(time.to_i * 1000, events[0].timestamp)
|
69
|
+
assert_equal('{"cloudwatch":"これは日本語です"}', events[0].message)
|
70
|
+
end
|
71
|
+
|
57
72
|
def test_write_24h_apart
|
58
73
|
new_log_stream
|
59
74
|
|
@@ -171,13 +186,59 @@ class CloudwatchLogsOutputTest < Test::Unit::TestCase
|
|
171
186
|
assert_equal('message2 logs2', events[1].message)
|
172
187
|
end
|
173
188
|
|
189
|
+
def test_include_time_key
|
190
|
+
new_log_stream
|
191
|
+
|
192
|
+
d = create_driver(<<-EOC)
|
193
|
+
#{default_config}
|
194
|
+
include_time_key true
|
195
|
+
EOC
|
196
|
+
|
197
|
+
time = Time.now
|
198
|
+
d.emit({'cloudwatch' => 'logs1'}, time.to_i)
|
199
|
+
d.emit({'cloudwatch' => 'logs2'}, time.to_i + 1)
|
200
|
+
d.run
|
201
|
+
|
202
|
+
sleep 20
|
203
|
+
|
204
|
+
events = get_log_events
|
205
|
+
assert_equal(2, events.size)
|
206
|
+
assert_equal(time.to_i * 1000, events[0].timestamp)
|
207
|
+
assert_equal("{\"cloudwatch\":\"logs1\",\"time\":\"#{time.utc.strftime("%Y-%m-%dT%H:%M:%SZ")}\"}", events[0].message)
|
208
|
+
assert_equal((time.to_i + 1) * 1000, events[1].timestamp)
|
209
|
+
assert_equal("{\"cloudwatch\":\"logs2\",\"time\":\"#{(time+1).utc.strftime("%Y-%m-%dT%H:%M:%SZ")}\"}", events[1].message)
|
210
|
+
end
|
211
|
+
|
212
|
+
def test_include_time_key_localtime
|
213
|
+
new_log_stream
|
214
|
+
|
215
|
+
d = create_driver(<<-EOC)
|
216
|
+
#{default_config}
|
217
|
+
include_time_key true
|
218
|
+
localtime true
|
219
|
+
EOC
|
220
|
+
|
221
|
+
time = Time.now
|
222
|
+
d.emit({'cloudwatch' => 'logs1'}, time.to_i)
|
223
|
+
d.emit({'cloudwatch' => 'logs2'}, time.to_i + 1)
|
224
|
+
d.run
|
225
|
+
|
226
|
+
sleep 20
|
227
|
+
|
228
|
+
events = get_log_events
|
229
|
+
assert_equal(2, events.size)
|
230
|
+
assert_equal(time.to_i * 1000, events[0].timestamp)
|
231
|
+
assert_equal("{\"cloudwatch\":\"logs1\",\"time\":\"#{time.strftime("%Y-%m-%dT%H:%M:%S%:z")}\"}", events[0].message)
|
232
|
+
assert_equal((time.to_i + 1) * 1000, events[1].timestamp)
|
233
|
+
assert_equal("{\"cloudwatch\":\"logs2\",\"time\":\"#{(time+1).strftime("%Y-%m-%dT%H:%M:%S%:z")}\"}", events[1].message)
|
234
|
+
end
|
235
|
+
|
174
236
|
private
|
175
237
|
def default_config
|
176
238
|
<<-EOC
|
177
239
|
type cloudwatch_logs
|
178
240
|
log_group_name #{log_group_name}
|
179
241
|
log_stream_name #{log_stream_name}
|
180
|
-
sequence_token_file #{sequence_token_file}
|
181
242
|
auto_create_stream true
|
182
243
|
#{aws_key_id}
|
183
244
|
#{aws_sec_key}
|
@@ -185,11 +246,6 @@ class CloudwatchLogsOutputTest < Test::Unit::TestCase
|
|
185
246
|
EOC
|
186
247
|
end
|
187
248
|
|
188
|
-
def sequence_token_file
|
189
|
-
File.expand_path('../../tmp/sequence_token', __FILE__)
|
190
|
-
end
|
191
|
-
|
192
|
-
|
193
249
|
def create_driver(conf = default_config)
|
194
250
|
Fluent::Test::BufferedOutputTestDriver.new(Fluent::CloudwatchLogsOutput, fluentd_tag).configure(conf)
|
195
251
|
end
|
data/test/test_helper.rb
CHANGED
@@ -29,15 +29,15 @@ module CloudwatchLogsTestHelper
|
|
29
29
|
"region #{ENV['region']}" if ENV['region']
|
30
30
|
end
|
31
31
|
|
32
|
-
def log_stream_name
|
32
|
+
def log_stream_name(log_stream_name_prefix = nil)
|
33
33
|
if !@log_stream_name
|
34
|
-
new_log_stream
|
34
|
+
new_log_stream(log_stream_name_prefix)
|
35
35
|
end
|
36
36
|
@log_stream_name
|
37
37
|
end
|
38
38
|
|
39
|
-
def new_log_stream
|
40
|
-
@log_stream_name = Time.now.to_f.to_s
|
39
|
+
def new_log_stream(log_stream_name_prefix = nil)
|
40
|
+
@log_stream_name = log_stream_name_prefix ? log_stream_name_prefix + Time.now.to_f.to_s : Time.now.to_f.to_s
|
41
41
|
end
|
42
42
|
|
43
43
|
def clear_log_group
|
@@ -54,7 +54,7 @@ module CloudwatchLogsTestHelper
|
|
54
54
|
@fluentd_tag ||= "fluent.plugin.cloudwatch.test.#{Time.now.to_f}"
|
55
55
|
end
|
56
56
|
|
57
|
-
def create_log_stream
|
57
|
+
def create_log_stream()
|
58
58
|
begin
|
59
59
|
logs.create_log_group(log_group_name: log_group_name)
|
60
60
|
rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-cloudwatch-logs
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Ryota Arai
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2015-10-
|
11
|
+
date: 2015-10-23 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: fluentd
|