rb-fluent-plugin-cloudwatch-logs 0.7.1.pre.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +22 -0
- data/Gemfile +4 -0
- data/ISSUE_TEMPLATE.md +21 -0
- data/LICENSE.txt +22 -0
- data/README.md +314 -0
- data/Rakefile +9 -0
- data/example/fluentd.conf +23 -0
- data/fluent-plugin-cloudwatch-logs.gemspec +28 -0
- data/lib/fluent/plugin/cloudwatch/logs.rb +11 -0
- data/lib/fluent/plugin/cloudwatch/logs/version.rb +9 -0
- data/lib/fluent/plugin/in_cloudwatch_logs.rb +194 -0
- data/lib/fluent/plugin/out_cloudwatch_logs.rb +468 -0
- data/test/plugin/test_in_cloudwatch_logs.rb +241 -0
- data/test/plugin/test_out_cloudwatch_logs.rb +749 -0
- data/test/test_helper.rb +105 -0
- metadata +161 -0
@@ -0,0 +1,194 @@
|
|
1
|
+
require 'date'
|
2
|
+
require 'fluent/plugin/input'
|
3
|
+
require 'fluent/plugin/parser'
|
4
|
+
require 'yajl'
|
5
|
+
|
6
|
+
module Fluent::Plugin
|
7
|
+
class CloudwatchLogsInput < Input
|
8
|
+
Fluent::Plugin.register_input('cloudwatch_logs', self)
|
9
|
+
|
10
|
+
helpers :parser, :thread, :compat_parameters
|
11
|
+
|
12
|
+
config_param :aws_key_id, :string, :default => nil, :secret => true
|
13
|
+
config_param :aws_sec_key, :string, :default => nil, :secret => true
|
14
|
+
config_param :aws_use_sts, :bool, default: false
|
15
|
+
config_param :aws_sts_role_arn, :string, default: nil
|
16
|
+
config_param :aws_sts_session_name, :string, default: 'fluentd'
|
17
|
+
config_param :region, :string, :default => nil
|
18
|
+
config_param :endpoint, :string, :default => nil
|
19
|
+
config_param :tag, :string
|
20
|
+
config_param :log_group_name, :string
|
21
|
+
config_param :log_stream_name, :string, :default => nil
|
22
|
+
config_param :use_log_stream_name_prefix, :bool, default: false
|
23
|
+
config_param :state_file, :string
|
24
|
+
config_param :fetch_interval, :time, default: 60
|
25
|
+
config_param :http_proxy, :string, default: nil
|
26
|
+
config_param :json_handler, :enum, list: [:yajl, :json], :default => :yajl
|
27
|
+
config_param :use_todays_log_stream, :bool, default: false
|
28
|
+
|
29
|
+
config_section :parse do
|
30
|
+
config_set_default :@type, 'none'
|
31
|
+
end
|
32
|
+
|
33
|
+
def initialize
|
34
|
+
super
|
35
|
+
|
36
|
+
require 'aws-sdk-cloudwatchlogs'
|
37
|
+
end
|
38
|
+
|
39
|
+
def configure(conf)
|
40
|
+
compat_parameters_convert(conf, :parser)
|
41
|
+
super
|
42
|
+
configure_parser(conf)
|
43
|
+
end
|
44
|
+
|
45
|
+
def start
|
46
|
+
super
|
47
|
+
options = {}
|
48
|
+
options[:region] = @region if @region
|
49
|
+
options[:endpoint] = @endpoint if @endpoint
|
50
|
+
options[:http_proxy] = @http_proxy if @http_proxy
|
51
|
+
|
52
|
+
if @aws_use_sts
|
53
|
+
Aws.config[:region] = options[:region]
|
54
|
+
options[:credentials] = Aws::AssumeRoleCredentials.new(
|
55
|
+
role_arn: @aws_sts_role_arn,
|
56
|
+
role_session_name: @aws_sts_session_name
|
57
|
+
)
|
58
|
+
else
|
59
|
+
options[:credentials] = Aws::Credentials.new(@aws_key_id, @aws_sec_key) if @aws_key_id && @aws_sec_key
|
60
|
+
end
|
61
|
+
|
62
|
+
@logs = Aws::CloudWatchLogs::Client.new(options)
|
63
|
+
|
64
|
+
@finished = false
|
65
|
+
thread_create(:in_cloudwatch_logs_runner, &method(:run))
|
66
|
+
|
67
|
+
@json_handler = case @json_handler
|
68
|
+
when :yajl
|
69
|
+
Yajl
|
70
|
+
when :json
|
71
|
+
JSON
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
def shutdown
|
76
|
+
@finished = true
|
77
|
+
super
|
78
|
+
end
|
79
|
+
|
80
|
+
private
|
81
|
+
def configure_parser(conf)
|
82
|
+
if conf['format']
|
83
|
+
@parser = parser_create
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
def state_file_for(log_stream_name)
|
88
|
+
return "#{@state_file}_#{log_stream_name.gsub(File::SEPARATOR, '-')}" if log_stream_name
|
89
|
+
return @state_file
|
90
|
+
end
|
91
|
+
|
92
|
+
def next_token(log_stream_name)
|
93
|
+
return nil unless File.exist?(state_file_for(log_stream_name))
|
94
|
+
File.read(state_file_for(log_stream_name)).chomp
|
95
|
+
end
|
96
|
+
|
97
|
+
def store_next_token(token, log_stream_name = nil)
|
98
|
+
open(state_file_for(log_stream_name), 'w') do |f|
|
99
|
+
f.write token
|
100
|
+
end
|
101
|
+
end
|
102
|
+
|
103
|
+
def run
|
104
|
+
@next_fetch_time = Time.now
|
105
|
+
|
106
|
+
until @finished
|
107
|
+
if Time.now > @next_fetch_time
|
108
|
+
@next_fetch_time += @fetch_interval
|
109
|
+
|
110
|
+
if @use_log_stream_name_prefix || @use_todays_log_stream
|
111
|
+
log_stream_name_prefix = @use_todays_log_stream ? get_todays_date : @log_stream_name
|
112
|
+
begin
|
113
|
+
log_streams = describe_log_streams(log_stream_name_prefix)
|
114
|
+
log_streams.concat(describe_log_streams(get_yesterdays_date)) if @use_todays_log_stream
|
115
|
+
log_streams.each do |log_stream|
|
116
|
+
log_stream_name = log_stream.log_stream_name
|
117
|
+
events = get_events(log_stream_name)
|
118
|
+
events.each do |event|
|
119
|
+
emit(log_stream_name, event)
|
120
|
+
end
|
121
|
+
end
|
122
|
+
rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException
|
123
|
+
log.warn "'#{@log_stream_name}' prefixed log stream(s) are not found"
|
124
|
+
next
|
125
|
+
end
|
126
|
+
else
|
127
|
+
events = get_events(@log_stream_name)
|
128
|
+
events.each do |event|
|
129
|
+
emit(log_stream_name, event)
|
130
|
+
end
|
131
|
+
end
|
132
|
+
end
|
133
|
+
sleep 1
|
134
|
+
end
|
135
|
+
end
|
136
|
+
|
137
|
+
def emit(stream, event)
|
138
|
+
if @parser
|
139
|
+
@parser.parse(event.message) {|time, record|
|
140
|
+
router.emit(@tag, time, record)
|
141
|
+
}
|
142
|
+
else
|
143
|
+
time = (event.timestamp / 1000).floor
|
144
|
+
record = @json_handler.load(event.message)
|
145
|
+
router.emit(@tag, time, record)
|
146
|
+
end
|
147
|
+
end
|
148
|
+
|
149
|
+
def get_events(log_stream_name)
|
150
|
+
request = {
|
151
|
+
log_group_name: @log_group_name,
|
152
|
+
log_stream_name: log_stream_name
|
153
|
+
}
|
154
|
+
log_next_token = next_token(log_stream_name)
|
155
|
+
request[:next_token] = log_next_token if !log_next_token.nil? && !log_next_token.empty?
|
156
|
+
response = @logs.get_log_events(request)
|
157
|
+
if valid_next_token(log_next_token, response.next_forward_token)
|
158
|
+
store_next_token(response.next_forward_token, log_stream_name)
|
159
|
+
end
|
160
|
+
|
161
|
+
response.events
|
162
|
+
end
|
163
|
+
|
164
|
+
def describe_log_streams(log_stream_name_prefix, log_streams = nil, next_token = nil)
|
165
|
+
request = {
|
166
|
+
log_group_name: @log_group_name
|
167
|
+
}
|
168
|
+
request[:next_token] = next_token if next_token
|
169
|
+
request[:log_stream_name_prefix] = log_stream_name_prefix
|
170
|
+
response = @logs.describe_log_streams(request)
|
171
|
+
if log_streams
|
172
|
+
log_streams.concat(response.log_streams)
|
173
|
+
else
|
174
|
+
log_streams = response.log_streams
|
175
|
+
end
|
176
|
+
if response.next_token
|
177
|
+
log_streams = describe_log_streams(log_stream_name_prefix, log_streams, response.next_token)
|
178
|
+
end
|
179
|
+
log_streams
|
180
|
+
end
|
181
|
+
|
182
|
+
def valid_next_token(prev_token, next_token)
|
183
|
+
return prev_token != next_token.chomp && !next_token.nil?
|
184
|
+
end
|
185
|
+
|
186
|
+
def get_todays_date
|
187
|
+
return Date.today.strftime("%Y/%m/%d")
|
188
|
+
end
|
189
|
+
|
190
|
+
def get_yesterdays_date
|
191
|
+
return (Date.today - 1).strftime("%Y/%m/%d")
|
192
|
+
end
|
193
|
+
end
|
194
|
+
end
|
@@ -0,0 +1,468 @@
|
|
1
|
+
require 'fluent/plugin/output'
|
2
|
+
require 'thread'
|
3
|
+
require 'yajl'
|
4
|
+
require 'memory_profiler'
|
5
|
+
|
6
|
+
module Fluent::Plugin
|
7
|
+
class CloudwatchLogsOutput < Output
|
8
|
+
include Fluent::MessagePackFactory::Mixin
|
9
|
+
Fluent::Plugin.register_output('cloudwatch_logs', self)
|
10
|
+
|
11
|
+
helpers :compat_parameters, :inject
|
12
|
+
|
13
|
+
DEFAULT_BUFFER_TYPE = "memory"
|
14
|
+
|
15
|
+
config_param :aws_key_id, :string, :default => nil, :secret => true
|
16
|
+
config_param :aws_sec_key, :string, :default => nil, :secret => true
|
17
|
+
config_param :aws_instance_profile_credentials_retries, :integer, default: nil
|
18
|
+
config_param :aws_use_sts, :bool, default: false
|
19
|
+
config_param :aws_sts_role_arn, :string, default: nil
|
20
|
+
config_param :aws_sts_session_name, :string, default: 'fluentd'
|
21
|
+
config_param :region, :string, :default => nil
|
22
|
+
config_param :endpoint, :string, :default => nil
|
23
|
+
config_param :log_group_name, :string, :default => nil
|
24
|
+
config_param :log_stream_name, :string, :default => nil
|
25
|
+
config_param :auto_create_stream, :bool, default: false
|
26
|
+
config_param :message_keys, :array, :default => [], value_type: :string
|
27
|
+
config_param :max_message_length, :integer, :default => nil
|
28
|
+
config_param :max_events_per_batch, :integer, :default => 10000
|
29
|
+
config_param :use_tag_as_group, :bool, :default => false # TODO: Rename to use_tag_as_group_name ?
|
30
|
+
config_param :use_tag_as_stream, :bool, :default => false # TODO: Rename to use_tag_as_stream_name ?
|
31
|
+
config_param :log_group_name_key, :string, :default => nil
|
32
|
+
config_param :log_stream_name_key, :string, :default => nil
|
33
|
+
config_param :remove_log_group_name_key, :bool, :default => false
|
34
|
+
config_param :remove_log_stream_name_key, :bool, :default => false
|
35
|
+
config_param :http_proxy, :string, default: nil
|
36
|
+
config_param :put_log_events_retry_wait, :time, default: 1.0
|
37
|
+
config_param :put_log_events_retry_limit, :integer, default: 17
|
38
|
+
config_param :put_log_events_disable_retry_limit, :bool, default: false
|
39
|
+
config_param :concurrency, :integer, default: 1
|
40
|
+
config_param :log_group_aws_tags, :hash, default: nil
|
41
|
+
config_param :log_group_aws_tags_key, :string, default: nil
|
42
|
+
config_param :remove_log_group_aws_tags_key, :bool, default: false
|
43
|
+
config_param :retention_in_days, :integer, default: nil
|
44
|
+
config_param :retention_in_days_key, :string, default: nil
|
45
|
+
config_param :remove_retention_in_days, :bool, default: false
|
46
|
+
config_param :json_handler, :enum, list: [:yajl, :json], :default => :yajl
|
47
|
+
|
48
|
+
config_section :buffer do
|
49
|
+
config_set_default :@type, DEFAULT_BUFFER_TYPE
|
50
|
+
end
|
51
|
+
|
52
|
+
MAX_EVENTS_SIZE = 1_048_576
|
53
|
+
MAX_EVENT_SIZE = 256 * 1024
|
54
|
+
EVENT_HEADER_SIZE = 26
|
55
|
+
|
56
|
+
def initialize
|
57
|
+
super
|
58
|
+
|
59
|
+
require 'aws-sdk-cloudwatchlogs'
|
60
|
+
end
|
61
|
+
|
62
|
+
def configure(conf)
|
63
|
+
compat_parameters_convert(conf, :buffer, :inject)
|
64
|
+
super
|
65
|
+
|
66
|
+
unless [conf['log_group_name'], conf['use_tag_as_group'], conf['log_group_name_key']].compact.size == 1
|
67
|
+
raise Fluent::ConfigError, "Set only one of log_group_name, use_tag_as_group and log_group_name_key"
|
68
|
+
end
|
69
|
+
|
70
|
+
unless [conf['log_stream_name'], conf['use_tag_as_stream'], conf['log_stream_name_key']].compact.size == 1
|
71
|
+
raise Fluent::ConfigError, "Set only one of log_stream_name, use_tag_as_stream and log_stream_name_key"
|
72
|
+
end
|
73
|
+
|
74
|
+
if [conf['log_group_aws_tags'], conf['log_group_aws_tags_key']].compact.size > 1
|
75
|
+
raise ConfigError, "Set only one of log_group_aws_tags, log_group_aws_tags_key"
|
76
|
+
end
|
77
|
+
|
78
|
+
if [conf['retention_in_days'], conf['retention_in_days_key']].compact.size > 1
|
79
|
+
raise ConfigError, "Set only one of retention_in_days, retention_in_days_key"
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
def start
|
84
|
+
super
|
85
|
+
|
86
|
+
options = {}
|
87
|
+
options[:region] = @region if @region
|
88
|
+
options[:endpoint] = @endpoint if @endpoint
|
89
|
+
options[:instance_profile_credentials_retries] = @aws_instance_profile_credentials_retries if @aws_instance_profile_credentials_retries
|
90
|
+
|
91
|
+
if @aws_use_sts
|
92
|
+
Aws.config[:region] = options[:region]
|
93
|
+
options[:credentials] = Aws::AssumeRoleCredentials.new(
|
94
|
+
role_arn: @aws_sts_role_arn,
|
95
|
+
role_session_name: @aws_sts_session_name
|
96
|
+
)
|
97
|
+
else
|
98
|
+
options[:credentials] = Aws::Credentials.new(@aws_key_id, @aws_sec_key) if @aws_key_id && @aws_sec_key
|
99
|
+
end
|
100
|
+
options[:http_proxy] = @http_proxy if @http_proxy
|
101
|
+
@logs ||= Aws::CloudWatchLogs::Client.new(options)
|
102
|
+
@sequence_tokens = {}
|
103
|
+
@store_next_sequence_token_mutex = Mutex.new
|
104
|
+
|
105
|
+
@json_handler = case @json_handler
|
106
|
+
when :yajl
|
107
|
+
Yajl
|
108
|
+
when :json
|
109
|
+
JSON
|
110
|
+
end
|
111
|
+
end
|
112
|
+
|
113
|
+
def format(tag, time, record)
|
114
|
+
record = inject_values_to_record(tag, time, record)
|
115
|
+
msgpack_packer.pack([tag, time, record]).to_s
|
116
|
+
end
|
117
|
+
|
118
|
+
def formatted_to_msgpack_binary?
|
119
|
+
true
|
120
|
+
end
|
121
|
+
|
122
|
+
def multi_workers_ready?
|
123
|
+
true
|
124
|
+
end
|
125
|
+
|
126
|
+
def write(chunk)
|
127
|
+
log_group_name = extract_placeholders(@log_group_name, chunk) if @log_group_name
|
128
|
+
log_stream_name = extract_placeholders(@log_stream_name, chunk) if @log_stream_name
|
129
|
+
|
130
|
+
queue = Thread::Queue.new
|
131
|
+
report = MemoryProfiler.report do
|
132
|
+
chunk.enum_for(:msgpack_each).select {|tag, time, record|
|
133
|
+
if record.nil?
|
134
|
+
log.warn "record is nil (tag=#{tag})"
|
135
|
+
false
|
136
|
+
else
|
137
|
+
true
|
138
|
+
end
|
139
|
+
}.group_by {|tag, time, record|
|
140
|
+
group = case
|
141
|
+
when @use_tag_as_group
|
142
|
+
tag
|
143
|
+
when @log_group_name_key
|
144
|
+
if @remove_log_group_name_key
|
145
|
+
record.delete(@log_group_name_key)
|
146
|
+
else
|
147
|
+
record[@log_group_name_key]
|
148
|
+
end
|
149
|
+
else
|
150
|
+
log_group_name
|
151
|
+
end
|
152
|
+
|
153
|
+
stream = case
|
154
|
+
when @use_tag_as_stream
|
155
|
+
tag
|
156
|
+
when @log_stream_name_key
|
157
|
+
if @remove_log_stream_name_key
|
158
|
+
record.delete(@log_stream_name_key)
|
159
|
+
else
|
160
|
+
record[@log_stream_name_key]
|
161
|
+
end
|
162
|
+
else
|
163
|
+
log_stream_name
|
164
|
+
end
|
165
|
+
|
166
|
+
[group, stream]
|
167
|
+
}.each {|group_stream, rs|
|
168
|
+
group_name, stream_name = group_stream
|
169
|
+
|
170
|
+
if stream_name.nil?
|
171
|
+
log.warn "stream_name is nil (group_name=#{group_name})"
|
172
|
+
next
|
173
|
+
end
|
174
|
+
|
175
|
+
unless log_group_exists?(group_name)
|
176
|
+
#rs = [[name, timestamp, record],[name,timestamp,record]]
|
177
|
+
#get tags and retention from first record
|
178
|
+
#as we create log group only once, values from first record will persist
|
179
|
+
record = rs[0][2]
|
180
|
+
|
181
|
+
awstags = @log_group_aws_tags
|
182
|
+
unless @log_group_aws_tags_key.nil?
|
183
|
+
if @remove_log_group_aws_tags_key
|
184
|
+
awstags = record.delete(@log_group_aws_tags_key)
|
185
|
+
else
|
186
|
+
awstags = record[@log_group_aws_tags_key]
|
187
|
+
end
|
188
|
+
end
|
189
|
+
|
190
|
+
retention_in_days = @retention_in_days
|
191
|
+
unless @retention_in_days_key.nil?
|
192
|
+
if @remove_retention_in_days_key
|
193
|
+
retention_in_days = record.delete(@retention_in_days_key)
|
194
|
+
else
|
195
|
+
retention_in_days = record[@retention_in_days_key]
|
196
|
+
end
|
197
|
+
end
|
198
|
+
|
199
|
+
if @auto_create_stream
|
200
|
+
create_log_group(group_name, awstags, retention_in_days)
|
201
|
+
else
|
202
|
+
log.warn "Log group '#{group_name}' does not exist"
|
203
|
+
next
|
204
|
+
end
|
205
|
+
end
|
206
|
+
|
207
|
+
unless log_stream_exists?(group_name, stream_name)
|
208
|
+
if @auto_create_stream
|
209
|
+
create_log_stream(group_name, stream_name)
|
210
|
+
else
|
211
|
+
log.warn "Log stream '#{stream_name}' does not exist"
|
212
|
+
next
|
213
|
+
end
|
214
|
+
end
|
215
|
+
|
216
|
+
events = []
|
217
|
+
rs.each do |t, time, record|
|
218
|
+
time_ms = (time.to_f * 1000).floor
|
219
|
+
|
220
|
+
scrub_record!(record)
|
221
|
+
unless @message_keys.empty?
|
222
|
+
message = @message_keys.map {|k| record[k].to_s }.join(' ')
|
223
|
+
else
|
224
|
+
message = @json_handler.dump(record)
|
225
|
+
end
|
226
|
+
|
227
|
+
if @max_message_length
|
228
|
+
message = message.slice(0, @max_message_length)
|
229
|
+
end
|
230
|
+
|
231
|
+
events << {timestamp: time_ms, message: message}
|
232
|
+
end
|
233
|
+
# The log events in the batch must be in chronological ordered by their timestamp.
|
234
|
+
# http://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_PutLogEvents.html
|
235
|
+
events = events.sort_by {|e| e[:timestamp] }
|
236
|
+
|
237
|
+
queue << [group_name, stream_name, events]
|
238
|
+
}
|
239
|
+
|
240
|
+
@concurrency.times do
|
241
|
+
queue << nil
|
242
|
+
end
|
243
|
+
threads = @concurrency.times.map do |i|
|
244
|
+
Thread.start do
|
245
|
+
while job = queue.shift
|
246
|
+
group_name, stream_name, events = job
|
247
|
+
put_events_by_chunk(group_name, stream_name, events)
|
248
|
+
end
|
249
|
+
end
|
250
|
+
end
|
251
|
+
threads.each(&:join)
|
252
|
+
end
|
253
|
+
|
254
|
+
report.pretty_print
|
255
|
+
end
|
256
|
+
|
257
|
+
private
|
258
|
+
def scrub_record!(record)
|
259
|
+
case record
|
260
|
+
when Hash
|
261
|
+
record.each_value {|v| scrub_record!(v) }
|
262
|
+
when Array
|
263
|
+
record.each {|v| scrub_record!(v) }
|
264
|
+
when String
|
265
|
+
record.scrub!
|
266
|
+
end
|
267
|
+
end
|
268
|
+
|
269
|
+
def delete_sequence_token(group_name, stream_name)
|
270
|
+
@sequence_tokens[group_name].delete(stream_name)
|
271
|
+
end
|
272
|
+
|
273
|
+
def next_sequence_token(group_name, stream_name)
|
274
|
+
@sequence_tokens[group_name][stream_name]
|
275
|
+
end
|
276
|
+
|
277
|
+
def store_next_sequence_token(group_name, stream_name, token)
|
278
|
+
@store_next_sequence_token_mutex.synchronize do
|
279
|
+
@sequence_tokens[group_name][stream_name] = token
|
280
|
+
end
|
281
|
+
end
|
282
|
+
|
283
|
+
def put_events_by_chunk(group_name, stream_name, events)
|
284
|
+
chunk = []
|
285
|
+
|
286
|
+
# The maximum batch size is 1,048,576 bytes, and this size is calculated as the sum of all event messages in UTF-8, plus 26 bytes for each log event.
|
287
|
+
# http://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_PutLogEvents.html
|
288
|
+
total_bytesize = 0
|
289
|
+
while event = events.shift
|
290
|
+
event_bytesize = event[:message].bytesize + EVENT_HEADER_SIZE
|
291
|
+
if MAX_EVENT_SIZE < event_bytesize
|
292
|
+
log.warn "Log event is discarded because it is too large: #{event_bytesize} bytes exceeds limit of #{MAX_EVENT_SIZE}"
|
293
|
+
break
|
294
|
+
end
|
295
|
+
|
296
|
+
new_chunk = chunk + [event]
|
297
|
+
|
298
|
+
chunk_span_too_big = new_chunk.size > 1 && new_chunk[-1][:timestamp] - new_chunk[0][:timestamp] >= 1000 * 60 * 60 * 24
|
299
|
+
chunk_too_big = total_bytesize + event_bytesize > MAX_EVENTS_SIZE
|
300
|
+
chunk_too_long = @max_events_per_batch && chunk.size >= @max_events_per_batch
|
301
|
+
if chunk_too_big or chunk_span_too_big or chunk_too_long
|
302
|
+
put_events(group_name, stream_name, chunk, total_bytesize)
|
303
|
+
chunk = [event]
|
304
|
+
total_bytesize = event_bytesize
|
305
|
+
else
|
306
|
+
chunk << event
|
307
|
+
total_bytesize += event_bytesize
|
308
|
+
end
|
309
|
+
end
|
310
|
+
|
311
|
+
unless chunk.empty?
|
312
|
+
put_events(group_name, stream_name, chunk, total_bytesize)
|
313
|
+
end
|
314
|
+
end
|
315
|
+
|
316
|
+
def put_events(group_name, stream_name, events, events_bytesize)
|
317
|
+
response = nil
|
318
|
+
retry_count = 0
|
319
|
+
|
320
|
+
until response
|
321
|
+
args = {
|
322
|
+
log_events: events,
|
323
|
+
log_group_name: group_name,
|
324
|
+
log_stream_name: stream_name,
|
325
|
+
}
|
326
|
+
|
327
|
+
token = next_sequence_token(group_name, stream_name)
|
328
|
+
args[:sequence_token] = token if token
|
329
|
+
|
330
|
+
begin
|
331
|
+
t = Time.now
|
332
|
+
response = @logs.put_log_events(args)
|
333
|
+
log.warn response.rejected_log_events_info if response.rejected_log_events_info != nil
|
334
|
+
log.debug "Called PutLogEvents API", {
|
335
|
+
"group" => group_name,
|
336
|
+
"stream" => stream_name,
|
337
|
+
"events_count" => events.size,
|
338
|
+
"events_bytesize" => events_bytesize,
|
339
|
+
"sequence_token" => token,
|
340
|
+
"thread" => Thread.current.object_id,
|
341
|
+
"request_sec" => Time.now - t,
|
342
|
+
}
|
343
|
+
rescue Aws::CloudWatchLogs::Errors::InvalidSequenceTokenException, Aws::CloudWatchLogs::Errors::DataAlreadyAcceptedException => err
|
344
|
+
sleep 1 # to avoid too many API calls
|
345
|
+
log_stream = find_log_stream(group_name, stream_name)
|
346
|
+
store_next_sequence_token(group_name, stream_name, log_stream.upload_sequence_token)
|
347
|
+
log.warn "updating upload sequence token forcefully because unrecoverable error occured", {
|
348
|
+
"error" => err,
|
349
|
+
"log_group" => group_name,
|
350
|
+
"log_stream" => stream_name,
|
351
|
+
"new_sequence_token" => token,
|
352
|
+
}
|
353
|
+
retry_count += 1
|
354
|
+
rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException => err
|
355
|
+
if @auto_create_stream && err.message == 'The specified log stream does not exist.'
|
356
|
+
log.warn 'Creating log stream because "The specified log stream does not exist." error is got', {
|
357
|
+
"error" => err,
|
358
|
+
"log_group" => group_name,
|
359
|
+
"log_stream" => stream_name,
|
360
|
+
}
|
361
|
+
create_log_stream(group_name, stream_name)
|
362
|
+
delete_sequence_token(group_name, stream_name)
|
363
|
+
retry_count += 1
|
364
|
+
else
|
365
|
+
raise err
|
366
|
+
end
|
367
|
+
rescue Aws::CloudWatchLogs::Errors::ThrottlingException => err
|
368
|
+
if !@put_log_events_disable_retry_limit && @put_log_events_retry_limit < retry_count
|
369
|
+
log.error "failed to PutLogEvents and discard logs because retry count exceeded put_log_events_retry_limit", {
|
370
|
+
"error_class" => err.class.to_s,
|
371
|
+
"error" => err.message,
|
372
|
+
}
|
373
|
+
return
|
374
|
+
else
|
375
|
+
sleep_sec = @put_log_events_retry_wait * (2 ** retry_count)
|
376
|
+
sleep_sec += sleep_sec * (0.25 * (rand - 0.5))
|
377
|
+
log.warn "failed to PutLogEvents", {
|
378
|
+
"next_retry" => Time.now + sleep_sec,
|
379
|
+
"error_class" => err.class.to_s,
|
380
|
+
"error" => err.message,
|
381
|
+
}
|
382
|
+
sleep(sleep_sec)
|
383
|
+
retry_count += 1
|
384
|
+
end
|
385
|
+
end
|
386
|
+
end
|
387
|
+
|
388
|
+
if 0 < retry_count
|
389
|
+
log.warn "retry succeeded"
|
390
|
+
end
|
391
|
+
|
392
|
+
store_next_sequence_token(group_name, stream_name, response.next_sequence_token)
|
393
|
+
end
|
394
|
+
|
395
|
+
def create_log_group(group_name, log_group_aws_tags = nil, retention_in_days = nil)
|
396
|
+
begin
|
397
|
+
@logs.create_log_group(log_group_name: group_name, tags: log_group_aws_tags)
|
398
|
+
unless retention_in_days.nil?
|
399
|
+
put_retention_policy(group_name, retention_in_days)
|
400
|
+
end
|
401
|
+
@sequence_tokens[group_name] = {}
|
402
|
+
rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException
|
403
|
+
log.debug "Log group '#{group_name}' already exists"
|
404
|
+
end
|
405
|
+
end
|
406
|
+
|
407
|
+
def put_retention_policy(group_name, retention_in_days)
|
408
|
+
begin
|
409
|
+
@logs.put_retention_policy({
|
410
|
+
log_group_name: group_name,
|
411
|
+
retention_in_days: retention_in_days
|
412
|
+
})
|
413
|
+
rescue Aws::CloudWatchLogs::Errors::InvalidParameterException => error
|
414
|
+
log.warn "failed to set retention policy for Log group '#{group_name}' with error #{error.backtrace}"
|
415
|
+
end
|
416
|
+
end
|
417
|
+
|
418
|
+
def create_log_stream(group_name, stream_name)
|
419
|
+
begin
|
420
|
+
@logs.create_log_stream(log_group_name: group_name, log_stream_name: stream_name)
|
421
|
+
@sequence_tokens[group_name] ||= {}
|
422
|
+
@sequence_tokens[group_name][stream_name] = nil
|
423
|
+
rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException
|
424
|
+
log.debug "Log stream '#{stream_name}' already exists"
|
425
|
+
end
|
426
|
+
end
|
427
|
+
|
428
|
+
def log_group_exists?(group_name)
|
429
|
+
if @sequence_tokens[group_name]
|
430
|
+
true
|
431
|
+
elsif @logs.describe_log_groups.any? {|page| page.log_groups.any? {|i| i.log_group_name == group_name } }
|
432
|
+
@sequence_tokens[group_name] = {}
|
433
|
+
true
|
434
|
+
else
|
435
|
+
false
|
436
|
+
end
|
437
|
+
end
|
438
|
+
|
439
|
+
def log_stream_exists?(group_name, stream_name)
|
440
|
+
if not @sequence_tokens[group_name]
|
441
|
+
false
|
442
|
+
elsif @sequence_tokens[group_name].has_key?(stream_name)
|
443
|
+
true
|
444
|
+
elsif (log_stream = find_log_stream(group_name, stream_name))
|
445
|
+
@sequence_tokens[group_name][stream_name] = log_stream.upload_sequence_token
|
446
|
+
true
|
447
|
+
else
|
448
|
+
false
|
449
|
+
end
|
450
|
+
end
|
451
|
+
|
452
|
+
def find_log_stream(group_name, stream_name)
|
453
|
+
next_token = nil
|
454
|
+
loop do
|
455
|
+
response = @logs.describe_log_streams(log_group_name: group_name, log_stream_name_prefix: stream_name, next_token: next_token)
|
456
|
+
if (log_stream = response.log_streams.find {|i| i.log_stream_name == stream_name })
|
457
|
+
return log_stream
|
458
|
+
end
|
459
|
+
if response.next_token.nil?
|
460
|
+
break
|
461
|
+
end
|
462
|
+
next_token = response.next_token
|
463
|
+
sleep 0.1
|
464
|
+
end
|
465
|
+
nil
|
466
|
+
end
|
467
|
+
end
|
468
|
+
end
|