logstash-output-awslogs 0.1.6 → 0.1.11
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/logstash/outputs/awslogs.rb +70 -41
- data/logstash-output-awslogs.gemspec +1 -1
- metadata +1 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: d7e7913d5155ad17f7f7847328870577b6e066f1
|
4
|
+
data.tar.gz: 78ec8c4bfe9b9644e288d6a39a6c6390ba606f35
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 6d513aa414dc6cb74f1fccd41aceed4292453aee645195bb8bcbc39436ba20f78d944913be4149303b9e671bfc571b6623da5b3da2c96dfd6c01240141ff4a30
|
7
|
+
data.tar.gz: 4b932c235143fb4588619226455ec9f1d69856c896a030991c42e7df13ea5cb56e1829a9f4ee1f1e0f3a343036d3ee72e5fe8f1dc247070cae358bab6f760d20
|
@@ -1,8 +1,9 @@
|
|
1
|
-
#
|
2
|
-
|
3
|
-
require
|
4
|
-
require
|
5
|
-
require
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'logstash/outputs/base'
|
4
|
+
require 'logstash/namespace'
|
5
|
+
require 'logstash/plugin_mixins/aws_config'
|
6
|
+
require 'aws-sdk'
|
6
7
|
|
7
8
|
Aws.eager_autoload!
|
8
9
|
|
@@ -10,35 +11,74 @@ Aws.eager_autoload!
|
|
10
11
|
class LogStash::Outputs::Awslogs < LogStash::Outputs::Base
|
11
12
|
include LogStash::PluginMixins::AwsConfig::V2
|
12
13
|
|
13
|
-
config_name
|
14
|
-
default :codec,
|
14
|
+
config_name 'awslogs'
|
15
|
+
default :codec, 'line'
|
15
16
|
|
16
|
-
config :log_group_name, :
|
17
|
-
config :log_stream_name, :
|
17
|
+
config :log_group_name, validate: :string, required: true
|
18
|
+
config :log_stream_name, validate: :string, required: true
|
18
19
|
|
19
20
|
public
|
21
|
+
|
20
22
|
def register
|
21
23
|
@client = Aws::CloudWatchLogs::Client.new(aws_options_hash)
|
22
24
|
@next_sequence_tokens = {}
|
23
25
|
end # def register
|
24
26
|
|
25
27
|
public
|
28
|
+
|
26
29
|
def multi_receive_encoded(events_and_encoded)
|
27
30
|
to_send = {}
|
31
|
+
sequence_tokens = {}
|
28
32
|
|
29
|
-
events_and_encoded.each do |event,
|
33
|
+
events_and_encoded.each do |event, _encoded|
|
30
34
|
event_log_stream_name = event.sprintf(log_stream_name)
|
31
35
|
event_log_group_name = event.sprintf(log_group_name)
|
32
36
|
|
33
37
|
next_sequence_token_key = [event_log_group_name, event_log_stream_name]
|
34
|
-
|
38
|
+
unless to_send.keys.include? next_sequence_token_key
|
35
39
|
to_send.store(next_sequence_token_key, [])
|
36
40
|
end
|
37
|
-
to_send[next_sequence_token_key].push(
|
41
|
+
to_send[next_sequence_token_key].push(
|
38
42
|
timestamp: (event.timestamp.time.to_f * 1000).to_int,
|
39
|
-
message: event.get(
|
40
|
-
|
43
|
+
message: event.get('message')
|
44
|
+
)
|
45
|
+
end
|
46
|
+
|
47
|
+
to_send.each do |event_log_names, _events|
|
48
|
+
event_log_group_name = event_log_names[0]
|
49
|
+
event_log_stream_name = event_log_names[1]
|
50
|
+
next if sequence_tokens.keys.include? event_log_group_name
|
51
|
+
|
52
|
+
sequence_tokens.store(event_log_group_name, {})
|
53
|
+
begin
|
54
|
+
@client.describe_log_streams(log_group_name: event_log_group_name).each do |response|
|
55
|
+
response.log_streams.each do |log_stream_data|
|
56
|
+
sequence_tokens[event_log_group_name][log_stream_data.log_stream_name.to_s] = log_stream_data.upload_sequence_token.to_s
|
57
|
+
end
|
58
|
+
end
|
59
|
+
rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException => e
|
60
|
+
@logger.info('Will create log group/stream and retry')
|
61
|
+
begin
|
62
|
+
@client.create_log_group(log_group_name: event_log_group_name)
|
63
|
+
rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException => e
|
64
|
+
@logger.info("Log group #{event_log_group_name} already exists")
|
65
|
+
rescue Exception => e
|
66
|
+
@logger.error(e)
|
67
|
+
end
|
68
|
+
begin
|
69
|
+
@client.create_log_stream(log_group_name: event_log_group_name, log_stream_name: event_log_stream_name)
|
70
|
+
rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException => e
|
71
|
+
@logger.info("Log stream #{event_log_stream_name} already exists")
|
72
|
+
rescue Exception => e
|
73
|
+
@logger.error(e)
|
74
|
+
end
|
75
|
+
retry
|
76
|
+
rescue Aws::CloudWatchLogs::Errors::ThrottlingException => e
|
77
|
+
@logger.info('Logs throttling, retry')
|
78
|
+
retry
|
79
|
+
end
|
41
80
|
end
|
81
|
+
|
42
82
|
to_send.each do |event_log_names, log_events|
|
43
83
|
event_log_group_name = event_log_names[0]
|
44
84
|
event_log_stream_name = event_log_names[1]
|
@@ -46,41 +86,30 @@ class LogStash::Outputs::Awslogs < LogStash::Outputs::Base
|
|
46
86
|
|
47
87
|
ident_opts = {
|
48
88
|
log_group_name: event_log_group_name,
|
49
|
-
log_stream_name: event_log_stream_name
|
89
|
+
log_stream_name: event_log_stream_name
|
50
90
|
}
|
51
|
-
send_opts = ident_opts.merge(
|
52
|
-
log_events: log_events
|
53
|
-
|
91
|
+
send_opts = ident_opts.merge(
|
92
|
+
log_events: log_events
|
93
|
+
)
|
54
94
|
|
55
95
|
if @next_sequence_tokens.keys.include? next_sequence_token_key
|
56
96
|
send_opts[:sequence_token] = @next_sequence_tokens[next_sequence_token_key]
|
97
|
+
elsif sequence_tokens[event_log_group_name].keys.include? event_log_stream_name
|
98
|
+
send_opts[:sequence_token] = sequence_tokens[event_log_group_name][event_log_stream_name]
|
57
99
|
else
|
58
100
|
begin
|
59
|
-
|
60
|
-
log_group_name: event_log_group_name,
|
61
|
-
log_stream_name_prefix: event_log_stream_name,
|
62
|
-
})
|
63
|
-
if resp.log_streams.length < 1
|
64
|
-
@client.create_log_stream(ident_opts)
|
65
|
-
else
|
66
|
-
resp.log_streams.each do |log_stream_data|
|
67
|
-
if log_stream_data.log_stream_name == event_log_stream_name
|
68
|
-
send_opts[:sequence_token] = log_stream_data.upload_sequence_token
|
69
|
-
break
|
70
|
-
end
|
71
|
-
end
|
72
|
-
end
|
101
|
+
@client.create_log_stream(ident_opts)
|
73
102
|
rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException => e
|
74
|
-
@logger.info(
|
103
|
+
@logger.info('Will create log group/stream and retry')
|
75
104
|
begin
|
76
|
-
@client.create_log_group(:
|
105
|
+
@client.create_log_group(log_group_name: send_opts[:log_group_name])
|
77
106
|
rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException => e
|
78
107
|
@logger.info("Log group #{send_opts[:log_group_name]} already exists")
|
79
108
|
rescue Exception => e
|
80
109
|
@logger.error(e)
|
81
110
|
end
|
82
111
|
begin
|
83
|
-
@client.create_log_stream(:
|
112
|
+
@client.create_log_stream(log_group_name: send_opts[:log_group_name], log_stream_name: send_opts[:log_stream_name])
|
84
113
|
rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException => e
|
85
114
|
@logger.info("Log stream #{send_opts[:log_stream_name]} already exists")
|
86
115
|
rescue Exception => e
|
@@ -88,23 +117,23 @@ class LogStash::Outputs::Awslogs < LogStash::Outputs::Base
|
|
88
117
|
end
|
89
118
|
retry
|
90
119
|
rescue Aws::CloudWatchLogs::Errors::ThrottlingException => e
|
91
|
-
@logger.info(
|
120
|
+
@logger.info('Logs throttling, retry')
|
92
121
|
retry
|
93
122
|
end
|
94
123
|
end
|
95
124
|
begin
|
96
125
|
resp = @client.put_log_events(send_opts)
|
97
126
|
rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException => e
|
98
|
-
@logger.info(
|
127
|
+
@logger.info('Will create log group/stream and retry')
|
99
128
|
begin
|
100
|
-
@client.create_log_group(:
|
129
|
+
@client.create_log_group(log_group_name: send_opts[:log_group_name])
|
101
130
|
rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException => e
|
102
131
|
@logger.info("Log group #{send_opts[:log_group_name]} already exists")
|
103
132
|
rescue Exception => e
|
104
133
|
@logger.error(e)
|
105
134
|
end
|
106
135
|
begin
|
107
|
-
@client.create_log_stream(:
|
136
|
+
@client.create_log_stream(log_group_name: send_opts[:log_group_name], log_stream_name: send_opts[:log_stream_name])
|
108
137
|
rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException => e
|
109
138
|
@logger.info("Log stream #{send_opts[:log_stream_name]} already exists")
|
110
139
|
rescue Exception => e
|
@@ -112,11 +141,11 @@ class LogStash::Outputs::Awslogs < LogStash::Outputs::Base
|
|
112
141
|
end
|
113
142
|
retry
|
114
143
|
# TODO: handle rejected events with debug message
|
115
|
-
@next_sequence_tokens.store(next_sequence_token_key, resp.next_sequence_token)
|
116
144
|
rescue Aws::CloudWatchLogs::Errors::ThrottlingException => e
|
117
|
-
@logger.info(
|
145
|
+
@logger.info('Logs throttling, retry')
|
118
146
|
retry
|
119
147
|
end
|
148
|
+
@next_sequence_tokens.store(next_sequence_token_key, resp.next_sequence_token)
|
120
149
|
end
|
121
150
|
end # def multi_receive_encoded
|
122
151
|
end # class LogStash::Outputs::Awslogs
|