logstash-output-awslogs 0.1.8 → 0.1.13
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/logstash/outputs/awslogs.rb +68 -56
- data/logstash-output-awslogs.gemspec +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 3bbaa5d8875a0a0c0953b8f621545f0a7fc6ab40
|
4
|
+
data.tar.gz: b0ed0c04c50f70dc5fbb9af910e05125f634d2a2
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 9de2dbffa4ad46c0ad35af85bef3a2927ebdc3228e2ca95645acba67073c322568306214c99e78342312ca8a63adc619538abd375af130badf77ca6ad725a9fb
|
7
|
+
data.tar.gz: 975f7760655ca0c8096b3e9408adf1692a0263b4e8b0fea48c40c343e2066bae6fa508be853ef03231936c183131ddeb3c23d3b0ffdf85411dc16376db522962
|
@@ -1,8 +1,9 @@
|
|
1
|
-
#
|
2
|
-
|
3
|
-
require
|
4
|
-
require
|
5
|
-
require
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'logstash/outputs/base'
|
4
|
+
require 'logstash/namespace'
|
5
|
+
require 'logstash/plugin_mixins/aws_config'
|
6
|
+
require 'aws-sdk'
|
6
7
|
|
7
8
|
Aws.eager_autoload!
|
8
9
|
|
@@ -10,73 +11,74 @@ Aws.eager_autoload!
|
|
10
11
|
class LogStash::Outputs::Awslogs < LogStash::Outputs::Base
|
11
12
|
include LogStash::PluginMixins::AwsConfig::V2
|
12
13
|
|
13
|
-
config_name
|
14
|
-
default :codec,
|
14
|
+
config_name 'awslogs'
|
15
|
+
default :codec, 'line'
|
15
16
|
|
16
|
-
config :log_group_name, :
|
17
|
-
config :log_stream_name, :
|
17
|
+
config :log_group_name, validate: :string, required: true
|
18
|
+
config :log_stream_name, validate: :string, required: true
|
18
19
|
|
19
20
|
public
|
21
|
+
|
20
22
|
def register
|
21
23
|
@client = Aws::CloudWatchLogs::Client.new(aws_options_hash)
|
22
24
|
@next_sequence_tokens = {}
|
23
25
|
end # def register
|
24
26
|
|
25
27
|
public
|
28
|
+
|
26
29
|
def multi_receive_encoded(events_and_encoded)
|
27
30
|
to_send = {}
|
28
31
|
sequence_tokens = {}
|
29
32
|
|
30
|
-
events_and_encoded.each do |event,
|
33
|
+
events_and_encoded.each do |event, _encoded|
|
31
34
|
event_log_stream_name = event.sprintf(log_stream_name)
|
32
35
|
event_log_group_name = event.sprintf(log_group_name)
|
33
36
|
|
34
37
|
next_sequence_token_key = [event_log_group_name, event_log_stream_name]
|
35
|
-
|
38
|
+
unless to_send.keys.include? next_sequence_token_key
|
36
39
|
to_send.store(next_sequence_token_key, [])
|
37
40
|
end
|
38
|
-
to_send[next_sequence_token_key].push(
|
41
|
+
to_send[next_sequence_token_key].push(
|
39
42
|
timestamp: (event.timestamp.time.to_f * 1000).to_int,
|
40
|
-
message: event.get(
|
41
|
-
|
43
|
+
message: event.get('message')
|
44
|
+
)
|
42
45
|
end
|
43
46
|
|
44
47
|
to_send.each do |event_log_names, _events|
|
45
48
|
event_log_group_name = event_log_names[0]
|
46
49
|
event_log_stream_name = event_log_names[1]
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
end
|
55
|
-
rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException => e
|
56
|
-
@logger.info("Will create log group/stream and retry")
|
57
|
-
begin
|
58
|
-
@client.create_log_group(:log_group_name => send_opts[:log_group_name])
|
59
|
-
rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException => e
|
60
|
-
@logger.info("Log group #{send_opts[:log_group_name]} already exists")
|
61
|
-
rescue Exception => e
|
62
|
-
@logger.error(e)
|
63
|
-
end
|
64
|
-
begin
|
65
|
-
@client.create_log_stream(:log_group_name => send_opts[:log_group_name], :log_stream_name => send_opts[:log_stream_name])
|
66
|
-
rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException => e
|
67
|
-
@logger.info("Log stream #{send_opts[:log_stream_name]} already exists")
|
68
|
-
rescue Exception => e
|
69
|
-
@logger.error(e)
|
50
|
+
next if sequence_tokens.keys.include? event_log_group_name
|
51
|
+
|
52
|
+
sequence_tokens.store(event_log_group_name, {})
|
53
|
+
begin
|
54
|
+
@client.describe_log_streams(log_group_name: event_log_group_name).each do |response|
|
55
|
+
response.log_streams.each do |log_stream_data|
|
56
|
+
sequence_tokens[event_log_group_name][log_stream_data.log_stream_name.to_s] = log_stream_data.upload_sequence_token.to_s
|
70
57
|
end
|
71
|
-
retry
|
72
|
-
rescue Aws::CloudWatchLogs::Errors::ThrottlingException => e
|
73
|
-
@logger.info("Logs throttling, retry")
|
74
|
-
retry
|
75
58
|
end
|
59
|
+
rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException => e
|
60
|
+
@logger.info('Will create log group/stream and retry')
|
61
|
+
begin
|
62
|
+
@client.create_log_group(log_group_name: event_log_group_name)
|
63
|
+
rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException => e
|
64
|
+
@logger.info("Log group #{event_log_group_name} already exists")
|
65
|
+
rescue Exception => e
|
66
|
+
@logger.error(e)
|
67
|
+
end
|
68
|
+
begin
|
69
|
+
@client.create_log_stream(log_group_name: event_log_group_name, log_stream_name: event_log_stream_name)
|
70
|
+
rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException => e
|
71
|
+
@logger.info("Log stream #{event_log_stream_name} already exists")
|
72
|
+
rescue Exception => e
|
73
|
+
@logger.error(e)
|
74
|
+
end
|
75
|
+
retry
|
76
|
+
rescue Aws::CloudWatchLogs::Errors::ThrottlingException => e
|
77
|
+
@logger.info('Logs throttling, retry')
|
78
|
+
retry
|
76
79
|
end
|
77
80
|
end
|
78
81
|
|
79
|
-
|
80
82
|
to_send.each do |event_log_names, log_events|
|
81
83
|
event_log_group_name = event_log_names[0]
|
82
84
|
event_log_stream_name = event_log_names[1]
|
@@ -84,11 +86,11 @@ class LogStash::Outputs::Awslogs < LogStash::Outputs::Base
|
|
84
86
|
|
85
87
|
ident_opts = {
|
86
88
|
log_group_name: event_log_group_name,
|
87
|
-
log_stream_name: event_log_stream_name
|
89
|
+
log_stream_name: event_log_stream_name
|
88
90
|
}
|
89
|
-
send_opts = ident_opts.merge(
|
90
|
-
log_events: log_events
|
91
|
-
|
91
|
+
send_opts = ident_opts.merge(
|
92
|
+
log_events: log_events
|
93
|
+
)
|
92
94
|
|
93
95
|
if @next_sequence_tokens.keys.include? next_sequence_token_key
|
94
96
|
send_opts[:sequence_token] = @next_sequence_tokens[next_sequence_token_key]
|
@@ -96,18 +98,26 @@ class LogStash::Outputs::Awslogs < LogStash::Outputs::Base
|
|
96
98
|
send_opts[:sequence_token] = sequence_tokens[event_log_group_name][event_log_stream_name]
|
97
99
|
else
|
98
100
|
begin
|
99
|
-
|
101
|
+
@client.create_log_stream(ident_opts)
|
102
|
+
until sequence_tokens[event_log_group_name][event_log_stream_name] do
|
103
|
+
@client.describe_log_streams(log_group_name: event_log_group_name).each do |response|
|
104
|
+
response.log_streams.each do |log_stream_data|
|
105
|
+
sequence_tokens[event_log_group_name][log_stream_data.log_stream_name.to_s] = log_stream_data.upload_sequence_token.to_s
|
106
|
+
end
|
107
|
+
end
|
108
|
+
end
|
109
|
+
send_opts[:sequence_token] = sequence_tokens[event_log_group_name][event_log_stream_name]
|
100
110
|
rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException => e
|
101
|
-
@logger.info(
|
111
|
+
@logger.info('Will create log group/stream and retry')
|
102
112
|
begin
|
103
|
-
@client.create_log_group(:
|
113
|
+
@client.create_log_group(log_group_name: send_opts[:log_group_name])
|
104
114
|
rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException => e
|
105
115
|
@logger.info("Log group #{send_opts[:log_group_name]} already exists")
|
106
116
|
rescue Exception => e
|
107
117
|
@logger.error(e)
|
108
118
|
end
|
109
119
|
begin
|
110
|
-
@client.create_log_stream(:
|
120
|
+
@client.create_log_stream(log_group_name: send_opts[:log_group_name], log_stream_name: send_opts[:log_stream_name])
|
111
121
|
rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException => e
|
112
122
|
@logger.info("Log stream #{send_opts[:log_stream_name]} already exists")
|
113
123
|
rescue Exception => e
|
@@ -115,23 +125,23 @@ class LogStash::Outputs::Awslogs < LogStash::Outputs::Base
|
|
115
125
|
end
|
116
126
|
retry
|
117
127
|
rescue Aws::CloudWatchLogs::Errors::ThrottlingException => e
|
118
|
-
@logger.info(
|
128
|
+
@logger.info('Logs throttling, retry')
|
119
129
|
retry
|
120
130
|
end
|
121
131
|
end
|
122
132
|
begin
|
123
133
|
resp = @client.put_log_events(send_opts)
|
124
134
|
rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException => e
|
125
|
-
@logger.info(
|
135
|
+
@logger.info('Will create log group/stream and retry')
|
126
136
|
begin
|
127
|
-
@client.create_log_group(:
|
137
|
+
@client.create_log_group(log_group_name: send_opts[:log_group_name])
|
128
138
|
rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException => e
|
129
139
|
@logger.info("Log group #{send_opts[:log_group_name]} already exists")
|
130
140
|
rescue Exception => e
|
131
141
|
@logger.error(e)
|
132
142
|
end
|
133
143
|
begin
|
134
|
-
@client.create_log_stream(:
|
144
|
+
@client.create_log_stream(log_group_name: send_opts[:log_group_name], log_stream_name: send_opts[:log_stream_name])
|
135
145
|
rescue Aws::CloudWatchLogs::Errors::ResourceAlreadyExistsException => e
|
136
146
|
@logger.info("Log stream #{send_opts[:log_stream_name]} already exists")
|
137
147
|
rescue Exception => e
|
@@ -139,11 +149,13 @@ class LogStash::Outputs::Awslogs < LogStash::Outputs::Base
|
|
139
149
|
end
|
140
150
|
retry
|
141
151
|
# TODO: handle rejected events with debug message
|
142
|
-
@next_sequence_tokens.store(next_sequence_token_key, resp.next_sequence_token)
|
143
152
|
rescue Aws::CloudWatchLogs::Errors::ThrottlingException => e
|
144
|
-
@logger.info(
|
153
|
+
@logger.info('Logs throttling, retry')
|
145
154
|
retry
|
155
|
+
rescue Exception => e
|
156
|
+
@logger.error("Err: #{e}; Opts: #{send_opts.each { |k,v| "#{k.to_s} : #{v.to_s}" }}")
|
146
157
|
end
|
158
|
+
@next_sequence_tokens.store(next_sequence_token_key, resp.next_sequence_token)
|
147
159
|
end
|
148
160
|
end # def multi_receive_encoded
|
149
161
|
end # class LogStash::Outputs::Awslogs
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-awslogs
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.13
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Anton Klyba
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2020-06-
|
11
|
+
date: 2020-06-17 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: logstash-core-plugin-api
|