logstash-integration-aws 0.1.0.pre
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/CHANGELOG.PRE.MERGE.md +658 -0
- data/CHANGELOG.md +15 -0
- data/CONTRIBUTORS +40 -0
- data/Gemfile +11 -0
- data/LICENSE +202 -0
- data/NOTICE.TXT +5 -0
- data/README.md +205 -0
- data/docs/codec-cloudfront.asciidoc +53 -0
- data/docs/codec-cloudtrail.asciidoc +45 -0
- data/docs/index.asciidoc +38 -0
- data/docs/input-cloudwatch.asciidoc +320 -0
- data/docs/input-s3.asciidoc +346 -0
- data/docs/input-sqs.asciidoc +287 -0
- data/docs/output-cloudwatch.asciidoc +321 -0
- data/docs/output-s3.asciidoc +442 -0
- data/docs/output-sns.asciidoc +166 -0
- data/docs/output-sqs.asciidoc +242 -0
- data/lib/logstash/codecs/cloudfront.rb +84 -0
- data/lib/logstash/codecs/cloudtrail.rb +47 -0
- data/lib/logstash/inputs/cloudwatch.rb +338 -0
- data/lib/logstash/inputs/s3.rb +466 -0
- data/lib/logstash/inputs/sqs.rb +196 -0
- data/lib/logstash/outputs/cloudwatch.rb +346 -0
- data/lib/logstash/outputs/s3/file_repository.rb +121 -0
- data/lib/logstash/outputs/s3/path_validator.rb +18 -0
- data/lib/logstash/outputs/s3/size_and_time_rotation_policy.rb +24 -0
- data/lib/logstash/outputs/s3/size_rotation_policy.rb +26 -0
- data/lib/logstash/outputs/s3/temporary_file.rb +71 -0
- data/lib/logstash/outputs/s3/temporary_file_factory.rb +129 -0
- data/lib/logstash/outputs/s3/time_rotation_policy.rb +26 -0
- data/lib/logstash/outputs/s3/uploader.rb +74 -0
- data/lib/logstash/outputs/s3/writable_directory_validator.rb +17 -0
- data/lib/logstash/outputs/s3/write_bucket_permission_validator.rb +60 -0
- data/lib/logstash/outputs/s3.rb +405 -0
- data/lib/logstash/outputs/sns.rb +133 -0
- data/lib/logstash/outputs/sqs.rb +167 -0
- data/lib/logstash/plugin_mixins/aws_config/generic.rb +54 -0
- data/lib/logstash/plugin_mixins/aws_config/v2.rb +93 -0
- data/lib/logstash/plugin_mixins/aws_config.rb +8 -0
- data/logstash-integration-aws.gemspec +52 -0
- data/spec/codecs/cloudfront_spec.rb +92 -0
- data/spec/codecs/cloudtrail_spec.rb +56 -0
- data/spec/fixtures/aws_credentials_file_sample_test.yml +2 -0
- data/spec/fixtures/aws_temporary_credentials_file_sample_test.yml +3 -0
- data/spec/fixtures/cloudfront.log +4 -0
- data/spec/fixtures/compressed.log.gee.zip +0 -0
- data/spec/fixtures/compressed.log.gz +0 -0
- data/spec/fixtures/compressed.log.gzip +0 -0
- data/spec/fixtures/invalid_utf8.gbk.log +2 -0
- data/spec/fixtures/json.log +2 -0
- data/spec/fixtures/json_with_message.log +2 -0
- data/spec/fixtures/multiline.log +6 -0
- data/spec/fixtures/multiple_compressed_streams.gz +0 -0
- data/spec/fixtures/uncompressed.log +2 -0
- data/spec/inputs/cloudwatch_spec.rb +85 -0
- data/spec/inputs/s3_spec.rb +610 -0
- data/spec/inputs/sincedb_spec.rb +17 -0
- data/spec/inputs/sqs_spec.rb +324 -0
- data/spec/integration/cloudwatch_spec.rb +25 -0
- data/spec/integration/dynamic_prefix_spec.rb +92 -0
- data/spec/integration/gzip_file_spec.rb +62 -0
- data/spec/integration/gzip_size_rotation_spec.rb +63 -0
- data/spec/integration/outputs/sqs_spec.rb +98 -0
- data/spec/integration/restore_from_crash_spec.rb +67 -0
- data/spec/integration/s3_spec.rb +66 -0
- data/spec/integration/size_rotation_spec.rb +59 -0
- data/spec/integration/sqs_spec.rb +110 -0
- data/spec/integration/stress_test_spec.rb +60 -0
- data/spec/integration/time_based_rotation_with_constant_write_spec.rb +60 -0
- data/spec/integration/time_based_rotation_with_stale_write_spec.rb +64 -0
- data/spec/integration/upload_current_file_on_shutdown_spec.rb +51 -0
- data/spec/outputs/cloudwatch_spec.rb +38 -0
- data/spec/outputs/s3/file_repository_spec.rb +143 -0
- data/spec/outputs/s3/size_and_time_rotation_policy_spec.rb +77 -0
- data/spec/outputs/s3/size_rotation_policy_spec.rb +41 -0
- data/spec/outputs/s3/temporary_file_factory_spec.rb +89 -0
- data/spec/outputs/s3/temporary_file_spec.rb +47 -0
- data/spec/outputs/s3/time_rotation_policy_spec.rb +60 -0
- data/spec/outputs/s3/uploader_spec.rb +69 -0
- data/spec/outputs/s3/writable_directory_validator_spec.rb +40 -0
- data/spec/outputs/s3/write_bucket_permission_validator_spec.rb +49 -0
- data/spec/outputs/s3_spec.rb +232 -0
- data/spec/outputs/sns_spec.rb +160 -0
- data/spec/plugin_mixin/aws_config_spec.rb +217 -0
- data/spec/spec_helper.rb +8 -0
- data/spec/support/helpers.rb +119 -0
- data/spec/unit/outputs/sqs_spec.rb +247 -0
- metadata +467 -0
@@ -0,0 +1,119 @@
|
|
1
|
+
def fetch_events(settings)
|
2
|
+
queue = []
|
3
|
+
s3 = LogStash::Inputs::S3.new(settings)
|
4
|
+
s3.register
|
5
|
+
s3.process_files(queue)
|
6
|
+
queue
|
7
|
+
end
|
8
|
+
|
9
|
+
# delete_files(prefix)
|
10
|
+
def upload_file(local_file, remote_name)
|
11
|
+
bucket = s3object.bucket(ENV['AWS_LOGSTASH_TEST_BUCKET'])
|
12
|
+
file = File.expand_path(File.join(File.dirname(__FILE__), local_file))
|
13
|
+
bucket.object(remote_name).upload_file(file)
|
14
|
+
end
|
15
|
+
|
16
|
+
def delete_remote_files(prefix)
|
17
|
+
bucket = s3object.bucket(ENV['AWS_LOGSTASH_TEST_BUCKET'])
|
18
|
+
bucket.objects(:prefix => prefix).each { |object| object.delete }
|
19
|
+
end
|
20
|
+
|
21
|
+
def list_remote_files(prefix, target_bucket = ENV['AWS_LOGSTASH_TEST_BUCKET'])
|
22
|
+
bucket = s3object.bucket(target_bucket)
|
23
|
+
bucket.objects(:prefix => prefix).collect(&:key)
|
24
|
+
end
|
25
|
+
|
26
|
+
def create_bucket(name)
|
27
|
+
s3object.bucket(name).create
|
28
|
+
end
|
29
|
+
|
30
|
+
def delete_bucket(name)
|
31
|
+
s3object.bucket(name).objects.map(&:delete)
|
32
|
+
s3object.bucket(name).delete
|
33
|
+
end
|
34
|
+
|
35
|
+
def s3object
|
36
|
+
Aws::S3::Resource.new
|
37
|
+
end
|
38
|
+
|
39
|
+
class TestInfiniteS3Object
|
40
|
+
def initialize(s3_obj)
|
41
|
+
@s3_obj = s3_obj
|
42
|
+
end
|
43
|
+
|
44
|
+
def each
|
45
|
+
counter = 1
|
46
|
+
|
47
|
+
loop do
|
48
|
+
yield @s3_obj
|
49
|
+
counter +=1
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
def push_sqs_event(message)
|
55
|
+
client = Aws::SQS::Client.new
|
56
|
+
queue_url = client.get_queue_url(:queue_name => ENV["SQS_QUEUE_NAME"])
|
57
|
+
|
58
|
+
client.send_message({
|
59
|
+
queue_url: queue_url.queue_url,
|
60
|
+
message_body: message,
|
61
|
+
})
|
62
|
+
end
|
63
|
+
|
64
|
+
shared_context "setup plugin" do
|
65
|
+
let(:temporary_directory) { Stud::Temporary.pathname }
|
66
|
+
|
67
|
+
let(:bucket) { ENV["AWS_LOGSTASH_TEST_BUCKET"] }
|
68
|
+
let(:access_key_id) { ENV["AWS_ACCESS_KEY_ID"] }
|
69
|
+
let(:secret_access_key) { ENV["AWS_SECRET_ACCESS_KEY"] }
|
70
|
+
let(:size_file) { 100 }
|
71
|
+
let(:time_file) { 100 }
|
72
|
+
let(:tags) { [] }
|
73
|
+
let(:prefix) { "home" }
|
74
|
+
let(:region) { ENV['AWS_REGION'] }
|
75
|
+
|
76
|
+
let(:main_options) do
|
77
|
+
{
|
78
|
+
"bucket" => bucket,
|
79
|
+
"prefix" => prefix,
|
80
|
+
"temporary_directory" => temporary_directory,
|
81
|
+
"access_key_id" => access_key_id,
|
82
|
+
"secret_access_key" => secret_access_key,
|
83
|
+
"size_file" => size_file,
|
84
|
+
"time_file" => time_file,
|
85
|
+
"region" => region,
|
86
|
+
"tags" => []
|
87
|
+
}
|
88
|
+
end
|
89
|
+
|
90
|
+
let(:client_credentials) { Aws::Credentials.new(access_key_id, secret_access_key) }
|
91
|
+
let(:bucket_resource) { Aws::S3::Bucket.new(bucket, { :credentials => client_credentials, :region => region }) }
|
92
|
+
|
93
|
+
subject { LogStash::Outputs::S3.new(options) }
|
94
|
+
end
|
95
|
+
|
96
|
+
def clean_remote_files(prefix = "")
|
97
|
+
bucket_resource.objects(:prefix => prefix).each do |object|
|
98
|
+
object.delete
|
99
|
+
end
|
100
|
+
end
|
101
|
+
|
102
|
+
# Retrieve all available messages from the specified queue.
|
103
|
+
#
|
104
|
+
# Rather than utilizing `Aws::SQS::QueuePoller` directly in order to poll an
|
105
|
+
# SQS queue for messages, this method retrieves and returns all messages that
|
106
|
+
# are able to be received from the specified SQS queue.
|
107
|
+
def receive_all_messages(queue_url, options = {})
|
108
|
+
options[:idle_timeout] ||= 0
|
109
|
+
options[:max_number_of_messages] ||= 10
|
110
|
+
|
111
|
+
messages = []
|
112
|
+
poller = Aws::SQS::QueuePoller.new(queue_url, options)
|
113
|
+
|
114
|
+
poller.poll do |received_messages|
|
115
|
+
messages.concat(received_messages)
|
116
|
+
end
|
117
|
+
|
118
|
+
messages
|
119
|
+
end
|
@@ -0,0 +1,247 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
require_relative '../../spec_helper'
|
4
|
+
require 'logstash/errors'
|
5
|
+
require 'logstash/event'
|
6
|
+
require 'logstash/json'
|
7
|
+
|
8
|
+
describe LogStash::Outputs::SQS do
|
9
|
+
let(:config) do
|
10
|
+
{
|
11
|
+
'queue' => queue_name,
|
12
|
+
'region' => region,
|
13
|
+
}
|
14
|
+
end
|
15
|
+
let(:queue_name) { 'my-sqs-queue' }
|
16
|
+
let(:queue_url) { "https://sqs.#{region}.amazonaws.com/123456789012/#{queue_name}" }
|
17
|
+
let(:region) { 'us-east-1' }
|
18
|
+
|
19
|
+
let(:sqs) { Aws::SQS::Client.new(:stub_responses => true) }
|
20
|
+
subject { described_class.new(config) }
|
21
|
+
|
22
|
+
describe '#register' do
|
23
|
+
context 'with a batch size that is too large' do
|
24
|
+
let(:config) { super().merge('batch_events' => 100) }
|
25
|
+
|
26
|
+
before do
|
27
|
+
allow(Aws::SQS::Client).to receive(:new).and_return(sqs)
|
28
|
+
end
|
29
|
+
|
30
|
+
it 'raises a configuration error' do
|
31
|
+
expect { subject.register }.to raise_error(LogStash::ConfigurationError)
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
context 'with a batch size that is too small' do
|
36
|
+
let(:config) { super().merge('batch_events' => 0) }
|
37
|
+
|
38
|
+
before do
|
39
|
+
allow(Aws::SQS::Client).to receive(:new).and_return(sqs)
|
40
|
+
end
|
41
|
+
|
42
|
+
it 'raises a configuration error' do
|
43
|
+
expect { subject.register }.to raise_error(LogStash::ConfigurationError)
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
context 'without a queue' do
|
48
|
+
let(:config) { Hash.new }
|
49
|
+
|
50
|
+
it 'raises a configuration error' do
|
51
|
+
expect { subject.register }.to raise_error(LogStash::ConfigurationError)
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
context 'with a nonexistent queue' do
|
56
|
+
before do
|
57
|
+
expect(Aws::SQS::Client).to receive(:new).and_return(sqs)
|
58
|
+
expect(sqs).to receive(:get_queue_url).with(:queue_name => queue_name) do
|
59
|
+
raise Aws::SQS::Errors::NonExistentQueue.new(nil, 'The specified queue does not exist for this wsdl version.')
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
it 'raises a configuration error' do
|
64
|
+
expect { subject.register }.to raise_error(LogStash::ConfigurationError)
|
65
|
+
end
|
66
|
+
end
|
67
|
+
|
68
|
+
context 'with a valid queue' do
|
69
|
+
before do
|
70
|
+
expect(Aws::SQS::Client).to receive(:new).and_return(sqs)
|
71
|
+
expect(sqs).to receive(:get_queue_url).with(:queue_name => queue_name).and_return(:queue_url => queue_url)
|
72
|
+
end
|
73
|
+
|
74
|
+
it 'does not raise an error' do
|
75
|
+
expect { subject.register }.not_to raise_error
|
76
|
+
end
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
describe '#multi_receive_encoded' do
|
81
|
+
before do
|
82
|
+
expect(Aws::SQS::Client).to receive(:new).and_return(sqs)
|
83
|
+
expect(sqs).to receive(:get_queue_url).with(:queue_name => queue_name).and_return(:queue_url => queue_url)
|
84
|
+
subject.register
|
85
|
+
end
|
86
|
+
|
87
|
+
after do
|
88
|
+
subject.close
|
89
|
+
end
|
90
|
+
|
91
|
+
let(:sample_count) { 10 }
|
92
|
+
let(:sample_event) { LogStash::Event.new('message' => 'This is a message') }
|
93
|
+
let(:sample_event_encoded) { LogStash::Json.dump(sample_event) }
|
94
|
+
let(:sample_events) do
|
95
|
+
sample_count.times.map do
|
96
|
+
[sample_event, sample_event_encoded]
|
97
|
+
end
|
98
|
+
end
|
99
|
+
|
100
|
+
context 'with batching disabled' do
|
101
|
+
let(:config) do
|
102
|
+
super().merge({
|
103
|
+
'batch_events' => 1,
|
104
|
+
})
|
105
|
+
end
|
106
|
+
|
107
|
+
it 'should call send_message' do
|
108
|
+
expect(sqs).to receive(:send_message).with(:queue_url => queue_url, :message_body => sample_event_encoded).exactly(sample_count).times
|
109
|
+
subject.multi_receive_encoded(sample_events)
|
110
|
+
end
|
111
|
+
|
112
|
+
it 'should not call send_message_batch' do
|
113
|
+
expect(sqs).not_to receive(:send_message_batch)
|
114
|
+
subject.multi_receive_encoded(sample_events)
|
115
|
+
end
|
116
|
+
end
|
117
|
+
|
118
|
+
context 'with batching enabled' do
|
119
|
+
let(:batch_events) { 3 }
|
120
|
+
let(:config) do
|
121
|
+
super().merge({
|
122
|
+
'batch_events' => batch_events,
|
123
|
+
})
|
124
|
+
end
|
125
|
+
|
126
|
+
let(:sample_batches) do
|
127
|
+
sample_events.each_slice(batch_events).each_with_index.map do |sample_batch, batch_index|
|
128
|
+
sample_batch.each_with_index.map do |encoded_event, index|
|
129
|
+
event, encoded = encoded_event
|
130
|
+
{
|
131
|
+
:id => (batch_index * batch_events + index).to_s,
|
132
|
+
:message_body => encoded,
|
133
|
+
}
|
134
|
+
end
|
135
|
+
end
|
136
|
+
end
|
137
|
+
|
138
|
+
it 'should call send_message_batch' do
|
139
|
+
expect(sqs).to receive(:send_message_batch).at_least(:once)
|
140
|
+
subject.multi_receive_encoded(sample_events)
|
141
|
+
end
|
142
|
+
|
143
|
+
it 'should batch events' do
|
144
|
+
sample_batches.each do |batch_entries|
|
145
|
+
expect(sqs).to receive(:send_message_batch).with(:queue_url => queue_url, :entries => batch_entries)
|
146
|
+
end
|
147
|
+
|
148
|
+
subject.multi_receive_encoded(sample_events)
|
149
|
+
end
|
150
|
+
end
|
151
|
+
|
152
|
+
context 'with empty payload' do
|
153
|
+
let(:sample_count) { 0 }
|
154
|
+
|
155
|
+
it 'does not raise an error' do
|
156
|
+
expect { subject.multi_receive_encoded(sample_events) }.not_to raise_error
|
157
|
+
end
|
158
|
+
|
159
|
+
it 'should not send any messages' do
|
160
|
+
expect(sqs).not_to receive(:send_message)
|
161
|
+
expect(sqs).not_to receive(:send_message_batch)
|
162
|
+
subject.multi_receive_encoded(sample_events)
|
163
|
+
end
|
164
|
+
end
|
165
|
+
|
166
|
+
context 'with event exceeding maximum size' do
|
167
|
+
let(:config) { super().merge('message_max_size' => message_max_size) }
|
168
|
+
let(:message_max_size) { 1024 }
|
169
|
+
|
170
|
+
let(:sample_count) { 1 }
|
171
|
+
let(:sample_event) { LogStash::Event.new('message' => '.' * message_max_size) }
|
172
|
+
|
173
|
+
it 'should drop event' do
|
174
|
+
expect(sqs).not_to receive(:send_message)
|
175
|
+
expect(sqs).not_to receive(:send_message_batch)
|
176
|
+
subject.multi_receive_encoded(sample_events)
|
177
|
+
end
|
178
|
+
end
|
179
|
+
|
180
|
+
|
181
|
+
|
182
|
+
context 'with large batch' do
|
183
|
+
let(:batch_events) { 4 }
|
184
|
+
let(:config) do
|
185
|
+
super().merge({
|
186
|
+
'batch_events' => batch_events,
|
187
|
+
'message_max_size' => message_max_size,
|
188
|
+
})
|
189
|
+
end
|
190
|
+
let(:message_max_size) { 1024 }
|
191
|
+
|
192
|
+
let(:sample_events) do
|
193
|
+
# This is the overhead associating with transmitting each message. The
|
194
|
+
# overhead is caused by metadata (such as the `message` field name and
|
195
|
+
# the `@timestamp` field) as well as additional characters as a result
|
196
|
+
# of encoding the event.
|
197
|
+
overhead = 90
|
198
|
+
|
199
|
+
events = [
|
200
|
+
LogStash::Event.new('message' => 'a' * (0.6 * message_max_size - overhead)),
|
201
|
+
LogStash::Event.new('message' => 'b' * (0.5 * message_max_size - overhead)),
|
202
|
+
LogStash::Event.new('message' => 'c' * (0.5 * message_max_size - overhead)),
|
203
|
+
LogStash::Event.new('message' => 'd' * (0.4 * message_max_size - overhead)),
|
204
|
+
]
|
205
|
+
|
206
|
+
events.map do |event|
|
207
|
+
[event, LogStash::Json.dump(event)]
|
208
|
+
end
|
209
|
+
end
|
210
|
+
|
211
|
+
let(:sample_batches) do
|
212
|
+
[
|
213
|
+
[
|
214
|
+
{
|
215
|
+
:id => '0',
|
216
|
+
:message_body => sample_events[0][1],
|
217
|
+
},
|
218
|
+
],
|
219
|
+
[
|
220
|
+
{
|
221
|
+
:id => '1',
|
222
|
+
:message_body => sample_events[1][1],
|
223
|
+
},
|
224
|
+
{
|
225
|
+
:id => '2',
|
226
|
+
:message_body => sample_events[2][1],
|
227
|
+
},
|
228
|
+
],
|
229
|
+
[
|
230
|
+
{
|
231
|
+
:id => '3',
|
232
|
+
:message_body => sample_events[3][1],
|
233
|
+
},
|
234
|
+
],
|
235
|
+
]
|
236
|
+
end
|
237
|
+
|
238
|
+
it 'should split events into smaller batches' do
|
239
|
+
sample_batches.each do |entries|
|
240
|
+
expect(sqs).to receive(:send_message_batch).with(:queue_url => queue_url, :entries => entries)
|
241
|
+
end
|
242
|
+
|
243
|
+
subject.multi_receive_encoded(sample_events)
|
244
|
+
end
|
245
|
+
end
|
246
|
+
end
|
247
|
+
end
|