logstash-integration-aws 7.1.1-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. checksums.yaml +7 -0
  2. data/CHANGELOG.PRE.MERGE.md +658 -0
  3. data/CHANGELOG.md +33 -0
  4. data/CONTRIBUTORS +40 -0
  5. data/Gemfile +11 -0
  6. data/LICENSE +202 -0
  7. data/NOTICE.TXT +5 -0
  8. data/README.md +205 -0
  9. data/VERSION +1 -0
  10. data/docs/codec-cloudfront.asciidoc +53 -0
  11. data/docs/codec-cloudtrail.asciidoc +45 -0
  12. data/docs/index.asciidoc +36 -0
  13. data/docs/input-cloudwatch.asciidoc +320 -0
  14. data/docs/input-s3.asciidoc +346 -0
  15. data/docs/input-sqs.asciidoc +287 -0
  16. data/docs/output-cloudwatch.asciidoc +321 -0
  17. data/docs/output-s3.asciidoc +442 -0
  18. data/docs/output-sns.asciidoc +166 -0
  19. data/docs/output-sqs.asciidoc +242 -0
  20. data/lib/logstash/codecs/cloudfront.rb +84 -0
  21. data/lib/logstash/codecs/cloudtrail.rb +47 -0
  22. data/lib/logstash/inputs/cloudwatch.rb +338 -0
  23. data/lib/logstash/inputs/s3.rb +466 -0
  24. data/lib/logstash/inputs/sqs.rb +196 -0
  25. data/lib/logstash/outputs/cloudwatch.rb +346 -0
  26. data/lib/logstash/outputs/s3/file_repository.rb +193 -0
  27. data/lib/logstash/outputs/s3/path_validator.rb +18 -0
  28. data/lib/logstash/outputs/s3/size_and_time_rotation_policy.rb +24 -0
  29. data/lib/logstash/outputs/s3/size_rotation_policy.rb +26 -0
  30. data/lib/logstash/outputs/s3/temporary_file.rb +114 -0
  31. data/lib/logstash/outputs/s3/temporary_file_factory.rb +126 -0
  32. data/lib/logstash/outputs/s3/time_rotation_policy.rb +26 -0
  33. data/lib/logstash/outputs/s3/uploader.rb +76 -0
  34. data/lib/logstash/outputs/s3/writable_directory_validator.rb +17 -0
  35. data/lib/logstash/outputs/s3/write_bucket_permission_validator.rb +60 -0
  36. data/lib/logstash/outputs/s3.rb +442 -0
  37. data/lib/logstash/outputs/sns.rb +133 -0
  38. data/lib/logstash/outputs/sqs.rb +167 -0
  39. data/lib/logstash/plugin_mixins/aws_config/generic.rb +54 -0
  40. data/lib/logstash/plugin_mixins/aws_config/v2.rb +93 -0
  41. data/lib/logstash/plugin_mixins/aws_config.rb +8 -0
  42. data/lib/logstash-integration-aws_jars.rb +4 -0
  43. data/lib/tasks/build.rake +15 -0
  44. data/logstash-integration-aws.gemspec +55 -0
  45. data/spec/codecs/cloudfront_spec.rb +92 -0
  46. data/spec/codecs/cloudtrail_spec.rb +56 -0
  47. data/spec/fixtures/aws_credentials_file_sample_test.yml +2 -0
  48. data/spec/fixtures/aws_temporary_credentials_file_sample_test.yml +3 -0
  49. data/spec/fixtures/cloudfront.log +4 -0
  50. data/spec/fixtures/compressed.log.gee.zip +0 -0
  51. data/spec/fixtures/compressed.log.gz +0 -0
  52. data/spec/fixtures/compressed.log.gzip +0 -0
  53. data/spec/fixtures/invalid_utf8.gbk.log +2 -0
  54. data/spec/fixtures/json.log +2 -0
  55. data/spec/fixtures/json_with_message.log +2 -0
  56. data/spec/fixtures/multiline.log +6 -0
  57. data/spec/fixtures/multiple_compressed_streams.gz +0 -0
  58. data/spec/fixtures/uncompressed.log +2 -0
  59. data/spec/inputs/cloudwatch_spec.rb +85 -0
  60. data/spec/inputs/s3_spec.rb +610 -0
  61. data/spec/inputs/sincedb_spec.rb +17 -0
  62. data/spec/inputs/sqs_spec.rb +324 -0
  63. data/spec/integration/cloudwatch_spec.rb +25 -0
  64. data/spec/integration/dynamic_prefix_spec.rb +92 -0
  65. data/spec/integration/gzip_file_spec.rb +62 -0
  66. data/spec/integration/gzip_size_rotation_spec.rb +63 -0
  67. data/spec/integration/outputs/sqs_spec.rb +98 -0
  68. data/spec/integration/restore_from_crash_spec.rb +133 -0
  69. data/spec/integration/s3_spec.rb +66 -0
  70. data/spec/integration/size_rotation_spec.rb +59 -0
  71. data/spec/integration/sqs_spec.rb +110 -0
  72. data/spec/integration/stress_test_spec.rb +60 -0
  73. data/spec/integration/time_based_rotation_with_constant_write_spec.rb +60 -0
  74. data/spec/integration/time_based_rotation_with_stale_write_spec.rb +64 -0
  75. data/spec/integration/upload_current_file_on_shutdown_spec.rb +51 -0
  76. data/spec/outputs/cloudwatch_spec.rb +38 -0
  77. data/spec/outputs/s3/file_repository_spec.rb +143 -0
  78. data/spec/outputs/s3/size_and_time_rotation_policy_spec.rb +77 -0
  79. data/spec/outputs/s3/size_rotation_policy_spec.rb +41 -0
  80. data/spec/outputs/s3/temporary_file_factory_spec.rb +89 -0
  81. data/spec/outputs/s3/temporary_file_spec.rb +47 -0
  82. data/spec/outputs/s3/time_rotation_policy_spec.rb +60 -0
  83. data/spec/outputs/s3/uploader_spec.rb +69 -0
  84. data/spec/outputs/s3/writable_directory_validator_spec.rb +40 -0
  85. data/spec/outputs/s3/write_bucket_permission_validator_spec.rb +49 -0
  86. data/spec/outputs/s3_spec.rb +232 -0
  87. data/spec/outputs/sns_spec.rb +160 -0
  88. data/spec/plugin_mixin/aws_config_spec.rb +217 -0
  89. data/spec/spec_helper.rb +8 -0
  90. data/spec/support/helpers.rb +121 -0
  91. data/spec/unit/outputs/sqs_spec.rb +247 -0
  92. data/vendor/jar-dependencies/org/logstash/plugins/integration/aws/logstash-integration-aws/7.1.1/logstash-integration-aws-7.1.1.jar +0 -0
  93. metadata +472 -0
@@ -0,0 +1,121 @@
1
+ def fetch_events(settings)
2
+ queue = []
3
+ s3 = LogStash::Inputs::S3.new(settings)
4
+ s3.register
5
+ s3.process_files(queue)
6
+ queue
7
+ end
8
+
9
+ # delete_files(prefix)
10
+ def upload_file(local_file, remote_name)
11
+ bucket = s3object.bucket(ENV['AWS_LOGSTASH_TEST_BUCKET'])
12
+ file = File.expand_path(File.join(File.dirname(__FILE__), local_file))
13
+ bucket.object(remote_name).upload_file(file)
14
+ end
15
+
16
+ def delete_remote_files(prefix)
17
+ bucket = s3object.bucket(ENV['AWS_LOGSTASH_TEST_BUCKET'])
18
+ bucket.objects(:prefix => prefix).each { |object| object.delete }
19
+ end
20
+
21
+ def list_remote_files(prefix, target_bucket = ENV['AWS_LOGSTASH_TEST_BUCKET'])
22
+ bucket = s3object.bucket(target_bucket)
23
+ bucket.objects(:prefix => prefix).collect(&:key)
24
+ end
25
+
26
+ def create_bucket(name)
27
+ s3object.bucket(name).create
28
+ end
29
+
30
+ def delete_bucket(name)
31
+ s3object.bucket(name).objects.map(&:delete)
32
+ s3object.bucket(name).delete
33
+ end
34
+
35
+ def s3object
36
+ Aws::S3::Resource.new
37
+ end
38
+
39
+ class TestInfiniteS3Object
40
+ def initialize(s3_obj)
41
+ @s3_obj = s3_obj
42
+ end
43
+
44
+ def each
45
+ counter = 1
46
+
47
+ loop do
48
+ yield @s3_obj
49
+ counter +=1
50
+ end
51
+ end
52
+ end
53
+
54
+ def push_sqs_event(message)
55
+ client = Aws::SQS::Client.new
56
+ queue_url = client.get_queue_url(:queue_name => ENV["SQS_QUEUE_NAME"])
57
+
58
+ client.send_message({
59
+ queue_url: queue_url.queue_url,
60
+ message_body: message,
61
+ })
62
+ end
63
+
64
+ shared_context "setup plugin" do
65
+ let(:temporary_directory) { Stud::Temporary.pathname }
66
+
67
+ let(:bucket) { ENV["AWS_LOGSTASH_TEST_BUCKET"] }
68
+ let(:access_key_id) { ENV["AWS_ACCESS_KEY_ID"] }
69
+ let(:secret_access_key) { ENV["AWS_SECRET_ACCESS_KEY"] }
70
+ let(:session_token) { ENV["AWS_SESSION_TOKEN"] }
71
+ let(:size_file) { 100 }
72
+ let(:time_file) { 100 }
73
+ let(:tags) { [] }
74
+ let(:prefix) { "home" }
75
+ let(:region) { ENV['AWS_REGION'] }
76
+
77
+ let(:main_options) do
78
+ {
79
+ "bucket" => bucket,
80
+ "prefix" => prefix,
81
+ "temporary_directory" => temporary_directory,
82
+ "access_key_id" => access_key_id,
83
+ "secret_access_key" => secret_access_key,
84
+ "session_token" => session_token,
85
+ "size_file" => size_file,
86
+ "time_file" => time_file,
87
+ "region" => region,
88
+ "tags" => []
89
+ }
90
+ end
91
+
92
+ let(:client_credentials) { Aws::Credentials.new(access_key_id, secret_access_key, session_token) }
93
+ let(:bucket_resource) { Aws::S3::Bucket.new(bucket, { :credentials => client_credentials, :region => region }) }
94
+
95
+ subject { LogStash::Outputs::S3.new(options) }
96
+ end
97
+
98
+ def clean_remote_files(prefix = "")
99
+ bucket_resource.objects(:prefix => prefix).each do |object|
100
+ object.delete
101
+ end
102
+ end
103
+
104
+ # Retrieve all available messages from the specified queue.
105
+ #
106
+ # Rather than utilizing `Aws::SQS::QueuePoller` directly in order to poll an
107
+ # SQS queue for messages, this method retrieves and returns all messages that
108
+ # are able to be received from the specified SQS queue.
109
+ def receive_all_messages(queue_url, options = {})
110
+ options[:idle_timeout] ||= 0
111
+ options[:max_number_of_messages] ||= 10
112
+
113
+ messages = []
114
+ poller = Aws::SQS::QueuePoller.new(queue_url, options)
115
+
116
+ poller.poll do |received_messages|
117
+ messages.concat(received_messages)
118
+ end
119
+
120
+ messages
121
+ end
@@ -0,0 +1,247 @@
1
+ # encoding: utf-8
2
+
3
+ require_relative '../../spec_helper'
4
+ require 'logstash/errors'
5
+ require 'logstash/event'
6
+ require 'logstash/json'
7
+
8
+ describe LogStash::Outputs::SQS do
9
+ let(:config) do
10
+ {
11
+ 'queue' => queue_name,
12
+ 'region' => region,
13
+ }
14
+ end
15
+ let(:queue_name) { 'my-sqs-queue' }
16
+ let(:queue_url) { "https://sqs.#{region}.amazonaws.com/123456789012/#{queue_name}" }
17
+ let(:region) { 'us-east-1' }
18
+
19
+ let(:sqs) { Aws::SQS::Client.new(:stub_responses => true) }
20
+ subject { described_class.new(config) }
21
+
22
+ describe '#register' do
23
+ context 'with a batch size that is too large' do
24
+ let(:config) { super().merge('batch_events' => 100) }
25
+
26
+ before do
27
+ allow(Aws::SQS::Client).to receive(:new).and_return(sqs)
28
+ end
29
+
30
+ it 'raises a configuration error' do
31
+ expect { subject.register }.to raise_error(LogStash::ConfigurationError)
32
+ end
33
+ end
34
+
35
+ context 'with a batch size that is too small' do
36
+ let(:config) { super().merge('batch_events' => 0) }
37
+
38
+ before do
39
+ allow(Aws::SQS::Client).to receive(:new).and_return(sqs)
40
+ end
41
+
42
+ it 'raises a configuration error' do
43
+ expect { subject.register }.to raise_error(LogStash::ConfigurationError)
44
+ end
45
+ end
46
+
47
+ context 'without a queue' do
48
+ let(:config) { Hash.new }
49
+
50
+ it 'raises a configuration error' do
51
+ expect { subject.register }.to raise_error(LogStash::ConfigurationError)
52
+ end
53
+ end
54
+
55
+ context 'with a nonexistent queue' do
56
+ before do
57
+ expect(Aws::SQS::Client).to receive(:new).and_return(sqs)
58
+ expect(sqs).to receive(:get_queue_url).with(:queue_name => queue_name) do
59
+ raise Aws::SQS::Errors::NonExistentQueue.new(nil, 'The specified queue does not exist for this wsdl version.')
60
+ end
61
+ end
62
+
63
+ it 'raises a configuration error' do
64
+ expect { subject.register }.to raise_error(LogStash::ConfigurationError)
65
+ end
66
+ end
67
+
68
+ context 'with a valid queue' do
69
+ before do
70
+ expect(Aws::SQS::Client).to receive(:new).and_return(sqs)
71
+ expect(sqs).to receive(:get_queue_url).with(:queue_name => queue_name).and_return(:queue_url => queue_url)
72
+ end
73
+
74
+ it 'does not raise an error' do
75
+ expect { subject.register }.not_to raise_error
76
+ end
77
+ end
78
+ end
79
+
80
+ describe '#multi_receive_encoded' do
81
+ before do
82
+ expect(Aws::SQS::Client).to receive(:new).and_return(sqs)
83
+ expect(sqs).to receive(:get_queue_url).with(:queue_name => queue_name).and_return(:queue_url => queue_url)
84
+ subject.register
85
+ end
86
+
87
+ after do
88
+ subject.close
89
+ end
90
+
91
+ let(:sample_count) { 10 }
92
+ let(:sample_event) { LogStash::Event.new('message' => 'This is a message') }
93
+ let(:sample_event_encoded) { LogStash::Json.dump(sample_event) }
94
+ let(:sample_events) do
95
+ sample_count.times.map do
96
+ [sample_event, sample_event_encoded]
97
+ end
98
+ end
99
+
100
+ context 'with batching disabled' do
101
+ let(:config) do
102
+ super().merge({
103
+ 'batch_events' => 1,
104
+ })
105
+ end
106
+
107
+ it 'should call send_message' do
108
+ expect(sqs).to receive(:send_message).with(:queue_url => queue_url, :message_body => sample_event_encoded).exactly(sample_count).times
109
+ subject.multi_receive_encoded(sample_events)
110
+ end
111
+
112
+ it 'should not call send_message_batch' do
113
+ expect(sqs).not_to receive(:send_message_batch)
114
+ subject.multi_receive_encoded(sample_events)
115
+ end
116
+ end
117
+
118
+ context 'with batching enabled' do
119
+ let(:batch_events) { 3 }
120
+ let(:config) do
121
+ super().merge({
122
+ 'batch_events' => batch_events,
123
+ })
124
+ end
125
+
126
+ let(:sample_batches) do
127
+ sample_events.each_slice(batch_events).each_with_index.map do |sample_batch, batch_index|
128
+ sample_batch.each_with_index.map do |encoded_event, index|
129
+ event, encoded = encoded_event
130
+ {
131
+ :id => (batch_index * batch_events + index).to_s,
132
+ :message_body => encoded,
133
+ }
134
+ end
135
+ end
136
+ end
137
+
138
+ it 'should call send_message_batch' do
139
+ expect(sqs).to receive(:send_message_batch).at_least(:once)
140
+ subject.multi_receive_encoded(sample_events)
141
+ end
142
+
143
+ it 'should batch events' do
144
+ sample_batches.each do |batch_entries|
145
+ expect(sqs).to receive(:send_message_batch).with(:queue_url => queue_url, :entries => batch_entries)
146
+ end
147
+
148
+ subject.multi_receive_encoded(sample_events)
149
+ end
150
+ end
151
+
152
+ context 'with empty payload' do
153
+ let(:sample_count) { 0 }
154
+
155
+ it 'does not raise an error' do
156
+ expect { subject.multi_receive_encoded(sample_events) }.not_to raise_error
157
+ end
158
+
159
+ it 'should not send any messages' do
160
+ expect(sqs).not_to receive(:send_message)
161
+ expect(sqs).not_to receive(:send_message_batch)
162
+ subject.multi_receive_encoded(sample_events)
163
+ end
164
+ end
165
+
166
+ context 'with event exceeding maximum size' do
167
+ let(:config) { super().merge('message_max_size' => message_max_size) }
168
+ let(:message_max_size) { 1024 }
169
+
170
+ let(:sample_count) { 1 }
171
+ let(:sample_event) { LogStash::Event.new('message' => '.' * message_max_size) }
172
+
173
+ it 'should drop event' do
174
+ expect(sqs).not_to receive(:send_message)
175
+ expect(sqs).not_to receive(:send_message_batch)
176
+ subject.multi_receive_encoded(sample_events)
177
+ end
178
+ end
179
+
180
+
181
+
182
+ context 'with large batch' do
183
+ let(:batch_events) { 4 }
184
+ let(:config) do
185
+ super().merge({
186
+ 'batch_events' => batch_events,
187
+ 'message_max_size' => message_max_size,
188
+ })
189
+ end
190
+ let(:message_max_size) { 1024 }
191
+
192
+ let(:sample_events) do
193
+ # This is the overhead associating with transmitting each message. The
194
+ # overhead is caused by metadata (such as the `message` field name and
195
+ # the `@timestamp` field) as well as additional characters as a result
196
+ # of encoding the event.
197
+ overhead = 90
198
+
199
+ events = [
200
+ LogStash::Event.new('message' => 'a' * (0.6 * message_max_size - overhead)),
201
+ LogStash::Event.new('message' => 'b' * (0.5 * message_max_size - overhead)),
202
+ LogStash::Event.new('message' => 'c' * (0.5 * message_max_size - overhead)),
203
+ LogStash::Event.new('message' => 'd' * (0.4 * message_max_size - overhead)),
204
+ ]
205
+
206
+ events.map do |event|
207
+ [event, LogStash::Json.dump(event)]
208
+ end
209
+ end
210
+
211
+ let(:sample_batches) do
212
+ [
213
+ [
214
+ {
215
+ :id => '0',
216
+ :message_body => sample_events[0][1],
217
+ },
218
+ ],
219
+ [
220
+ {
221
+ :id => '1',
222
+ :message_body => sample_events[1][1],
223
+ },
224
+ {
225
+ :id => '2',
226
+ :message_body => sample_events[2][1],
227
+ },
228
+ ],
229
+ [
230
+ {
231
+ :id => '3',
232
+ :message_body => sample_events[3][1],
233
+ },
234
+ ],
235
+ ]
236
+ end
237
+
238
+ it 'should split events into smaller batches' do
239
+ sample_batches.each do |entries|
240
+ expect(sqs).to receive(:send_message_batch).with(:queue_url => queue_url, :entries => entries)
241
+ end
242
+
243
+ subject.multi_receive_encoded(sample_events)
244
+ end
245
+ end
246
+ end
247
+ end