logstash-integration-aws 0.1.0.pre

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. checksums.yaml +7 -0
  2. data/CHANGELOG.PRE.MERGE.md +658 -0
  3. data/CHANGELOG.md +15 -0
  4. data/CONTRIBUTORS +40 -0
  5. data/Gemfile +11 -0
  6. data/LICENSE +202 -0
  7. data/NOTICE.TXT +5 -0
  8. data/README.md +205 -0
  9. data/docs/codec-cloudfront.asciidoc +53 -0
  10. data/docs/codec-cloudtrail.asciidoc +45 -0
  11. data/docs/index.asciidoc +38 -0
  12. data/docs/input-cloudwatch.asciidoc +320 -0
  13. data/docs/input-s3.asciidoc +346 -0
  14. data/docs/input-sqs.asciidoc +287 -0
  15. data/docs/output-cloudwatch.asciidoc +321 -0
  16. data/docs/output-s3.asciidoc +442 -0
  17. data/docs/output-sns.asciidoc +166 -0
  18. data/docs/output-sqs.asciidoc +242 -0
  19. data/lib/logstash/codecs/cloudfront.rb +84 -0
  20. data/lib/logstash/codecs/cloudtrail.rb +47 -0
  21. data/lib/logstash/inputs/cloudwatch.rb +338 -0
  22. data/lib/logstash/inputs/s3.rb +466 -0
  23. data/lib/logstash/inputs/sqs.rb +196 -0
  24. data/lib/logstash/outputs/cloudwatch.rb +346 -0
  25. data/lib/logstash/outputs/s3/file_repository.rb +121 -0
  26. data/lib/logstash/outputs/s3/path_validator.rb +18 -0
  27. data/lib/logstash/outputs/s3/size_and_time_rotation_policy.rb +24 -0
  28. data/lib/logstash/outputs/s3/size_rotation_policy.rb +26 -0
  29. data/lib/logstash/outputs/s3/temporary_file.rb +71 -0
  30. data/lib/logstash/outputs/s3/temporary_file_factory.rb +129 -0
  31. data/lib/logstash/outputs/s3/time_rotation_policy.rb +26 -0
  32. data/lib/logstash/outputs/s3/uploader.rb +74 -0
  33. data/lib/logstash/outputs/s3/writable_directory_validator.rb +17 -0
  34. data/lib/logstash/outputs/s3/write_bucket_permission_validator.rb +60 -0
  35. data/lib/logstash/outputs/s3.rb +405 -0
  36. data/lib/logstash/outputs/sns.rb +133 -0
  37. data/lib/logstash/outputs/sqs.rb +167 -0
  38. data/lib/logstash/plugin_mixins/aws_config/generic.rb +54 -0
  39. data/lib/logstash/plugin_mixins/aws_config/v2.rb +93 -0
  40. data/lib/logstash/plugin_mixins/aws_config.rb +8 -0
  41. data/logstash-integration-aws.gemspec +52 -0
  42. data/spec/codecs/cloudfront_spec.rb +92 -0
  43. data/spec/codecs/cloudtrail_spec.rb +56 -0
  44. data/spec/fixtures/aws_credentials_file_sample_test.yml +2 -0
  45. data/spec/fixtures/aws_temporary_credentials_file_sample_test.yml +3 -0
  46. data/spec/fixtures/cloudfront.log +4 -0
  47. data/spec/fixtures/compressed.log.gee.zip +0 -0
  48. data/spec/fixtures/compressed.log.gz +0 -0
  49. data/spec/fixtures/compressed.log.gzip +0 -0
  50. data/spec/fixtures/invalid_utf8.gbk.log +2 -0
  51. data/spec/fixtures/json.log +2 -0
  52. data/spec/fixtures/json_with_message.log +2 -0
  53. data/spec/fixtures/multiline.log +6 -0
  54. data/spec/fixtures/multiple_compressed_streams.gz +0 -0
  55. data/spec/fixtures/uncompressed.log +2 -0
  56. data/spec/inputs/cloudwatch_spec.rb +85 -0
  57. data/spec/inputs/s3_spec.rb +610 -0
  58. data/spec/inputs/sincedb_spec.rb +17 -0
  59. data/spec/inputs/sqs_spec.rb +324 -0
  60. data/spec/integration/cloudwatch_spec.rb +25 -0
  61. data/spec/integration/dynamic_prefix_spec.rb +92 -0
  62. data/spec/integration/gzip_file_spec.rb +62 -0
  63. data/spec/integration/gzip_size_rotation_spec.rb +63 -0
  64. data/spec/integration/outputs/sqs_spec.rb +98 -0
  65. data/spec/integration/restore_from_crash_spec.rb +67 -0
  66. data/spec/integration/s3_spec.rb +66 -0
  67. data/spec/integration/size_rotation_spec.rb +59 -0
  68. data/spec/integration/sqs_spec.rb +110 -0
  69. data/spec/integration/stress_test_spec.rb +60 -0
  70. data/spec/integration/time_based_rotation_with_constant_write_spec.rb +60 -0
  71. data/spec/integration/time_based_rotation_with_stale_write_spec.rb +64 -0
  72. data/spec/integration/upload_current_file_on_shutdown_spec.rb +51 -0
  73. data/spec/outputs/cloudwatch_spec.rb +38 -0
  74. data/spec/outputs/s3/file_repository_spec.rb +143 -0
  75. data/spec/outputs/s3/size_and_time_rotation_policy_spec.rb +77 -0
  76. data/spec/outputs/s3/size_rotation_policy_spec.rb +41 -0
  77. data/spec/outputs/s3/temporary_file_factory_spec.rb +89 -0
  78. data/spec/outputs/s3/temporary_file_spec.rb +47 -0
  79. data/spec/outputs/s3/time_rotation_policy_spec.rb +60 -0
  80. data/spec/outputs/s3/uploader_spec.rb +69 -0
  81. data/spec/outputs/s3/writable_directory_validator_spec.rb +40 -0
  82. data/spec/outputs/s3/write_bucket_permission_validator_spec.rb +49 -0
  83. data/spec/outputs/s3_spec.rb +232 -0
  84. data/spec/outputs/sns_spec.rb +160 -0
  85. data/spec/plugin_mixin/aws_config_spec.rb +217 -0
  86. data/spec/spec_helper.rb +8 -0
  87. data/spec/support/helpers.rb +119 -0
  88. data/spec/unit/outputs/sqs_spec.rb +247 -0
  89. metadata +467 -0
@@ -0,0 +1,67 @@
1
+ # encoding: utf-8
2
+ require_relative "../spec_helper"
3
+ require "logstash/outputs/s3"
4
+ require "logstash/codecs/line"
5
+ require "stud/temporary"
6
+
7
+ describe "Restore from crash", :integration => true do
8
+ include_context "setup plugin"
9
+
10
+ let(:options) { main_options.merge({ "restore" => true, "canned_acl" => "public-read-write" }) }
11
+
12
+ let(:number_of_files) { 5 }
13
+ let(:dummy_content) { "foobar\n" * 100 }
14
+ let(:factory) { LogStash::Outputs::S3::TemporaryFileFactory.new(prefix, tags, "none", temporary_directory)}
15
+
16
+ before do
17
+ clean_remote_files(prefix)
18
+ end
19
+
20
+
21
+ context 'with a non-empty tempfile' do
22
+ before do
23
+ # Creating a factory always create a file
24
+ factory.current.write(dummy_content)
25
+ factory.current.fsync
26
+
27
+ (number_of_files - 1).times do
28
+ factory.rotate!
29
+ factory.current.write(dummy_content)
30
+ factory.current.fsync
31
+ end
32
+ end
33
+ it "uploads the file to the bucket" do
34
+ subject.register
35
+ try(20) do
36
+ expect(bucket_resource.objects(:prefix => prefix).count).to eq(number_of_files)
37
+ expect(Dir.glob(File.join(temporary_directory, "*")).size).to eq(0)
38
+ expect(bucket_resource.objects(:prefix => prefix).first.acl.grants.collect(&:permission)).to include("READ", "WRITE")
39
+ end
40
+ end
41
+ end
42
+
43
+ context 'with an empty tempfile' do
44
+ before do
45
+ factory.current
46
+ factory.rotate!
47
+ end
48
+
49
+ it "should remove the temporary file" do
50
+ expect(Dir.glob(::File.join(temporary_directory, "**", "*")).size).to be > 0
51
+ subject.register
52
+ puts Dir.glob(::File.join(temporary_directory, "**", "*"))
53
+ expect(Dir.glob(::File.join(temporary_directory, "**", "*")).size).to eq(0)
54
+ end
55
+
56
+ it "should not upload the file to the bucket" do
57
+ expect(bucket_resource.objects(:prefix => prefix).count).to eq(0)
58
+ expect(Dir.glob(::File.join(temporary_directory, "**", "*")).size).to be > 0
59
+ subject.register
60
+
61
+ # Sleep to give enough time for plugin upload to s3 if it attempts to upload empty temporary file to S3
62
+ sleep 5
63
+ expect(bucket_resource.objects(:prefix => prefix).count).to eq(0)
64
+ end
65
+ end
66
+ end
67
+
@@ -0,0 +1,66 @@
1
+ require "logstash/devutils/rspec/spec_helper"
2
+ require "logstash/inputs/s3"
3
+ require "fileutils"
4
+ require_relative "../support/helpers"
5
+
6
+ describe LogStash::Inputs::S3, :integration => true, :s3 => true do
7
+ before do
8
+ Thread.abort_on_exception = true
9
+
10
+ upload_file('../fixtures/uncompressed.log' , "#{prefix}uncompressed_1.log")
11
+ upload_file('../fixtures/compressed.log.gz', "#{prefix}compressed_1.log.gz")
12
+ sleep(LogStash::Inputs::S3::CUTOFF_SECOND + 1)
13
+ end
14
+
15
+ after do
16
+ delete_remote_files(prefix)
17
+ FileUtils.rm_rf(temporary_directory)
18
+ delete_remote_files(backup_prefix)
19
+ end
20
+
21
+ let(:temporary_directory) { Stud::Temporary.directory }
22
+ let(:prefix) { 'logstash-s3-input-prefix/' }
23
+
24
+ let(:minimal_settings) { { "access_key_id" => ENV['AWS_ACCESS_KEY_ID'],
25
+ "secret_access_key" => ENV['AWS_SECRET_ACCESS_KEY'],
26
+ "bucket" => ENV['AWS_LOGSTASH_TEST_BUCKET'],
27
+ "region" => ENV["AWS_REGION"] || "us-east-1",
28
+ "prefix" => prefix,
29
+ "temporary_directory" => temporary_directory } }
30
+ let(:backup_prefix) { "backup/" }
31
+ let(:backup_bucket) { "logstash-s3-input-backup" }
32
+
33
+ it "support prefix to scope the remote files" do
34
+ events = fetch_events(minimal_settings)
35
+ expect(events.size).to eq(4)
36
+ end
37
+
38
+
39
+ it "add a prefix to the file" do
40
+ fetch_events(minimal_settings.merge({ "backup_to_bucket" => ENV["AWS_LOGSTASH_TEST_BUCKET"],
41
+ "backup_add_prefix" => backup_prefix }))
42
+ expect(list_remote_files(backup_prefix).size).to eq(2)
43
+ end
44
+
45
+ it "allow you to backup to a local directory" do
46
+ Stud::Temporary.directory do |backup_dir|
47
+ fetch_events(minimal_settings.merge({ "backup_to_dir" => backup_dir }))
48
+ expect(Dir.glob(File.join(backup_dir, "*")).size).to eq(2)
49
+ end
50
+ end
51
+
52
+ context "remote backup" do
53
+ before do
54
+ create_bucket(backup_bucket)
55
+ end
56
+
57
+ it "another bucket" do
58
+ fetch_events(minimal_settings.merge({ "backup_to_bucket" => backup_bucket}))
59
+ expect(list_remote_files("", backup_bucket).size).to eq(2)
60
+ end
61
+
62
+ after do
63
+ delete_bucket(backup_bucket)
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,59 @@
1
+ # encoding: utf-8
2
+ require_relative "../spec_helper"
3
+ require "logstash/outputs/s3"
4
+ require "logstash/codecs/line"
5
+ require "stud/temporary"
6
+
7
+ describe "Size rotation", :integration => true do
8
+ include_context "setup plugin"
9
+
10
+ let(:event_size) { "Hello world".bytesize }
11
+ let(:size_file) { batch_size * event_size * 2 }
12
+ let(:options) { main_options.merge({ "rotation_strategy" => "size", "size_file" => size_file }) }
13
+ let(:number_of_events) { 5000 }
14
+ let(:batch_size) { 125 }
15
+ let(:event_encoded) { "Hello world" }
16
+ let(:batch) do
17
+ b = {}
18
+ number_of_events.times do
19
+ event = LogStash::Event.new({ "message" => event_encoded })
20
+ b[event] = "#{event_encoded}\n"
21
+ end
22
+ b
23
+ end
24
+ let(:number_of_files) { number_of_events * event_size / size_file }
25
+
26
+ before do
27
+ clean_remote_files(prefix)
28
+ subject.register
29
+ batch.each_slice(batch_size) do |smaller_batch|
30
+ subject.multi_receive_encoded(smaller_batch)
31
+ end
32
+ subject.close
33
+ end
34
+
35
+ it "creates a specific quantity of files" do
36
+ expect(bucket_resource.objects(:prefix => prefix).count).to eq(number_of_files)
37
+ end
38
+
39
+ it "Rotates the files based on size" do
40
+ bucket_resource.objects(:prefix => prefix).each do |f|
41
+ expect(f.size).to be_between(size_file, size_file * 2).inclusive
42
+ end
43
+ end
44
+
45
+ it "Persists all events" do
46
+ download_directory = Stud::Temporary.pathname
47
+
48
+ FileUtils.rm_rf(download_directory)
49
+ FileUtils.mkdir_p(download_directory)
50
+
51
+ counter = 0
52
+ bucket_resource.objects(:prefix => prefix).each do |object|
53
+ target = File.join(download_directory, "#{counter}.txt")
54
+ object.get(:response_target => target)
55
+ counter += 1
56
+ end
57
+ expect(Dir.glob(File.join(download_directory, "**", "*.txt")).inject(0) { |sum, f| sum + IO.readlines(f).size }).to eq(number_of_events)
58
+ end
59
+ end
@@ -0,0 +1,110 @@
1
+ # encoding: utf-8
2
+ require "spec_helper"
3
+ require "logstash/inputs/sqs"
4
+ require "logstash/event"
5
+ require "logstash/json"
6
+ require_relative "../support/helpers"
7
+ require "thread"
8
+
9
+ Thread.abort_on_exception = true
10
+
11
+ describe "LogStash::Inputs::SQS integration", :integration => true do
12
+ let(:decoded_message) { { "drstrange" => "is-he-really-that-strange" } }
13
+ let(:encoded_message) { LogStash::Json.dump(decoded_message) }
14
+ let(:queue) { Queue.new }
15
+
16
+ let(:input) { LogStash::Inputs::SQS.new(options) }
17
+
18
+ context "with invalid credentials" do
19
+ let(:options) do
20
+ {
21
+ "queue" => "do-not-exist",
22
+ "access_key_id" => "bad_access",
23
+ "secret_access_key" => "bad_secret_key",
24
+ "region" => ENV["AWS_REGION"]
25
+ }
26
+ end
27
+
28
+ subject { input }
29
+
30
+ it "raises a Configuration error if the credentials are bad" do
31
+ expect { subject.register }.to raise_error(LogStash::ConfigurationError)
32
+ end
33
+ end
34
+
35
+ context "with valid credentials" do
36
+ let(:options) do
37
+ {
38
+ "queue" => ENV["SQS_QUEUE_NAME"],
39
+ "access_key_id" => ENV['AWS_ACCESS_KEY_ID'],
40
+ "secret_access_key" => ENV['AWS_SECRET_ACCESS_KEY'],
41
+ "region" => ENV["AWS_REGION"]
42
+ }
43
+ end
44
+
45
+ before :each do
46
+ push_sqs_event(encoded_message)
47
+ input.register
48
+ @server = Thread.new { input.run(queue) }
49
+ end
50
+
51
+ after do
52
+ @server.kill
53
+ end
54
+
55
+ subject { queue.pop }
56
+
57
+ it "creates logstash events" do
58
+ expect(subject["drstrange"]).to eq(decoded_message["drstrange"])
59
+ end
60
+
61
+ context "when the optionals fields are not specified" do
62
+ let(:id_field) { "my_id_field" }
63
+ let(:md5_field) { "my_md5_field" }
64
+ let(:sent_timestamp_field) { "my_sent_timestamp_field" }
65
+
66
+ it "add the `message_id`" do
67
+ expect(subject[id_field]).to be_nil
68
+ end
69
+
70
+ it "add the `md5_of_body`" do
71
+ expect(subject[md5_field]).to be_nil
72
+ end
73
+
74
+ it "add the `sent_timestamp`" do
75
+ expect(subject[sent_timestamp_field]).to be_nil
76
+ end
77
+
78
+ end
79
+
80
+ context "when the optionals fields are specified" do
81
+ let(:id_field) { "my_id_field" }
82
+ let(:md5_field) { "my_md5_field" }
83
+ let(:sent_timestamp_field) { "my_sent_timestamp_field" }
84
+
85
+ let(:options) do
86
+ {
87
+ "queue" => ENV["SQS_QUEUE_NAME"],
88
+ "access_key_id" => ENV['AWS_ACCESS_KEY_ID'],
89
+ "secret_access_key" => ENV['AWS_SECRET_ACCESS_KEY'],
90
+ "region" => ENV["AWS_REGION"],
91
+ "id_field" => id_field,
92
+ "md5_field" => md5_field,
93
+ "sent_timestamp_field" => sent_timestamp_field
94
+ }
95
+ end
96
+
97
+ it "add the `message_id`" do
98
+ expect(subject[id_field]).not_to be_nil
99
+ end
100
+
101
+ it "add the `md5_of_body`" do
102
+ expect(subject[md5_field]).not_to be_nil
103
+ end
104
+
105
+ it "add the `sent_timestamp`" do
106
+ expect(subject[sent_timestamp_field]).not_to be_nil
107
+ end
108
+ end
109
+ end
110
+ end
@@ -0,0 +1,60 @@
1
+ # encoding: utf-8
2
+ require_relative "../spec_helper"
3
+ require "logstash/outputs/s3"
4
+ require "logstash/codecs/line"
5
+ require "stud/temporary"
6
+
7
+ describe "Upload current file on shutdown", :integration => true, :slow => true do
8
+ include_context "setup plugin"
9
+ let(:stress_time) { ENV["RUNTIME"] || 1 * 60 }
10
+ let(:options) { main_options }
11
+
12
+ let(:time_file) { 15 }
13
+ let(:batch_size) { 125 }
14
+ let(:event_encoded) { "Hello world" }
15
+ let(:batch) do
16
+ b = {}
17
+ batch_size.times do
18
+ event = LogStash::Event.new({ "message" => event_encoded })
19
+ b[event] = "#{event_encoded}\n"
20
+ end
21
+ b
22
+ end
23
+ let(:workers) { 3 }
24
+
25
+ it "Persists all events" do
26
+ started_at = Time.now
27
+ events_sent = {}
28
+
29
+ clean_remote_files(prefix)
30
+ subject.register
31
+
32
+ workers.times do
33
+ Thread.new do
34
+ events_sent[Thread.current] = 0
35
+
36
+ while Time.now - started_at < stress_time
37
+ subject.multi_receive_encoded(batch)
38
+ events_sent[Thread.current] += batch_size
39
+ end
40
+ end
41
+ end
42
+
43
+ sleep(1) while Time.now - started_at < stress_time
44
+
45
+ subject.close
46
+
47
+ download_directory = Stud::Temporary.pathname
48
+
49
+ FileUtils.rm_rf(download_directory)
50
+ FileUtils.mkdir_p(download_directory)
51
+
52
+ counter = 0
53
+ bucket_resource.objects(:prefix => prefix).each do |object|
54
+ target = File.join(download_directory, "#{counter}.txt")
55
+ object.get(:response_target => target)
56
+ counter += 1
57
+ end
58
+ expect(Dir.glob(File.join(download_directory, "**", "*.txt")).inject(0) { |sum, f| sum + IO.readlines(f).size }).to eq(events_sent.values.inject(0, :+))
59
+ end
60
+ end
@@ -0,0 +1,60 @@
1
+ # encoding: utf-8
2
+ require_relative "../spec_helper"
3
+ require "logstash/outputs/s3"
4
+ require "logstash/codecs/line"
5
+ require "stud/temporary"
6
+
7
+ describe "File Time rotation with constant write", :integration => true do
8
+ include_context "setup plugin"
9
+
10
+ let(:time_file) { 0.004 }
11
+ let(:options) { main_options.merge({ "rotation_strategy" => "time" }) }
12
+ let(:number_of_events) { 5000 }
13
+ let(:batch_size) { 125 }
14
+ let(:event_encoded) { "Hello world" }
15
+ let(:batch) do
16
+ b = {}
17
+ number_of_events.times do
18
+ event = LogStash::Event.new({ "message" => event_encoded })
19
+ b[event] = "#{event_encoded}\n"
20
+ end
21
+ b
22
+ end
23
+ let(:minimum_number_of_time_rotation) { 3 }
24
+ let(:batch_step) { (number_of_events / minimum_number_of_time_rotation).ceil }
25
+
26
+ before do
27
+ clean_remote_files(prefix)
28
+ subject.register
29
+
30
+ # simulate batch read/write
31
+ batch.each_slice(batch_step) do |batch_time|
32
+ batch_time.each_slice(batch_size) do |smaller_batch|
33
+ subject.multi_receive_encoded(smaller_batch)
34
+ end
35
+ sleep(1)
36
+ end
37
+
38
+ subject.close
39
+ end
40
+
41
+ it "creates multiples files" do
42
+ # using close will upload the current file
43
+ expect(bucket_resource.objects(:prefix => prefix).count).to be_between(minimum_number_of_time_rotation, minimum_number_of_time_rotation + 1).inclusive
44
+ end
45
+
46
+ it "Persists all events" do
47
+ download_directory = Stud::Temporary.pathname
48
+
49
+ FileUtils.rm_rf(download_directory)
50
+ FileUtils.mkdir_p(download_directory)
51
+
52
+ counter = 0
53
+ bucket_resource.objects(:prefix => prefix).each do |object|
54
+ target = File.join(download_directory, "#{counter}.txt")
55
+ object.get(:response_target => target)
56
+ counter += 1
57
+ end
58
+ expect(Dir.glob(File.join(download_directory, "**", "*.txt")).inject(0) { |sum, f| sum + IO.readlines(f).size }).to eq(number_of_events)
59
+ end
60
+ end
@@ -0,0 +1,64 @@
1
+ # encoding: utf-8
2
+ require_relative "../spec_helper"
3
+ require "logstash/outputs/s3"
4
+ require "logstash/codecs/line"
5
+ require "stud/temporary"
6
+
7
+ describe "File Time rotation with stale write", :integration => true do
8
+ include_context "setup plugin"
9
+
10
+ let(:time_file) { 0.0004 }
11
+ let(:options) { main_options.merge({ "rotation_strategy" => "time" }) }
12
+ let(:number_of_events) { 5000 }
13
+ let(:batch_size) { 125 }
14
+ let(:event_encoded) { "Hello world" }
15
+ let(:batch) do
16
+ b = {}
17
+ number_of_events.times do
18
+ event = LogStash::Event.new({ "message" => event_encoded })
19
+ b[event] = "#{event_encoded}\n"
20
+ end
21
+ b
22
+ end
23
+
24
+ before do
25
+ stub_const('LogStash::Outputs::S3::PERIODIC_CHECK_INTERVAL_IN_SECONDS', 1)
26
+ clean_remote_files(prefix)
27
+ subject.register
28
+ subject.multi_receive_encoded(batch)
29
+ sleep(5) # the periodic check should have kick in
30
+ end
31
+
32
+ after do
33
+ subject.close
34
+ end
35
+
36
+ it "create one file" do
37
+ # using close will upload the current file
38
+ try(20) do
39
+ expect(bucket_resource.objects(:prefix => prefix).count).to eq(1)
40
+ end
41
+ end
42
+
43
+ it "Persists all events" do
44
+ download_directory = Stud::Temporary.pathname
45
+
46
+ FileUtils.rm_rf(download_directory)
47
+ FileUtils.mkdir_p(download_directory)
48
+
49
+ counter = 0
50
+ bucket_resource.objects(:prefix => prefix).each do |object|
51
+ target = File.join(download_directory, "#{counter}.txt")
52
+ object.get(:response_target => target)
53
+ counter += 1
54
+ end
55
+
56
+ try(20) do
57
+ expect(Dir.glob(File.join(download_directory, "**", "*.txt")).inject(0) { |sum, f| sum + IO.readlines(f).size }).to eq(number_of_events)
58
+ end
59
+
60
+ try(10) do
61
+ expect(Dir.glob(File.join(temporary_directory, "**", "*.txt")).size).to eq(1) # we should only have 1 file left, since we did a rotation
62
+ end
63
+ end
64
+ end
@@ -0,0 +1,51 @@
1
+ # encoding: utf-8
2
+ require_relative "../spec_helper"
3
+ require "logstash/outputs/s3"
4
+ require "logstash/codecs/line"
5
+ require "stud/temporary"
6
+
7
+ describe "Upload current file on shutdown", :integration => true do
8
+ include_context "setup plugin"
9
+ let(:options) { main_options }
10
+
11
+ let(:size_file) { 1000000 }
12
+ let(:time_file) { 100000 }
13
+ let(:number_of_events) { 5000 }
14
+ let(:batch_size) { 125 }
15
+ let(:event_encoded) { "Hello world" }
16
+ let(:batch) do
17
+ b = {}
18
+ number_of_events.times do
19
+ event = LogStash::Event.new({ "message" => event_encoded })
20
+ b[event] = "#{event_encoded}\n"
21
+ end
22
+ b
23
+ end
24
+
25
+ before do
26
+ clean_remote_files(prefix)
27
+ subject.register
28
+ subject.multi_receive_encoded(batch)
29
+ subject.close
30
+ end
31
+
32
+ it "creates a specific quantity of files" do
33
+ # Since we have really big value of time_file and size_file
34
+ expect(bucket_resource.objects(:prefix => prefix).count).to eq(1)
35
+ end
36
+
37
+ it "Persists all events" do
38
+ download_directory = Stud::Temporary.pathname
39
+
40
+ FileUtils.rm_rf(download_directory)
41
+ FileUtils.mkdir_p(download_directory)
42
+
43
+ counter = 0
44
+ bucket_resource.objects(:prefix => prefix).each do |object|
45
+ target = File.join(download_directory, "#{counter}.txt")
46
+ object.get(:response_target => target)
47
+ counter += 1
48
+ end
49
+ expect(Dir.glob(File.join(download_directory, "**", "*.txt")).inject(0) { |sum, f| sum + IO.readlines(f).size }).to eq(number_of_events)
50
+ end
51
+ end
@@ -0,0 +1,38 @@
1
+ require "logstash/devutils/rspec/spec_helper"
2
+ require "logstash/outputs/cloudwatch"
3
+
4
+ describe "outputs/cloudwatch" do
5
+
6
+ let(:config) { { 'metricname' => 'foo' } }
7
+
8
+ subject(:plugin) { LogStash::Outputs::CloudWatch.new(config) }
9
+
10
+ it "should register" do
11
+ expect { plugin.register }.to_not raise_error
12
+ end
13
+
14
+ it "should respond correctly to a receive call" do
15
+ plugin.register
16
+ event = LogStash::Event.new
17
+ expect { plugin.receive(event) }.to_not raise_error
18
+ end
19
+
20
+ context 'with queue_size' do
21
+
22
+ let(:queue_size) { 100 }
23
+
24
+ let(:config) { super().merge('queue_size' => queue_size) }
25
+
26
+ it "triggers job ahead of time" do
27
+ plugin.register
28
+ event_queue = plugin.event_queue
29
+ allow( event_queue ).to receive(:length).and_return queue_size # emulate full queue
30
+ expect( plugin ).to receive(:publish)
31
+
32
+ event = LogStash::Event.new
33
+ plugin.receive(event)
34
+ sleep 1.0 # allow scheduler to kick in
35
+ end
36
+
37
+ end
38
+ end