logstash-input-sqs 1.0.0 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +7 -0
- data/lib/logstash/inputs/sqs.rb +101 -82
- data/logstash-input-sqs.gemspec +2 -3
- data/spec/inputs/sqs_spec.rb +170 -1
- data/spec/integration/sqs_spec.rb +111 -0
- data/spec/spec_helper.rb +2 -0
- data/spec/support/helpers.rb +10 -0
- metadata +12 -6
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 2a6fdc994c8c6ebd9175705b27655d59b05077bf
|
4
|
+
data.tar.gz: 27f71b1d9c8c5b3fb3e5f0ff88e89d612dbf68a5
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 144dc569bcb78514c165eb2cc4089e2f90069b778537ad6dc501edd03a537f46fda0b9074ca24b01bf2c348b0687f4fb05c4943a6c8cd3edc87b55ba484a44ff
|
7
|
+
data.tar.gz: 5dab8585276679b08119fd525a02b38a428060cce3d02068a6663facf0c5c065f885c99ded701de7c92503819e30c28c6fae12c031c008c070d0455db703c6c3
|
data/CHANGELOG.md
CHANGED
@@ -0,0 +1,7 @@
|
|
1
|
+
# 1.1.0
|
2
|
+
- AWS ruby SDK v2 upgrade
|
3
|
+
- Replaces aws-sdk dependencies with mixin-aws
|
4
|
+
- Removes unnecessary de-allocation
|
5
|
+
- Move the code into smaller methods to allow easier mocking and testing
|
6
|
+
- Add the option to configure polling frequency
|
7
|
+
- Adding a monkey patch to make sure `LogStash::ShutdownSignal` doesn't get catch by AWS RetryError.
|
data/lib/logstash/inputs/sqs.rb
CHANGED
@@ -1,9 +1,10 @@
|
|
1
1
|
# encoding: utf-8
|
2
|
+
#
|
2
3
|
require "logstash/inputs/threadable"
|
3
4
|
require "logstash/namespace"
|
4
5
|
require "logstash/timestamp"
|
5
6
|
require "logstash/plugin_mixins/aws_config"
|
6
|
-
require "
|
7
|
+
require "logstash/errors"
|
7
8
|
|
8
9
|
# Pull events from an Amazon Web Services Simple Queue Service (SQS) queue.
|
9
10
|
#
|
@@ -58,7 +59,15 @@ require "digest/sha2"
|
|
58
59
|
# See http://aws.amazon.com/iam/ for more details on setting up AWS identities.
|
59
60
|
#
|
60
61
|
class LogStash::Inputs::SQS < LogStash::Inputs::Threadable
|
61
|
-
include LogStash::PluginMixins::AwsConfig
|
62
|
+
include LogStash::PluginMixins::AwsConfig::V2
|
63
|
+
|
64
|
+
MAX_TIME_BEFORE_GIVING_UP = 60
|
65
|
+
MAX_MESSAGES_TO_FETCH = 10 # Between 1-10 in the AWS-SDK doc
|
66
|
+
SENT_TIMESTAMP = "SentTimestamp"
|
67
|
+
SQS_ATTRIBUTES = [SENT_TIMESTAMP]
|
68
|
+
BACKOFF_SLEEP_TIME = 1
|
69
|
+
BACKOFF_FACTOR = 2
|
70
|
+
DEFAULT_POLLING_FREQUENCY = 20
|
62
71
|
|
63
72
|
config_name "sqs"
|
64
73
|
|
@@ -73,104 +82,114 @@ class LogStash::Inputs::SQS < LogStash::Inputs::Threadable
|
|
73
82
|
# Name of the event field in which to store the SQS message MD5 checksum
|
74
83
|
config :md5_field, :validate => :string
|
75
84
|
|
76
|
-
# Name of the event field in which to store the
|
85
|
+
# Name of the event field in which to store the SQS message Sent Timestamp
|
77
86
|
config :sent_timestamp_field, :validate => :string
|
78
87
|
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
}
|
84
|
-
end
|
88
|
+
# Polling frequency, default is 20 seconds
|
89
|
+
config :polling_frequency, :validate => :number, :default => DEFAULT_POLLING_FREQUENCY
|
90
|
+
|
91
|
+
attr_reader :poller
|
85
92
|
|
86
|
-
public
|
87
93
|
def register
|
88
|
-
@logger.info("Registering SQS input", :queue => @queue)
|
89
94
|
require "aws-sdk"
|
95
|
+
@logger.info("Registering SQS input", :queue => @queue)
|
90
96
|
|
91
|
-
|
97
|
+
monkey_patch_aws_retryable_plugin!
|
98
|
+
setup_queue
|
99
|
+
end
|
92
100
|
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
end # def register
|
102
|
-
|
103
|
-
public
|
104
|
-
def run(output_queue)
|
105
|
-
@logger.debug("Polling SQS queue", :queue => @queue)
|
101
|
+
def setup_queue
|
102
|
+
aws_sqs_client = Aws::SQS::Client.new(aws_options_hash)
|
103
|
+
queue_url = aws_sqs_client.get_queue_url(:queue_name => @queue)[:queue_url]
|
104
|
+
@poller = Aws::SQS::QueuePoller.new(queue_url, :client => aws_sqs_client)
|
105
|
+
rescue Aws::SQS::Errors::ServiceError => e
|
106
|
+
@logger.error("Cannot establish connection to Amazon SQS", :error => e)
|
107
|
+
raise LogStash::ConfigurationError, "Verify the SQS queue name and your credentials"
|
108
|
+
end
|
106
109
|
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
110
|
+
def polling_options
|
111
|
+
{
|
112
|
+
:max_number_of_messages => MAX_MESSAGES_TO_FETCH,
|
113
|
+
:attribute_names => SQS_ATTRIBUTES,
|
114
|
+
:wait_time_seconds => @polling_frequency
|
111
115
|
}
|
116
|
+
end
|
112
117
|
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
118
|
+
def decode_event(message)
|
119
|
+
@codec.decode(message.body) do |event|
|
120
|
+
return event
|
121
|
+
end
|
122
|
+
end
|
123
|
+
|
124
|
+
def add_sqs_data(event, message)
|
125
|
+
event[@id_field] = message.message_id if @id_field
|
126
|
+
event[@md5_field] = message.md5_of_body if @md5_field
|
127
|
+
event[@sent_timestamp_field] = convert_epoch_to_timestamp(message.attributes[SENT_TIMESTAMP]) if @sent_timestamp_field
|
128
|
+
|
129
|
+
return event
|
130
|
+
end
|
131
|
+
|
132
|
+
def handle_message(message)
|
133
|
+
event = decode_event(message)
|
134
|
+
add_sqs_data(event, message)
|
135
|
+
decorate(event)
|
136
|
+
return event
|
137
|
+
end
|
138
|
+
|
139
|
+
def run(output_queue)
|
140
|
+
@logger.debug("Polling SQS queue", :polling_options => polling_options)
|
141
|
+
|
142
|
+
run_with_backoff do
|
143
|
+
poller.poll(polling_options) do |messages, stats|
|
144
|
+
messages.each do |message|
|
145
|
+
output_queue << handle_message(message)
|
146
|
+
end
|
147
|
+
|
148
|
+
@logger.debug("SQS Stats:", :request_count => stats.request_count,
|
149
|
+
:messages_count => stats.message_count,
|
150
|
+
:last_message_received_at => stats.last_message_received_at) if @logger.debug?
|
151
|
+
end
|
152
|
+
end
|
153
|
+
end
|
143
154
|
|
144
155
|
private
|
145
156
|
# Runs an AWS request inside a Ruby block with an exponential backoff in case
|
146
|
-
# we
|
157
|
+
# we experience a ServiceError.
|
147
158
|
#
|
148
159
|
# @param [Integer] max_time maximum amount of time to sleep before giving up.
|
149
160
|
# @param [Integer] sleep_time the initial amount of time to sleep before retrying.
|
150
161
|
# @param [Block] block Ruby code block to execute.
|
151
|
-
def run_with_backoff(max_time, sleep_time, &block)
|
152
|
-
|
153
|
-
@logger.error("AWS::EC2::Errors::RequestLimitExceeded ... failed.", :queue => @queue)
|
154
|
-
return false
|
155
|
-
end # retry limit exceeded
|
162
|
+
def run_with_backoff(max_time = MAX_TIME_BEFORE_GIVING_UP, sleep_time = BACKOFF_SLEEP_TIME, &block)
|
163
|
+
next_sleep = sleep_time
|
156
164
|
|
157
165
|
begin
|
158
166
|
block.call
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
rescue
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
end
|
167
|
+
next_sleep = sleep_time
|
168
|
+
rescue Aws::SQS::Errors::ServiceError => e
|
169
|
+
@logger.warn("Aws::SQS::Errors::ServiceError ... retrying SQS request with exponential backoff", :queue => @queue, :sleep_time => sleep_time, :error => e)
|
170
|
+
sleep(next_sleep)
|
171
|
+
next_sleep = next_sleep > max_time ? sleep_time : sleep_time * BACKOFF_FACTOR
|
172
|
+
|
173
|
+
retry
|
174
|
+
rescue LogStash::ShutdownSignal
|
175
|
+
# The pipeline is currently shutting down.
|
176
|
+
# we can safely rescue and return, all unacked sqs messages will be resend
|
177
|
+
# when the pipeline is up again.
|
178
|
+
end
|
179
|
+
end
|
180
|
+
|
181
|
+
def convert_epoch_to_timestamp(time)
|
182
|
+
LogStash::Timestamp.at(time.to_i / 1000)
|
183
|
+
end
|
184
|
+
|
185
|
+
def monkey_patch_aws_retryable_plugin!
|
186
|
+
Aws::Plugins::RetryErrors::ErrorInspector.module_eval do
|
187
|
+
def networking?
|
188
|
+
!@error.is_a?(LogStash::ShutdownSignal) &&
|
189
|
+
(@error.is_a?(Seahorse::Client::NetworkingError) ||
|
190
|
+
Aws::Plugins::RetryErrors::ErrorInspector::NETWORKING_ERRORS.include?(@name) ||
|
191
|
+
@http_status_code == 0)
|
192
|
+
end
|
193
|
+
end
|
194
|
+
end
|
176
195
|
end # class LogStash::Inputs::SQS
|
data/logstash-input-sqs.gemspec
CHANGED
@@ -1,7 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
|
-
|
3
2
|
s.name = 'logstash-input-sqs'
|
4
|
-
s.version = '1.
|
3
|
+
s.version = '1.1.0'
|
5
4
|
s.licenses = ['Apache License (2.0)']
|
6
5
|
s.summary = "Pull events from an Amazon Web Services Simple Queue Service (SQS) queue."
|
7
6
|
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
|
@@ -23,7 +22,7 @@ Gem::Specification.new do |s|
|
|
23
22
|
s.add_runtime_dependency "logstash-core", '>= 1.4.0', '< 2.0.0'
|
24
23
|
|
25
24
|
s.add_runtime_dependency 'logstash-codec-json'
|
26
|
-
s.add_runtime_dependency
|
25
|
+
s.add_runtime_dependency "logstash-mixin-aws", ">= 1.0.0"
|
27
26
|
|
28
27
|
s.add_development_dependency 'logstash-devutils'
|
29
28
|
end
|
data/spec/inputs/sqs_spec.rb
CHANGED
@@ -1 +1,170 @@
|
|
1
|
-
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/inputs/sqs"
|
3
|
+
require "logstash/errors"
|
4
|
+
require "logstash/event"
|
5
|
+
require "logstash/json"
|
6
|
+
require "aws-sdk"
|
7
|
+
require "spec_helper"
|
8
|
+
|
9
|
+
describe LogStash::Inputs::SQS do
|
10
|
+
let(:queue_name) { "the-infinite-pandora-box" }
|
11
|
+
let(:queue_url) { "https://sqs.test.local/#{queue}" }
|
12
|
+
let(:options) do
|
13
|
+
{
|
14
|
+
"region" => "us-east-1",
|
15
|
+
"access_key_id" => "123",
|
16
|
+
"secret_access_key" => "secret",
|
17
|
+
"queue" => queue_name
|
18
|
+
}
|
19
|
+
end
|
20
|
+
|
21
|
+
let(:input) { LogStash::Inputs::SQS.new(options) }
|
22
|
+
|
23
|
+
let(:decoded_message) { { "bonjour" => "awesome" } }
|
24
|
+
let(:encoded_message) { double("sqs_message", :body => LogStash::Json::dump(decoded_message)) }
|
25
|
+
|
26
|
+
subject { input }
|
27
|
+
|
28
|
+
let(:mock_sqs) { Aws::SQS::Client.new({ :stub_responses => true }) }
|
29
|
+
|
30
|
+
context "with invalid credentials" do
|
31
|
+
before do
|
32
|
+
expect(Aws::SQS::Client).to receive(:new).and_return(mock_sqs)
|
33
|
+
expect(mock_sqs).to receive(:get_queue_url).with({ :queue_name => queue_name }) { raise Aws::SQS::Errors::ServiceError.new("bad-something", "bad token") }
|
34
|
+
end
|
35
|
+
|
36
|
+
it "raises a Configuration error if the credentials are bad" do
|
37
|
+
expect { subject.register }.to raise_error(LogStash::ConfigurationError)
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
context "valid credentials" do
|
42
|
+
let(:queue) { [] }
|
43
|
+
|
44
|
+
it "doesn't raise an error with valid credentials" do
|
45
|
+
expect(Aws::SQS::Client).to receive(:new).and_return(mock_sqs)
|
46
|
+
expect(mock_sqs).to receive(:get_queue_url).with({ :queue_name => queue_name }).and_return({:queue_url => queue_url })
|
47
|
+
expect { subject.register }.not_to raise_error
|
48
|
+
end
|
49
|
+
|
50
|
+
context "enrich event" do
|
51
|
+
let(:event) { LogStash::Event.new }
|
52
|
+
|
53
|
+
let(:message_id) { "123" }
|
54
|
+
let(:md5_of_body) { "dr strange" }
|
55
|
+
let(:sent_timestamp) { LogStash::Timestamp.new }
|
56
|
+
let(:epoch_timestamp) { (sent_timestamp.utc.to_f * 1000).to_i }
|
57
|
+
|
58
|
+
let(:id_field) { "my_id_field" }
|
59
|
+
let(:md5_field) { "my_md5_field" }
|
60
|
+
let(:sent_timestamp_field) { "my_sent_timestamp_field" }
|
61
|
+
|
62
|
+
let(:message) do
|
63
|
+
double("message", :message_id => message_id, :md5_of_body => md5_of_body, :attributes => { LogStash::Inputs::SQS::SENT_TIMESTAMP => epoch_timestamp } )
|
64
|
+
end
|
65
|
+
|
66
|
+
subject { input.add_sqs_data(event, message) }
|
67
|
+
|
68
|
+
context "when the option is specified" do
|
69
|
+
let(:options) do
|
70
|
+
{
|
71
|
+
"region" => "us-east-1",
|
72
|
+
"access_key_id" => "123",
|
73
|
+
"secret_access_key" => "secret",
|
74
|
+
"queue" => queue_name,
|
75
|
+
"id_field" => id_field,
|
76
|
+
"md5_field" => md5_field,
|
77
|
+
"sent_timestamp_field" => sent_timestamp_field
|
78
|
+
}
|
79
|
+
end
|
80
|
+
|
81
|
+
it "add the `message_id`" do
|
82
|
+
expect(subject[id_field]).to eq(message_id)
|
83
|
+
end
|
84
|
+
|
85
|
+
it "add the `md5_of_body`" do
|
86
|
+
expect(subject[md5_field]).to eq(md5_of_body)
|
87
|
+
end
|
88
|
+
|
89
|
+
it "add the `sent_timestamp`" do
|
90
|
+
expect(subject[sent_timestamp_field].to_i).to eq(sent_timestamp.to_i)
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
94
|
+
context "when the option isn't specified" do
|
95
|
+
it "doesnt add the `message_id`" do
|
96
|
+
expect(subject).not_to include(id_field)
|
97
|
+
end
|
98
|
+
|
99
|
+
it "doesnt add the `md5_of_body`" do
|
100
|
+
expect(subject).not_to include(md5_field)
|
101
|
+
end
|
102
|
+
|
103
|
+
it "doesnt add the `sent_timestamp`" do
|
104
|
+
expect(subject).not_to include(sent_timestamp_field)
|
105
|
+
end
|
106
|
+
end
|
107
|
+
end
|
108
|
+
|
109
|
+
context "when decoding body" do
|
110
|
+
subject { LogStash::Inputs::SQS::new(options.merge({ "codec" => "json" })) }
|
111
|
+
|
112
|
+
it "uses the specified codec" do
|
113
|
+
expect(subject.decode_event(encoded_message)["bonjour"]).to eq(decoded_message["bonjour"])
|
114
|
+
end
|
115
|
+
end
|
116
|
+
|
117
|
+
context "receiving messages" do
|
118
|
+
before do
|
119
|
+
expect(subject).to receive(:poller).and_return(mock_sqs).at_least(:once)
|
120
|
+
end
|
121
|
+
|
122
|
+
it "creates logstash event" do
|
123
|
+
expect(mock_sqs).to receive(:poll).with(anything()).and_yield([encoded_message], double("stats"))
|
124
|
+
subject.run(queue)
|
125
|
+
expect(queue.pop["bonjour"]).to eq(decoded_message["bonjour"])
|
126
|
+
end
|
127
|
+
end
|
128
|
+
|
129
|
+
context "on errors" do
|
130
|
+
let(:payload) { "Hello world" }
|
131
|
+
|
132
|
+
before do
|
133
|
+
expect(subject).to receive(:poller).and_return(mock_sqs).at_least(:once)
|
134
|
+
end
|
135
|
+
|
136
|
+
context "SQS errors" do
|
137
|
+
it "retry to fetch messages" do
|
138
|
+
# change the poller implementation to raise SQS errors.
|
139
|
+
had_error = false
|
140
|
+
|
141
|
+
# actually using the child of `Object` to do an expectation of `#sleep`
|
142
|
+
expect(subject).to receive(:sleep).with(LogStash::Inputs::SQS::BACKOFF_SLEEP_TIME)
|
143
|
+
expect(mock_sqs).to receive(:poll).with(anything()).at_most(2) do
|
144
|
+
unless had_error
|
145
|
+
had_error = true
|
146
|
+
raise Aws::SQS::Errors::ServiceError.new("testing", "testing exception")
|
147
|
+
end
|
148
|
+
|
149
|
+
queue << payload
|
150
|
+
end
|
151
|
+
|
152
|
+
subject.run(queue)
|
153
|
+
|
154
|
+
expect(queue.size).to eq(1)
|
155
|
+
expect(queue.pop).to eq(payload)
|
156
|
+
end
|
157
|
+
end
|
158
|
+
|
159
|
+
context "other errors" do
|
160
|
+
it "stops executing the code and raise the exception" do
|
161
|
+
expect(mock_sqs).to receive(:poll).with(anything()).at_most(2) do
|
162
|
+
raise RuntimeError
|
163
|
+
end
|
164
|
+
|
165
|
+
expect { subject.run(queue) }.to raise_error(RuntimeError)
|
166
|
+
end
|
167
|
+
end
|
168
|
+
end
|
169
|
+
end
|
170
|
+
end
|
@@ -0,0 +1,111 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/inputs/sqs"
|
3
|
+
require "logstash/event"
|
4
|
+
require "logstash/json"
|
5
|
+
require "aws-sdk"
|
6
|
+
require "spec_helper"
|
7
|
+
require_relative "../support/helpers"
|
8
|
+
require "thread"
|
9
|
+
|
10
|
+
Thread.abort_on_exception = true
|
11
|
+
|
12
|
+
describe "LogStash::Inputs::SQS integration", :integration => true do
|
13
|
+
let(:decoded_message) { { "drstrange" => "is-he-really-that-strange" } }
|
14
|
+
let(:encoded_message) { LogStash::Json.dump(decoded_message) }
|
15
|
+
let(:queue) { Queue.new }
|
16
|
+
|
17
|
+
let(:input) { LogStash::Inputs::SQS.new(options) }
|
18
|
+
|
19
|
+
context "with invalid credentials" do
|
20
|
+
let(:options) do
|
21
|
+
{
|
22
|
+
"queue" => "do-not-exist",
|
23
|
+
"access_key_id" => "bad_access",
|
24
|
+
"secret_access_key" => "bad_secret_key",
|
25
|
+
"region" => ENV["AWS_REGION"]
|
26
|
+
}
|
27
|
+
end
|
28
|
+
|
29
|
+
subject { input }
|
30
|
+
|
31
|
+
it "raises a Configuration error if the credentials are bad" do
|
32
|
+
expect { subject.register }.to raise_error(LogStash::ConfigurationError)
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
context "with valid credentials" do
|
37
|
+
let(:options) do
|
38
|
+
{
|
39
|
+
"queue" => ENV["SQS_QUEUE_NAME"],
|
40
|
+
"access_key_id" => ENV['AWS_ACCESS_KEY_ID'],
|
41
|
+
"secret_access_key" => ENV['AWS_SECRET_ACCESS_KEY'],
|
42
|
+
"region" => ENV["AWS_REGION"]
|
43
|
+
}
|
44
|
+
end
|
45
|
+
|
46
|
+
before :each do
|
47
|
+
push_sqs_event(encoded_message)
|
48
|
+
input.register
|
49
|
+
@server = Thread.new { input.run(queue) }
|
50
|
+
end
|
51
|
+
|
52
|
+
after do
|
53
|
+
@server.kill
|
54
|
+
end
|
55
|
+
|
56
|
+
subject { queue.pop }
|
57
|
+
|
58
|
+
it "creates logstash events" do
|
59
|
+
expect(subject["drstrange"]).to eq(decoded_message["drstrange"])
|
60
|
+
end
|
61
|
+
|
62
|
+
context "when the optionals fields are not specified" do
|
63
|
+
let(:id_field) { "my_id_field" }
|
64
|
+
let(:md5_field) { "my_md5_field" }
|
65
|
+
let(:sent_timestamp_field) { "my_sent_timestamp_field" }
|
66
|
+
|
67
|
+
it "add the `message_id`" do
|
68
|
+
expect(subject[id_field]).to be_nil
|
69
|
+
end
|
70
|
+
|
71
|
+
it "add the `md5_of_body`" do
|
72
|
+
expect(subject[md5_field]).to be_nil
|
73
|
+
end
|
74
|
+
|
75
|
+
it "add the `sent_timestamp`" do
|
76
|
+
expect(subject[sent_timestamp_field]).to be_nil
|
77
|
+
end
|
78
|
+
|
79
|
+
end
|
80
|
+
|
81
|
+
context "when the optionals fields are specified" do
|
82
|
+
let(:id_field) { "my_id_field" }
|
83
|
+
let(:md5_field) { "my_md5_field" }
|
84
|
+
let(:sent_timestamp_field) { "my_sent_timestamp_field" }
|
85
|
+
|
86
|
+
let(:options) do
|
87
|
+
{
|
88
|
+
"queue" => ENV["SQS_QUEUE_NAME"],
|
89
|
+
"access_key_id" => ENV['AWS_ACCESS_KEY_ID'],
|
90
|
+
"secret_access_key" => ENV['AWS_SECRET_ACCESS_KEY'],
|
91
|
+
"region" => ENV["AWS_REGION"],
|
92
|
+
"id_field" => id_field,
|
93
|
+
"md5_field" => md5_field,
|
94
|
+
"sent_timestamp_field" => sent_timestamp_field
|
95
|
+
}
|
96
|
+
end
|
97
|
+
|
98
|
+
it "add the `message_id`" do
|
99
|
+
expect(subject[id_field]).not_to be_nil
|
100
|
+
end
|
101
|
+
|
102
|
+
it "add the `md5_of_body`" do
|
103
|
+
expect(subject[md5_field]).not_to be_nil
|
104
|
+
end
|
105
|
+
|
106
|
+
it "add the `sent_timestamp`" do
|
107
|
+
expect(subject[sent_timestamp_field]).not_to be_nil
|
108
|
+
end
|
109
|
+
end
|
110
|
+
end
|
111
|
+
end
|
data/spec/spec_helper.rb
ADDED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-input-sqs
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.1.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2015-
|
11
|
+
date: 2015-08-24 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: logstash-core
|
@@ -45,17 +45,17 @@ dependencies:
|
|
45
45
|
prerelease: false
|
46
46
|
type: :runtime
|
47
47
|
- !ruby/object:Gem::Dependency
|
48
|
-
name: aws
|
48
|
+
name: logstash-mixin-aws
|
49
49
|
version_requirements: !ruby/object:Gem::Requirement
|
50
50
|
requirements:
|
51
51
|
- - '>='
|
52
52
|
- !ruby/object:Gem::Version
|
53
|
-
version:
|
53
|
+
version: 1.0.0
|
54
54
|
requirement: !ruby/object:Gem::Requirement
|
55
55
|
requirements:
|
56
56
|
- - '>='
|
57
57
|
- !ruby/object:Gem::Version
|
58
|
-
version:
|
58
|
+
version: 1.0.0
|
59
59
|
prerelease: false
|
60
60
|
type: :runtime
|
61
61
|
- !ruby/object:Gem::Dependency
|
@@ -89,6 +89,9 @@ files:
|
|
89
89
|
- lib/logstash/inputs/sqs.rb
|
90
90
|
- logstash-input-sqs.gemspec
|
91
91
|
- spec/inputs/sqs_spec.rb
|
92
|
+
- spec/integration/sqs_spec.rb
|
93
|
+
- spec/spec_helper.rb
|
94
|
+
- spec/support/helpers.rb
|
92
95
|
homepage: http://www.elastic.co/guide/en/logstash/current/index.html
|
93
96
|
licenses:
|
94
97
|
- Apache License (2.0)
|
@@ -111,9 +114,12 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
111
114
|
version: '0'
|
112
115
|
requirements: []
|
113
116
|
rubyforge_project:
|
114
|
-
rubygems_version: 2.
|
117
|
+
rubygems_version: 2.1.9
|
115
118
|
signing_key:
|
116
119
|
specification_version: 4
|
117
120
|
summary: Pull events from an Amazon Web Services Simple Queue Service (SQS) queue.
|
118
121
|
test_files:
|
119
122
|
- spec/inputs/sqs_spec.rb
|
123
|
+
- spec/integration/sqs_spec.rb
|
124
|
+
- spec/spec_helper.rb
|
125
|
+
- spec/support/helpers.rb
|