logstash-input-tqp 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/Gemfile +11 -0
- data/LICENSE +202 -0
- data/README.md +27 -0
- data/lib/logstash/inputs/sqs/patch.rb +21 -0
- data/lib/logstash/inputs/tqp.rb +256 -0
- data/logstash-input-tqp.gemspec +29 -0
- data/spec/inputs/tqp_spec.rb +178 -0
- data/spec/integration/tqp_spec.rb +74 -0
- data/spec/spec_helper.rb +2 -0
- data/spec/support/helpers.rb +18 -0
- metadata +137 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 22137a39127bc2546cffb562e760b17d9b5475d68d6e2452cfa67c173a4d68f1
|
4
|
+
data.tar.gz: 344ae3838834dd5e868dd0fdcfb57f647a46a3f62da73c3d56c49eb0bc76251e
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 1c4e6eb6ba7ad4de1024509efafc4fde483371dc41415732e5bea329e28f84db03c40d493f465c41b22d30699a206187a83ea6aeb2f777d94e1ffad818ebe3c9
|
7
|
+
data.tar.gz: 94e89728be7bda4ce8e6a0cb3cbdd56b17bbcca4eccf73f0781c5ecce7d38aa580e1064764372c49bafecc754aee5c35773a21577c6a542c1e8bebbe79b048bf
|
data/Gemfile
ADDED
@@ -0,0 +1,11 @@
|
|
1
|
+
source 'https://rubygems.org'
|
2
|
+
|
3
|
+
gemspec
|
4
|
+
|
5
|
+
logstash_path = ENV["LOGSTASH_PATH"] || "../../logstash"
|
6
|
+
use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1"
|
7
|
+
|
8
|
+
if Dir.exist?(logstash_path) && use_logstash_source
|
9
|
+
gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
|
10
|
+
gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
|
11
|
+
end
|
data/LICENSE
ADDED
@@ -0,0 +1,202 @@
|
|
1
|
+
|
2
|
+
Apache License
|
3
|
+
Version 2.0, January 2004
|
4
|
+
http://www.apache.org/licenses/
|
5
|
+
|
6
|
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
7
|
+
|
8
|
+
1. Definitions.
|
9
|
+
|
10
|
+
"License" shall mean the terms and conditions for use, reproduction,
|
11
|
+
and distribution as defined by Sections 1 through 9 of this document.
|
12
|
+
|
13
|
+
"Licensor" shall mean the copyright owner or entity authorized by
|
14
|
+
the copyright owner that is granting the License.
|
15
|
+
|
16
|
+
"Legal Entity" shall mean the union of the acting entity and all
|
17
|
+
other entities that control, are controlled by, or are under common
|
18
|
+
control with that entity. For the purposes of this definition,
|
19
|
+
"control" means (i) the power, direct or indirect, to cause the
|
20
|
+
direction or management of such entity, whether by contract or
|
21
|
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
22
|
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
23
|
+
|
24
|
+
"You" (or "Your") shall mean an individual or Legal Entity
|
25
|
+
exercising permissions granted by this License.
|
26
|
+
|
27
|
+
"Source" form shall mean the preferred form for making modifications,
|
28
|
+
including but not limited to software source code, documentation
|
29
|
+
source, and configuration files.
|
30
|
+
|
31
|
+
"Object" form shall mean any form resulting from mechanical
|
32
|
+
transformation or translation of a Source form, including but
|
33
|
+
not limited to compiled object code, generated documentation,
|
34
|
+
and conversions to other media types.
|
35
|
+
|
36
|
+
"Work" shall mean the work of authorship, whether in Source or
|
37
|
+
Object form, made available under the License, as indicated by a
|
38
|
+
copyright notice that is included in or attached to the work
|
39
|
+
(an example is provided in the Appendix below).
|
40
|
+
|
41
|
+
"Derivative Works" shall mean any work, whether in Source or Object
|
42
|
+
form, that is based on (or derived from) the Work and for which the
|
43
|
+
editorial revisions, annotations, elaborations, or other modifications
|
44
|
+
represent, as a whole, an original work of authorship. For the purposes
|
45
|
+
of this License, Derivative Works shall not include works that remain
|
46
|
+
separable from, or merely link (or bind by name) to the interfaces of,
|
47
|
+
the Work and Derivative Works thereof.
|
48
|
+
|
49
|
+
"Contribution" shall mean any work of authorship, including
|
50
|
+
the original version of the Work and any modifications or additions
|
51
|
+
to that Work or Derivative Works thereof, that is intentionally
|
52
|
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
53
|
+
or by an individual or Legal Entity authorized to submit on behalf of
|
54
|
+
the copyright owner. For the purposes of this definition, "submitted"
|
55
|
+
means any form of electronic, verbal, or written communication sent
|
56
|
+
to the Licensor or its representatives, including but not limited to
|
57
|
+
communication on electronic mailing lists, source code control systems,
|
58
|
+
and issue tracking systems that are managed by, or on behalf of, the
|
59
|
+
Licensor for the purpose of discussing and improving the Work, but
|
60
|
+
excluding communication that is conspicuously marked or otherwise
|
61
|
+
designated in writing by the copyright owner as "Not a Contribution."
|
62
|
+
|
63
|
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
64
|
+
on behalf of whom a Contribution has been received by Licensor and
|
65
|
+
subsequently incorporated within the Work.
|
66
|
+
|
67
|
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
68
|
+
this License, each Contributor hereby grants to You a perpetual,
|
69
|
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
70
|
+
copyright license to reproduce, prepare Derivative Works of,
|
71
|
+
publicly display, publicly perform, sublicense, and distribute the
|
72
|
+
Work and such Derivative Works in Source or Object form.
|
73
|
+
|
74
|
+
3. Grant of Patent License. Subject to the terms and conditions of
|
75
|
+
this License, each Contributor hereby grants to You a perpetual,
|
76
|
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
77
|
+
(except as stated in this section) patent license to make, have made,
|
78
|
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
79
|
+
where such license applies only to those patent claims licensable
|
80
|
+
by such Contributor that are necessarily infringed by their
|
81
|
+
Contribution(s) alone or by combination of their Contribution(s)
|
82
|
+
with the Work to which such Contribution(s) was submitted. If You
|
83
|
+
institute patent litigation against any entity (including a
|
84
|
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
85
|
+
or a Contribution incorporated within the Work constitutes direct
|
86
|
+
or contributory patent infringement, then any patent licenses
|
87
|
+
granted to You under this License for that Work shall terminate
|
88
|
+
as of the date such litigation is filed.
|
89
|
+
|
90
|
+
4. Redistribution. You may reproduce and distribute copies of the
|
91
|
+
Work or Derivative Works thereof in any medium, with or without
|
92
|
+
modifications, and in Source or Object form, provided that You
|
93
|
+
meet the following conditions:
|
94
|
+
|
95
|
+
(a) You must give any other recipients of the Work or
|
96
|
+
Derivative Works a copy of this License; and
|
97
|
+
|
98
|
+
(b) You must cause any modified files to carry prominent notices
|
99
|
+
stating that You changed the files; and
|
100
|
+
|
101
|
+
(c) You must retain, in the Source form of any Derivative Works
|
102
|
+
that You distribute, all copyright, patent, trademark, and
|
103
|
+
attribution notices from the Source form of the Work,
|
104
|
+
excluding those notices that do not pertain to any part of
|
105
|
+
the Derivative Works; and
|
106
|
+
|
107
|
+
(d) If the Work includes a "NOTICE" text file as part of its
|
108
|
+
distribution, then any Derivative Works that You distribute must
|
109
|
+
include a readable copy of the attribution notices contained
|
110
|
+
within such NOTICE file, excluding those notices that do not
|
111
|
+
pertain to any part of the Derivative Works, in at least one
|
112
|
+
of the following places: within a NOTICE text file distributed
|
113
|
+
as part of the Derivative Works; within the Source form or
|
114
|
+
documentation, if provided along with the Derivative Works; or,
|
115
|
+
within a display generated by the Derivative Works, if and
|
116
|
+
wherever such third-party notices normally appear. The contents
|
117
|
+
of the NOTICE file are for informational purposes only and
|
118
|
+
do not modify the License. You may add Your own attribution
|
119
|
+
notices within Derivative Works that You distribute, alongside
|
120
|
+
or as an addendum to the NOTICE text from the Work, provided
|
121
|
+
that such additional attribution notices cannot be construed
|
122
|
+
as modifying the License.
|
123
|
+
|
124
|
+
You may add Your own copyright statement to Your modifications and
|
125
|
+
may provide additional or different license terms and conditions
|
126
|
+
for use, reproduction, or distribution of Your modifications, or
|
127
|
+
for any such Derivative Works as a whole, provided Your use,
|
128
|
+
reproduction, and distribution of the Work otherwise complies with
|
129
|
+
the conditions stated in this License.
|
130
|
+
|
131
|
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
132
|
+
any Contribution intentionally submitted for inclusion in the Work
|
133
|
+
by You to the Licensor shall be under the terms and conditions of
|
134
|
+
this License, without any additional terms or conditions.
|
135
|
+
Notwithstanding the above, nothing herein shall supersede or modify
|
136
|
+
the terms of any separate license agreement you may have executed
|
137
|
+
with Licensor regarding such Contributions.
|
138
|
+
|
139
|
+
6. Trademarks. This License does not grant permission to use the trade
|
140
|
+
names, trademarks, service marks, or product names of the Licensor,
|
141
|
+
except as required for reasonable and customary use in describing the
|
142
|
+
origin of the Work and reproducing the content of the NOTICE file.
|
143
|
+
|
144
|
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
145
|
+
agreed to in writing, Licensor provides the Work (and each
|
146
|
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
147
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
148
|
+
implied, including, without limitation, any warranties or conditions
|
149
|
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
150
|
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
151
|
+
appropriateness of using or redistributing the Work and assume any
|
152
|
+
risks associated with Your exercise of permissions under this License.
|
153
|
+
|
154
|
+
8. Limitation of Liability. In no event and under no legal theory,
|
155
|
+
whether in tort (including negligence), contract, or otherwise,
|
156
|
+
unless required by applicable law (such as deliberate and grossly
|
157
|
+
negligent acts) or agreed to in writing, shall any Contributor be
|
158
|
+
liable to You for damages, including any direct, indirect, special,
|
159
|
+
incidental, or consequential damages of any character arising as a
|
160
|
+
result of this License or out of the use or inability to use the
|
161
|
+
Work (including but not limited to damages for loss of goodwill,
|
162
|
+
work stoppage, computer failure or malfunction, or any and all
|
163
|
+
other commercial damages or losses), even if such Contributor
|
164
|
+
has been advised of the possibility of such damages.
|
165
|
+
|
166
|
+
9. Accepting Warranty or Additional Liability. While redistributing
|
167
|
+
the Work or Derivative Works thereof, You may choose to offer,
|
168
|
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
169
|
+
or other liability obligations and/or rights consistent with this
|
170
|
+
License. However, in accepting such obligations, You may act only
|
171
|
+
on Your own behalf and on Your sole responsibility, not on behalf
|
172
|
+
of any other Contributor, and only if You agree to indemnify,
|
173
|
+
defend, and hold each Contributor harmless for any liability
|
174
|
+
incurred by, or claims asserted against, such Contributor by reason
|
175
|
+
of your accepting any such warranty or additional liability.
|
176
|
+
|
177
|
+
END OF TERMS AND CONDITIONS
|
178
|
+
|
179
|
+
APPENDIX: How to apply the Apache License to your work.
|
180
|
+
|
181
|
+
To apply the Apache License to your work, attach the following
|
182
|
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
183
|
+
replaced with your own identifying information. (Don't include
|
184
|
+
the brackets!) The text should be enclosed in the appropriate
|
185
|
+
comment syntax for the file format. We also recommend that a
|
186
|
+
file or class name and description of purpose be included on the
|
187
|
+
same "printed page" as the copyright notice for easier
|
188
|
+
identification within third-party archives.
|
189
|
+
|
190
|
+
Copyright 2020 Elastic and contributors
|
191
|
+
|
192
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
193
|
+
you may not use this file except in compliance with the License.
|
194
|
+
You may obtain a copy of the License at
|
195
|
+
|
196
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
197
|
+
|
198
|
+
Unless required by applicable law or agreed to in writing, software
|
199
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
200
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
201
|
+
See the License for the specific language governing permissions and
|
202
|
+
limitations under the License.
|
data/README.md
ADDED
@@ -0,0 +1,27 @@
|
|
1
|
+
# logstash-input-tqp plugin
|
2
|
+
|
3
|
+
This is a plugin for [Logstash](https://github.com/elastic/logstash).
|
4
|
+
|
5
|
+
It is fully free and fully open source. The license is Apache 2.0, meaning you are pretty much free to use it however you want in whatever way.
|
6
|
+
|
7
|
+
## Documentation
|
8
|
+
|
9
|
+
TQP is an an opinionated library for pub/sub over SQS and SNS
|
10
|
+
|
11
|
+
`logstash-input-tqp` is an input plugin for logstash to implements the same tqp conventions for SQS queue construction and topic subscription.
|
12
|
+
|
13
|
+
```
|
14
|
+
input {
|
15
|
+
tqp {
|
16
|
+
queue_name => "logstash-poller",
|
17
|
+
topics => ["my-topic"]
|
18
|
+
access_key_id => ENV["AWS_ACCESS_KEY_ID"],
|
19
|
+
secret_access_key => ENV["AWS_SECRET_ACCESS_KEY"],
|
20
|
+
region" => ENV["AWS_REGION"]
|
21
|
+
}
|
22
|
+
}
|
23
|
+
```
|
24
|
+
|
25
|
+
The plugin will create a SQS queue (and an associated DLQ) if it doesn't exist and subscribe
|
26
|
+
to each configured topic. SNS messages are parsed from `json`, and set as the logstash event.
|
27
|
+
the topic name is also added to the event `@metadata`.
|
@@ -0,0 +1,21 @@
|
|
1
|
+
# This patch was stolen from logstash-plugins/logstash-output-sqs#20.
|
2
|
+
#
|
3
|
+
# This patch is a workaround for a JRuby issue which has been fixed in JRuby
|
4
|
+
# 9000, but not in JRuby 1.7. See https://github.com/jruby/jruby/issues/3645
|
5
|
+
# and https://github.com/jruby/jruby/issues/3920. This is necessary because the
|
6
|
+
# `aws-sdk` is doing tricky name discovery to generate the correct error class.
|
7
|
+
#
|
8
|
+
# As per https://github.com/aws/aws-sdk-ruby/issues/1301#issuecomment-261115960,
|
9
|
+
# this patch may be short-lived anyway.
|
10
|
+
require 'aws-sdk'
|
11
|
+
|
12
|
+
begin
|
13
|
+
old_stderr = $stderr
|
14
|
+
$stderr = StringIO.new
|
15
|
+
|
16
|
+
module Aws
|
17
|
+
const_set(:SQS, Aws::SQS)
|
18
|
+
end
|
19
|
+
ensure
|
20
|
+
$stderr = old_stderr
|
21
|
+
end
|
@@ -0,0 +1,256 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/inputs/threadable"
|
3
|
+
require "logstash/json"
|
4
|
+
require "logstash/namespace"
|
5
|
+
require "logstash/timestamp"
|
6
|
+
require "logstash/plugin_mixins/aws_config"
|
7
|
+
require "logstash/errors"
|
8
|
+
require 'logstash/inputs/sqs/patch'
|
9
|
+
|
10
|
+
# Forcibly load all modules marked to be lazily loaded.
|
11
|
+
#
|
12
|
+
# It is recommended that this is called prior to launching threads. See
|
13
|
+
# https://aws.amazon.com/blogs/developer/threading-with-the-aws-sdk-for-ruby/.
|
14
|
+
Aws.eager_autoload!
|
15
|
+
|
16
|
+
def jsonify_hash(hash)
|
17
|
+
hash.map { |k,v| [k.to_s, LogStash::Json.dump(v)] }.to_h
|
18
|
+
end
|
19
|
+
|
20
|
+
def create_queue_raw(sqs_client, queue_name, attributes, tags)
|
21
|
+
sqs_resource = Aws::SQS::Resource.new(client: sqs_client)
|
22
|
+
|
23
|
+
return sqs_resource.create_queue(
|
24
|
+
queue_name: queue_name,
|
25
|
+
attributes: jsonify_hash(attributes),
|
26
|
+
tags: tags,
|
27
|
+
)
|
28
|
+
rescue Aws::SQS::Errors::QueueNameExists
|
29
|
+
sqs_resource = Aws::SQS::Resource.new(client: sqs_client)
|
30
|
+
|
31
|
+
queue_url = sqs_client.get_queue_url(queue_name).queue_url
|
32
|
+
|
33
|
+
existing_tags = sqs_client.list_queue_tags(queue_url: queue_url).tags
|
34
|
+
|
35
|
+
tags_to_remove = existing_tags.keys - tags.keys
|
36
|
+
|
37
|
+
sqs_client.tag_queue(queue_url: queue_url, tags: tags)
|
38
|
+
|
39
|
+
if tags_to_remove
|
40
|
+
sqs_client.untag_queue(queue_url: queue_url, tag_keys: tags_to_remove)
|
41
|
+
end
|
42
|
+
|
43
|
+
# Run create again to make sure everything matches.
|
44
|
+
return sqs_resource.create_queue(
|
45
|
+
queue_name: queue_name,
|
46
|
+
attributes: jsonify_hash(attributes),
|
47
|
+
tags: tags,
|
48
|
+
)
|
49
|
+
end
|
50
|
+
|
51
|
+
|
52
|
+
def create_queue(sqs_client, queue_name, **kwargs)
|
53
|
+
tags = kwargs.delete('tags') || {}
|
54
|
+
|
55
|
+
dead_letter_queue = create_queue_raw(
|
56
|
+
sqs_client,
|
57
|
+
"#{queue_name}-dead-letter",
|
58
|
+
{"MessageRetentionPeriod" => 1209600}, # maximum (14 days)
|
59
|
+
{"dlq" => "true", **tags},
|
60
|
+
)
|
61
|
+
|
62
|
+
dead_letter_queue_arn = dead_letter_queue.attributes["QueueArn"]
|
63
|
+
|
64
|
+
redrive_policy_kwargs = kwargs.delete("RedrivePolicy") || {}
|
65
|
+
|
66
|
+
redrivePolicy = {"maxReceiveCount" => 5}.merge(redrive_policy_kwargs)
|
67
|
+
redrivePolicy["deadLetterTargetArn"] = dead_letter_queue_arn
|
68
|
+
|
69
|
+
return create_queue_raw(
|
70
|
+
sqs_client,
|
71
|
+
queue_name,
|
72
|
+
{"RedrivePolicy" => redrivePolicy },
|
73
|
+
tags,
|
74
|
+
)
|
75
|
+
end
|
76
|
+
|
77
|
+
|
78
|
+
class LogStash::Inputs::TQP < LogStash::Inputs::Threadable
|
79
|
+
include LogStash::PluginMixins::AwsConfig::V2
|
80
|
+
|
81
|
+
MAX_TIME_BEFORE_GIVING_UP = 60
|
82
|
+
MAX_MESSAGES_TO_FETCH = 10 # Between 1-10 in the AWS-SDK doc
|
83
|
+
SQS_ATTRIBUTES = ['All']
|
84
|
+
BACKOFF_SLEEP_TIME = 1
|
85
|
+
BACKOFF_FACTOR = 2
|
86
|
+
DEFAULT_POLLING_FREQUENCY = 20
|
87
|
+
|
88
|
+
config_name "tqp"
|
89
|
+
|
90
|
+
default :codec, "json"
|
91
|
+
|
92
|
+
config :additional_settings, :validate => :hash, :default => {}
|
93
|
+
|
94
|
+
config :queue_name, :validate => :string
|
95
|
+
config :prefix, :validate => :string
|
96
|
+
config :topics, :validate => :array
|
97
|
+
|
98
|
+
# Polling frequency, default is 20 seconds
|
99
|
+
config :polling_frequency, :validate => :number, :default => DEFAULT_POLLING_FREQUENCY
|
100
|
+
|
101
|
+
attr_reader :poller
|
102
|
+
|
103
|
+
def register
|
104
|
+
require "aws-sdk"
|
105
|
+
@logger.info("Registering SQS input",
|
106
|
+
queue_name: @queue_name,
|
107
|
+
)
|
108
|
+
|
109
|
+
setup_queue
|
110
|
+
end
|
111
|
+
|
112
|
+
def setup_queue
|
113
|
+
sqs_client = Aws::SQS::Client.new()
|
114
|
+
|
115
|
+
@sqs_queue = create_queue(sqs_client, "#{prefix}--#{queue_name}")
|
116
|
+
|
117
|
+
subscribe_to_topics(sqs_client)
|
118
|
+
|
119
|
+
poller = Aws::SQS::QueuePoller.new(@sqs_queue.url, client: sqs_client)
|
120
|
+
poller.before_request { |stats| throw :stop_polling if stop? }
|
121
|
+
|
122
|
+
@poller = poller
|
123
|
+
rescue Aws::SQS::Errors::ServiceError, Seahorse::Client::NetworkingError => e
|
124
|
+
@logger.error("Cannot establish connection to Amazon SQS", exception_details(e))
|
125
|
+
raise LogStash::ConfigurationError, "Verify the SQS queue name and your credentials"
|
126
|
+
end
|
127
|
+
|
128
|
+
def subscribe_to_topics(sqs_client)
|
129
|
+
sns_client = Aws::SNS::Resource.new()
|
130
|
+
|
131
|
+
queue_arn = @sqs_queue.attributes["QueueArn"]
|
132
|
+
|
133
|
+
topic_arns = []
|
134
|
+
|
135
|
+
topics.each do |topic|
|
136
|
+
topic = "#{topic[0]}--#{topic[1]}" if topic.kind_of?(Array)
|
137
|
+
|
138
|
+
@logger.debug("Subscribing to topic", topic: "#{prefix}--#{topic}")
|
139
|
+
|
140
|
+
topic = sns_client.create_topic(name: "#{prefix}--#{topic}")
|
141
|
+
topic.subscribe(protocol: "sqs", endpoint: queue_arn)
|
142
|
+
|
143
|
+
topic_arns.push(topic.arn)
|
144
|
+
end
|
145
|
+
|
146
|
+
|
147
|
+
@sqs_queue.set_attributes(
|
148
|
+
attributes: jsonify_hash({
|
149
|
+
"Policy" => {
|
150
|
+
"Version" => "2012-10-17",
|
151
|
+
"Statement" => [
|
152
|
+
{
|
153
|
+
"Sid" => "sns",
|
154
|
+
"Effect" => "Allow",
|
155
|
+
"Principal" => {"AWS" => "*"},
|
156
|
+
"Action" => "SQS:SendMessage",
|
157
|
+
"Resource" => queue_arn,
|
158
|
+
"Condition" => {"ArnEquals" => {"aws:SourceArn" => topic_arns}},
|
159
|
+
}
|
160
|
+
]
|
161
|
+
}
|
162
|
+
})
|
163
|
+
)
|
164
|
+
|
165
|
+
end
|
166
|
+
|
167
|
+
def polling_options
|
168
|
+
{
|
169
|
+
:max_number_of_messages => MAX_MESSAGES_TO_FETCH,
|
170
|
+
:attribute_names => SQS_ATTRIBUTES,
|
171
|
+
:wait_time_seconds => @polling_frequency
|
172
|
+
}
|
173
|
+
end
|
174
|
+
|
175
|
+
def add_sns_data(event, message)
|
176
|
+
body = LogStash::Json.load(message.body)
|
177
|
+
|
178
|
+
topic = body["TopicArn"].split(":")[-1]
|
179
|
+
topic = topic.delete_prefix("#{prefix}--") if prefix
|
180
|
+
|
181
|
+
message = LogStash::Json.load(body.delete("Message") || '{}')
|
182
|
+
|
183
|
+
event.set('[@metadata][topic]', topic)
|
184
|
+
event.set('[@metadata][body]', body)
|
185
|
+
|
186
|
+
message.each do |key, value|
|
187
|
+
event.set(key, value)
|
188
|
+
end
|
189
|
+
end
|
190
|
+
|
191
|
+
def handle_message(message, output_queue)
|
192
|
+
event = LogStash::Event.new("message" => @message, "host" => @host)
|
193
|
+
|
194
|
+
add_sns_data(event, message)
|
195
|
+
|
196
|
+
decorate(event)
|
197
|
+
output_queue << event
|
198
|
+
end
|
199
|
+
|
200
|
+
def run(output_queue)
|
201
|
+
@logger.debug("Polling SQS queue", polling_options: polling_options)
|
202
|
+
|
203
|
+
run_with_backoff do
|
204
|
+
poller.poll(polling_options) do |messages, stats|
|
205
|
+
break if stop?
|
206
|
+
|
207
|
+
messages.each {|message|
|
208
|
+
handle_message(message, output_queue)
|
209
|
+
}
|
210
|
+
|
211
|
+
@logger.debug(
|
212
|
+
"SQS Stats:",
|
213
|
+
request_count: stats.request_count,
|
214
|
+
received_message_count: stats.received_message_count,
|
215
|
+
last_message_received_at: stats.last_message_received_at
|
216
|
+
) if @logger.debug?
|
217
|
+
end
|
218
|
+
end
|
219
|
+
end
|
220
|
+
|
221
|
+
private
|
222
|
+
|
223
|
+
# Runs an AWS request inside a Ruby block with an exponential backoff in case
|
224
|
+
# we experience a ServiceError.
|
225
|
+
#
|
226
|
+
# @param [Block] block Ruby code block to execute.
|
227
|
+
def run_with_backoff(&block)
|
228
|
+
sleep_time = BACKOFF_SLEEP_TIME
|
229
|
+
begin
|
230
|
+
block.call
|
231
|
+
rescue Aws::SQS::Errors::ServiceError, Seahorse::Client::NetworkingError => e
|
232
|
+
@logger.warn("SQS error ... retrying with exponential backoff", exception_details(e, sleep_time))
|
233
|
+
sleep_time = backoff_sleep(sleep_time)
|
234
|
+
retry
|
235
|
+
end
|
236
|
+
end
|
237
|
+
|
238
|
+
def backoff_sleep(sleep_time)
|
239
|
+
sleep(sleep_time)
|
240
|
+
sleep_time > MAX_TIME_BEFORE_GIVING_UP ? sleep_time : sleep_time * BACKOFF_FACTOR
|
241
|
+
end
|
242
|
+
|
243
|
+
def convert_epoch_to_timestamp(time)
|
244
|
+
LogStash::Timestamp.at(time.to_i / 1000)
|
245
|
+
end
|
246
|
+
|
247
|
+
def exception_details(e, sleep_time = nil)
|
248
|
+
details = { :queue_name => @queue_name, :exception => e.class, :message => e.message }
|
249
|
+
details[:code] = e.code if e.is_a?(Aws::SQS::Errors::ServiceError) && e.code
|
250
|
+
details[:cause] = e.original_error if e.respond_to?(:original_error) && e.original_error # Seahorse::Client::NetworkingError
|
251
|
+
details[:sleep_time] = sleep_time if sleep_time
|
252
|
+
details[:backtrace] = e.backtrace if @logger.debug?
|
253
|
+
details
|
254
|
+
end
|
255
|
+
|
256
|
+
end # class LogStash::Inputs::TQP
|
@@ -0,0 +1,29 @@
|
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
s.name = 'logstash-input-tqp'
|
3
|
+
s.version = '0.1.0'
|
4
|
+
s.licenses = ['Apache-2.0']
|
5
|
+
s.summary = "An opinionated library for pub/sub over SQS and SNS"
|
6
|
+
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
7
|
+
s.authors = ["jason quense"]
|
8
|
+
s.email = 'monastic.panic@gmailc.om'
|
9
|
+
s.require_paths = ["lib"]
|
10
|
+
|
11
|
+
# Files
|
12
|
+
s.files = Dir["lib/**/*","spec/**/*","*.gemspec","*.md","CONTRIBUTORS","Gemfile","LICENSE","NOTICE.TXT", "vendor/jar-dependencies/**/*.jar", "vendor/jar-dependencies/**/*.rb", "VERSION", "docs/**/*"]
|
13
|
+
|
14
|
+
# Tests
|
15
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
16
|
+
|
17
|
+
# Special flag to let us know this is actually a logstash plugin
|
18
|
+
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "input" }
|
19
|
+
|
20
|
+
# Gem dependencies
|
21
|
+
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
22
|
+
|
23
|
+
s.add_runtime_dependency 'logstash-codec-json'
|
24
|
+
s.add_runtime_dependency 'logstash-mixin-aws', '>= 5.1.0'
|
25
|
+
|
26
|
+
s.add_development_dependency 'logstash-devutils'
|
27
|
+
s.add_development_dependency "logstash-codec-json_lines"
|
28
|
+
end
|
29
|
+
|
@@ -0,0 +1,178 @@
|
|
1
|
+
# # encoding: utf-8
|
2
|
+
# require "logstash/devutils/rspec/spec_helper"
|
3
|
+
# require "logstash/devutils/rspec/shared_examples"
|
4
|
+
# require "logstash/inputs/tqp"
|
5
|
+
# require "logstash/errors"
|
6
|
+
# require "logstash/event"
|
7
|
+
# require "logstash/json"
|
8
|
+
# require "aws-sdk"
|
9
|
+
# require "ostruct"
|
10
|
+
|
11
|
+
# describe LogStash::Inputs::TQP do
|
12
|
+
|
13
|
+
# let(:queue_name) { "my-poller" }
|
14
|
+
# let(:queue_url) { "https://sqs.test.local/#{queue_name}" }
|
15
|
+
# let(:config) do
|
16
|
+
# {
|
17
|
+
# "region" => "us-east-1",
|
18
|
+
# "access_key_id" => "123",
|
19
|
+
# "secret_access_key" => "secret",
|
20
|
+
# "queue_name" => :queue_name,
|
21
|
+
# "prefix" => 'my-service',
|
22
|
+
# "topics" => ['topic--1', 'topic--2']
|
23
|
+
# }
|
24
|
+
# end
|
25
|
+
|
26
|
+
# let(:input) { LogStash::Inputs::TQP.new(config) }
|
27
|
+
# # let(:decoded_message) { { "bonjour" => "awesome" } }
|
28
|
+
# # let(:encoded_message) { double("sqs_message", :body => LogStash::Json::dump(decoded_message)) }
|
29
|
+
|
30
|
+
# subject { input }
|
31
|
+
|
32
|
+
# let(:mock_sqs) {
|
33
|
+
# sqs = Aws::SQS::Client.new({ :stub_responses => true })
|
34
|
+
# sqs.stub_responses(:get_queue_url, {queue_url: queue_url})
|
35
|
+
# sqs.stub_responses(:get_queue_attributes, {attributes: {"QueueArn" => 'aws::sqs:::coolthing'}})
|
36
|
+
# sqs
|
37
|
+
# }
|
38
|
+
|
39
|
+
# context "valid credentials" do
|
40
|
+
# let(:queue) { [] }
|
41
|
+
|
42
|
+
# it "doesn't raise an error with valid credentials" do
|
43
|
+
# expect(Aws::SQS::Client).to receive(:new).and_return(mock_sqs)
|
44
|
+
# # expect(mock_sqs).to receive(:get_queue_url).with(queue_name: queue_name).and_return(queue_url: 'hi')
|
45
|
+
# mock_sqs
|
46
|
+
|
47
|
+
# # puts 'hi', mock_sqs.get_queue_url(queue_name: queue_name )
|
48
|
+
# subject.register
|
49
|
+
# # expect { subject.register }.not_to raise_error
|
50
|
+
# end
|
51
|
+
|
52
|
+
# # context "when interrupting the plugin" do
|
53
|
+
# # before do
|
54
|
+
# # expect(Aws::SQS::Client).to receive(:new).and_return(mock_sqs)
|
55
|
+
# # expect(mock_sqs).to receive(:get_queue_url).with({ :queue_name => queue_name }).and_return({:queue_url => queue_url })
|
56
|
+
# # expect(subject).to receive(:poller).and_return(mock_sqs).at_least(:once)
|
57
|
+
|
58
|
+
# # # We have to make sure we create a bunch of events
|
59
|
+
# # # so we actually really try to stop the plugin.
|
60
|
+
|
61
|
+
# # #
|
62
|
+
# # # rspec's `and_yield` allow you to define a fix amount of possible
|
63
|
+
# # # yielded values and doesn't allow you to create infinite loop.
|
64
|
+
# # # And since we are actually creating thread we need to make sure
|
65
|
+
# # # we have enough work to keep the thread working until we kill it..
|
66
|
+
# # #
|
67
|
+
# # # I haven't found a way to make it rspec friendly
|
68
|
+
# # mock_sqs.instance_eval do
|
69
|
+
# # def poll(polling_options = {})
|
70
|
+
# # loop do
|
71
|
+
# # yield [OpenStruct.new(:body => LogStash::Json::dump({ "message" => "hello world"}))], OpenStruct.new
|
72
|
+
# # end
|
73
|
+
# # end
|
74
|
+
# # end
|
75
|
+
# # end
|
76
|
+
|
77
|
+
# # it_behaves_like "an interruptible input plugin"
|
78
|
+
# # end
|
79
|
+
|
80
|
+
# # context "enrich event" do
|
81
|
+
# # let(:event) { LogStash::Event.new }
|
82
|
+
|
83
|
+
# # let(:message_id) { "123" }
|
84
|
+
# # let(:md5_of_body) { "dr strange" }
|
85
|
+
# # let(:sent_timestamp) { LogStash::Timestamp.new }
|
86
|
+
# # let(:epoch_timestamp) { (sent_timestamp.utc.to_f * 1000).to_i }
|
87
|
+
|
88
|
+
# # let(:id_field) { "my_id_field" }
|
89
|
+
# # let(:md5_field) { "my_md5_field" }
|
90
|
+
# # let(:sent_timestamp_field) { "my_sent_timestamp_field" }
|
91
|
+
|
92
|
+
# # let(:message) do
|
93
|
+
# # double("message", :message_id => message_id, :md5_of_body => md5_of_body, :attributes => { LogStash::Inputs::TQP::SENT_TIMESTAMP => epoch_timestamp } )
|
94
|
+
|
95
|
+
# # end
|
96
|
+
|
97
|
+
# # subject { input.add_sqs_data(event, message) }
|
98
|
+
|
99
|
+
# # context "when the option is specified" do
|
100
|
+
# # let(:config) do
|
101
|
+
# # {
|
102
|
+
# # "region" => "us-east-1",
|
103
|
+
# # "access_key_id" => "123",
|
104
|
+
# # "secret_access_key" => "secret",
|
105
|
+
# # "queue" => queue_name,
|
106
|
+
# # "id_field" => id_field,
|
107
|
+
# # "md5_field" => md5_field,
|
108
|
+
# # "sent_timestamp_field" => sent_timestamp_field
|
109
|
+
# # }
|
110
|
+
# # end
|
111
|
+
|
112
|
+
# # it "add the `message_id`" do
|
113
|
+
# # expect(subject.get(id_field)).to eq(message_id)
|
114
|
+
# # end
|
115
|
+
|
116
|
+
# # it "add the `md5_of_body`" do
|
117
|
+
# # expect(subject.get(md5_field)).to eq(md5_of_body)
|
118
|
+
# # end
|
119
|
+
|
120
|
+
# # it "add the `sent_timestamp`" do
|
121
|
+
# # expect(subject.get(sent_timestamp_field).to_i).to eq(sent_timestamp.to_i)
|
122
|
+
# # end
|
123
|
+
# # end
|
124
|
+
|
125
|
+
# # context "when the option isn't specified" do
|
126
|
+
# # it "doesnt add the `message_id`" do
|
127
|
+
# # expect(subject).not_to include(id_field)
|
128
|
+
# # end
|
129
|
+
|
130
|
+
# # it "doesnt add the `md5_of_body`" do
|
131
|
+
# # expect(subject).not_to include(md5_field)
|
132
|
+
# # end
|
133
|
+
|
134
|
+
# # it "doesnt add the `sent_timestamp`" do
|
135
|
+
# # expect(subject).not_to include(sent_timestamp_field)
|
136
|
+
# # end
|
137
|
+
# # end
|
138
|
+
# # end
|
139
|
+
|
140
|
+
# # context "when decoding body" do
|
141
|
+
# # subject { LogStash::Inputs::TQP::new(config.merge({ "codec" => "json" })) }
|
142
|
+
|
143
|
+
# # it "uses the specified codec" do
|
144
|
+
# # subject.handle_message(encoded_message, queue)
|
145
|
+
# # expect(queue.pop.get("bonjour")).to eq(decoded_message["bonjour"])
|
146
|
+
# # end
|
147
|
+
# # end
|
148
|
+
|
149
|
+
# # context "receiving messages" do
|
150
|
+
|
151
|
+
# # before do
|
152
|
+
# # expect(subject).to receive(:poller).and_return(mock_sqs).at_least(:once)
|
153
|
+
# # end
|
154
|
+
|
155
|
+
# # it "creates logstash event" do
|
156
|
+
# # expect(mock_sqs).to receive(:poll).with(anything()).and_yield([encoded_message], double("stats"))
|
157
|
+
# # subject.run(queue)
|
158
|
+
# # expect(queue.pop.get("bonjour")).to eq(decoded_message["bonjour"])
|
159
|
+
# # end
|
160
|
+
|
161
|
+
# # context 'can create multiple events' do
|
162
|
+
# # require "logstash/codecs/json_lines"
|
163
|
+
# # let(:config) { super().merge({ "codec" => "json_lines" }) }
|
164
|
+
# # let(:first_message) { { "sequence" => "first" } }
|
165
|
+
# # let(:second_message) { { "sequence" => "second" } }
|
166
|
+
# # let(:encoded_message) { double("sqs_message", :body => "#{LogStash::Json::dump(first_message)}\n#{LogStash::Json::dump(second_message)}\n") }
|
167
|
+
|
168
|
+
# # it 'creates multiple events' do
|
169
|
+
# # expect(mock_sqs).to receive(:poll).with(anything()).and_yield([encoded_message], double("stats"))
|
170
|
+
# # subject.run(queue)
|
171
|
+
# # events = queue.map{ |e|e.get('sequence')}
|
172
|
+
# # expect(events).to match_array([first_message['sequence'], second_message['sequence']])
|
173
|
+
# # end
|
174
|
+
# # end
|
175
|
+
# # end
|
176
|
+
|
177
|
+
# end
|
178
|
+
# end
|
@@ -0,0 +1,74 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "spec_helper"
|
3
|
+
require "logstash/inputs/tqp"
|
4
|
+
require "logstash/event"
|
5
|
+
require "logstash/json"
|
6
|
+
require "aws-sdk"
|
7
|
+
require_relative "../support/helpers"
|
8
|
+
require "thread"
|
9
|
+
|
10
|
+
Thread.abort_on_exception = true
|
11
|
+
|
12
|
+
describe "LogStash::Inputs::TQP integration" do
|
13
|
+
let(:decoded_message) { { "drstrange" => "is-he-really-that-strange" } }
|
14
|
+
let(:encoded_message) { LogStash::Json.dump(decoded_message) }
|
15
|
+
let(:queue) { Queue.new }
|
16
|
+
let(:prefix) { "jq" }
|
17
|
+
let(:input) {
|
18
|
+
puts options
|
19
|
+
LogStash::Inputs::TQP.new(options)
|
20
|
+
}
|
21
|
+
|
22
|
+
context "with invalid credentials" do
|
23
|
+
let(:options) do
|
24
|
+
{
|
25
|
+
"queue_name" => 'test-queue-1',
|
26
|
+
"access_key_id" => "bad_access",
|
27
|
+
"secret_access_key" => "bad_secret_key",
|
28
|
+
"region" => ENV["AWS_REGION"],
|
29
|
+
"topics": []
|
30
|
+
}
|
31
|
+
end
|
32
|
+
|
33
|
+
subject { input }
|
34
|
+
|
35
|
+
it "raises a Configuration error if the credentials are bad" do
|
36
|
+
expect { subject.register }.to raise_error(LogStash::ConfigurationError)
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
context "with valid credentials" do
|
41
|
+
let(:options) do
|
42
|
+
{
|
43
|
+
"queue_name" => 'test-queue-1',
|
44
|
+
"prefix" => prefix,
|
45
|
+
"topics" => ['studies--updated'],
|
46
|
+
"access_key_id" => ENV['AWS_ACCESS_KEY_ID'],
|
47
|
+
"secret_access_key" => ENV['AWS_SECRET_ACCESS_KEY'],
|
48
|
+
"region" => ENV["AWS_REGION"]
|
49
|
+
}
|
50
|
+
end
|
51
|
+
|
52
|
+
before :each do
|
53
|
+
input.register
|
54
|
+
|
55
|
+
@server = Thread.new { input.run(queue) }
|
56
|
+
end
|
57
|
+
|
58
|
+
after do
|
59
|
+
@server.kill
|
60
|
+
end
|
61
|
+
|
62
|
+
subject {
|
63
|
+
push_sns_event("#{prefix}--studies--updated", {"foo" => "bar"})
|
64
|
+
queue.pop
|
65
|
+
}
|
66
|
+
|
67
|
+
it "creates logstash events" do
|
68
|
+
puts subject.to_json()
|
69
|
+
|
70
|
+
expect(subject.get('[@metadata][topic]')).to eq('studies--updated')
|
71
|
+
expect(subject.get('foo')).to eq('bar')
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
data/spec/spec_helper.rb
ADDED
@@ -0,0 +1,18 @@
|
|
1
|
+
require 'logstash/json'
|
2
|
+
|
3
|
+
# encoding: utf-8
|
4
|
+
def push_sqs_event(message)
|
5
|
+
client = Aws::SQS::Client.new
|
6
|
+
queue_url = client.get_queue_url(queue_name: ENV['SQS_QUEUE_NAME'])
|
7
|
+
|
8
|
+
client.send_message({ queue_url: queue_url.queue_url, message_body: message })
|
9
|
+
end
|
10
|
+
|
11
|
+
def push_sns_event(topic, message)
|
12
|
+
resource = Aws::SNS::Resource.new
|
13
|
+
|
14
|
+
puts "sending message #{topic}"
|
15
|
+
|
16
|
+
topic = resource.create_topic(name: topic)
|
17
|
+
topic.publish(message: LogStash::Json.dump(message))
|
18
|
+
end
|
metadata
ADDED
@@ -0,0 +1,137 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: logstash-input-tqp
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- jason quense
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2022-05-27 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
requirement: !ruby/object:Gem::Requirement
|
15
|
+
requirements:
|
16
|
+
- - ">="
|
17
|
+
- !ruby/object:Gem::Version
|
18
|
+
version: '1.60'
|
19
|
+
- - "<="
|
20
|
+
- !ruby/object:Gem::Version
|
21
|
+
version: '2.99'
|
22
|
+
name: logstash-core-plugin-api
|
23
|
+
prerelease: false
|
24
|
+
type: :runtime
|
25
|
+
version_requirements: !ruby/object:Gem::Requirement
|
26
|
+
requirements:
|
27
|
+
- - ">="
|
28
|
+
- !ruby/object:Gem::Version
|
29
|
+
version: '1.60'
|
30
|
+
- - "<="
|
31
|
+
- !ruby/object:Gem::Version
|
32
|
+
version: '2.99'
|
33
|
+
- !ruby/object:Gem::Dependency
|
34
|
+
requirement: !ruby/object:Gem::Requirement
|
35
|
+
requirements:
|
36
|
+
- - ">="
|
37
|
+
- !ruby/object:Gem::Version
|
38
|
+
version: '0'
|
39
|
+
name: logstash-codec-json
|
40
|
+
prerelease: false
|
41
|
+
type: :runtime
|
42
|
+
version_requirements: !ruby/object:Gem::Requirement
|
43
|
+
requirements:
|
44
|
+
- - ">="
|
45
|
+
- !ruby/object:Gem::Version
|
46
|
+
version: '0'
|
47
|
+
- !ruby/object:Gem::Dependency
|
48
|
+
requirement: !ruby/object:Gem::Requirement
|
49
|
+
requirements:
|
50
|
+
- - ">="
|
51
|
+
- !ruby/object:Gem::Version
|
52
|
+
version: 5.1.0
|
53
|
+
name: logstash-mixin-aws
|
54
|
+
prerelease: false
|
55
|
+
type: :runtime
|
56
|
+
version_requirements: !ruby/object:Gem::Requirement
|
57
|
+
requirements:
|
58
|
+
- - ">="
|
59
|
+
- !ruby/object:Gem::Version
|
60
|
+
version: 5.1.0
|
61
|
+
- !ruby/object:Gem::Dependency
|
62
|
+
requirement: !ruby/object:Gem::Requirement
|
63
|
+
requirements:
|
64
|
+
- - ">="
|
65
|
+
- !ruby/object:Gem::Version
|
66
|
+
version: '0'
|
67
|
+
name: logstash-devutils
|
68
|
+
prerelease: false
|
69
|
+
type: :development
|
70
|
+
version_requirements: !ruby/object:Gem::Requirement
|
71
|
+
requirements:
|
72
|
+
- - ">="
|
73
|
+
- !ruby/object:Gem::Version
|
74
|
+
version: '0'
|
75
|
+
- !ruby/object:Gem::Dependency
|
76
|
+
requirement: !ruby/object:Gem::Requirement
|
77
|
+
requirements:
|
78
|
+
- - ">="
|
79
|
+
- !ruby/object:Gem::Version
|
80
|
+
version: '0'
|
81
|
+
name: logstash-codec-json_lines
|
82
|
+
prerelease: false
|
83
|
+
type: :development
|
84
|
+
version_requirements: !ruby/object:Gem::Requirement
|
85
|
+
requirements:
|
86
|
+
- - ">="
|
87
|
+
- !ruby/object:Gem::Version
|
88
|
+
version: '0'
|
89
|
+
description: This gem is a Logstash plugin required to be installed on top of the
|
90
|
+
Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This
|
91
|
+
gem is not a stand-alone program
|
92
|
+
email: monastic.panic@gmailc.om
|
93
|
+
executables: []
|
94
|
+
extensions: []
|
95
|
+
extra_rdoc_files: []
|
96
|
+
files:
|
97
|
+
- Gemfile
|
98
|
+
- LICENSE
|
99
|
+
- README.md
|
100
|
+
- lib/logstash/inputs/sqs/patch.rb
|
101
|
+
- lib/logstash/inputs/tqp.rb
|
102
|
+
- logstash-input-tqp.gemspec
|
103
|
+
- spec/inputs/tqp_spec.rb
|
104
|
+
- spec/integration/tqp_spec.rb
|
105
|
+
- spec/spec_helper.rb
|
106
|
+
- spec/support/helpers.rb
|
107
|
+
homepage:
|
108
|
+
licenses:
|
109
|
+
- Apache-2.0
|
110
|
+
metadata:
|
111
|
+
logstash_plugin: 'true'
|
112
|
+
logstash_group: input
|
113
|
+
post_install_message:
|
114
|
+
rdoc_options: []
|
115
|
+
require_paths:
|
116
|
+
- lib
|
117
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
118
|
+
requirements:
|
119
|
+
- - ">="
|
120
|
+
- !ruby/object:Gem::Version
|
121
|
+
version: '0'
|
122
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
123
|
+
requirements:
|
124
|
+
- - ">="
|
125
|
+
- !ruby/object:Gem::Version
|
126
|
+
version: '0'
|
127
|
+
requirements: []
|
128
|
+
rubyforge_project:
|
129
|
+
rubygems_version: 2.7.10
|
130
|
+
signing_key:
|
131
|
+
specification_version: 4
|
132
|
+
summary: An opinionated library for pub/sub over SQS and SNS
|
133
|
+
test_files:
|
134
|
+
- spec/inputs/tqp_spec.rb
|
135
|
+
- spec/integration/tqp_spec.rb
|
136
|
+
- spec/spec_helper.rb
|
137
|
+
- spec/support/helpers.rb
|