logstash-output-charrington 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: d9e0b78caecf406a5c26c0c433a9e9e03361ca64110e5380333ecbb1b7e14c53
4
+ data.tar.gz: 97b48b44372f9f44463c608d03d9362984ccaded87257e5938bed056ac7c6bc2
5
+ SHA512:
6
+ metadata.gz: 7a8d64b4f5cc6de22d26297477ca3561af1d9cde45dbcc32f7ad785f9485b490780832698265402ad7bb9291b164ea06ca57fd4aaa18968ce1eefe3848590bd5
7
+ data.tar.gz: 5a79965fad327dc2588ea45b4b6eae8c8a1c2393e28cc7f02e76eb6e1100688405f454e2a0d2d4966330483899bbb217cddd3cb1e0992204de8f57ca95edd430
data/CHANGELOG.md ADDED
@@ -0,0 +1,64 @@
1
+ # Change Log
2
+ All notable changes to this project will be documented in this file, from 0.2.0.
3
+
4
+ ## [5.3.0] - 2017-11-08
5
+ - Adds configuration options `enable_event_as_json_keyword` and `event_as_json_keyword`
6
+ - Adds BigDecimal support
7
+ - Adds additional logging for debugging purposes (with thanks to @mlkmhd's work)
8
+
9
+ ## [5.2.1] - 2017-04-09
10
+ - Adds Array and Hash to_json support for non-sprintf syntax
11
+
12
+ ## [5.2.0] - 2017-04-01
13
+ - Upgrades HikariCP to latest
14
+ - Fixes HikariCP logging integration issues
15
+
16
+ ## [5.1.0] - 2016-12-17
17
+ - phoenix-thin fixes for issue #60
18
+
19
+ ## [5.0.0] - 2016-11-03
20
+ - logstash v5 support
21
+
22
+ ## [0.3.1] - 2016-08-28
23
+ - Adds connection_test configuration option, to prevent the connection test from occuring, allowing the error to be suppressed.
24
+ Useful for cockroachdb deployments. https://github.com/theangryangel/logstash-output-jdbc/issues/53
25
+
26
+ ## [0.3.0] - 2016-07-24
27
+ - Brings tests from v5 branch, providing greater coverage
28
+ - Removes bulk update support, due to inconsistent behaviour
29
+ - Plugin now marked as threadsafe, meaning only 1 instance per-Logstash
30
+ - Raises default max_pool_size to match the default number of workers (1 connection per worker)
31
+
32
+ ## [0.2.10] - 2016-07-07
33
+ - Support non-string entries in statement array
34
+ - Adds backtrace to exception logging
35
+
36
+ ## [0.2.9] - 2016-06-29
37
+ - Fix NameError exception.
38
+ - Moved log_jdbc_exception calls
39
+
40
+ ## [0.2.7] - 2016-05-29
41
+ - Backport retry exception logic from v5 branch
42
+ - Backport improved timestamp compatibility from v5 branch
43
+
44
+ ## [0.2.6] - 2016-05-02
45
+ - Fix for exception infinite loop
46
+
47
+ ## [0.2.5] - 2016-04-11
48
+ ### Added
49
+ - Basic tests running against DerbyDB
50
+ - Fix for converting Logstash::Timestamp to iso8601 from @hordijk
51
+
52
+ ## [0.2.4] - 2016-04-07
53
+ - Documentation fixes from @hordijk
54
+
55
+ ## [0.2.3] - 2016-02-16
56
+ - Bug fixes
57
+
58
+ ## [0.2.2] - 2015-12-30
59
+ - Bug fixes
60
+
61
+ ## [0.2.1] - 2015-12-22
62
+ - Support for connection pooling support added through HikariCP
63
+ - Support for unsafe statement handling (allowing dynamic queries)
64
+ - Altered exception handling to now count sequential flushes with exceptions thrown
data/Gemfile ADDED
@@ -0,0 +1,11 @@
1
+ source 'https://rubygems.org'
2
+
3
+ gemspec
4
+
5
+ logstash_path = ENV["LOGSTASH_PATH"] || "../../logstash"
6
+ use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1"
7
+
8
+ if Dir.exist?(logstash_path) && use_logstash_source
9
+ gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
10
+ gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
11
+ end
data/LICENSE.txt ADDED
@@ -0,0 +1,21 @@
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2014
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,93 @@
1
+ # logstash-output-jdbc
2
+
3
+ [![Build Status](https://travis-ci.org/theangryangel/logstash-output-jdbc.svg?branch=master)](https://travis-ci.org/theangryangel/logstash-output-jdbc) [![Flattr this git repo](http://api.flattr.com/button/flattr-badge-large.png)](https://flattr.com/submit/auto?user_id=the_angry_angel&url=https://github.com/the_angry_angel/logstash-output-jdbc&title=logstash-output-jdbc&language=&tags=github&category=software)
4
+
5
+ This plugin is provided as an external plugin and is not part of the Logstash project.
6
+
7
+ This plugin allows you to output to SQL databases, using JDBC adapters.
8
+ See below for tested adapters, and example configurations.
9
+
10
+ This has not yet been extensively tested with all JDBC drivers and may not yet work for you.
11
+
12
+ If you do find this works for a JDBC driver without an example, let me know and provide a small example configuration if you can.
13
+
14
+ This plugin does not bundle any JDBC jar files, and does expect them to be in a
15
+ particular location. Please ensure you read the 4 installation lines below.
16
+
17
+ ## Support & release schedule
18
+ I no longer have time at work to maintain this plugin in step with Logstash's releases, and I am not completely immersed in the Logstash ecosystem. If something is broken for you I will do my best to help, but I cannot guarantee timeframes.
19
+
20
+ Pull requests are always welcome.
21
+
22
+ ## Changelog
23
+ See CHANGELOG.md
24
+
25
+ ## Versions
26
+ Released versions are available via rubygems, and typically tagged.
27
+
28
+ For development:
29
+ - See master branch for logstash v5 & v6 :warning: This is untested under Logstash 6.3 at this time, and there has been 1 unverified report of an issue. Please use at your own risk until I can find the time to evaluate and test 6.3.
30
+ - See v2.x branch for logstash v2
31
+ - See v1.5 branch for logstash v1.5
32
+ - See v1.4 branch for logstash 1.4
33
+
34
+ ## Installation
35
+ - Run `bin/logstash-plugin install logstash-output-charrington` in your logstash installation directory
36
+ - Now either:
37
+ - Use driver_jar_path in your configuraton to specify a path to your jar file
38
+ - Or:
39
+ - Create the directory vendor/jar/jdbc in your logstash installation (`mkdir -p vendor/jar/jdbc/`)
40
+ - Add JDBC jar files to vendor/jar/jdbc in your logstash installation
41
+ - And then configure (examples can be found in the examples directory)
42
+
43
+ ## Configuration options
44
+
45
+ | Option | Type | Description | Required? | Default |
46
+ | ------ | ---- | ----------- | --------- | ------- |
47
+ | driver_class | String | Specify a driver class if autoloading fails | No | |
48
+ | driver_auto_commit | Boolean | If the driver does not support auto commit, you should set this to false | No | True |
49
+ | driver_jar_path | String | File path to jar file containing your JDBC driver. This is optional, and all JDBC jars may be placed in $LOGSTASH_HOME/vendor/jar/jdbc instead. | No | |
50
+ | connection_string | String | JDBC connection URL | Yes | |
51
+ | connection_test | Boolean | Run a JDBC connection test. Some drivers do not function correctly, and you may need to disable the connection test to supress an error. Cockroach with the postgres JDBC driver is such an example. | No | Yes |
52
+ | connection_test_query | String | Connection test and init query string, required for some JDBC drivers that don't support isValid(). Typically you'd set to this "SELECT 1" | No | |
53
+ | username | String | JDBC username - this is optional as it may be included in the connection string, for many drivers | No | |
54
+ | password | String | JDBC password - this is optional as it may be included in the connection string, for many drivers | No | |
55
+ | statement | Array | An array of strings representing the SQL statement to run. Index 0 is the SQL statement that is prepared, all other array entries are passed in as parameters (in order). A parameter may either be a property of the event (i.e. "@timestamp", or "host") or a formatted string (i.e. "%{host} - %{message}" or "%{message}"). If a key is passed then it will be automatically converted as required for insertion into SQL. If it's a formatted string then it will be passed in verbatim. | Yes | |
56
+ | unsafe_statement | Boolean | If yes, the statement is evaluated for event fields - this allows you to use dynamic table names, etc. **This is highly dangerous** and you should **not** use this unless you are 100% sure that the field(s) you are passing in are 100% safe. Failure to do so will result in possible SQL injections. Example statement: [ "insert into %{table_name_field} (column) values(?)", "fieldname" ] | No | False |
57
+ | max_pool_size | Number | Maximum number of connections to open to the SQL server at any 1 time | No | 5 |
58
+ | connection_timeout | Number | Number of milliseconds before a SQL connection is closed | No | 10000 |
59
+ | flush_size | Number | Maximum number of entries to buffer before sending to SQL - if this is reached before idle_flush_time | No | 1000 |
60
+ | max_flush_exceptions | Number | Number of sequential flushes which cause an exception, before the set of events are discarded. Set to a value less than 1 if you never want it to stop. This should be carefully configured with respect to retry_initial_interval and retry_max_interval, if your SQL server is not highly available | No | 10 |
61
+ | retry_initial_interval | Number | Number of seconds before the initial retry in the event of a failure. On each failure it will be doubled until it reaches retry_max_interval | No | 2 |
62
+ | retry_max_interval | Number | Maximum number of seconds between each retry | No | 128 |
63
+ | retry_sql_states | Array of strings | An array of custom SQL state codes you wish to retry until `max_flush_exceptions`. Useful if you're using a JDBC driver which returns retry-able, but non-standard SQL state codes in it's exceptions. | No | [] |
64
+ | event_as_json_keyword | String | The magic key word that the plugin looks for to convert the entire event into a JSON object. As Logstash does not support this out of the box with it's `sprintf` implementation, you can use whatever this field is set to in the statement parameters | No | @event |
65
+ | enable_event_as_json_keyword | Boolean | Enables the magic keyword set in the configuration option `event_as_json_keyword`. Without this enabled the plugin will not convert the `event_as_json_keyword` into JSON encoding of the entire event. | No | False |
66
+
67
+ ## Example configurations
68
+ Example logstash configurations, can now be found in the examples directory. Where possible we try to link every configuration with a tested jar.
69
+
70
+ If you have a working sample configuration, for a DB thats not listed, pull requests are welcome.
71
+
72
+ ## Development and Running tests
73
+ For development tests are recommended to run inside a virtual machine (Vagrantfile is included in the repo), as it requires
74
+ access to various database engines and could completely destroy any data in a live system.
75
+
76
+ If you have vagrant available (this is temporary whilst I'm hacking on v5 support. I'll make this more streamlined later):
77
+ - `vagrant up`
78
+ - `vagrant ssh`
79
+ - `cd /vagrant`
80
+ - `gem install bundler`
81
+ - `cd /vagrant && bundle install && bundle exec rake vendor && bundle exec rake install_jars`
82
+ - `./scripts/travis-before_script.sh && source ./scripts/travis-variables.sh`
83
+ - `bundle exec rspec`
84
+
85
+ ## Releasing
86
+ - Update Changelog
87
+ - Bump version in gemspec
88
+ - Commit
89
+ - Create tag `git tag v<version-number-in-gemspec>`
90
+ - `bundle exec rake install_jars`
91
+ - `bundle exec rake pre_release_checks`
92
+ - `gem build logstash-output-jdbc.gemspec`
93
+ - `gem push`
data/THANKS.md ADDED
@@ -0,0 +1,18 @@
1
+ logstash-output-jdbc is a project originally created by Karl Southern
2
+ (the_angry_angel), but there are a number of people that have contributed
3
+ or implemented key features over time. We do our best to keep this list
4
+ up-to-date, but you can also have a look at the nice contributor graphs
5
+ produced by GitHub: https://github.com/theangryangel/logstash-output-jdbc/graphs/contributors
6
+
7
+ * [hordijk](https://github.com/hordijk)
8
+ * [dmitryakadiamond](https://github.com/dmitryakadiamond)
9
+ * [MassimoSporchia](https://github.com/MassimoSporchia)
10
+ * [ebuildy](https://github.com/ebuildy)
11
+ * [kushtrimjunuzi](https://github.com/kushtrimjunuzi)
12
+ * [josemazo](https://github.com/josemazo)
13
+ * [aceoliver](https://github.com/aceoliver)
14
+ * [roflmao](https://github.com/roflmao)
15
+ * [onesuper](https://github.com/onesuper)
16
+ * [phr0gz](https://github.com/phr0gz)
17
+ * [jMonsinjon](https://github.com/jMonsinjon)
18
+ * [mlkmhd](https://github.com/mlkmhd)
@@ -0,0 +1,5 @@
1
+ # encoding: utf-8
2
+ require 'logstash/environment'
3
+
4
+ root_dir = File.expand_path(File.join(File.dirname(__FILE__), '..'))
5
+ LogStash::Environment.load_runtime_jars! File.join(root_dir, 'vendor')
@@ -0,0 +1,422 @@
1
+ # encoding: utf-8
2
+ require 'logstash/outputs/base'
3
+ require 'logstash/namespace'
4
+ require 'concurrent'
5
+ require 'stud/interval'
6
+ require 'java'
7
+ require 'logstash-output-charrington_jars'
8
+ require 'json'
9
+ require 'bigdecimal'
10
+
11
+ # Write events to a SQL engine, using JDBC.
12
+ #
13
+ # It is upto the user of the plugin to correctly configure the plugin. This
14
+ # includes correctly crafting the SQL statement, and matching the number of
15
+ # parameters correctly.
16
+ #
17
+ class LogStash::Outputs::Charrington < LogStash::Outputs::Base
18
+ concurrency :shared
19
+
20
+ STRFTIME_FMT = '%Y-%m-%d %T.%L'.freeze
21
+
22
+ RETRYABLE_SQLSTATE_CLASSES = [
23
+ # Classes of retryable SQLSTATE codes
24
+ # Not all in the class will be retryable. However, this is the best that
25
+ # we've got right now.
26
+ # If a custom state code is required, set it in retry_sql_states.
27
+ '08', # Connection Exception
28
+ '24', # Invalid Cursor State (Maybe retry-able in some circumstances)
29
+ '25', # Invalid Transaction State
30
+ '40', # Transaction Rollback
31
+ '53', # Insufficient Resources
32
+ '54', # Program Limit Exceeded (MAYBE)
33
+ '55', # Object Not In Prerequisite State
34
+ '57', # Operator Intervention
35
+ '58', # System Error
36
+ ].freeze
37
+
38
+ config_name 'charrington'
39
+
40
+ # Driver class - Reintroduced for https://github.com/theangryangel/logstash-output-jdbc/issues/26
41
+ config :driver_class, validate: :string
42
+
43
+ # Does the JDBC driver support autocommit?
44
+ config :driver_auto_commit, validate: :boolean, default: true, required: true
45
+
46
+ # Where to find the jar
47
+ # Defaults to not required, and to the original behaviour
48
+ config :driver_jar_path, validate: :string, required: false
49
+
50
+ # jdbc connection string
51
+ config :connection_string, validate: :string, required: true
52
+
53
+ # jdbc username - optional, maybe in the connection string
54
+ config :username, validate: :string, required: false
55
+
56
+ # jdbc password - optional, maybe in the connection string
57
+ config :password, validate: :string, required: false
58
+
59
+ # [ "insert into table (message) values(?)", "%{message}" ]
60
+ config :statement, validate: :array, required: true
61
+
62
+ # If this is an unsafe statement, use event.sprintf
63
+ # This also has potential performance penalties due to having to create a
64
+ # new statement for each event, rather than adding to the batch and issuing
65
+ # multiple inserts in 1 go
66
+ config :unsafe_statement, validate: :boolean, default: false
67
+
68
+ # Number of connections in the pool to maintain
69
+ config :max_pool_size, validate: :number, default: 5
70
+
71
+ # Connection timeout
72
+ config :connection_timeout, validate: :number, default: 10000
73
+
74
+ # We buffer a certain number of events before flushing that out to SQL.
75
+ # This setting controls how many events will be buffered before sending a
76
+ # batch of events.
77
+ config :flush_size, validate: :number, default: 1000
78
+
79
+ # Set initial interval in seconds between retries. Doubled on each retry up to `retry_max_interval`
80
+ config :retry_initial_interval, validate: :number, default: 2
81
+
82
+ # Maximum time between retries, in seconds
83
+ config :retry_max_interval, validate: :number, default: 128
84
+
85
+ # Any additional custom, retryable SQL state codes.
86
+ # Suitable for configuring retryable custom JDBC SQL state codes.
87
+ config :retry_sql_states, validate: :array, default: []
88
+
89
+ # Run a connection test on start.
90
+ config :connection_test, validate: :boolean, default: true
91
+
92
+ config :connection_test_query, validate: :string, required: false
93
+
94
+ # Maximum number of sequential failed attempts, before we stop retrying.
95
+ # If set to < 1, then it will infinitely retry.
96
+ # At the default values this is a little over 10 minutes
97
+
98
+ config :max_flush_exceptions, validate: :number, default: 10
99
+
100
+ config :max_repeat_exceptions, obsolete: 'This has been replaced by max_flush_exceptions - which behaves slightly differently. Please check the documentation.'
101
+ config :max_repeat_exceptions_time, obsolete: 'This is no longer required'
102
+ config :idle_flush_time, obsolete: 'No longer necessary under Logstash v5'
103
+
104
+ # Allows the whole event to be converted to JSON
105
+ config :enable_event_as_json_keyword, validate: :boolean, default: false
106
+
107
+ # The magic key used to convert the whole event to JSON. If you need this, and you have the default in your events, you can use this to change your magic keyword.
108
+ config :event_as_json_keyword, validate: :string, default: '@event'
109
+
110
+ def register
111
+ @logger.info('JDBC - Starting up')
112
+
113
+ load_jar_files!
114
+
115
+ @stopping = Concurrent::AtomicBoolean.new(false)
116
+
117
+ @logger.warn('JDBC - Flush size is set to > 1000') if @flush_size > 1000
118
+
119
+ if @statement.empty?
120
+ @logger.error('JDBC - No statement provided. Configuration error.')
121
+ end
122
+
123
+ if !@unsafe_statement && @statement.length < 2
124
+ @logger.error("JDBC - Statement has no parameters. No events will be inserted into SQL as you're not passing any event data. Likely configuration error.")
125
+ end
126
+
127
+ setup_and_test_pool!
128
+ end
129
+
130
+ def multi_receive(events)
131
+ events.each_slice(@flush_size) do |slice|
132
+ retrying_submit(slice)
133
+ end
134
+ end
135
+
136
+ def close
137
+ @stopping.make_true
138
+ @pool.close
139
+ super
140
+ end
141
+
142
+ private
143
+
144
+ def setup_and_test_pool!
145
+ # Setup pool
146
+ @pool = Java::ComZaxxerHikari::HikariDataSource.new
147
+
148
+ @pool.setAutoCommit(@driver_auto_commit)
149
+ @pool.setDriverClassName(@driver_class) if @driver_class
150
+
151
+ @pool.setJdbcUrl(@connection_string)
152
+
153
+ @pool.setUsername(@username) if @username
154
+ @pool.setPassword(@password) if @password
155
+
156
+ @pool.setMaximumPoolSize(@max_pool_size)
157
+ @pool.setConnectionTimeout(@connection_timeout)
158
+
159
+ validate_connection_timeout = (@connection_timeout / 1000) / 2
160
+
161
+ if !@connection_test_query.nil? and @connection_test_query.length > 1
162
+ @pool.setConnectionTestQuery(@connection_test_query)
163
+ @pool.setConnectionInitSql(@connection_test_query)
164
+ end
165
+
166
+ return unless @connection_test
167
+
168
+ # Test connection
169
+ test_connection = @pool.getConnection
170
+ unless test_connection.isValid(validate_connection_timeout)
171
+ @logger.warn('JDBC - Connection is not reporting as validate. Either connection is invalid, or driver is not getting the appropriate response.')
172
+ end
173
+ test_connection.close
174
+ end
175
+
176
+ def load_jar_files!
177
+ # Load jar from driver path
178
+ unless @driver_jar_path.nil?
179
+ raise LogStash::ConfigurationError, 'JDBC - Could not find jar file at given path. Check config.' unless File.exist? @driver_jar_path
180
+ require @driver_jar_path
181
+ return
182
+ end
183
+
184
+ # Revert original behaviour of loading from vendor directory
185
+ # if no path given
186
+ jarpath = if ENV['LOGSTASH_HOME']
187
+ File.join(ENV['LOGSTASH_HOME'], '/vendor/jar/jdbc/*.jar')
188
+ else
189
+ File.join(File.dirname(__FILE__), '../../../vendor/jar/jdbc/*.jar')
190
+ end
191
+
192
+ @logger.trace('JDBC - jarpath', path: jarpath)
193
+
194
+ jars = Dir[jarpath]
195
+ raise LogStash::ConfigurationError, 'JDBC - No jars found. Have you read the README?' if jars.empty?
196
+
197
+ jars.each do |jar|
198
+ @logger.trace('JDBC - Loaded jar', jar: jar)
199
+ require jar
200
+ end
201
+ end
202
+
203
+ def create_statement(event)
204
+ event = event.to_hash
205
+ hashed = event.delete_if {|k, _v| k.start_with?("@") || k == 'host' || k == 'path' }
206
+
207
+ columns = '(' + hashed.keys.join(', ') + ')'
208
+
209
+ value_placeholders = ('?' * hashed.length).split('')
210
+ values = '(' + value_placeholders.join(', ') + ')'
211
+
212
+ table_name = create_table_name(event)
213
+ return "INSERT INTO #{table_name} #{columns} VALUES #{values}", hashed.keys
214
+ end
215
+
216
+ def create_table_name(event)
217
+ if event.nil? || event.to_hash["event"].nil?
218
+ raise TableNameNil.new("Table name is nil", event)
219
+ end
220
+ event.to_hash["event"].to_s.strip.gsub(/[ -]+/, "_").downcase
221
+ end
222
+
223
+ def prepared_statement(keys)
224
+ keys.map do |key|
225
+ turn_into_wrapped(key)
226
+ end
227
+ end
228
+
229
+ def turn_into_wrapped(key)
230
+ "[#{key}]"
231
+ end
232
+
233
+ def submit(events)
234
+ connection = nil
235
+ statement = nil
236
+ events_to_retry = []
237
+
238
+ begin
239
+ connection = @pool.getConnection
240
+ rescue => e
241
+ log_jdbc_exception(e, true, nil)
242
+ # If a connection is not available, then the server has gone away
243
+ # We're not counting that towards our retry count.
244
+ return events, false
245
+ end
246
+
247
+ events.each do |event|
248
+
249
+
250
+ begin
251
+ ins, columns = create_statement(event)
252
+ keys_we_care_about = prepared_statement(columns)
253
+ statement = connection.prepareStatement(
254
+ ins
255
+ )
256
+ statement = add_statement_event_params(statement, event, keys_we_care_about)
257
+ statement.execute
258
+ rescue TableNameNil => e
259
+ @logger.error("#{e.message} event=#{e.event}")
260
+ rescue => e
261
+ @logger.error "Rescue from SQLException #{e.message}"
262
+ create_statement = make_create_statement(event, columns)
263
+ puts 'create_statement'
264
+ puts create_statement
265
+
266
+ statement = connection.prepareStatement(
267
+ create_statement
268
+ )
269
+ statement.execute
270
+ @logger.debug('Created new Table.')
271
+ events_to_retry.push(event)
272
+ ensure
273
+ statement.close unless statement.nil?
274
+ end
275
+ end
276
+
277
+ connection.close unless connection.nil?
278
+
279
+ return events_to_retry, true
280
+ end
281
+
282
+ def retrying_submit(actions)
283
+ # Initially we submit the full list of actions
284
+ submit_actions = actions
285
+ count_as_attempt = true
286
+
287
+ attempts = 1
288
+
289
+ sleep_interval = @retry_initial_interval
290
+ while @stopping.false? and (submit_actions and !submit_actions.empty?)
291
+ return if !submit_actions || submit_actions.empty? # If everything's a success we move along
292
+ # We retry whatever didn't succeed
293
+ submit_actions, count_as_attempt = submit(submit_actions)
294
+
295
+ # Everything was a success!
296
+ break if !submit_actions || submit_actions.empty?
297
+
298
+ if @max_flush_exceptions > 0 and count_as_attempt == true
299
+ attempts += 1
300
+
301
+ if attempts > @max_flush_exceptions
302
+ @logger.error("JDBC - max_flush_exceptions has been reached. #{submit_actions.length} events have been unable to be sent to SQL and are being dropped. See previously logged exceptions for details.")
303
+ break
304
+ end
305
+ end
306
+
307
+ # If we're retrying the action sleep for the recommended interval
308
+ # Double the interval for the next time through to achieve exponential backoff
309
+ Stud.stoppable_sleep(sleep_interval) { @stopping.true? }
310
+ sleep_interval = next_sleep_interval(sleep_interval)
311
+ end
312
+ end
313
+
314
+ def make_create_statement(event, keys_we_care_about)
315
+ columns = []
316
+
317
+ keys_we_care_about.each_with_index do |key, idx|
318
+ wrapped = turn_into_wrapped(key)
319
+
320
+ case event.get(wrapped)
321
+ when Time, LogStash::Timestamp
322
+ columns << "#{key} TIMESTAMP"
323
+ when Integer
324
+ columns << "#{key} BIGINT"
325
+ when BigDecimal
326
+ columns << "#{key} DECIMAL"
327
+ when Float
328
+ columns << "#{key} DOUBLE PRECISION"
329
+ when String, Array, Hash
330
+ columns << "#{key} VARCHAR"
331
+ when true, false
332
+ columns << "#{key} BOOLEAN"
333
+ end
334
+ end
335
+
336
+ "CREATE TABLE IF NOT EXISTS #{create_table_name(event)} (#{columns.join(', ')})"
337
+ end
338
+
339
+ def add_statement_event_params(statement, event, keys_we_care_about)
340
+ keys_we_care_about.each_with_index do |key, idx|
341
+ if @enable_event_as_json_keyword == true and key.is_a? String and key == @event_as_json_keyword
342
+ value = event.to_json
343
+ elsif key.is_a? String
344
+ value = event.get(key)
345
+ if value.nil? and key =~ /%\{/
346
+ value = event.sprintf(key)
347
+ end
348
+ else
349
+ value = key
350
+ end
351
+
352
+ case value
353
+ when Time
354
+ statement.setString(idx + 1, value.strftime(STRFTIME_FMT))
355
+ when LogStash::Timestamp
356
+ statement.setString(idx + 1, value.time.strftime(STRFTIME_FMT))
357
+ when Integer
358
+ if value > 2147483647 or value < -2147483648
359
+ statement.setLong(idx + 1, value)
360
+ else
361
+ statement.setInt(idx + 1, value)
362
+ end
363
+ when BigDecimal
364
+ statement.setBigDecimal(idx + 1, value.to_java)
365
+ when Float
366
+ statement.setFloat(idx + 1, value)
367
+ when String
368
+ statement.setString(idx + 1, value)
369
+ when Array, Hash
370
+ statement.setString(idx + 1, value.to_json)
371
+ when true, false
372
+ statement.setBoolean(idx + 1, value)
373
+ else
374
+ statement.setString(idx + 1, nil)
375
+ end
376
+ end
377
+
378
+ statement
379
+ end
380
+
381
+ def retry_exception?(exception, event)
382
+ retrying = (exception.respond_to? 'getSQLState' and (RETRYABLE_SQLSTATE_CLASSES.include?(exception.getSQLState.to_s[0,2]) or @retry_sql_states.include?(exception.getSQLState)))
383
+ log_jdbc_exception(exception, retrying, event)
384
+
385
+ retrying
386
+ end
387
+
388
+ def log_jdbc_exception(exception, retrying, event)
389
+ current_exception = exception
390
+ log_text = 'JDBC - Exception. ' + (retrying ? 'Retrying' : 'Not retrying')
391
+
392
+ log_method = (retrying ? 'warn' : 'error')
393
+
394
+ loop do
395
+ # TODO reformat event output so that it only shows the fields necessary.
396
+
397
+ @logger.send(log_method, log_text, :exception => current_exception, :statement => @statement[0], :event => event)
398
+
399
+ if current_exception.respond_to? 'getNextException'
400
+ current_exception = current_exception.getNextException()
401
+ else
402
+ current_exception = nil
403
+ end
404
+
405
+ break if current_exception == nil
406
+ end
407
+ end
408
+
409
+ def next_sleep_interval(current_interval)
410
+ doubled = current_interval * 2
411
+ doubled > @retry_max_interval ? @retry_max_interval : doubled
412
+ end
413
+ end # class LogStash::Outputs::Charrington
414
+
415
+ class TableNameNil < StandardError
416
+ attr_reader :event
417
+
418
+ def initialize(msg='Table name is nil', event={})
419
+ @event = event
420
+ super(msg)
421
+ end
422
+ end
@@ -0,0 +1,30 @@
1
+ Gem::Specification.new do |s|
2
+ s.name = 'logstash-output-charrington'
3
+ s.version = '0.1.0'
4
+ s.licenses = ['Apache License (2.0)']
5
+ s.summary = 'This plugin allows you to output to SQL, via JDBC'
6
+ s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install 'logstash-output-charrington'. This gem is not a stand-alone program"
7
+ s.authors = ['dconger', 'brianbroderick', 'spencerdcarlson']
8
+ s.require_paths = ['lib']
9
+
10
+ # Files
11
+ s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','Gemfile','LICENSE.txt']
12
+ # Tests
13
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
14
+
15
+ # Special flag to let us know this is actually a logstash plugin
16
+ s.metadata = { 'logstash_plugin' => 'true', 'logstash_group' => 'output' }
17
+
18
+ # Gem dependencies
19
+ #
20
+ s.add_runtime_dependency 'logstash-core-plugin-api', ">= 1.60", "<= 2.99"
21
+ s.add_runtime_dependency 'logstash-codec-plain'
22
+ s.add_development_dependency 'logstash-devutils'
23
+
24
+ s.requirements << "jar 'com.zaxxer:HikariCP', '2.7.2'"
25
+ s.requirements << "jar 'org.apache.logging.log4j:log4j-slf4j-impl', '2.6.2'"
26
+
27
+ s.add_development_dependency 'jar-dependencies'
28
+ s.add_development_dependency 'ruby-maven', '~> 3.3'
29
+ s.add_development_dependency 'rubocop', '0.41.2'
30
+ end
@@ -0,0 +1,216 @@
1
+ require 'logstash/devutils/rspec/spec_helper'
2
+ require 'logstash/outputs/charrington'
3
+ require 'stud/temporary'
4
+ require 'java'
5
+ require 'securerandom'
6
+
7
+ RSpec::Support::ObjectFormatter.default_instance.max_formatted_output_length = 80000
8
+
9
+ RSpec.configure do |c|
10
+
11
+ def start_service(name)
12
+ cmd = "sudo /etc/init.d/#{name}* start"
13
+
14
+ `which systemctl`
15
+ if $?.success?
16
+ cmd = "sudo systemctl start #{name}"
17
+ end
18
+
19
+ `#{cmd}`
20
+ end
21
+
22
+ def stop_service(name)
23
+ cmd = "sudo /etc/init.d/#{name}* stop"
24
+
25
+ `which systemctl`
26
+ if $?.success?
27
+ cmd = "sudo systemctl stop #{name}"
28
+ end
29
+
30
+ `#{cmd}`
31
+ end
32
+
33
+ end
34
+
35
+ RSpec.shared_context 'rspec setup' do
36
+ it 'ensure jar is available' do
37
+ expect(ENV[jdbc_jar_env]).not_to be_nil, "#{jdbc_jar_env} not defined, required to run tests"
38
+ expect(File.exist?(ENV[jdbc_jar_env])).to eq(true), "#{jdbc_jar_env} defined, but not valid"
39
+ end
40
+ end
41
+
42
+ RSpec.shared_context 'when initializing' do
43
+ it 'shouldn\'t register with a missing jar file' do
44
+ jdbc_settings['driver_jar_path'] = nil
45
+ plugin = LogStash::Plugin.lookup('output', 'jdbc').new(jdbc_settings)
46
+ expect { plugin.register }.to raise_error(LogStash::ConfigurationError)
47
+ end
48
+ end
49
+
50
+ RSpec.shared_context 'when outputting messages' do
51
+ let(:logger) {
52
+ double("logger")
53
+ }
54
+
55
+ let(:jdbc_test_table) do
56
+ 'logstash_output_jdbc_test'
57
+ end
58
+
59
+ let(:jdbc_drop_table) do
60
+ "DROP TABLE #{jdbc_test_table}"
61
+ end
62
+
63
+ let(:jdbc_statement_fields) do
64
+ [
65
+ {db_field: "created_at", db_type: "datetime", db_value: '?', event_field: '@timestamp'},
66
+ {db_field: "message", db_type: "varchar(512)", db_value: '?', event_field: 'message'},
67
+ {db_field: "message_sprintf", db_type: "varchar(512)", db_value: '?', event_field: 'sprintf-%{message}'},
68
+ {db_field: "static_int", db_type: "int", db_value: '?', event_field: 'int'},
69
+ {db_field: "static_bigint", db_type: "bigint", db_value: '?', event_field: 'bigint'},
70
+ {db_field: "static_float", db_type: "float", db_value: '?', event_field: 'float'},
71
+ {db_field: "static_bool", db_type: "boolean", db_value: '?', event_field: 'bool'},
72
+ {db_field: "static_bigdec", db_type: "decimal", db_value: '?', event_field: 'bigdec'}
73
+ ]
74
+ end
75
+
76
+ let(:jdbc_create_table) do
77
+ fields = jdbc_statement_fields.collect { |entry| "#{entry[:db_field]} #{entry[:db_type]} not null" }.join(", ")
78
+
79
+ "CREATE table #{jdbc_test_table} (#{fields})"
80
+ end
81
+
82
+ let(:jdbc_drop_table) do
83
+ "DROP table #{jdbc_test_table}"
84
+ end
85
+
86
+ let(:jdbc_statement) do
87
+ fields = jdbc_statement_fields.collect { |entry| "#{entry[:db_field]}" }.join(", ")
88
+ values = jdbc_statement_fields.collect { |entry| "#{entry[:db_value]}" }.join(", ")
89
+ statement = jdbc_statement_fields.collect { |entry| entry[:event_field] }
90
+
91
+ statement.insert(0, "insert into #{jdbc_test_table} (#{fields}) values(#{values})")
92
+ end
93
+
94
+ let(:systemd_database_service) do
95
+ nil
96
+ end
97
+
98
+ let(:event) do
99
+ # TODO: Auto generate fields from jdbc_statement_fields
100
+ LogStash::Event.new({
101
+ message: "test-message #{SecureRandom.uuid}",
102
+ float: 12.1,
103
+ bigint: 4000881632477184,
104
+ bool: true,
105
+ int: 1,
106
+ bigdec: BigDecimal.new("123.123")
107
+ })
108
+ end
109
+
110
+ let(:plugin) do
111
+ # Setup logger
112
+ allow(LogStash::Outputs::Jdbc).to receive(:logger).and_return(logger)
113
+
114
+ # XXX: Suppress reflection logging. There has to be a better way around this.
115
+ allow(logger).to receive(:debug).with(/config LogStash::/)
116
+
117
+ # Suppress beta warnings.
118
+ allow(logger).to receive(:info).with(/Please let us know if you find bugs or have suggestions on how to improve this plugin./)
119
+
120
+ # Suppress start up messages.
121
+ expect(logger).to receive(:info).once.with(/JDBC - Starting up/)
122
+
123
+ # Setup plugin
124
+ output = LogStash::Plugin.lookup('output', 'jdbc').new(jdbc_settings)
125
+ output.register
126
+
127
+ output
128
+ end
129
+
130
+ before :each do
131
+ # Setup table
132
+ c = plugin.instance_variable_get(:@pool).getConnection
133
+
134
+ # Derby doesn't support IF EXISTS.
135
+ # Seems like the quickest solution. Bleurgh.
136
+ begin
137
+ stmt = c.createStatement
138
+ stmt.executeUpdate(jdbc_drop_table)
139
+ rescue
140
+ # noop
141
+ ensure
142
+ stmt.close
143
+
144
+ stmt = c.createStatement
145
+ stmt.executeUpdate(jdbc_create_table)
146
+ stmt.close
147
+ c.close
148
+ end
149
+ end
150
+
151
+ # Delete table after each
152
+ after :each do
153
+ c = plugin.instance_variable_get(:@pool).getConnection
154
+
155
+ stmt = c.createStatement
156
+ stmt.executeUpdate(jdbc_drop_table)
157
+ stmt.close
158
+ c.close
159
+ end
160
+
161
+ it 'should save a event' do
162
+ expect { plugin.multi_receive([event]) }.to_not raise_error
163
+
164
+ # Verify the number of items in the output table
165
+ c = plugin.instance_variable_get(:@pool).getConnection
166
+
167
+ # TODO replace this simple count with a check of the actual contents
168
+
169
+ stmt = c.prepareStatement("select count(*) as total from #{jdbc_test_table} where message = ?")
170
+ stmt.setString(1, event.get('message'))
171
+ rs = stmt.executeQuery
172
+ count = 0
173
+ count = rs.getInt('total') while rs.next
174
+ stmt.close
175
+ c.close
176
+
177
+ expect(count).to eq(1)
178
+ end
179
+
180
+ it 'should not save event, and log an unretryable exception' do
181
+ e = event
182
+ original_event = e.get('message')
183
+ e.set('message', nil)
184
+
185
+ expect(logger).to receive(:error).once.with(/JDBC - Exception. Not retrying/, Hash)
186
+ expect { plugin.multi_receive([event]) }.to_not raise_error
187
+
188
+ e.set('message', original_event)
189
+ end
190
+
191
+ it 'it should retry after a connection loss, and log a warning' do
192
+ skip "does not run as a service, or known issue with test" if systemd_database_service.nil?
193
+
194
+ p = plugin
195
+
196
+ # Check that everything is fine right now
197
+ expect { p.multi_receive([event]) }.not_to raise_error
198
+
199
+ stop_service(systemd_database_service)
200
+
201
+ # Start a thread to restart the service after the fact.
202
+ t = Thread.new(systemd_database_service) { |systemd_database_service|
203
+ sleep 20
204
+
205
+ start_service(systemd_database_service)
206
+ }
207
+
208
+ t.run
209
+
210
+ expect(logger).to receive(:warn).at_least(:once).with(/JDBC - Exception. Retrying/, Hash)
211
+ expect { p.multi_receive([event]) }.to_not raise_error
212
+
213
+ # Wait for the thread to finish
214
+ t.join
215
+ end
216
+ end
@@ -0,0 +1,24 @@
1
+ # require_relative '../jdbc_spec_helper'
2
+
3
+ # describe 'logstash-output-jdbc: mysql', if: ENV['JDBC_MYSQL_JAR'] do
4
+ # include_context 'rspec setup'
5
+ # include_context 'when outputting messages'
6
+ #
7
+ # let(:jdbc_jar_env) do
8
+ # 'JDBC_MYSQL_JAR'
9
+ # end
10
+ #
11
+ # let(:systemd_database_service) do
12
+ # 'mysql'
13
+ # end
14
+ #
15
+ # let(:jdbc_settings) do
16
+ # {
17
+ # 'driver_class' => 'com.mysql.jdbc.Driver',
18
+ # 'connection_string' => 'jdbc:mysql://localhost/logstash?user=logstash&password=logstash',
19
+ # 'driver_jar_path' => ENV[jdbc_jar_env],
20
+ # 'statement' => jdbc_statement,
21
+ # 'max_flush_exceptions' => 1
22
+ # }
23
+ # end
24
+ # end
@@ -0,0 +1,42 @@
1
+ # require_relative '../jdbc_spec_helper'
2
+
3
+ # describe 'logstash-output-jdbc: postgres', if: ENV['JDBC_POSTGRES_JAR'] do
4
+ # include_context 'rspec setup'
5
+ # include_context 'when outputting messages'
6
+ #
7
+ # let(:jdbc_jar_env) do
8
+ # 'JDBC_POSTGRES_JAR'
9
+ # end
10
+ #
11
+ # # TODO: Postgres doesnt kill connections fast enough for the test to pass
12
+ # # Investigate options.
13
+ #
14
+ # #let(:systemd_database_service) do
15
+ # # 'postgresql'
16
+ # #end
17
+ #
18
+ # let(:jdbc_statement_fields) do
19
+ # [
20
+ # {db_field: "created_at", db_type: "timestamp", db_value: 'CAST(? as timestamp)', event_field: '@timestamp'},
21
+ # {db_field: "message", db_type: "varchar(512)", db_value: '?', event_field: 'message'},
22
+ # {db_field: "message_sprintf", db_type: "varchar(512)", db_value: '?', event_field: 'sprintf-%{message}'},
23
+ # {db_field: "static_int", db_type: "int", db_value: '?', event_field: 'int'},
24
+ # {db_field: "static_bigint", db_type: "bigint", db_value: '?', event_field: 'bigint'},
25
+ # {db_field: "static_float", db_type: "float", db_value: '?', event_field: 'float'},
26
+ # {db_field: "static_bool", db_type: "boolean", db_value: '?', event_field: 'bool'},
27
+ # {db_field: "static_bigdec", db_type: "decimal", db_value: '?', event_field: 'bigdec'}
28
+ #
29
+ # ]
30
+ # end
31
+ #
32
+ # let(:jdbc_settings) do
33
+ # {
34
+ # 'driver_class' => 'org.postgresql.Driver',
35
+ # 'connection_string' => 'jdbc:postgresql://localhost/logstash?user=logstash&password=logstash',
36
+ # 'driver_jar_path' => ENV[jdbc_jar_env],
37
+ # 'statement' => jdbc_statement,
38
+ # 'max_flush_exceptions' => 1
39
+ # }
40
+ # end
41
+ # end
42
+
@@ -0,0 +1,11 @@
1
+ # require_relative '../charrington_spec_helper'
2
+ #
3
+ # describe LogStash::Outputs::Jdbc do
4
+ # context 'when initializing' do
5
+ # it 'shouldn\'t register without a config' do
6
+ # expect do
7
+ # LogStash::Plugin.lookup('output', 'charrington').new
8
+ # end.to raise_error(LogStash::ConfigurationError)
9
+ # end
10
+ # end
11
+ # end
Binary file
Binary file
metadata ADDED
@@ -0,0 +1,167 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: logstash-output-charrington
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - dconger
8
+ - brianbroderick
9
+ - spencerdcarlson
10
+ autorequire:
11
+ bindir: bin
12
+ cert_chain: []
13
+ date: 2019-05-30 00:00:00.000000000 Z
14
+ dependencies:
15
+ - !ruby/object:Gem::Dependency
16
+ requirement: !ruby/object:Gem::Requirement
17
+ requirements:
18
+ - - ">="
19
+ - !ruby/object:Gem::Version
20
+ version: '1.60'
21
+ - - "<="
22
+ - !ruby/object:Gem::Version
23
+ version: '2.99'
24
+ name: logstash-core-plugin-api
25
+ prerelease: false
26
+ type: :runtime
27
+ version_requirements: !ruby/object:Gem::Requirement
28
+ requirements:
29
+ - - ">="
30
+ - !ruby/object:Gem::Version
31
+ version: '1.60'
32
+ - - "<="
33
+ - !ruby/object:Gem::Version
34
+ version: '2.99'
35
+ - !ruby/object:Gem::Dependency
36
+ requirement: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - ">="
39
+ - !ruby/object:Gem::Version
40
+ version: '0'
41
+ name: logstash-codec-plain
42
+ prerelease: false
43
+ type: :runtime
44
+ version_requirements: !ruby/object:Gem::Requirement
45
+ requirements:
46
+ - - ">="
47
+ - !ruby/object:Gem::Version
48
+ version: '0'
49
+ - !ruby/object:Gem::Dependency
50
+ requirement: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - ">="
53
+ - !ruby/object:Gem::Version
54
+ version: '0'
55
+ name: logstash-devutils
56
+ prerelease: false
57
+ type: :development
58
+ version_requirements: !ruby/object:Gem::Requirement
59
+ requirements:
60
+ - - ">="
61
+ - !ruby/object:Gem::Version
62
+ version: '0'
63
+ - !ruby/object:Gem::Dependency
64
+ requirement: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - ">="
67
+ - !ruby/object:Gem::Version
68
+ version: '0'
69
+ name: jar-dependencies
70
+ prerelease: false
71
+ type: :development
72
+ version_requirements: !ruby/object:Gem::Requirement
73
+ requirements:
74
+ - - ">="
75
+ - !ruby/object:Gem::Version
76
+ version: '0'
77
+ - !ruby/object:Gem::Dependency
78
+ requirement: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - "~>"
81
+ - !ruby/object:Gem::Version
82
+ version: '3.3'
83
+ name: ruby-maven
84
+ prerelease: false
85
+ type: :development
86
+ version_requirements: !ruby/object:Gem::Requirement
87
+ requirements:
88
+ - - "~>"
89
+ - !ruby/object:Gem::Version
90
+ version: '3.3'
91
+ - !ruby/object:Gem::Dependency
92
+ requirement: !ruby/object:Gem::Requirement
93
+ requirements:
94
+ - - '='
95
+ - !ruby/object:Gem::Version
96
+ version: 0.41.2
97
+ name: rubocop
98
+ prerelease: false
99
+ type: :development
100
+ version_requirements: !ruby/object:Gem::Requirement
101
+ requirements:
102
+ - - '='
103
+ - !ruby/object:Gem::Version
104
+ version: 0.41.2
105
+ description: This gem is a logstash plugin required to be installed on top of the
106
+ Logstash core pipeline using $LS_HOME/bin/logstash-plugin install 'logstash-output-charrington'.
107
+ This gem is not a stand-alone program
108
+ email:
109
+ executables: []
110
+ extensions: []
111
+ extra_rdoc_files: []
112
+ files:
113
+ - CHANGELOG.md
114
+ - Gemfile
115
+ - LICENSE.txt
116
+ - README.md
117
+ - THANKS.md
118
+ - lib/com/zaxxer/HikariCP/2.7.2/HikariCP-2.7.2.jar
119
+ - lib/logstash-output-charrington_jars.rb
120
+ - lib/logstash/outputs/charrington.rb
121
+ - lib/org/apache/logging/log4j/log4j-api/2.6.2/log4j-api-2.6.2.jar
122
+ - lib/org/apache/logging/log4j/log4j-slf4j-impl/2.6.2/log4j-slf4j-impl-2.6.2.jar
123
+ - lib/org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25.jar
124
+ - logstash-output-charrington.gemspec
125
+ - spec/charrington_spec_helper.rb
126
+ - spec/outputs/charrington_mysql_spec.rb
127
+ - spec/outputs/charrington_postgres_spec.rb
128
+ - spec/outputs/charrington_spec.rb
129
+ - vendor/jar-dependencies/runtime-jars/HikariCP-2.7.2.jar
130
+ - vendor/jar-dependencies/runtime-jars/log4j-api-2.6.2.jar
131
+ - vendor/jar-dependencies/runtime-jars/log4j-slf4j-impl-2.6.2.jar
132
+ - vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.25.jar
133
+ - vendor/postgresql-42.2.5.jar
134
+ - vendor/redshift.jar
135
+ homepage:
136
+ licenses:
137
+ - Apache License (2.0)
138
+ metadata:
139
+ logstash_plugin: 'true'
140
+ logstash_group: output
141
+ post_install_message:
142
+ rdoc_options: []
143
+ require_paths:
144
+ - lib
145
+ required_ruby_version: !ruby/object:Gem::Requirement
146
+ requirements:
147
+ - - ">="
148
+ - !ruby/object:Gem::Version
149
+ version: '0'
150
+ required_rubygems_version: !ruby/object:Gem::Requirement
151
+ requirements:
152
+ - - ">="
153
+ - !ruby/object:Gem::Version
154
+ version: '0'
155
+ requirements:
156
+ - jar 'com.zaxxer:HikariCP', '2.7.2'
157
+ - jar 'org.apache.logging.log4j:log4j-slf4j-impl', '2.6.2'
158
+ rubyforge_project:
159
+ rubygems_version: 2.7.6
160
+ signing_key:
161
+ specification_version: 4
162
+ summary: This plugin allows you to output to SQL, via JDBC
163
+ test_files:
164
+ - spec/charrington_spec_helper.rb
165
+ - spec/outputs/charrington_mysql_spec.rb
166
+ - spec/outputs/charrington_postgres_spec.rb
167
+ - spec/outputs/charrington_spec.rb