logstash-input-jdbc 0.1.3 → 1.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +2 -0
- data/NOTICE.TXT +5 -0
- data/README.md +32 -3
- data/lib/logstash/inputs/jdbc.rb +123 -26
- data/lib/logstash/plugin_mixins/jdbc.rb +48 -9
- data/logstash-input-jdbc.gemspec +1 -1
- data/spec/inputs/jdbc_spec.rb +284 -64
- metadata +3 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 9026f853d22bd2451bfe72e0b045318ec605d77b
|
4
|
+
data.tar.gz: df99f72e2ba6ddffc1f9d40c193189a1941c2dad
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 948d8d265d5b6158daef86e4032958b344f49d0116ca30f6f7d3c697e875b19f69eb12ef63627c6a94c4e8bbcb5c389c9e6f935853ce5552fd2967216fe1cf92
|
7
|
+
data.tar.gz: e77a3ee5bac991ef164db17776c5e35713c97e4d4667a3f5703d4b09d2a15f7b67d8e6a5732e42ce450cbaa71a1223a9f5f52282d78e7915eb8c234abfef12d5
|
data/CHANGELOG.md
CHANGED
data/NOTICE.TXT
ADDED
data/README.md
CHANGED
@@ -1,7 +1,5 @@
|
|
1
1
|
# Logstash JDBC Input Plugin
|
2
2
|
|
3
|
-
## WIP: Under Development, NOT FOR PRODUCTION
|
4
|
-
|
5
3
|
This is a plugin for [Logstash](https://github.com/elasticsearch/logstash).
|
6
4
|
|
7
5
|
It is fully free and fully open source. The license is Apache 2.0, meaning you are pretty much free to use it however you want in whatever way.
|
@@ -15,7 +13,7 @@ Logstash provides infrastructure to automatically generate documentation for thi
|
|
15
13
|
|
16
14
|
## Need Help?
|
17
15
|
|
18
|
-
Need help? Try #logstash on freenode IRC or the logstash
|
16
|
+
Need help? Try #logstash on freenode IRC or the https://discuss.elastic.co/c/logstash discussion forum.
|
19
17
|
|
20
18
|
## Developing
|
21
19
|
|
@@ -86,6 +84,37 @@ bin/plugin install /your/local/plugin/logstash-input-jdbc.gem
|
|
86
84
|
```
|
87
85
|
- Start Logstash and proceed to test the plugin
|
88
86
|
|
87
|
+
## Example configuration
|
88
|
+
|
89
|
+
Reading data from MySQL:
|
90
|
+
|
91
|
+
input {
|
92
|
+
jdbc {
|
93
|
+
jdbc_driver_library => "/path/to/mysql-connector-java-5.1.33-bin.jar"
|
94
|
+
jdbc_driver_class => "com.mysql.jdbc.Driver"
|
95
|
+
jdbc_connection_string => "jdbc:mysql://host:port/database"
|
96
|
+
jdbc_user => "user"
|
97
|
+
jdbc_password => "password"
|
98
|
+
statement => "SELECT ..."
|
99
|
+
jdbc_paging_enabled => "true"
|
100
|
+
jdbc_page_size => "50000"
|
101
|
+
}
|
102
|
+
}
|
103
|
+
|
104
|
+
filter {
|
105
|
+
[some filters here]
|
106
|
+
}
|
107
|
+
|
108
|
+
output {
|
109
|
+
stdout {
|
110
|
+
codec => rubydebug
|
111
|
+
}
|
112
|
+
elasticsearch_http {
|
113
|
+
host => "host"
|
114
|
+
index => "myindex"
|
115
|
+
}
|
116
|
+
}
|
117
|
+
|
89
118
|
## Contributing
|
90
119
|
|
91
120
|
All contributions are welcome: ideas, patches, documentation, bug reports, complaints, and even something you drew up on a napkin.
|
data/lib/logstash/inputs/jdbc.rb
CHANGED
@@ -2,41 +2,99 @@
|
|
2
2
|
require "logstash/inputs/base"
|
3
3
|
require "logstash/namespace"
|
4
4
|
require "logstash/plugin_mixins/jdbc"
|
5
|
+
require "yaml" # persistence
|
5
6
|
|
6
|
-
#
|
7
|
+
# This plugin was created as a way to ingest data in any database
|
8
|
+
# with a JDBC interface into Logstash. You can periodically schedule ingestion
|
9
|
+
# using a cron syntax (see `schedule` setting) or run the query one time to load
|
10
|
+
# data into Logstash. Each row in the resultset becomes a single event.
|
11
|
+
# Columns in the resultset are converted into fields in the event.
|
7
12
|
#
|
8
|
-
#
|
9
|
-
# with a JDBC interface into Logstash.
|
13
|
+
# ==== Drivers
|
10
14
|
#
|
11
|
-
#
|
15
|
+
# This plugin does not come packaged with JDBC driver libraries. The desired
|
16
|
+
# jdbc driver library must be explicitly passed in to the plugin using the
|
17
|
+
# `jdbc_driver_library` configuration option.
|
18
|
+
#
|
19
|
+
# ==== Scheduling
|
12
20
|
#
|
13
|
-
#
|
14
|
-
#
|
15
|
-
#
|
21
|
+
# Input from this plugin can be scheduled to run periodically according to a specific
|
22
|
+
# schedule. This scheduling syntax is powered by [rufus-scheduler](https://github.com/jmettraux/rufus-scheduler).
|
23
|
+
# The syntax is cron-like with some extensions specific to Rufus (e.g. timezone support ).
|
16
24
|
#
|
17
|
-
#
|
18
|
-
#
|
19
|
-
# Some parameters are built-in and can be used from within your queries.
|
20
|
-
# Here is the list:
|
25
|
+
# Examples:
|
21
26
|
#
|
22
27
|
# |==========================================================
|
23
|
-
# |
|
28
|
+
# | * 5 * 1-3 * | will execute every minute of 5am every day of January through March.
|
29
|
+
# | 0 * * * * | will execute on the 0th minute of every hour every day.
|
30
|
+
# | 0 6 * * * America/Chicago | will execute at 6:00am (UTC/GMT -5) every day.
|
24
31
|
# |==========================================================
|
32
|
+
#
|
33
|
+
#
|
34
|
+
# Further documentation describing this syntax can be found [here](https://github.com/jmettraux/rufus-scheduler#parsing-cronlines-and-time-strings)
|
35
|
+
#
|
36
|
+
# ==== State
|
37
|
+
#
|
38
|
+
# The plugin will persist the `sql_last_start` parameter in the form of a
|
39
|
+
# metadata file stored in the configured `last_run_metadata_path`. Upon shutting down,
|
40
|
+
# this file will be updated with the current value of `sql_last_start`. Next time
|
41
|
+
# the pipeline starts up, this value will be updated by reading from the file. If
|
42
|
+
# `clean_run` is set to true, this value will be ignored and `sql_last_start` will be
|
43
|
+
# set to Jan 1, 1970, as if no query has ever been executed.
|
44
|
+
#
|
45
|
+
# ==== Dealing With Large Result-sets
|
46
|
+
#
|
47
|
+
# Many JDBC drivers use the `fetch_size` parameter to limit how many
|
48
|
+
# results are pre-fetched at a time from the cursor into the client's cache
|
49
|
+
# before retrieving more results from the result-set. This is configured in
|
50
|
+
# this plugin using the `jdbc_fetch_size` configuration option. No fetch size
|
51
|
+
# is set by default in this plugin, so the specific driver's default size will
|
52
|
+
# be used.
|
53
|
+
#
|
54
|
+
# ==== Usage:
|
55
|
+
#
|
56
|
+
# Here is an example of setting up the plugin to fetch data from a MySQL database.
|
57
|
+
# First, we place the appropriate JDBC driver library in our current
|
58
|
+
# path (this can be placed anywhere on your filesystem). In this example, we connect to
|
59
|
+
# the 'mydb' database using the user: 'mysql' and wish to input all rows in the 'songs'
|
60
|
+
# table that match a specific artist. The following examples demonstrates a possible
|
61
|
+
# Logstash configuration for this. The `schedule` option in this example will
|
62
|
+
# instruct the plugin to execute this input statement on the minute, every minute.
|
25
63
|
#
|
26
|
-
# #### Usage:
|
27
|
-
# This is an example logstash config
|
28
64
|
# [source,ruby]
|
65
|
+
# ----------------------------------
|
29
66
|
# input {
|
30
67
|
# jdbc {
|
31
|
-
#
|
32
|
-
#
|
33
|
-
#
|
34
|
-
#
|
35
|
-
#
|
36
|
-
# parameters => { "my_id" => "231" }
|
68
|
+
# jdbc_driver_library => "mysql-connector-java-5.1.36-bin.jar"
|
69
|
+
# jdbc_driver_class => "com.mysql.jdbc.Driver"
|
70
|
+
# jdbc_connection_string => "jdbc:mysql://localhost:3306/mydb"
|
71
|
+
# jdbc_user => "mysql"
|
72
|
+
# parameters => { "favorite_artist" => "Beethoven" }
|
37
73
|
# schedule => "* * * * *"
|
74
|
+
# statement => "SELECT * from songs where artist = :favorite_artist"
|
38
75
|
# }
|
39
76
|
# }
|
77
|
+
# ----------------------------------
|
78
|
+
#
|
79
|
+
# ==== Configuring SQL statement
|
80
|
+
#
|
81
|
+
# A sql statement is required for this input. This can be passed-in via a
|
82
|
+
# statement option in the form of a string, or read from a file (`statement_filepath`). File
|
83
|
+
# option is typically used when the SQL statement is large or cumbersome to supply in the config.
|
84
|
+
# The file option only supports one SQL statement. The plugin will only accept one of the options.
|
85
|
+
# It cannot read a statement from a file as well as from the `statement` configuration parameter.
|
86
|
+
#
|
87
|
+
# ==== Predefined Parameters
|
88
|
+
#
|
89
|
+
# Some parameters are built-in and can be used from within your queries.
|
90
|
+
# Here is the list:
|
91
|
+
#
|
92
|
+
# |==========================================================
|
93
|
+
# |sql_last_start | The last time a statement was executed. This is set to
|
94
|
+
# | | Thursday, 1 January 1970 before any query is run, and updated
|
95
|
+
# | | accordingly after first query is run.
|
96
|
+
# |==========================================================
|
97
|
+
#
|
40
98
|
class LogStash::Inputs::Jdbc < LogStash::Inputs::Base
|
41
99
|
include LogStash::PluginMixins::Jdbc
|
42
100
|
config_name "jdbc"
|
@@ -45,25 +103,59 @@ class LogStash::Inputs::Jdbc < LogStash::Inputs::Base
|
|
45
103
|
default :codec, "plain"
|
46
104
|
|
47
105
|
# Statement to execute
|
106
|
+
#
|
48
107
|
# To use parameters, use named parameter syntax.
|
49
108
|
# For example:
|
109
|
+
#
|
110
|
+
# [source, ruby]
|
111
|
+
# ----------------------------------
|
50
112
|
# "SELECT * FROM MYTABLE WHERE id = :target_id"
|
51
|
-
#
|
113
|
+
# ----------------------------------
|
52
114
|
#
|
53
|
-
|
115
|
+
# here, ":target_id" is a named parameter. You can configure named parameters
|
116
|
+
# with the `parameters` setting.
|
117
|
+
config :statement, :validate => :string
|
118
|
+
|
119
|
+
# Path of file containing statement to execute
|
120
|
+
config :statement_filepath, :validate => :path
|
54
121
|
|
55
122
|
# Hash of query parameter, for example `{ "target_id" => "321" }`
|
56
123
|
config :parameters, :validate => :hash, :default => {}
|
57
124
|
|
58
125
|
# Schedule of when to periodically run statement, in Cron format
|
59
126
|
# for example: "* * * * *" (execute query every minute, on the minute)
|
127
|
+
#
|
128
|
+
# There is no schedule by default. If no schedule is given, then the statement is run
|
129
|
+
# exactly once.
|
60
130
|
config :schedule, :validate => :string
|
61
131
|
|
132
|
+
# Path to file with last run time
|
133
|
+
config :last_run_metadata_path, :validate => :string, :default => "#{ENV['HOME']}/.logstash_jdbc_last_run"
|
134
|
+
|
135
|
+
# Whether the previous run state should be preserved
|
136
|
+
config :clean_run, :validate => :boolean, :default => false
|
137
|
+
|
138
|
+
# Whether to save state or not in last_run_metadata_path
|
139
|
+
config :record_last_run, :validate => :boolean, :default => true
|
140
|
+
|
62
141
|
public
|
63
142
|
|
64
143
|
def register
|
65
144
|
require "rufus/scheduler"
|
66
|
-
prepare_jdbc_connection
|
145
|
+
prepare_jdbc_connection
|
146
|
+
|
147
|
+
# load sql_last_start from file if exists
|
148
|
+
if @clean_run && File.exist?(@last_run_metadata_path)
|
149
|
+
File.delete(@last_run_metadata_path)
|
150
|
+
elsif File.exist?(@last_run_metadata_path)
|
151
|
+
@sql_last_start = YAML.load(File.read(@last_run_metadata_path))
|
152
|
+
end
|
153
|
+
|
154
|
+
unless @statement.nil? ^ @statement_filepath.nil?
|
155
|
+
raise(LogStash::ConfigurationError, "Must set either :statement or :statement_filepath. Only one may be set at a time.")
|
156
|
+
end
|
157
|
+
|
158
|
+
@statement = File.read(@statement_filepath) if @statement_filepath
|
67
159
|
end # def register
|
68
160
|
|
69
161
|
def run(queue)
|
@@ -79,13 +171,18 @@ class LogStash::Inputs::Jdbc < LogStash::Inputs::Base
|
|
79
171
|
end # def run
|
80
172
|
|
81
173
|
def teardown
|
82
|
-
if @scheduler
|
83
|
-
|
174
|
+
@scheduler.stop if @scheduler
|
175
|
+
|
176
|
+
# update state file for next run
|
177
|
+
if @record_last_run
|
178
|
+
File.write(@last_run_metadata_path, YAML.dump(@sql_last_start))
|
84
179
|
end
|
85
|
-
|
180
|
+
|
181
|
+
close_jdbc_connection
|
86
182
|
end # def teardown
|
87
183
|
|
88
184
|
private
|
185
|
+
|
89
186
|
def execute_query(queue)
|
90
187
|
# update default parameters
|
91
188
|
@parameters['sql_last_start'] = @sql_last_start
|
@@ -13,14 +13,14 @@ module LogStash::PluginMixins::Jdbc
|
|
13
13
|
# Add these methods to the 'base' given.
|
14
14
|
base.extend(self)
|
15
15
|
base.setup_jdbc_config
|
16
|
-
|
17
|
-
@sql_last_start = Time.at(0).utc
|
18
16
|
end
|
19
17
|
|
20
18
|
|
21
19
|
public
|
22
20
|
def setup_jdbc_config
|
23
21
|
# JDBC driver library path to third party driver library.
|
22
|
+
#
|
23
|
+
# If not provided, Plugin will look for the driver class in the Logstash Java classpath.
|
24
24
|
config :jdbc_driver_library, :validate => :path
|
25
25
|
|
26
26
|
# JDBC driver class to load, for example "oracle.jdbc.OracleDriver" or "org.apache.derby.jdbc.ClientDriver"
|
@@ -30,18 +30,33 @@ module LogStash::PluginMixins::Jdbc
|
|
30
30
|
config :jdbc_connection_string, :validate => :string, :required => true
|
31
31
|
|
32
32
|
# JDBC user
|
33
|
-
config :jdbc_user, :validate => :string, :
|
33
|
+
config :jdbc_user, :validate => :string, :required => true
|
34
34
|
|
35
35
|
# JDBC password
|
36
36
|
config :jdbc_password, :validate => :password
|
37
37
|
|
38
|
+
# JDBC enable paging
|
39
|
+
#
|
40
|
+
# This will cause a sql statement to be broken up into multiple queries.
|
41
|
+
# Each query will use limits and offsets to collectively retrieve the full
|
42
|
+
# result-set. The limit size is set with `jdbc_page_size`.
|
43
|
+
#
|
44
|
+
# Be aware that ordering is not guaranteed between queries.
|
45
|
+
config :jdbc_paging_enabled, :validate => :boolean, :default => false
|
46
|
+
|
47
|
+
# JDBC page size
|
48
|
+
config :jdbc_page_size, :validate => :number, :default => 100000
|
49
|
+
|
50
|
+
# JDBC fetch size. if not provided, respective driver's default will be used
|
51
|
+
config :jdbc_fetch_size, :validate => :number
|
52
|
+
|
38
53
|
# Connection pool configuration.
|
39
54
|
# Validate connection before use.
|
40
55
|
config :jdbc_validate_connection, :validate => :boolean, :default => false
|
41
56
|
|
42
57
|
# Connection pool configuration.
|
43
58
|
# How often to validate a connection (in seconds)
|
44
|
-
config :
|
59
|
+
config :jdbc_validation_timeout, :validate => :number, :default => 3600
|
45
60
|
end
|
46
61
|
|
47
62
|
public
|
@@ -50,18 +65,32 @@ module LogStash::PluginMixins::Jdbc
|
|
50
65
|
require "sequel"
|
51
66
|
require "sequel/adapters/jdbc"
|
52
67
|
require @jdbc_driver_library if @jdbc_driver_library
|
53
|
-
|
68
|
+
begin
|
69
|
+
Sequel::JDBC.load_driver(@jdbc_driver_class)
|
70
|
+
rescue Sequel::AdapterNotFound => e
|
71
|
+
message = if @jdbc_driver_library.nil?
|
72
|
+
":jdbc_driver_library is not set, are you sure you included
|
73
|
+
the proper driver client libraries in your classpath?"
|
74
|
+
else
|
75
|
+
"Are you sure you've included the correct jdbc driver in :jdbc_driver_library?"
|
76
|
+
end
|
77
|
+
raise LogStash::ConfigurationError, "#{e}. #{message}"
|
78
|
+
end
|
54
79
|
@database = Sequel.connect(@jdbc_connection_string, :user=> @jdbc_user, :password=> @jdbc_password.nil? ? nil : @jdbc_password.value)
|
80
|
+
@database.extension(:pagination)
|
55
81
|
if @jdbc_validate_connection
|
56
82
|
@database.extension(:connection_validator)
|
57
|
-
@database.pool.connection_validation_timeout = @
|
83
|
+
@database.pool.connection_validation_timeout = @jdbc_validation_timeout
|
58
84
|
end
|
85
|
+
@database.fetch_size = @jdbc_fetch_size unless @jdbc_fetch_size.nil?
|
59
86
|
begin
|
60
87
|
@database.test_connection
|
61
88
|
rescue Sequel::DatabaseConnectionError => e
|
62
89
|
#TODO return false and let the plugin raise a LogStash::ConfigurationError
|
63
90
|
raise e
|
64
91
|
end
|
92
|
+
|
93
|
+
@sql_last_start = Time.at(0).utc
|
65
94
|
end # def prepare_jdbc_connection
|
66
95
|
|
67
96
|
public
|
@@ -77,9 +106,19 @@ module LogStash::PluginMixins::Jdbc
|
|
77
106
|
query = @database[statement, parameters]
|
78
107
|
@logger.debug? and @logger.debug("Executing JDBC query", :statement => statement, :parameters => parameters)
|
79
108
|
@sql_last_start = Time.now.utc
|
80
|
-
|
81
|
-
|
82
|
-
|
109
|
+
|
110
|
+
if @jdbc_paging_enabled
|
111
|
+
query.each_page(@jdbc_page_size) do |paged_dataset|
|
112
|
+
paged_dataset.each do |row|
|
113
|
+
#Stringify row keys
|
114
|
+
yield Hash[row.map { |k, v| [k.to_s, v] }]
|
115
|
+
end
|
116
|
+
end
|
117
|
+
else
|
118
|
+
query.each do |row|
|
119
|
+
#Stringify row keys
|
120
|
+
yield Hash[row.map { |k, v| [k.to_s, v] }]
|
121
|
+
end
|
83
122
|
end
|
84
123
|
success = true
|
85
124
|
rescue Sequel::DatabaseConnectionError, Sequel::DatabaseError => e
|
data/logstash-input-jdbc.gemspec
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-input-jdbc'
|
3
|
-
s.version = '0.
|
3
|
+
s.version = '1.0.0'
|
4
4
|
s.licenses = ['Apache License (2.0)']
|
5
5
|
s.summary = "This example input streams a string at a definable interval."
|
6
6
|
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
|
data/spec/inputs/jdbc_spec.rb
CHANGED
@@ -1,84 +1,304 @@
|
|
1
1
|
require "logstash/devutils/rspec/spec_helper"
|
2
2
|
require "logstash/inputs/jdbc"
|
3
3
|
require "jdbc/derby"
|
4
|
+
require "sequel"
|
5
|
+
require "sequel/adapters/jdbc"
|
4
6
|
require "timecop"
|
7
|
+
require "stud/temporary"
|
5
8
|
|
6
9
|
|
7
10
|
describe "jdbc" do
|
8
|
-
let(:mixin_settings) { {"jdbc_driver_class" => "org.apache.derby.jdbc.EmbeddedDriver", "jdbc_connection_string" => "jdbc:derby:memory:testdb;create=true"} }
|
9
|
-
|
10
|
-
|
11
|
-
|
11
|
+
let(:mixin_settings) { {"jdbc_user" => ENV['USER'], "jdbc_driver_class" => "org.apache.derby.jdbc.EmbeddedDriver", "jdbc_connection_string" => "jdbc:derby:memory:testdb;create=true"} }
|
12
|
+
let(:settings) { {} }
|
13
|
+
let(:plugin) { LogStash::Inputs::Jdbc.new(mixin_settings.merge(settings)) }
|
14
|
+
let(:queue) { Queue.new }
|
15
|
+
let (:db) do
|
16
|
+
Sequel.connect(mixin_settings['jdbc_connection_string'], :user=> nil, :password=> nil)
|
12
17
|
end
|
13
18
|
|
14
|
-
|
15
|
-
settings = {"statement" => "SELECT 1 as col1 FROM SYSIBM.SYSDUMMY1"}
|
16
|
-
plugin = LogStash::Plugin.lookup("input", "jdbc").new(mixin_settings.merge(settings))
|
17
|
-
expect { plugin.register }.to_not raise_error
|
18
|
-
expect { plugin.teardown }.to_not raise_error
|
19
|
-
end
|
20
|
-
|
21
|
-
it "should retrieve params correctly from Event" do
|
22
|
-
settings = {"statement" => "SELECT :num_param as num_param FROM SYSIBM.SYSDUMMY1", "parameters" => {"num_param" => 10} }
|
23
|
-
plugin = LogStash::Inputs::Jdbc.new(mixin_settings.merge(settings))
|
24
|
-
plugin.register
|
25
|
-
q = Queue.new
|
26
|
-
plugin.run(q)
|
27
|
-
insist { q.size } == 1
|
28
|
-
insist { q.pop['num_param'] } == settings['parameters']['num_param']
|
29
|
-
plugin.teardown
|
30
|
-
end
|
31
|
-
|
32
|
-
it "should properly schedule" do
|
33
|
-
settings = {"statement" => "SELECT 1 as num_param FROM SYSIBM.SYSDUMMY1", "schedule" => "* * * * *"}
|
34
|
-
plugin = LogStash::Inputs::Jdbc.new(mixin_settings.merge(settings))
|
35
|
-
plugin.register
|
36
|
-
q = Queue.new
|
37
|
-
Timecop.travel(Time.new(2000))
|
38
|
-
Timecop.scale(60)
|
39
|
-
runner = Thread.new do
|
40
|
-
plugin.run(q)
|
41
|
-
end
|
42
|
-
sleep 3
|
43
|
-
plugin.teardown
|
44
|
-
runner.kill
|
45
|
-
runner.join
|
46
|
-
insist { q.size } == 2
|
47
|
-
Timecop.return
|
48
|
-
end
|
49
|
-
|
50
|
-
it "should successfully iterate table with respect to field values" do
|
51
|
-
require "sequel"
|
52
|
-
require "sequel/adapters/jdbc"
|
19
|
+
before :each do
|
53
20
|
Jdbc::Derby.load_driver
|
54
|
-
|
55
|
-
@database.create_table :test_table do
|
21
|
+
db.create_table :test_table do
|
56
22
|
DateTime :created_at
|
57
23
|
Integer :num
|
58
24
|
end
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
25
|
+
end
|
26
|
+
|
27
|
+
after :each do
|
28
|
+
db.drop_table(:test_table)
|
29
|
+
end
|
30
|
+
|
31
|
+
context "when registering and tearing down" do
|
32
|
+
let(:settings) { {"statement" => "SELECT 1 as col1 FROM test_table"} }
|
33
|
+
|
34
|
+
it "should register without raising exception" do
|
35
|
+
expect { plugin.register }.to_not raise_error
|
36
|
+
plugin.teardown
|
37
|
+
end
|
38
|
+
|
39
|
+
it "should teardown without raising exception" do
|
40
|
+
plugin.register
|
41
|
+
expect { plugin.teardown }.to_not raise_error
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
context "when neither statement and statement_filepath arguments are passed" do
|
46
|
+
it "should fail to register" do
|
47
|
+
expect{ plugin.register }.to raise_error(LogStash::ConfigurationError)
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
context "when both statement and statement_filepath arguments are passed" do
|
52
|
+
let(:statement) { "SELECT * from test_table" }
|
53
|
+
let(:statement_file_path) { Stud::Temporary.pathname }
|
54
|
+
let(:settings) { { "statement_filepath" => statement_file_path, "statement" => statement } }
|
55
|
+
|
56
|
+
it "should fail to register" do
|
57
|
+
expect{ plugin.register }.to raise_error(LogStash::ConfigurationError)
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
context "when statement is passed in from a file" do
|
62
|
+
let(:statement) { "SELECT * from test_table" }
|
63
|
+
let(:statement_file_path) { Stud::Temporary.pathname }
|
64
|
+
let(:settings) { { "statement_filepath" => statement_file_path } }
|
65
|
+
|
66
|
+
before do
|
67
|
+
File.write(statement_file_path, statement)
|
68
|
+
plugin.register
|
69
|
+
end
|
70
|
+
|
71
|
+
after do
|
72
|
+
plugin.teardown
|
73
|
+
end
|
74
|
+
|
75
|
+
it "should read in statement from file" do
|
76
|
+
expect(plugin.statement).to eq(statement)
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
context "when passing parameters" do
|
81
|
+
let(:settings) do
|
82
|
+
{
|
83
|
+
"statement" => "SELECT :num_param as num_param FROM SYSIBM.SYSDUMMY1",
|
84
|
+
"parameters" => {"num_param" => 10}
|
85
|
+
}
|
86
|
+
end
|
87
|
+
|
88
|
+
before do
|
89
|
+
plugin.register
|
90
|
+
end
|
91
|
+
|
92
|
+
after do
|
93
|
+
plugin.teardown
|
94
|
+
end
|
95
|
+
|
96
|
+
it "should retrieve params correctly from Event" do
|
97
|
+
plugin.run(queue)
|
98
|
+
expect(queue.pop['num_param']).to eq(settings['parameters']['num_param'])
|
99
|
+
end
|
100
|
+
end
|
101
|
+
|
102
|
+
context "when scheduling" do
|
103
|
+
let(:settings) { {"statement" => "SELECT 1 as num_param FROM SYSIBM.SYSDUMMY1", "schedule" => "* * * * * UTC"} }
|
104
|
+
|
105
|
+
before do
|
106
|
+
plugin.register
|
107
|
+
end
|
108
|
+
|
109
|
+
it "should properly schedule" do
|
110
|
+
Timecop.travel(Time.new(2000))
|
111
|
+
Timecop.scale(60)
|
112
|
+
runner = Thread.new do
|
113
|
+
plugin.run(queue)
|
114
|
+
end
|
115
|
+
sleep 3
|
116
|
+
plugin.teardown
|
117
|
+
runner.kill
|
118
|
+
runner.join
|
119
|
+
expect(queue.size).to eq(2)
|
120
|
+
Timecop.return
|
121
|
+
end
|
122
|
+
|
123
|
+
end
|
124
|
+
|
125
|
+
context "when iterating result-set via paging" do
|
126
|
+
|
127
|
+
let(:settings) do
|
128
|
+
{
|
129
|
+
"statement" => "SELECT * from test_table",
|
130
|
+
"jdbc_paging_enabled" => true,
|
131
|
+
"jdbc_page_size" => 20
|
132
|
+
}
|
133
|
+
end
|
134
|
+
|
135
|
+
let(:num_rows) { 1000 }
|
136
|
+
|
137
|
+
before do
|
138
|
+
plugin.register
|
139
|
+
end
|
140
|
+
|
141
|
+
after do
|
142
|
+
plugin.teardown
|
143
|
+
end
|
144
|
+
|
145
|
+
it "should fetch all rows" do
|
146
|
+
num_rows.times do
|
147
|
+
db[:test_table].insert(:num => 1, :created_at => Time.now.utc)
|
148
|
+
end
|
149
|
+
|
150
|
+
plugin.run(queue)
|
151
|
+
|
152
|
+
expect(queue.size).to eq(num_rows)
|
153
|
+
end
|
154
|
+
|
155
|
+
end
|
156
|
+
|
157
|
+
context "when iteratively running plugin#run" do
|
158
|
+
let(:settings) do
|
159
|
+
{"statement" => "SELECT num, created_at FROM test_table WHERE created_at > :sql_last_start"}
|
160
|
+
end
|
161
|
+
|
162
|
+
let(:nums) { [10, 20, 30, 40, 50] }
|
64
163
|
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
test_table.insert(:num => nums[1], :created_at => Time.now.utc)
|
69
|
-
plugin.run(q)
|
70
|
-
test_table.insert(:num => nums[2], :created_at => Time.now.utc)
|
71
|
-
test_table.insert(:num => nums[3], :created_at => Time.now.utc)
|
72
|
-
test_table.insert(:num => nums[4], :created_at => Time.now.utc)
|
73
|
-
plugin.run(q)
|
164
|
+
before do
|
165
|
+
plugin.register
|
166
|
+
end
|
74
167
|
|
75
|
-
|
76
|
-
|
77
|
-
actual_sum += q.pop['num']
|
168
|
+
after do
|
169
|
+
plugin.teardown
|
78
170
|
end
|
79
171
|
|
80
|
-
|
172
|
+
it "should successfully iterate table with respect to field values" do
|
173
|
+
test_table = db[:test_table]
|
174
|
+
|
175
|
+
plugin.run(queue)
|
176
|
+
test_table.insert(:num => nums[0], :created_at => Time.now.utc)
|
177
|
+
test_table.insert(:num => nums[1], :created_at => Time.now.utc)
|
178
|
+
plugin.run(queue)
|
179
|
+
test_table.insert(:num => nums[2], :created_at => Time.now.utc)
|
180
|
+
test_table.insert(:num => nums[3], :created_at => Time.now.utc)
|
181
|
+
test_table.insert(:num => nums[4], :created_at => Time.now.utc)
|
182
|
+
plugin.run(queue)
|
183
|
+
|
184
|
+
actual_sum = 0
|
185
|
+
until queue.empty? do
|
186
|
+
actual_sum += queue.pop['num']
|
187
|
+
end
|
188
|
+
|
189
|
+
expect(actual_sum).to eq(nums.inject{|sum,x| sum + x })
|
190
|
+
end
|
191
|
+
end
|
192
|
+
|
193
|
+
context "when previous runs are to be respected" do
|
194
|
+
|
195
|
+
let(:settings) do
|
196
|
+
{ "statement" => "SELECT * FROM test_table",
|
197
|
+
"last_run_metadata_path" => Stud::Temporary.pathname }
|
198
|
+
end
|
199
|
+
|
200
|
+
let(:last_run_time) { Time.at(1).utc }
|
201
|
+
|
202
|
+
before do
|
203
|
+
File.write(settings["last_run_metadata_path"], YAML.dump(last_run_time))
|
204
|
+
plugin.register
|
205
|
+
end
|
206
|
+
|
207
|
+
after do
|
208
|
+
plugin.teardown
|
209
|
+
end
|
210
|
+
|
211
|
+
it "should respect last run metadata" do
|
212
|
+
expect(plugin.instance_variable_get("@sql_last_start")).to eq(last_run_time)
|
213
|
+
end
|
214
|
+
end
|
215
|
+
|
216
|
+
context "when doing a clean run" do
|
217
|
+
|
218
|
+
let(:settings) do
|
219
|
+
{
|
220
|
+
"statement" => "SELECT * FROM test_table",
|
221
|
+
"last_run_metadata_path" => Stud::Temporary.pathname,
|
222
|
+
"clean_run" => true
|
223
|
+
}
|
224
|
+
end
|
225
|
+
|
226
|
+
let(:last_run_time) { Time.at(1).utc }
|
227
|
+
|
228
|
+
before do
|
229
|
+
File.write(settings["last_run_metadata_path"], YAML.dump(last_run_time))
|
230
|
+
plugin.register
|
231
|
+
end
|
232
|
+
|
233
|
+
after do
|
234
|
+
plugin.teardown
|
235
|
+
end
|
81
236
|
|
82
|
-
|
237
|
+
it "should ignore last run metadata if :clean_run set to true" do
|
238
|
+
expect(plugin.instance_variable_get("@sql_last_start")).to eq(Time.at(0).utc)
|
239
|
+
end
|
240
|
+
end
|
241
|
+
|
242
|
+
context "when state is not to be persisted" do
|
243
|
+
let(:settings) do
|
244
|
+
{
|
245
|
+
"statement" => "SELECT * FROM test_table",
|
246
|
+
"last_run_metadata_path" => Stud::Temporary.pathname,
|
247
|
+
"record_last_run" => false
|
248
|
+
}
|
249
|
+
end
|
250
|
+
|
251
|
+
before do
|
252
|
+
plugin.register
|
253
|
+
end
|
254
|
+
|
255
|
+
after do
|
256
|
+
plugin.teardown
|
257
|
+
end
|
258
|
+
|
259
|
+
it "should not save state if :record_last_run is false" do
|
260
|
+
expect(File).not_to exist(settings["last_run_metadata_path"])
|
261
|
+
end
|
262
|
+
end
|
263
|
+
|
264
|
+
context "when setting fetch size" do
|
265
|
+
|
266
|
+
let(:settings) do
|
267
|
+
{
|
268
|
+
"statement" => "SELECT * from test_table",
|
269
|
+
"jdbc_fetch_size" => 1
|
270
|
+
}
|
271
|
+
end
|
272
|
+
|
273
|
+
let(:num_rows) { 10 }
|
274
|
+
|
275
|
+
before do
|
276
|
+
num_rows.times do
|
277
|
+
db[:test_table].insert(:num => 1, :created_at => Time.now.utc)
|
278
|
+
end
|
279
|
+
|
280
|
+
plugin.register
|
281
|
+
end
|
282
|
+
|
283
|
+
after do
|
284
|
+
plugin.teardown
|
285
|
+
end
|
286
|
+
|
287
|
+
it "should fetch all rows" do
|
288
|
+
plugin.run(queue)
|
289
|
+
expect(queue.size).to eq(num_rows)
|
290
|
+
end
|
291
|
+
end
|
292
|
+
|
293
|
+
context "when driver is not found" do
|
294
|
+
let(:settings) { { "statement" => "SELECT * FROM test_table" } }
|
295
|
+
|
296
|
+
before do
|
297
|
+
mixin_settings['jdbc_driver_class'] = "org.not.ExistsDriver"
|
298
|
+
end
|
299
|
+
|
300
|
+
it "should fail" do
|
301
|
+
expect { plugin.register }.to raise_error(LogStash::ConfigurationError)
|
302
|
+
end
|
83
303
|
end
|
84
304
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-input-jdbc
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 1.0.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2015-
|
11
|
+
date: 2015-07-16 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: logstash-core
|
@@ -138,6 +138,7 @@ files:
|
|
138
138
|
- CHANGELOG.md
|
139
139
|
- Gemfile
|
140
140
|
- LICENSE
|
141
|
+
- NOTICE.TXT
|
141
142
|
- README.md
|
142
143
|
- Rakefile
|
143
144
|
- lib/logstash/inputs/jdbc.rb
|