logstash-integration-jdbc 5.0.0.alpha1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/CHANGELOG.md +8 -0
- data/CONTRIBUTORS +22 -0
- data/Gemfile +11 -0
- data/LICENSE +13 -0
- data/NOTICE.TXT +5 -0
- data/README.md +105 -0
- data/docs/filter-jdbc_static.asciidoc +606 -0
- data/docs/filter-jdbc_streaming.asciidoc +317 -0
- data/docs/index.asciidoc +32 -0
- data/docs/input-jdbc.asciidoc +573 -0
- data/lib/logstash/filters/jdbc/basic_database.rb +125 -0
- data/lib/logstash/filters/jdbc/column.rb +39 -0
- data/lib/logstash/filters/jdbc/db_object.rb +101 -0
- data/lib/logstash/filters/jdbc/loader.rb +119 -0
- data/lib/logstash/filters/jdbc/loader_schedule.rb +64 -0
- data/lib/logstash/filters/jdbc/lookup.rb +253 -0
- data/lib/logstash/filters/jdbc/lookup_processor.rb +100 -0
- data/lib/logstash/filters/jdbc/lookup_result.rb +40 -0
- data/lib/logstash/filters/jdbc/read_only_database.rb +57 -0
- data/lib/logstash/filters/jdbc/read_write_database.rb +108 -0
- data/lib/logstash/filters/jdbc/repeating_load_runner.rb +13 -0
- data/lib/logstash/filters/jdbc/single_load_runner.rb +46 -0
- data/lib/logstash/filters/jdbc/validatable.rb +46 -0
- data/lib/logstash/filters/jdbc_static.rb +240 -0
- data/lib/logstash/filters/jdbc_streaming.rb +196 -0
- data/lib/logstash/inputs/jdbc.rb +341 -0
- data/lib/logstash/inputs/tzinfo_jruby_patch.rb +57 -0
- data/lib/logstash/plugin_mixins/jdbc/checked_count_logger.rb +43 -0
- data/lib/logstash/plugin_mixins/jdbc/jdbc.rb +298 -0
- data/lib/logstash/plugin_mixins/jdbc/statement_handler.rb +129 -0
- data/lib/logstash/plugin_mixins/jdbc/value_tracking.rb +140 -0
- data/lib/logstash/plugin_mixins/jdbc_streaming/cache_payload.rb +28 -0
- data/lib/logstash/plugin_mixins/jdbc_streaming/parameter_handler.rb +64 -0
- data/lib/logstash/plugin_mixins/jdbc_streaming/statement_handler.rb +143 -0
- data/lib/logstash/plugin_mixins/jdbc_streaming.rb +100 -0
- data/lib/logstash/plugin_mixins/statement_handler.rb +0 -0
- data/lib/logstash-integration-jdbc_jars.rb +5 -0
- data/logstash-integration-jdbc.gemspec +44 -0
- data/spec/filters/env_helper.rb +10 -0
- data/spec/filters/integration/jdbc_static_spec.rb +154 -0
- data/spec/filters/integration/jdbcstreaming_spec.rb +173 -0
- data/spec/filters/jdbc/column_spec.rb +70 -0
- data/spec/filters/jdbc/db_object_spec.rb +81 -0
- data/spec/filters/jdbc/loader_spec.rb +77 -0
- data/spec/filters/jdbc/lookup_processor_spec.rb +132 -0
- data/spec/filters/jdbc/lookup_spec.rb +253 -0
- data/spec/filters/jdbc/read_only_database_spec.rb +67 -0
- data/spec/filters/jdbc/read_write_database_spec.rb +90 -0
- data/spec/filters/jdbc/repeating_load_runner_spec.rb +24 -0
- data/spec/filters/jdbc/single_load_runner_spec.rb +16 -0
- data/spec/filters/jdbc_static_file_local_spec.rb +83 -0
- data/spec/filters/jdbc_static_spec.rb +162 -0
- data/spec/filters/jdbc_streaming_spec.rb +350 -0
- data/spec/filters/remote_server_helper.rb +24 -0
- data/spec/filters/shared_helpers.rb +34 -0
- data/spec/helpers/WHY-THIS-JAR.txt +4 -0
- data/spec/helpers/derbyrun.jar +0 -0
- data/spec/inputs/integration/integ_spec.rb +78 -0
- data/spec/inputs/jdbc_spec.rb +1431 -0
- data/vendor/jar-dependencies/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar +0 -0
- data/vendor/jar-dependencies/org/apache/derby/derbyclient/10.14.1.0/derbyclient-10.14.1.0.jar +0 -0
- metadata +319 -0
@@ -0,0 +1,298 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
# TAKEN FROM WIIBAA
|
3
|
+
require "logstash/config/mixin"
|
4
|
+
require "time"
|
5
|
+
require "date"
|
6
|
+
require_relative "value_tracking"
|
7
|
+
require_relative "checked_count_logger"
|
8
|
+
require_relative "statement_handler"
|
9
|
+
|
10
|
+
java_import java.util.concurrent.locks.ReentrantLock
|
11
|
+
|
12
|
+
# Tentative of abstracting JDBC logic to a mixin
|
13
|
+
# for potential reuse in other plugins (input/output)
|
14
|
+
module LogStash module PluginMixins module Jdbc
|
15
|
+
module Jdbc
|
16
|
+
# This method is called when someone includes this module
|
17
|
+
def self.included(base)
|
18
|
+
# Add these methods to the 'base' given.
|
19
|
+
base.extend(self)
|
20
|
+
base.setup_jdbc_config
|
21
|
+
end
|
22
|
+
|
23
|
+
|
24
|
+
public
|
25
|
+
def setup_jdbc_config
|
26
|
+
# JDBC driver library path to third party driver library. In case of multiple libraries being
|
27
|
+
# required you can pass them separated by a comma.
|
28
|
+
#
|
29
|
+
# If not provided, Plugin will look for the driver class in the Logstash Java classpath.
|
30
|
+
config :jdbc_driver_library, :validate => :string
|
31
|
+
|
32
|
+
# JDBC driver class to load, for exmaple, "org.apache.derby.jdbc.ClientDriver"
|
33
|
+
# NB per https://github.com/logstash-plugins/logstash-input-jdbc/issues/43 if you are using
|
34
|
+
# the Oracle JDBC driver (ojdbc6.jar) the correct `jdbc_driver_class` is `"Java::oracle.jdbc.driver.OracleDriver"`
|
35
|
+
config :jdbc_driver_class, :validate => :string, :required => true
|
36
|
+
|
37
|
+
# JDBC connection string
|
38
|
+
config :jdbc_connection_string, :validate => :string, :required => true
|
39
|
+
|
40
|
+
# JDBC user
|
41
|
+
config :jdbc_user, :validate => :string, :required => true
|
42
|
+
|
43
|
+
# JDBC password
|
44
|
+
config :jdbc_password, :validate => :password
|
45
|
+
|
46
|
+
# JDBC password filename
|
47
|
+
config :jdbc_password_filepath, :validate => :path
|
48
|
+
|
49
|
+
# JDBC enable paging
|
50
|
+
#
|
51
|
+
# This will cause a sql statement to be broken up into multiple queries.
|
52
|
+
# Each query will use limits and offsets to collectively retrieve the full
|
53
|
+
# result-set. The limit size is set with `jdbc_page_size`.
|
54
|
+
#
|
55
|
+
# Be aware that ordering is not guaranteed between queries.
|
56
|
+
config :jdbc_paging_enabled, :validate => :boolean, :default => false
|
57
|
+
|
58
|
+
# JDBC page size
|
59
|
+
config :jdbc_page_size, :validate => :number, :default => 100000
|
60
|
+
|
61
|
+
# JDBC fetch size. if not provided, respective driver's default will be used
|
62
|
+
config :jdbc_fetch_size, :validate => :number
|
63
|
+
|
64
|
+
# Connection pool configuration.
|
65
|
+
# Validate connection before use.
|
66
|
+
config :jdbc_validate_connection, :validate => :boolean, :default => false
|
67
|
+
|
68
|
+
# Connection pool configuration.
|
69
|
+
# How often to validate a connection (in seconds)
|
70
|
+
config :jdbc_validation_timeout, :validate => :number, :default => 3600
|
71
|
+
|
72
|
+
# Connection pool configuration.
|
73
|
+
# The amount of seconds to wait to acquire a connection before raising a PoolTimeoutError (default 5)
|
74
|
+
config :jdbc_pool_timeout, :validate => :number, :default => 5
|
75
|
+
|
76
|
+
# Timezone conversion.
|
77
|
+
# SQL does not allow for timezone data in timestamp fields. This plugin will automatically
|
78
|
+
# convert your SQL timestamp fields to Logstash timestamps, in relative UTC time in ISO8601 format.
|
79
|
+
#
|
80
|
+
# Using this setting will manually assign a specified timezone offset, instead
|
81
|
+
# of using the timezone setting of the local machine. You must use a canonical
|
82
|
+
# timezone, *America/Denver*, for example.
|
83
|
+
config :jdbc_default_timezone, :validate => :string
|
84
|
+
|
85
|
+
# General/Vendor-specific Sequel configuration options.
|
86
|
+
#
|
87
|
+
# An example of an optional connection pool configuration
|
88
|
+
# max_connections - The maximum number of connections the connection pool
|
89
|
+
#
|
90
|
+
# examples of vendor-specific options can be found in this
|
91
|
+
# documentation page: https://github.com/jeremyevans/sequel/blob/master/doc/opening_databases.rdoc
|
92
|
+
config :sequel_opts, :validate => :hash, :default => {}
|
93
|
+
|
94
|
+
# Log level at which to log SQL queries, the accepted values are the common ones fatal, error, warn,
|
95
|
+
# info and debug. The default value is info.
|
96
|
+
config :sql_log_level, :validate => [ "fatal", "error", "warn", "info", "debug" ], :default => "info"
|
97
|
+
|
98
|
+
# Maximum number of times to try connecting to database
|
99
|
+
config :connection_retry_attempts, :validate => :number, :default => 1
|
100
|
+
# Number of seconds to sleep between connection attempts
|
101
|
+
config :connection_retry_attempts_wait_time, :validate => :number, :default => 0.5
|
102
|
+
|
103
|
+
# give users the ability to force Sequel application side into using local timezone
|
104
|
+
config :plugin_timezone, :validate => ["local", "utc"], :default => "utc"
|
105
|
+
end
|
106
|
+
|
107
|
+
private
|
108
|
+
def jdbc_connect
|
109
|
+
opts = {
|
110
|
+
:user => @jdbc_user,
|
111
|
+
:password => @jdbc_password.nil? ? nil : @jdbc_password.value,
|
112
|
+
:pool_timeout => @jdbc_pool_timeout,
|
113
|
+
:keep_reference => false
|
114
|
+
}.merge(@sequel_opts)
|
115
|
+
retry_attempts = @connection_retry_attempts
|
116
|
+
loop do
|
117
|
+
retry_attempts -= 1
|
118
|
+
begin
|
119
|
+
return Sequel.connect(@jdbc_connection_string, opts=opts)
|
120
|
+
rescue Sequel::PoolTimeout => e
|
121
|
+
if retry_attempts <= 0
|
122
|
+
@logger.error("Failed to connect to database. #{@jdbc_pool_timeout} second timeout exceeded. Tried #{@connection_retry_attempts} times.")
|
123
|
+
raise e
|
124
|
+
else
|
125
|
+
@logger.error("Failed to connect to database. #{@jdbc_pool_timeout} second timeout exceeded. Trying again.")
|
126
|
+
end
|
127
|
+
# rescue Java::JavaSql::SQLException, ::Sequel::Error => e
|
128
|
+
rescue ::Sequel::Error => e
|
129
|
+
if retry_attempts <= 0
|
130
|
+
@logger.error("Unable to connect to database. Tried #{@connection_retry_attempts} times", :error_message => e.message, )
|
131
|
+
raise e
|
132
|
+
else
|
133
|
+
@logger.error("Unable to connect to database. Trying again", :error_message => e.message)
|
134
|
+
end
|
135
|
+
end
|
136
|
+
sleep(@connection_retry_attempts_wait_time)
|
137
|
+
end
|
138
|
+
end
|
139
|
+
|
140
|
+
private
|
141
|
+
|
142
|
+
def load_driver_jars
|
143
|
+
unless @jdbc_driver_library.nil? || @jdbc_driver_library.empty?
|
144
|
+
@jdbc_driver_library.split(",").each do |driver_jar|
|
145
|
+
begin
|
146
|
+
@logger.debug("loading #{driver_jar}")
|
147
|
+
# Use https://github.com/jruby/jruby/wiki/CallingJavaFromJRuby#from-jar-files to make classes from jar
|
148
|
+
# available
|
149
|
+
require driver_jar
|
150
|
+
rescue LoadError => e
|
151
|
+
raise LogStash::PluginLoadingError, "unable to load #{driver_jar} from :jdbc_driver_library, #{e.message}"
|
152
|
+
end
|
153
|
+
end
|
154
|
+
end
|
155
|
+
end
|
156
|
+
|
157
|
+
private
|
158
|
+
def open_jdbc_connection
|
159
|
+
require "java"
|
160
|
+
require "sequel"
|
161
|
+
require "sequel/adapters/jdbc"
|
162
|
+
|
163
|
+
Sequel.application_timezone = @plugin_timezone.to_sym
|
164
|
+
if @drivers_loaded.false?
|
165
|
+
begin
|
166
|
+
load_driver_jars
|
167
|
+
Sequel::JDBC.load_driver(@jdbc_driver_class)
|
168
|
+
rescue LogStash::Error => e
|
169
|
+
# raised in load_drivers, e.cause should be the caught Java exceptions
|
170
|
+
raise LogStash::PluginLoadingError, "#{e.message} and #{e.cause.message}"
|
171
|
+
rescue Sequel::AdapterNotFound => e
|
172
|
+
# fix this !!!
|
173
|
+
message = if @jdbc_driver_library.nil?
|
174
|
+
":jdbc_driver_library is not set, are you sure you included
|
175
|
+
the proper driver client libraries in your classpath?"
|
176
|
+
else
|
177
|
+
"Are you sure you've included the correct jdbc driver in :jdbc_driver_library?"
|
178
|
+
end
|
179
|
+
raise LogStash::PluginLoadingError, "#{e}. #{message}"
|
180
|
+
end
|
181
|
+
@drivers_loaded.make_true
|
182
|
+
end
|
183
|
+
@database = jdbc_connect()
|
184
|
+
@database.extension(:pagination)
|
185
|
+
if @jdbc_default_timezone
|
186
|
+
@database.extension(:named_timezones)
|
187
|
+
@database.timezone = @jdbc_default_timezone
|
188
|
+
end
|
189
|
+
if @jdbc_validate_connection
|
190
|
+
@database.extension(:connection_validator)
|
191
|
+
@database.pool.connection_validation_timeout = @jdbc_validation_timeout
|
192
|
+
end
|
193
|
+
@database.fetch_size = @jdbc_fetch_size unless @jdbc_fetch_size.nil?
|
194
|
+
begin
|
195
|
+
@database.test_connection
|
196
|
+
rescue Java::JavaSql::SQLException => e
|
197
|
+
@logger.warn("Failed test_connection with java.sql.SQLException.", :exception => e)
|
198
|
+
rescue Sequel::DatabaseConnectionError => e
|
199
|
+
@logger.warn("Failed test_connection.", :exception => e)
|
200
|
+
close_jdbc_connection
|
201
|
+
|
202
|
+
#TODO return false and let the plugin raise a LogStash::ConfigurationError
|
203
|
+
raise e
|
204
|
+
end
|
205
|
+
|
206
|
+
@database.sql_log_level = @sql_log_level.to_sym
|
207
|
+
@database.logger = @logger
|
208
|
+
|
209
|
+
@database.extension :identifier_mangling
|
210
|
+
|
211
|
+
if @lowercase_column_names
|
212
|
+
@database.identifier_output_method = :downcase
|
213
|
+
else
|
214
|
+
@database.identifier_output_method = :to_s
|
215
|
+
end
|
216
|
+
end
|
217
|
+
|
218
|
+
public
|
219
|
+
def prepare_jdbc_connection
|
220
|
+
@connection_lock = ReentrantLock.new
|
221
|
+
@drivers_loaded = Concurrent::AtomicBoolean.new
|
222
|
+
end
|
223
|
+
|
224
|
+
public
|
225
|
+
def close_jdbc_connection
|
226
|
+
begin
|
227
|
+
# pipeline restarts can also close the jdbc connection, block until the current executing statement is finished to avoid leaking connections
|
228
|
+
# connections in use won't really get closed
|
229
|
+
@connection_lock.lock
|
230
|
+
@database.disconnect if @database
|
231
|
+
rescue => e
|
232
|
+
@logger.warn("Failed to close connection", :exception => e)
|
233
|
+
ensure
|
234
|
+
@connection_lock.unlock
|
235
|
+
end
|
236
|
+
end
|
237
|
+
|
238
|
+
public
|
239
|
+
def execute_statement
|
240
|
+
success = false
|
241
|
+
@connection_lock.lock
|
242
|
+
open_jdbc_connection
|
243
|
+
begin
|
244
|
+
sql_last_value = @use_column_value ? @value_tracker.value : Time.now.utc
|
245
|
+
@tracking_column_warning_sent = false
|
246
|
+
@statement_handler.perform_query(@database, @value_tracker.value, @jdbc_paging_enabled, @jdbc_page_size) do |row|
|
247
|
+
sql_last_value = get_column_value(row) if @use_column_value
|
248
|
+
yield extract_values_from(row)
|
249
|
+
end
|
250
|
+
success = true
|
251
|
+
rescue Sequel::DatabaseConnectionError, Sequel::DatabaseError, Java::JavaSql::SQLException => e
|
252
|
+
@logger.warn("Exception when executing JDBC query", :exception => e)
|
253
|
+
else
|
254
|
+
@value_tracker.set_value(sql_last_value)
|
255
|
+
ensure
|
256
|
+
close_jdbc_connection
|
257
|
+
@connection_lock.unlock
|
258
|
+
end
|
259
|
+
return success
|
260
|
+
end
|
261
|
+
|
262
|
+
public
|
263
|
+
def get_column_value(row)
|
264
|
+
if !row.has_key?(@tracking_column.to_sym)
|
265
|
+
if !@tracking_column_warning_sent
|
266
|
+
@logger.warn("tracking_column not found in dataset.", :tracking_column => @tracking_column)
|
267
|
+
@tracking_column_warning_sent = true
|
268
|
+
end
|
269
|
+
# If we can't find the tracking column, return the current value in the ivar
|
270
|
+
@value_tracker.value
|
271
|
+
else
|
272
|
+
# Otherwise send the updated tracking column
|
273
|
+
row[@tracking_column.to_sym]
|
274
|
+
end
|
275
|
+
end
|
276
|
+
|
277
|
+
private
|
278
|
+
#Stringify row keys and decorate values when necessary
|
279
|
+
def extract_values_from(row)
|
280
|
+
Hash[row.map { |k, v| [k.to_s, decorate_value(v)] }]
|
281
|
+
end
|
282
|
+
|
283
|
+
private
|
284
|
+
def decorate_value(value)
|
285
|
+
case value
|
286
|
+
when Time
|
287
|
+
# transform it to LogStash::Timestamp as required by LS
|
288
|
+
LogStash::Timestamp.new(value)
|
289
|
+
when Date, DateTime
|
290
|
+
LogStash::Timestamp.new(value.to_time)
|
291
|
+
else
|
292
|
+
value
|
293
|
+
end
|
294
|
+
end
|
295
|
+
end
|
296
|
+
end end end
|
297
|
+
|
298
|
+
|
@@ -0,0 +1,129 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
module LogStash module PluginMixins module Jdbc
|
4
|
+
class StatementHandler
|
5
|
+
def self.build_statement_handler(plugin, logger)
|
6
|
+
klass = plugin.use_prepared_statements ? PreparedStatementHandler : NormalStatementHandler
|
7
|
+
klass.new(plugin, logger)
|
8
|
+
end
|
9
|
+
|
10
|
+
attr_reader :statement, :parameters, :statement_logger
|
11
|
+
|
12
|
+
def initialize(plugin, statement_logger)
|
13
|
+
@statement = plugin.statement
|
14
|
+
@statement_logger = statement_logger
|
15
|
+
post_init(plugin)
|
16
|
+
end
|
17
|
+
|
18
|
+
def build_query(db, sql_last_value)
|
19
|
+
# override in subclass
|
20
|
+
end
|
21
|
+
|
22
|
+
def post_init(plugin)
|
23
|
+
# override in subclass, if needed
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
class NormalStatementHandler < StatementHandler
|
28
|
+
# Performs the query, respecting our pagination settings, yielding once per row of data
|
29
|
+
# @param db [Sequel::Database]
|
30
|
+
# @param sql_last_value [Integet|DateTime|Time]
|
31
|
+
# @yieldparam row [Hash{Symbol=>Object}]
|
32
|
+
def perform_query(db, sql_last_value, jdbc_paging_enabled, jdbc_page_size)
|
33
|
+
query = build_query(db, sql_last_value)
|
34
|
+
if jdbc_paging_enabled
|
35
|
+
query.each_page(jdbc_page_size) do |paged_dataset|
|
36
|
+
paged_dataset.each do |row|
|
37
|
+
yield row
|
38
|
+
end
|
39
|
+
end
|
40
|
+
else
|
41
|
+
query.each do |row|
|
42
|
+
yield row
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
private
|
48
|
+
|
49
|
+
def build_query(db, sql_last_value)
|
50
|
+
parameters[:sql_last_value] = sql_last_value
|
51
|
+
query = db[statement, parameters]
|
52
|
+
statement_logger.log_statement_parameters(statement, parameters, query)
|
53
|
+
query
|
54
|
+
end
|
55
|
+
|
56
|
+
def post_init(plugin)
|
57
|
+
@parameter_keys = ["sql_last_value"] + plugin.parameters.keys
|
58
|
+
@parameters = plugin.parameters.inject({}) do |hash,(k,v)|
|
59
|
+
case v
|
60
|
+
when LogStash::Timestamp
|
61
|
+
hash[k.to_sym] = v.time
|
62
|
+
else
|
63
|
+
hash[k.to_sym] = v
|
64
|
+
end
|
65
|
+
hash
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
class PreparedStatementHandler < StatementHandler
|
71
|
+
attr_reader :name, :bind_values_array, :statement_prepared, :prepared
|
72
|
+
|
73
|
+
# Performs the query, ignoring our pagination settings, yielding once per row of data
|
74
|
+
# @param db [Sequel::Database]
|
75
|
+
# @param sql_last_value [Integet|DateTime|Time]
|
76
|
+
# @yieldparam row [Hash{Symbol=>Object}]
|
77
|
+
def perform_query(db, sql_last_value, jdbc_paging_enabled, jdbc_page_size)
|
78
|
+
query = build_query(db, sql_last_value)
|
79
|
+
query.each do |row|
|
80
|
+
yield row
|
81
|
+
end
|
82
|
+
end
|
83
|
+
|
84
|
+
private
|
85
|
+
|
86
|
+
def build_query(db, sql_last_value)
|
87
|
+
@parameters = create_bind_values_hash
|
88
|
+
if statement_prepared.false?
|
89
|
+
prepended = parameters.keys.map{|v| v.to_s.prepend("$").to_sym}
|
90
|
+
@prepared = db[statement, *prepended].prepare(:select, name)
|
91
|
+
statement_prepared.make_true
|
92
|
+
end
|
93
|
+
# under the scheduler the Sequel database instance is recreated each time
|
94
|
+
# so the previous prepared statements are lost, add back
|
95
|
+
if db.prepared_statement(name).nil?
|
96
|
+
db.set_prepared_statement(name, prepared)
|
97
|
+
end
|
98
|
+
bind_value_sql_last_value(sql_last_value)
|
99
|
+
statement_logger.log_statement_parameters(statement, parameters, nil)
|
100
|
+
db.call(name, parameters)
|
101
|
+
end
|
102
|
+
|
103
|
+
def post_init(plugin)
|
104
|
+
# don't log statement count when using prepared statements for now...
|
105
|
+
# needs enhancement to allow user to supply a bindable count prepared statement in settings.
|
106
|
+
@statement_logger.disable_count
|
107
|
+
|
108
|
+
@name = plugin.prepared_statement_name.to_sym
|
109
|
+
@bind_values_array = plugin.prepared_statement_bind_values
|
110
|
+
@parameters = plugin.parameters
|
111
|
+
@statement_prepared = Concurrent::AtomicBoolean.new(false)
|
112
|
+
end
|
113
|
+
|
114
|
+
def create_bind_values_hash
|
115
|
+
hash = {}
|
116
|
+
bind_values_array.each_with_index {|v,i| hash[:"p#{i}"] = v}
|
117
|
+
hash
|
118
|
+
end
|
119
|
+
|
120
|
+
def bind_value_sql_last_value(sql_last_value)
|
121
|
+
parameters.keys.each do |key|
|
122
|
+
value = parameters[key]
|
123
|
+
if value == ":sql_last_value"
|
124
|
+
parameters[key] = sql_last_value
|
125
|
+
end
|
126
|
+
end
|
127
|
+
end
|
128
|
+
end
|
129
|
+
end end end
|
@@ -0,0 +1,140 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "yaml" # persistence
|
3
|
+
|
4
|
+
module LogStash module PluginMixins module Jdbc
|
5
|
+
class ValueTracking
|
6
|
+
|
7
|
+
def self.build_last_value_tracker(plugin)
|
8
|
+
handler = plugin.record_last_run ? FileHandler.new(plugin.last_run_metadata_path) : NullFileHandler.new(plugin.last_run_metadata_path)
|
9
|
+
if plugin.record_last_run
|
10
|
+
handler = FileHandler.new(plugin.last_run_metadata_path)
|
11
|
+
end
|
12
|
+
if plugin.clean_run
|
13
|
+
handler.clean
|
14
|
+
end
|
15
|
+
|
16
|
+
if plugin.use_column_value && plugin.tracking_column_type == "numeric"
|
17
|
+
# use this irrespective of the jdbc_default_timezone setting
|
18
|
+
NumericValueTracker.new(handler)
|
19
|
+
else
|
20
|
+
if plugin.jdbc_default_timezone.nil? || plugin.jdbc_default_timezone.empty?
|
21
|
+
# no TZ stuff for Sequel, use Time
|
22
|
+
TimeValueTracker.new(handler)
|
23
|
+
else
|
24
|
+
# Sequel does timezone handling on DateTime only
|
25
|
+
DateTimeValueTracker.new(handler)
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
attr_reader :value
|
31
|
+
|
32
|
+
def initialize(handler)
|
33
|
+
@file_handler = handler
|
34
|
+
set_initial
|
35
|
+
end
|
36
|
+
|
37
|
+
def set_initial
|
38
|
+
# override in subclass
|
39
|
+
end
|
40
|
+
|
41
|
+
def set_value(value)
|
42
|
+
# override in subclass
|
43
|
+
end
|
44
|
+
|
45
|
+
def write
|
46
|
+
@file_handler.write(@value)
|
47
|
+
end
|
48
|
+
|
49
|
+
private
|
50
|
+
def common_set_initial(method_symbol, default)
|
51
|
+
persisted = @file_handler.read
|
52
|
+
|
53
|
+
if persisted && persisted.respond_to?(method_symbol)
|
54
|
+
@value = persisted
|
55
|
+
else
|
56
|
+
@file_handler.clean
|
57
|
+
@value = default
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
|
63
|
+
class NumericValueTracker < ValueTracking
|
64
|
+
def set_initial
|
65
|
+
common_set_initial(:gcd, 0)
|
66
|
+
end
|
67
|
+
|
68
|
+
def set_value(value)
|
69
|
+
return unless value.is_a?(Numeric)
|
70
|
+
@value = value
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
class DateTimeValueTracker < ValueTracking
|
75
|
+
def set_initial
|
76
|
+
common_set_initial(:to_datetime, DateTime.new(1970))
|
77
|
+
end
|
78
|
+
|
79
|
+
def set_value(value)
|
80
|
+
if value.respond_to?(:to_datetime)
|
81
|
+
@value = value.to_datetime
|
82
|
+
else
|
83
|
+
@value = DateTime.parse(value)
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end
|
87
|
+
|
88
|
+
class TimeValueTracker < ValueTracking
|
89
|
+
def set_initial
|
90
|
+
common_set_initial(:to_time, Time.at(0).utc)
|
91
|
+
end
|
92
|
+
|
93
|
+
def set_value(value)
|
94
|
+
if value.respond_to?(:to_time)
|
95
|
+
@value = value.to_time
|
96
|
+
else
|
97
|
+
@value = DateTime.parse(value).to_time
|
98
|
+
end
|
99
|
+
end
|
100
|
+
end
|
101
|
+
|
102
|
+
class FileHandler
|
103
|
+
attr_reader :path
|
104
|
+
|
105
|
+
def initialize(path)
|
106
|
+
@path = path
|
107
|
+
@exists = ::File.exist?(@path)
|
108
|
+
end
|
109
|
+
|
110
|
+
def clean
|
111
|
+
return unless @exists
|
112
|
+
::File.delete(@path)
|
113
|
+
@exists = false
|
114
|
+
end
|
115
|
+
|
116
|
+
def read
|
117
|
+
return unless @exists
|
118
|
+
YAML.load(::File.read(@path))
|
119
|
+
end
|
120
|
+
|
121
|
+
def write(value)
|
122
|
+
::File.write(@path, YAML.dump(value))
|
123
|
+
@exists = true
|
124
|
+
end
|
125
|
+
end
|
126
|
+
|
127
|
+
class NullFileHandler
|
128
|
+
def initialize(path)
|
129
|
+
end
|
130
|
+
|
131
|
+
def clean
|
132
|
+
end
|
133
|
+
|
134
|
+
def read
|
135
|
+
end
|
136
|
+
|
137
|
+
def write(value)
|
138
|
+
end
|
139
|
+
end
|
140
|
+
end end end
|
@@ -0,0 +1,28 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
module LogStash module PluginMixins module JdbcStreaming
|
4
|
+
class CachePayload
|
5
|
+
attr_reader :payload
|
6
|
+
|
7
|
+
def initialize
|
8
|
+
@failure = false
|
9
|
+
@payload = []
|
10
|
+
end
|
11
|
+
|
12
|
+
def push(data)
|
13
|
+
@payload << data
|
14
|
+
end
|
15
|
+
|
16
|
+
def failed!
|
17
|
+
@failure = true
|
18
|
+
end
|
19
|
+
|
20
|
+
def failed?
|
21
|
+
@failure
|
22
|
+
end
|
23
|
+
|
24
|
+
def empty?
|
25
|
+
@payload.empty?
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end end end
|
@@ -0,0 +1,64 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
module LogStash module PluginMixins module JdbcStreaming
|
4
|
+
class ParameterHandler
|
5
|
+
|
6
|
+
def self.build_parameter_handler(given_value)
|
7
|
+
# does it really make sense to deal with normal parameters differently?
|
8
|
+
handler = FieldParameter.new(given_value)
|
9
|
+
return handler unless given_value.is_a?(String)
|
10
|
+
|
11
|
+
first_percent_curly = given_value.index("%{")
|
12
|
+
if first_percent_curly && given_value.index("}", first_percent_curly)
|
13
|
+
return InterpolatedParameter.new(given_value)
|
14
|
+
end
|
15
|
+
|
16
|
+
handler
|
17
|
+
end
|
18
|
+
|
19
|
+
def self.build_bind_value_handler(given_value)
|
20
|
+
handler = ConstantParameter.new(given_value)
|
21
|
+
|
22
|
+
return handler unless given_value.is_a?(String) # allow non String constants
|
23
|
+
|
24
|
+
first_percent_curly = given_value.index("%{")
|
25
|
+
if first_percent_curly && given_value.index("}", first_percent_curly)
|
26
|
+
return InterpolatedParameter.new(given_value)
|
27
|
+
end
|
28
|
+
|
29
|
+
if given_value =~ /\A\s*\[[^\]]+\]\s*\z/
|
30
|
+
return FieldParameter.new(given_value)
|
31
|
+
end
|
32
|
+
|
33
|
+
handler
|
34
|
+
end
|
35
|
+
|
36
|
+
attr_reader :given_value
|
37
|
+
|
38
|
+
def initialize(given_value)
|
39
|
+
@given_value = given_value
|
40
|
+
end
|
41
|
+
|
42
|
+
def extract_from(event)
|
43
|
+
# override in subclass
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
class InterpolatedParameter < ParameterHandler
|
48
|
+
def extract_from(event)
|
49
|
+
event.sprintf(@given_value)
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
class FieldParameter < ParameterHandler
|
54
|
+
def extract_from(event)
|
55
|
+
event.get(@given_value)
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
class ConstantParameter < ParameterHandler
|
60
|
+
def extract_from(event)
|
61
|
+
@given_value
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end end end
|