logstash-filter-jdbc_static 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +2 -0
- data/CONTRIBUTORS +22 -0
- data/Gemfile +2 -0
- data/LICENSE +13 -0
- data/README.md +94 -0
- data/lib/logstash-filter-jdbc_static_jars.rb +5 -0
- data/lib/logstash/filters/jdbc/basic_database.rb +117 -0
- data/lib/logstash/filters/jdbc/column.rb +38 -0
- data/lib/logstash/filters/jdbc/db_object.rb +103 -0
- data/lib/logstash/filters/jdbc/loader.rb +114 -0
- data/lib/logstash/filters/jdbc/loader_schedule.rb +38 -0
- data/lib/logstash/filters/jdbc/lookup.rb +192 -0
- data/lib/logstash/filters/jdbc/lookup_processor.rb +91 -0
- data/lib/logstash/filters/jdbc/lookup_result.rb +39 -0
- data/lib/logstash/filters/jdbc/read_only_database.rb +57 -0
- data/lib/logstash/filters/jdbc/read_write_database.rb +86 -0
- data/lib/logstash/filters/jdbc/repeating_load_runner.rb +11 -0
- data/lib/logstash/filters/jdbc/single_load_runner.rb +43 -0
- data/lib/logstash/filters/jdbc/validatable.rb +49 -0
- data/lib/logstash/filters/jdbc_static.rb +216 -0
- data/logstash-filter-jdbc_static.gemspec +38 -0
- data/spec/filters/env_helper.rb +10 -0
- data/spec/filters/jdbc/column_spec.rb +70 -0
- data/spec/filters/jdbc/db_object_spec.rb +81 -0
- data/spec/filters/jdbc/loader_spec.rb +76 -0
- data/spec/filters/jdbc/lookup_processor_spec.rb +132 -0
- data/spec/filters/jdbc/lookup_spec.rb +129 -0
- data/spec/filters/jdbc/read_only_database_spec.rb +66 -0
- data/spec/filters/jdbc/read_write_database_spec.rb +89 -0
- data/spec/filters/jdbc/repeating_load_runner_spec.rb +24 -0
- data/spec/filters/jdbc/single_load_runner_spec.rb +16 -0
- data/spec/filters/jdbc_static_file_local_spec.rb +83 -0
- data/spec/filters/jdbc_static_spec.rb +70 -0
- data/spec/filters/remote_server_helper.rb +24 -0
- data/spec/filters/shared_helpers.rb +35 -0
- data/spec/helpers/WHY-THIS-JAR.txt +4 -0
- data/spec/helpers/derbyrun.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/derby-10.14.1.0.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/derbyclient-10.14.1.0.jar +0 -0
- metadata +224 -0
@@ -0,0 +1,38 @@
|
|
1
|
+
require_relative "validatable"
|
2
|
+
|
3
|
+
module LogStash module Filters module Jdbc
|
4
|
+
class LoaderSchedule < Validatable
|
5
|
+
attr_reader :schedule_frequency, :loader_schedule
|
6
|
+
|
7
|
+
private
|
8
|
+
|
9
|
+
def post_initialize
|
10
|
+
if valid?
|
11
|
+
# From the Rufus::Scheduler docs:
|
12
|
+
# By default, rufus-scheduler sleeps 0.300 second between every step.
|
13
|
+
# At each step it checks for jobs to trigger and so on.
|
14
|
+
if @cronline.seconds.is_a?(Set)
|
15
|
+
@schedule_frequency = 0.3
|
16
|
+
else
|
17
|
+
@schedule_frequency = 30
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
def parse_options
|
23
|
+
@loader_schedule = @options
|
24
|
+
|
25
|
+
unless @loader_schedule.is_a?(String)
|
26
|
+
@option_errors << "The loader_schedule option must be a string"
|
27
|
+
end
|
28
|
+
|
29
|
+
begin
|
30
|
+
@cronline = Rufus::Scheduler::CronLine.new(@loader_schedule)
|
31
|
+
rescue => e
|
32
|
+
@option_errors << "The loader_schedule option is invalid: #{e.message}"
|
33
|
+
end
|
34
|
+
|
35
|
+
@valid = @option_errors.empty?
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end end end
|
@@ -0,0 +1,192 @@
|
|
1
|
+
require_relative "lookup_result"
|
2
|
+
require "logstash/util/loggable"
|
3
|
+
|
4
|
+
module LogStash module Filters module Jdbc
|
5
|
+
class Lookup
|
6
|
+
include LogStash::Util::Loggable
|
7
|
+
|
8
|
+
class Sprintfier
|
9
|
+
def initialize(param)
|
10
|
+
@param = param
|
11
|
+
end
|
12
|
+
|
13
|
+
def fetch(event, result)
|
14
|
+
formatted = event.sprintf(@param)
|
15
|
+
if formatted == @param # no field found so no transformation
|
16
|
+
result.invalid_parameters_push(@param)
|
17
|
+
end
|
18
|
+
formatted
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
class Getfier
|
23
|
+
def initialize(param)
|
24
|
+
@param = param
|
25
|
+
end
|
26
|
+
|
27
|
+
def fetch(event, result)
|
28
|
+
value = event.get(@param)
|
29
|
+
if value.nil? || value.is_a?(Hash) || value.is_a?(Array) # Array or Hash is not suitable
|
30
|
+
result.invalid_parameters_push(@param)
|
31
|
+
end
|
32
|
+
value
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
def self.find_validation_errors(array_of_options)
|
37
|
+
if !array_of_options.is_a?(Array)
|
38
|
+
return "The options must be an Array"
|
39
|
+
end
|
40
|
+
errors = []
|
41
|
+
array_of_options.each_with_index do |options, i|
|
42
|
+
instance = new(options, {}, "lookup-#{i.next}")
|
43
|
+
unless instance.valid?
|
44
|
+
errors << instance.formatted_errors
|
45
|
+
end
|
46
|
+
end
|
47
|
+
return nil if errors.empty?
|
48
|
+
errors.join("; ")
|
49
|
+
end
|
50
|
+
|
51
|
+
attr_reader :id, :target, :query, :parameters
|
52
|
+
|
53
|
+
def initialize(options, globals, default_id)
|
54
|
+
@id = options["id"] || default_id
|
55
|
+
@target = options["target"]
|
56
|
+
@id_used_as_target = @target.nil?
|
57
|
+
if @id_used_as_target
|
58
|
+
@target = @id
|
59
|
+
end
|
60
|
+
@options = options
|
61
|
+
@globals = globals
|
62
|
+
@valid = false
|
63
|
+
@option_errors = []
|
64
|
+
@default_result = nil
|
65
|
+
parse_options
|
66
|
+
end
|
67
|
+
|
68
|
+
def id_used_as_target?
|
69
|
+
@id_used_as_target
|
70
|
+
end
|
71
|
+
|
72
|
+
def valid?
|
73
|
+
@valid
|
74
|
+
end
|
75
|
+
|
76
|
+
def formatted_errors
|
77
|
+
@option_errors.join(", ")
|
78
|
+
end
|
79
|
+
|
80
|
+
def enhance(local, event)
|
81
|
+
result = fetch(local, event) # should return a LookupResult
|
82
|
+
|
83
|
+
if result.failed? || result.parameters_invalid?
|
84
|
+
tag_failure(event)
|
85
|
+
end
|
86
|
+
|
87
|
+
if result.valid?
|
88
|
+
if @use_default && result.empty?
|
89
|
+
tag_default(event)
|
90
|
+
process_event(event, @default_result)
|
91
|
+
else
|
92
|
+
process_event(event, result)
|
93
|
+
end
|
94
|
+
true
|
95
|
+
else
|
96
|
+
false
|
97
|
+
end
|
98
|
+
end
|
99
|
+
|
100
|
+
private
|
101
|
+
|
102
|
+
def tag_failure(event)
|
103
|
+
@tag_on_failure.each do |tag|
|
104
|
+
event.tag(tag)
|
105
|
+
end
|
106
|
+
end
|
107
|
+
|
108
|
+
def tag_default(event)
|
109
|
+
@tag_on_default_use.each do |tag|
|
110
|
+
event.tag(tag)
|
111
|
+
end
|
112
|
+
end
|
113
|
+
|
114
|
+
def fetch(local, event)
|
115
|
+
result = LookupResult.new()
|
116
|
+
if @parameters_specified
|
117
|
+
params = prepare_parameters_from_event(event, result)
|
118
|
+
if result.parameters_invalid?
|
119
|
+
logger.warn? && logger.warn("Parameter field not found in event", :lookup_id => @id, :invalid_parameters => result.invalid_parameters)
|
120
|
+
return result
|
121
|
+
end
|
122
|
+
else
|
123
|
+
params = {}
|
124
|
+
end
|
125
|
+
begin
|
126
|
+
logger.debug? && logger.debug("Executing Jdbc query", :lookup_id => @id, :statement => query, :parameters => params)
|
127
|
+
local.fetch(query, params).each do |row|
|
128
|
+
stringified = row.inject({}){|hash,(k,v)| hash[k.to_s] = v; hash} #Stringify row keys
|
129
|
+
result.push(stringified)
|
130
|
+
end
|
131
|
+
rescue ::Sequel::Error => e
|
132
|
+
# all sequel errors are a subclass of this, let all other standard or runtime errors bubble up
|
133
|
+
result.failed!
|
134
|
+
logger.warn? && logger.warn("Exception when executing Jdbc query", :lookup_id => @id, :exception => e.message, :backtrace => e.backtrace.take(8))
|
135
|
+
end
|
136
|
+
# if either of: no records or a Sequel exception occurs the payload is
|
137
|
+
# empty and the default can be substituted later.
|
138
|
+
result
|
139
|
+
end
|
140
|
+
|
141
|
+
def process_event(event, result)
|
142
|
+
# use deep clone here so other filter function don't taint the payload by reference
|
143
|
+
event.set(@target, ::LogStash::Util.deep_clone(result.payload))
|
144
|
+
end
|
145
|
+
|
146
|
+
def prepare_parameters_from_event(event, result)
|
147
|
+
@symbol_parameters.inject({}) do |hash,(k,v)|
|
148
|
+
value = v.fetch(event, result)
|
149
|
+
hash[k] = value.is_a?(::LogStash::Timestamp) ? value.time : value
|
150
|
+
hash
|
151
|
+
end
|
152
|
+
end
|
153
|
+
|
154
|
+
def sprintf_or_get(v)
|
155
|
+
v.match(/%{([^}]+)}/) ? Sprintfier.new(v) : Getfier.new(v)
|
156
|
+
end
|
157
|
+
|
158
|
+
def parse_options
|
159
|
+
@query = @options["query"]
|
160
|
+
unless @query && @query.is_a?(String)
|
161
|
+
@option_errors << "The options for '#{@id}' must include a 'query' string"
|
162
|
+
end
|
163
|
+
|
164
|
+
@parameters = @options["parameters"]
|
165
|
+
@parameters_specified = false
|
166
|
+
if @parameters
|
167
|
+
if !@parameters.is_a?(Hash)
|
168
|
+
@option_errors << "The 'parameters' option for '#{@id}' must be a Hash"
|
169
|
+
else
|
170
|
+
# this is done once per lookup at start, i.e. Sprintfier.new et.al is done once.
|
171
|
+
@symbol_parameters = @parameters.inject({}) {|hash,(k,v)| hash[k.to_sym] = sprintf_or_get(v) ; hash }
|
172
|
+
# the user might specify an empty hash parameters => {}
|
173
|
+
# maybe due to an unparameterised query
|
174
|
+
@parameters_specified = !@symbol_parameters.empty?
|
175
|
+
end
|
176
|
+
end
|
177
|
+
|
178
|
+
default_hash = @options["default_hash"]
|
179
|
+
if default_hash && !default_hash.empty?
|
180
|
+
@default_result = LookupResult.new()
|
181
|
+
@default_result.push(default_hash)
|
182
|
+
end
|
183
|
+
|
184
|
+
@use_default = !@default_result.nil?
|
185
|
+
|
186
|
+
@tag_on_failure = @options["tag_on_failure"] || @globals["tag_on_failure"] || []
|
187
|
+
@tag_on_default_use = @options["tag_on_default_use"] || @globals["tag_on_default_use"] || []
|
188
|
+
|
189
|
+
@valid = @option_errors.empty?
|
190
|
+
end
|
191
|
+
end
|
192
|
+
end end end
|
@@ -0,0 +1,91 @@
|
|
1
|
+
require_relative "lookup"
|
2
|
+
require_relative "read_write_database"
|
3
|
+
|
4
|
+
module LogStash module Filters module Jdbc
|
5
|
+
class LookupProcessor
|
6
|
+
attr_reader :lookups, :local
|
7
|
+
|
8
|
+
CONNECTION_ERROR_MSG = "Connection error when initialising lookup (local) db"
|
9
|
+
DISCONNECTION_ERROR_MSG = "Connection error when disconnecting from lookup (local) db"
|
10
|
+
|
11
|
+
def self.find_validation_errors(array_of_options)
|
12
|
+
if !array_of_options.is_a?(Array)
|
13
|
+
return "The options must be an Array"
|
14
|
+
end
|
15
|
+
errors = []
|
16
|
+
instance = new(array_of_options, {})
|
17
|
+
instance.lookups.each do |lookup|
|
18
|
+
unless lookup.valid?
|
19
|
+
errors << lookup.formatted_errors
|
20
|
+
end
|
21
|
+
end
|
22
|
+
unless instance.valid?
|
23
|
+
errors << instance.formatted_errors
|
24
|
+
end
|
25
|
+
return nil if errors.empty?
|
26
|
+
# errors.unshift("For plugin #{}")
|
27
|
+
errors.join("; ")
|
28
|
+
end
|
29
|
+
|
30
|
+
def initialize(lookups_array, globals)
|
31
|
+
@lookups_errors = []
|
32
|
+
@lookups = lookups_array.map.with_index do |options, i|
|
33
|
+
Lookup.new(options, globals, "lookup-#{i.next}")
|
34
|
+
end
|
35
|
+
validate_lookups
|
36
|
+
if @lookups_errors.empty? && !globals.empty?
|
37
|
+
@local = ReadWriteDatabase.create(*globals.values_at(
|
38
|
+
"lookup_jdbc_connection_string",
|
39
|
+
"lookup_jdbc_driver_class",
|
40
|
+
"lookup_jdbc_driver_library").compact)
|
41
|
+
@local.connect(CONNECTION_ERROR_MSG)
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
def enhance(event)
|
46
|
+
@lookups.map { |lookup| lookup.enhance(@local, event) }
|
47
|
+
end
|
48
|
+
|
49
|
+
def close
|
50
|
+
@local.disconnect(DISCONNECTION_ERROR_MSG)
|
51
|
+
@local = nil
|
52
|
+
end
|
53
|
+
|
54
|
+
def formatted_errors
|
55
|
+
@lookups_errors.join(", ")
|
56
|
+
end
|
57
|
+
|
58
|
+
def valid?
|
59
|
+
@lookups_errors.empty?
|
60
|
+
end
|
61
|
+
|
62
|
+
private
|
63
|
+
|
64
|
+
def validate_lookups
|
65
|
+
ids = Hash.new(0)
|
66
|
+
errors = []
|
67
|
+
@lookups.each {|lookup| ids[lookup.id] += 1}
|
68
|
+
ids.select{|id, count| count > 1}.each do |id, count|
|
69
|
+
errors << "'#{id}' is specified multiple times"
|
70
|
+
end
|
71
|
+
if !errors.empty?
|
72
|
+
errors.unshift("Id setting must be different across all lookups")
|
73
|
+
end
|
74
|
+
@lookups_errors.concat(errors)
|
75
|
+
targets = Hash.new {|h,k| h[k] = []}
|
76
|
+
errors = []
|
77
|
+
@lookups.each do |lookup|
|
78
|
+
# if id was used as target, skip target unique check because id uniqueness is checked already
|
79
|
+
next if lookup.id_used_as_target?
|
80
|
+
targets[lookup.target] << lookup.id
|
81
|
+
end
|
82
|
+
targets.select{|_,val| val.size > 1}.each do |target, ids|
|
83
|
+
errors << "'#{ids.join("', '")}' have the same target field setting"
|
84
|
+
end
|
85
|
+
if !errors.empty?
|
86
|
+
errors.unshift("Target setting must be different across all lookups")
|
87
|
+
end
|
88
|
+
@lookups_errors.concat(errors)
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end end end
|
@@ -0,0 +1,39 @@
|
|
1
|
+
module LogStash module Filters module Jdbc
|
2
|
+
class LookupResult
|
3
|
+
attr_reader :payload, :invalid_parameters
|
4
|
+
|
5
|
+
def initialize
|
6
|
+
@failure = false
|
7
|
+
@payload = []
|
8
|
+
@invalid_parameters = []
|
9
|
+
end
|
10
|
+
|
11
|
+
def push(data)
|
12
|
+
@payload << data
|
13
|
+
end
|
14
|
+
|
15
|
+
def invalid_parameters_push(data)
|
16
|
+
@invalid_parameters << data
|
17
|
+
end
|
18
|
+
|
19
|
+
def failed!
|
20
|
+
@failure = true
|
21
|
+
end
|
22
|
+
|
23
|
+
def valid?
|
24
|
+
!failed? && @invalid_parameters.empty?
|
25
|
+
end
|
26
|
+
|
27
|
+
def failed?
|
28
|
+
@failure
|
29
|
+
end
|
30
|
+
|
31
|
+
def parameters_invalid?
|
32
|
+
!@invalid_parameters.empty?
|
33
|
+
end
|
34
|
+
|
35
|
+
def empty?
|
36
|
+
@payload.empty?
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end end end
|
@@ -0,0 +1,57 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require_relative "basic_database"
|
3
|
+
|
4
|
+
module LogStash module Filters module Jdbc
|
5
|
+
class ReadOnlyDatabase < BasicDatabase
|
6
|
+
|
7
|
+
def count(statement)
|
8
|
+
result = 0
|
9
|
+
debug_log_messages = ["Lookup query count is zero"]
|
10
|
+
begin
|
11
|
+
# its the responsibility of the caller to manage the connections see Loader
|
12
|
+
if connected?
|
13
|
+
result = @db[statement].count
|
14
|
+
else
|
15
|
+
debug_log_messages.concat("and there is no connection to the remote db at this time")
|
16
|
+
end
|
17
|
+
rescue ::Sequel::Error => err
|
18
|
+
# a fatal issue
|
19
|
+
msg = "Exception occurred when executing loader Jdbc query count"
|
20
|
+
logger.error(msg, :exception => err.message, :backtrace => err.backtrace.take(8))
|
21
|
+
raise wrap_error(LookupJdbcException, err, msg)
|
22
|
+
end
|
23
|
+
logger.debug(debug_log_messages.join(' ')) if result.zero?
|
24
|
+
result
|
25
|
+
end
|
26
|
+
|
27
|
+
def query(statement)
|
28
|
+
result = empty_record_set
|
29
|
+
debug_log_messages = ["Lookup query results are empty"]
|
30
|
+
begin
|
31
|
+
# its the responsibility of the caller to manage the connections see Loader
|
32
|
+
if connected?
|
33
|
+
result = @db[statement].all
|
34
|
+
else
|
35
|
+
debug_log_messages.concat("and there is no connection to the remote db at this time")
|
36
|
+
end
|
37
|
+
rescue ::Sequel::Error => err
|
38
|
+
# a fatal issue
|
39
|
+
msg = "Exception occurred when executing loader Jdbc query"
|
40
|
+
logger.error(msg, :exception => err.message, :backtrace => err.backtrace.take(8))
|
41
|
+
raise wrap_error(LookupJdbcException, err, msg)
|
42
|
+
end
|
43
|
+
logger.debug(debug_log_messages.join(' ')) if result.empty?
|
44
|
+
result
|
45
|
+
end
|
46
|
+
|
47
|
+
def post_create(connection_string, driver_class, driver_library, user, password)
|
48
|
+
verify_connection(connection_string, driver_class, driver_library, user, password)
|
49
|
+
end
|
50
|
+
|
51
|
+
private
|
52
|
+
|
53
|
+
def post_initialize()
|
54
|
+
super
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end end end
|
@@ -0,0 +1,86 @@
|
|
1
|
+
require_relative "basic_database"
|
2
|
+
|
3
|
+
module LogStash module Filters module Jdbc
|
4
|
+
class ReadWriteDatabase < BasicDatabase
|
5
|
+
def repopulate_all(loaders)
|
6
|
+
case loaders.size
|
7
|
+
when 1
|
8
|
+
fill_and_switch(loaders.first)
|
9
|
+
when 2
|
10
|
+
fill_and_switch(loaders.first)
|
11
|
+
fill_and_switch(loaders.last)
|
12
|
+
else
|
13
|
+
loaders.each do |loader|
|
14
|
+
fill_and_switch(loader)
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
alias populate_all repopulate_all
|
20
|
+
|
21
|
+
def fetch(statement, parameters)
|
22
|
+
@rwlock.readLock().lock()
|
23
|
+
# any exceptions should bubble up because we need to set failure tags etc.
|
24
|
+
@db[statement, parameters].all
|
25
|
+
ensure
|
26
|
+
@rwlock.readLock().unlock()
|
27
|
+
end
|
28
|
+
|
29
|
+
def build_db_object(db_object)
|
30
|
+
begin
|
31
|
+
@rwlock.writeLock().lock()
|
32
|
+
db_object.build(@db)
|
33
|
+
rescue *CONNECTION_ERRORS => err
|
34
|
+
# we do not raise an error when there is a connection error, we hope that the connection works next time
|
35
|
+
logger.error("Connection error when initialising lookup db", :db_object => db_object.inspect, :exception => err.message, :backtrace => err.backtrace.take(8))
|
36
|
+
rescue ::Sequel::Error => err
|
37
|
+
msg = "Exception when initialising lookup db for db object: #{db_object}"
|
38
|
+
logger.error(msg, :exception => err.message, :backtrace => err.backtrace.take(8))
|
39
|
+
raise wrap_error(LoaderJdbcException, err, msg)
|
40
|
+
ensure
|
41
|
+
@rwlock.writeLock().unlock()
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
def post_create(connection_string, driver_class, driver_library, user, password)
|
46
|
+
mutated_connection_string = connection_string.sub("____", unique_db_name)
|
47
|
+
verify_connection(mutated_connection_string, driver_class, driver_library, user, password)
|
48
|
+
connect("Connection error when connecting to lookup db")
|
49
|
+
end
|
50
|
+
|
51
|
+
private
|
52
|
+
|
53
|
+
def fill_and_switch(loader)
|
54
|
+
begin
|
55
|
+
records = loader.fetch
|
56
|
+
return if records.size.zero?
|
57
|
+
@rwlock.writeLock().lock()
|
58
|
+
tmp = self.class.random_name
|
59
|
+
@db.transaction do |conn|
|
60
|
+
@db[loader.temp_table].multi_insert(records)
|
61
|
+
@db.rename_table(loader.temp_table, tmp)
|
62
|
+
@db.rename_table(loader.table, loader.temp_table)
|
63
|
+
@db.rename_table(tmp, loader.table)
|
64
|
+
@db[loader.temp_table].truncate
|
65
|
+
end
|
66
|
+
rescue *CONNECTION_ERRORS => err
|
67
|
+
# we do not raise an error when there is a connection error, we hope that the connection works next time
|
68
|
+
logger.error("Connection error when filling lookup db from loader query results", :exception => err.message, :backtrace => err.backtrace.take(8))
|
69
|
+
rescue => err
|
70
|
+
# In theory all exceptions in Sequel should be wrapped in Sequel::Error
|
71
|
+
# There are cases where exceptions occur in unprotected ensure sections
|
72
|
+
msg = "Exception when filling lookup db from loader query results, original exception: #{err.class}, original message: #{err.message}"
|
73
|
+
logger.error(msg, :backtrace => err.backtrace.take(16))
|
74
|
+
raise wrap_error(LoaderJdbcException, err, msg)
|
75
|
+
ensure
|
76
|
+
@rwlock.writeLock().unlock()
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
def post_initialize()
|
81
|
+
super
|
82
|
+
# get a fair reentrant read write lock
|
83
|
+
@rwlock = java.util.concurrent.locks.ReentrantReadWriteLock.new(true)
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end end end
|